コード例 #1
0
ファイル: file_views.py プロジェクト: bring52405/bioshare
def preview_file(request, share, subpath):
    from file_utils import get_lines, get_num_lines
    from_line = int(request.GET.get('from', 1))
    num_lines = int(request.GET.get('for', 100))
    file_path = os.path.join(share.get_path(), subpath)
    try:
        content = get_lines(file_path, from_line, from_line + num_lines - 1)
        response = {
            'share_id': share.id,
            'subpath': subpath,
            'content': content,
            'from': from_line,
            'for': num_lines,
            'next': {
                'from': from_line + num_lines,
                'for': num_lines
            }
        }
        if 'get_total' in request.GET:
            response['total'] = get_num_lines(file_path)
        return json_response(response)
    except Exception, e:
        content = "Unable to preview file.  This file may not be a plain text file, or has unsupported characters."
        response = {
            'share_id': share.id,
            'subpath': subpath,
            'content': content,
            'from': from_line,
            'for': num_lines,
            'next': {
                'from': from_line + num_lines,
                'for': num_lines
            }
        }
        return json_response(response)
コード例 #2
0
ファイル: file_views.py プロジェクト: amschaal/bioshare
def preview_file(request, share, subpath):
    from file_utils import get_lines, get_num_lines
    from_line = int(request.GET.get('from',1))
    num_lines = int(request.GET.get('for',100))
    file_path = os.path.join(share.get_path(),subpath)
    response = {'share_id':share.id,'subpath':subpath,'content':get_lines(file_path,from_line,from_line+num_lines-1),'from':from_line,'for':num_lines,'next':{'from':from_line+num_lines,'for':num_lines}}
    if 'get_total' in request.GET:
        response['total'] = get_num_lines(file_path)
    return json_response(response)
コード例 #3
0
def main():
    print "Reading from file {}".format(IMPORT_FILEPATH)
    job_mapping_lines = files.get_lines(IMPORT_FILEPATH)

    print "Appending to file {}".format(EXPORT_FILEPATH)
    if RUN_IN_PARALLEL:
        utils.parallelize_with_param(get_and_write_packages_for_job,
                                     job_mapping_lines)
    else:
        for job_mapping_line in job_mapping_lines:
            get_and_write_packages_for_job(job_mapping_line)
コード例 #4
0
def main():

    API_KEY = ""  # add your API key before running
    lines = file_utils.get_lines(
        "/Users/mirek/Desktop/delivered_packages_addresses.csv")
    histogram = {}
    done = 0
    for line in lines[1:]:  #skip header
        done += 1
        if done > 500:
            print "Finished"
            break

        entries = line.split(",")

        if "TB91849729SEA1" in entries:  # has a "," in line2 field, just skip it
            continue

        [
            tracking_num, from_line_1, from_line_2, from_city, from_state,
            from_zip, from_coord_long_raw, from_coord_lat_raw, to_line_1,
            to_line_2, to_city, to_state, to_zip, to_coord_long_raw,
            to_coord_lat_raw
        ] = entries
        # from_coord_lat = from_coord_lat_raw.replace('"','')
        # from_coord_long = from_coord_long_raw.replace('"','')
        # to_coord_lat = to_coord_long_raw.replace('"','')
        # to_coord_long = to_coord_long_raw.replace('"','')

        url_base = "https://maps.googleapis.com/maps/api/place/autocomplete/json?"
        params = "key={}&input={}, {}, {}".format(API_KEY, to_line_1, to_city,
                                                  to_state)
        raw_response = requests.get(url_base + params)
        response = raw_response.json()

        place_id = response["predictions"][0]["place_id"]
        types = response["predictions"][0]["types"]
        for t in types:
            if t not in histogram:
                histogram[t] = 0
            histogram[t] += 1

        print "{}: {}".format(tracking_num, types)

    print "Results"
    print json.dumps(histogram, indent=1)
コード例 #5
0
def main(environ=api.UAT):

    failed_lines = []
    fixed_lines = []
    all_lines = file_utils.get_lines("/Users/mirek/driver_addresses.csv")
    # all_lines = file_utils.get_lines("/Users/mirek/temp_driver_address.csv")

    for address_str in all_lines[1:]:  #skip header
        print address_str

        address_line = address_str.split(",")
        recommended_address = get_valid_or_recommended_address(
            environ, address_line)
        if recommended_address:
            location = recommended_address["location"]
            fixed_line = address_line + [
                str(location["latitude"]),
                str(location["longitude"])
            ]
            fixed_lines.append(fixed_line)
        else:  # try cleaning things up
            [
                _id, first_name, last_name, email, address, city, state,
                zip_code, o_lat, o_lon
            ] = address_line
            clean_address = address
            for splitter in [" apt", " #", " unit"]:
                clean_address = clean_address.lower().split(splitter)[0]

            clean_address_line = [
                _id, first_name, last_name, email, clean_address, city, state,
                zip_code, o_lat, o_lon
            ]
            recommended_address = get_valid_or_recommended_address(
                environ, clean_address_line)
            if recommended_address:
                print "\t\tFixed address '{}' -> '{}".format(
                    address, clean_address)
                location = recommended_address["location"]
                fixed_line = address_line + [
                    str(location["latitude"]),
                    str(location["longitude"])
                ]
                fixed_lines.append(fixed_line)

            else:
                print "\t\tDidn't fix address '{}' -> '{}".format(
                    address, clean_address)
                failed_lines.append(address_line)

    print "exporting {} failed and {} fixed lines".format(
        len(failed_lines), len(fixed_lines))

    orig_header_str = all_lines[0]
    fixed_header_str = orig_header_str + ",new_latitude,new_longitude"
    export_lines(failed_lines,
                 "/Users/mirek/Desktop/failed_driver_addresses.csv",
                 orig_header_str)
    export_lines(fixed_lines,
                 "/Users/mirek/Desktop/fixed_driver_addresses.csv",
                 fixed_header_str)