Пример #1
0
def write_to_json(infile, json_file):
    data = process_map(infile)

    # write item by item
    with open(json_file, "w") as jf:
        for a in data:
            json.dump(a, jf, indent=2)
            jf.write("\n")
Пример #2
0
def write_to_json(infile, json_file):
    data = process_map(infile)
    
    # write item by item
    with open(json_file, "w") as jf:
        for a in data:
            json.dump(a, jf, indent=2)
            jf.write("\n")
Пример #3
0
def main():
    filename = 'san-francisco.osm'
    #filename = 'sf-sampler.osm'
    #print("\n*** Check the number of tags ***")
    #taglist = mapparser.count_tags(filename)
    #pprint.pprint(taglist)

    #print("\n*** Check k value of each tag ***")
    #keys = tags.process_map(filename)
    #pprint.pprint(keys)

    print("\n*** Audit stree types and city names ***")
    street_types = defaultdict(set)
    city_names = set()
    audit.audit(filename, street_types, city_names)
    pprint.pprint(dict(street_types))
    pprint.pprint(city_names)

    print("\n*** Convert data *** ")
    data.process_map(filename, False)
Пример #4
0
    # get idea of what kind of fixes we should make
    print("\nTags issues:")
    pprint(tags.process_map(FILENAME))

    # get idea of unique users in dataset
    print("\nUnique User Count:\n")
    print(users.process_map(FILENAME))

    # audit the data to see lastly what changes need be made
    print("\nIdeas for audits that should be made:")
    pprint(audit.audit(FILENAME))

    # lastly, after examining the data, call data.py.
    print("\nExporting data to csv files:")
    data.process_map(FILENAME, validate=False)

    # If you want to load content with CSV in terminal, comment out the
    # the code below as it's for loading all data into sql from python

    # 0) Make sure the DB has been created by establishing connection first:
    insert_data.create_connection(DB_FILE)

    # 1) split the drop query on ; and execute each of those queries against DB
    for drop in DROP_QUERY.split(";"):
        insert_data.update_db(drop, DB_FILE)

    # 2) performm the same for the create query
    for create in CREATE_QUERY.split(";"):
        insert_data.update_db(create, DB_FILE)
Пример #5
0
def convert_map(input_file):
    project_data.process_map(input_file, True)