def upload_insarmaps_metadata(fileName): insarmapsMetadata = None with open(fileName, "rb") as file: insarmapsMetadata = pickle.load(file) area = insarmapsMetadata["area"] project_name = insarmapsMetadata["project_name"] mid_long = insarmapsMetadata["mid_long"] mid_lat = insarmapsMetadata["mid_lat"] country = insarmapsMetadata["country"] region = insarmapsMetadata["region"] chunk_num = insarmapsMetadata["chunk_num"] attribute_keys = insarmapsMetadata["attribute_keys"] attribute_values = insarmapsMetadata["attribute_values"] string_dates_sql = insarmapsMetadata["string_dates_sql"] decimal_dates_sql = insarmapsMetadata["decimal_dates_sql"] attributes = insarmapsMetadata["attributes"] needed_attributes = insarmapsMetadata["needed_attributes"] attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() attributesController.create_area_table_if_not_exists() attributesController.insert_dataset_into_area_table( area, project_name, mid_long, mid_lat, country, region, chunk_num, attribute_keys, attribute_values, string_dates_sql, decimal_dates_sql) # put in attributes into standalone attributes table for k in attributes: v = attributes[k] # convert numpy.int64 objects to native python types otherwise psycopg2 can't upload to db # needed because we use pickle.HIGHEST_PROTOCOL to serialize with pickle now if isinstance(v, numpy.int64): v = v.item() if k in needed_attributes: attributesController.add_attribute(area, k, v) elif k == "plotAttributes": attributesController.add_plot_attribute(area, k, v) attributesController.close()
def upload_json(folder_path): global dbUsername, dbPassword, dbHost attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() print "Clearing old dataset, if it is there" area_name = get_unavco_name(folder_path) attributesController.remove_dataset_if_there(area_name) attributesController.close() firstJsonFile = True for file in os.listdir(folder_path): # insert json file to pgsql using ogr2ogr file_extension = file.split(".")[1] if file != "metadata.pickle" and file_extension != "mbtiles": command = 'ogr2ogr -append -f "PostgreSQL" PG:"dbname=pgis host=' + dbHost + ' user='******' password='******'" --config PG_USE_COPY YES -nln "' + area_name + '" ' + folder_path + '/' + file # only provide layer creation options if this is the first file if firstJsonFile: command = 'ogr2ogr -lco LAUNDER=NO -append -f "PostgreSQL" PG:"dbname=pgis host=' + dbHost + ' user='******' password='******'" --config PG_USE_COPY YES -nln "' + area_name + '" ' + folder_path + '/' + file firstJsonFile = False res = os.system(command) if res != 0: sys.stderr.write( "Error inserting into the database. This is most often due to running out of Memory (RAM), or incorrect database credentials... quitting" ) sys.exit() print "Inserted " + file + " to db" # uploading metadata for area upload_insarmaps_metadata(folder_path + "/metadata.pickle") # create index print "Creating index on " + area_name attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() attributesController.index_table_on(area_name, "p", None) attributesController.cluster_table_using(area_name, area_name + "_p_idx") attributesController.close()
def upload_insarmaps_metadata(fileName): insarmapsMetadata = None with open(fileName, "r") as file: insarmapsMetadata = cPickle.load(file) area = insarmapsMetadata["area"] project_name = insarmapsMetadata["project_name"] mid_long = insarmapsMetadata["mid_long"] mid_lat = insarmapsMetadata["mid_lat"] country = insarmapsMetadata["country"] region = insarmapsMetadata["region"] chunk_num = insarmapsMetadata["chunk_num"] attribute_keys = insarmapsMetadata["attribute_keys"] attribute_values = insarmapsMetadata["attribute_values"] string_dates_sql = insarmapsMetadata["string_dates_sql"] decimal_dates_sql = insarmapsMetadata["decimal_dates_sql"] attributes = insarmapsMetadata["attributes"] needed_attributes = insarmapsMetadata["needed_attributes"] attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() attributesController.create_area_table_if_not_exists() attributesController.insert_dataset_into_area_table( area, project_name, mid_long, mid_lat, country, region, chunk_num, attribute_keys, attribute_values, string_dates_sql, decimal_dates_sql) # put in attributes into standalone attributes table for k in attributes: v = attributes[k] if k in needed_attributes: attributesController.add_attribute(area, k, v) elif k == "plotAttributes": attributesController.add_plot_attribute(area, k, v) attributesController.close()
def upload_json(folder_path): global dbUsername, dbPassword, dbHost attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() print "Clearing old dataset, if it is there" area_name = get_unavco_name(folder_path) attributesController.remove_dataset_if_there(area_name) attributesController.close() firstJsonFile = True for file in os.listdir(folder_path): # insert json file to pgsql using ogr2ogr file_extension = file.split(".")[1] if file != "metadata.pickle" and file_extension != "mbtiles": command = 'ogr2ogr -append -f "PostgreSQL" PG:"dbname=pgis host=' + dbHost + ' user='******' password='******'" --config PG_USE_COPY YES -nln "' + area_name + '" ' + folder_path + '/' + file # only provide layer creation options if this is the first file if firstJsonFile: command = 'ogr2ogr -lco LAUNDER=NO -append -f "PostgreSQL" PG:"dbname=pgis host=' + dbHost + ' user='******' password='******'" --config PG_USE_COPY YES -nln "' + area_name + '" ' + folder_path + '/' + file firstJsonFile = False res = os.system(command) if res != 0: sys.stderr.write("Error inserting into the database. This is most often due to running out of Memory (RAM), or incorrect database credentials... quitting") sys.exit() print "Inserted " + file + " to db" # uploading metadata for area upload_insarmaps_metadata(folder_path + "/metadata.pickle") # create index print "Creating index on " + area_name attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() attributesController.index_table_on(area_name, "p", None) attributesController.close()
def upload_insarmaps_metadata(fileName): insarmapsMetadata = None with open(fileName, "r") as file: insarmapsMetadata = cPickle.load(file) area = insarmapsMetadata["area"] project_name = insarmapsMetadata["project_name"] mid_long = insarmapsMetadata["mid_long"] mid_lat = insarmapsMetadata["mid_lat"] country = insarmapsMetadata["country"] region = insarmapsMetadata["region"] chunk_num = insarmapsMetadata["chunk_num"] attribute_keys = insarmapsMetadata["attribute_keys"] attribute_values = insarmapsMetadata["attribute_values"] string_dates_sql = insarmapsMetadata["string_dates_sql"] decimal_dates_sql = insarmapsMetadata["decimal_dates_sql"] attributes = insarmapsMetadata["attributes"] needed_attributes = insarmapsMetadata["needed_attributes"] attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() attributesController.create_area_table_if_not_exists() attributesController.insert_dataset_into_area_table(area, project_name, mid_long, mid_lat, country, region, chunk_num, attribute_keys, attribute_values, string_dates_sql, decimal_dates_sql) # put in attributes into standalone attributes table for k in attributes: v = attributes[k] if k in needed_attributes: attributesController.add_attribute(area, k, v) elif k == "plotAttributes": attributesController.add_plot_attribute(area, k, v) attributesController.close()
print e sys.exit() # put attributes in own table. TODO: remove old way of adding attributes # via array attributesController = InsarDatabaseController(dbUsername, dbPassword, dbHost, 'pgis') attributesController.connect() for k in attributes: v = attributes[k] if "POLYGON" in str(v): arr = v.split(",") s = "\," v = s.join(arr) attributesController.add_attribute(project_name, k, v) attributesController.close() # create index to speed up queries: print "Creating index" try: con = psycopg2.connect("dbname='pgis' user='******' host='" + dbHost + "' password='******'") cur = con.cursor() query = 'CREATE INDEX ON ' + area + ' (p)' cur.execute(query) con.commit() con.close() except Exception, e: print "error creating index on p" print e #sys.exit() Don't exit, as what if the index already exists print "Done creating index"