def create_schema(): s = sa_session.get_session() sa_metadata.metadata.drop_all(s.bind) s.commit() sa_metadata.metadata.create_all(s.bind) s.commit()
def main(): # Get db session. session = sa_session.get_session() # Clear cell tables for t in [sa_cell.cell_habitat_table, sa_cell.cell_table]: session.execute(t.delete()) session.commit() # For each type of cell... for cell_size in ["km100", "km1000"]: print >>sys.stderr, "Processing cells of size '%s'" % cell_size # Initialize list of cells. cells = [] # Get cell ids cell_id_attr = getattr(Habitat, "id_%s" % cell_size) cell_area = func.sum(geo_func.area(func.geography(Habitat.geom))).label("cell_area") cell_depth = func.avg(Habitat.z).label("cell_depth") cell_geom_wkb = geo_func.wkb(func.st_union(Habitat.geom).label("cell_geom")) cell_infos = session.query(cell_id_attr, cell_area, cell_depth, cell_geom_wkb).group_by(cell_id_attr).all() # For each id, create cell and assign habitats. print >>sys.stderr, "Creating cells" cell_counter = 0 for (cell_id, cell_area, cell_depth, cell_geom_wkb) in cell_infos: if (cell_counter % 1000) == 0: print >>sys.stderr, "%s..." % (cell_counter), cell_counter += 1 # Get cell's habitats. cell_habitats = session.query(Habitat).filter(cell_id_attr == cell_id).all() # Format cell's geometry. cell_geom = wkb.loads("%s" % cell_geom_wkb) if cell_geom.geom_type == "Polygon": cell_geom = MultiPolygon([(cell_geom.exterior.coords, cell_geom.interiors)]) cell = Cell( type=cell_size, type_id=cell_id, geom=cell_geom.wkt, area=cell_area, depth=cell_depth, habitats=cell_habitats, ) cells.append(cell) session.add_all(cells) session.commit()
def create_schema(): # Get db session. session = sa_session.get_session() # Drop/Add tables print "Resetting db schema" sa_metadata.metadata.drop_all(bind = session.connection()) sa_metadata.metadata.create_all(bind = session.connection()) session.commit()
def main(): # Get params from command line. if len(sys.argv) != 3: print >> sys.stderr, "Must provide csv file and id of effort set to be created in db." exit(1) csv_file = sys.argv[1] tag = sys.argv[2] # Setup DAOs. db_session = sa_session.get_session() effort_dao = SA_Effort_DAO(session = db_session) cell_dao = SA_Cell_DAO(session = db_session) # Get 100km cells and make id lookup. cells = cell_dao.get_cells(filters=[{'attr': 'type', 'op': '==', 'value': 'km100'}]) cells_by_100km_id = {} for c in cells: cells_by_100km_id[c.type_id] = c # Make gears lookup. gears_by_id = {} for gear in db_session.query(Gear).all(): gears_by_id[gear.id] = gear # Create efforts from csv rows. efforts = [] csv_reader = csv.DictReader(open(csv_file, "rb")) r_counter = 0 for r in csv_reader: r_counter += 1 if (r_counter % 1000) == 0: print >> sys.stderr, r_counter # Get cell. cell = cells_by_100km_id[int(r['id_100'])] # Get gear. gear_id = "GC%s" % r['gear_code'] gear = gears_by_id[gear_id] # Create effort. efforts.append(Effort( cell = cell, time = r['year'], gear = gear, tag = tag, swept_area = float(r['A']), hours_fished = float(r['hours_fished']) )) # Save efforts. effort_dao.save_efforts(efforts)
def create_schema(): session = sa_session.get_session() tables = [ sa_result.table ] for t in tables: if t.exists(session.bind): t.drop(session.bind) tables.reverse() for t in tables: t.create(session.bind)
def create_schema(): session = sa_session.get_session() tables = [ sa_gear.table, sa_effort.table, sa_effort_set.effort_set_table, sa_effort_set.effort_set_effort_table ] for t in tables: t.create(session.bind)
def main(): # Get DB session. session = sa_session.get_session() # Clear gears table. session.execute(sa_gear.table.delete()) # Create gear object for each gear definition in fishing_conf. gear_objs = [] for gear_definition in fishing_conf.gear_definitions: g_obj = Gear( id = gear_definition['id'], name = gear_definition['name'], category = gear_definition['category'] ) gear_objs.append(g_obj) # Add gear objects to session and save to DB. session.add_all(gear_objs) session.commit()
def main(): # Read features from vulernability assessment. va_rows = sasi.util.va.read_va_from_csv(conf.conf["va_file"]) va = VulnerabilityAssessment(rows=va_rows) features = va.get_features() # Get DB session. session = sa_session.get_session() # Clear features table. session.execute(sa_feature.table.delete()) session.commit() # Create Feature objects # note: might move this into the VA object itself later. feature_objs = [] for f in features.values(): f_obj = Feature(name=f["FEATURE"], id=f["FEATURE_CODE"], category=f["FEATURE_CLASS_CODE"]) feature_objs.append(f_obj) # Add feature objects to session and save to DB. session.add_all(feature_objs) session.commit()
def main(): # Read habitat types and features from vulnerability assessment. va_rows = sasi.util.va.read_va_from_csv(conf.conf['va_file']) va = VulnerabilityAssessment(rows = va_rows) valid_habitat_types = va.get_habitats() features_by_habs = va.get_features_by_habitats() # Get DB session. session = sa_session.get_session() # Clear habitat_types table. session.execute(sa_habitat_type.habitat_type_feature_table.delete()) session.execute(sa_habitat_type.habitat_type_table.delete()) session.commit() # For each valid habitat_type... for h in valid_habitat_types: (substrate_id,energy) = h.split(',') # Get substrate object. substrate = session.query(Substrate).filter(Substrate.id == substrate_id).one() # Get features. features_for_hab = features_by_habs[h] features = [] for category_features in features_for_hab.values(): features.extend([session.query(Feature).filter(Feature.id == f).one() for f in category_features]) # Create habitat_type object. habitat_type_obj = Habitat_Type(substrate=substrate, energy=energy, features=features) # Add to session. session.add(habitat_type_obj) # Save to db. session.commit()
def main(): # Read substrates from vulernability assessment. va_rows = sasi.util.va.read_va_from_csv(conf.conf['va_file']) va = VulnerabilityAssessment(rows = va_rows) f_by_h = va.get_features_by_habitats() substrates = va.get_substrates() # Get DB session. session = sa_session.get_session() # Clear substrate table. session.execute(sa_substrate.table.delete()) session.commit() # Create Substrate objects # note: might move this into the VA object itself later. substrate_objs = [] for s in substrates.values(): s_obj = Substrate(name=s['SUBSTRATE'], id=s['SUBSTRATE_CODE']) substrate_objs.append(s_obj) # Add substrate objects to session and save to DB. session.add_all(substrate_objs) session.commit()
@app.route('/get_map') @crossdomain(origin='*') @cache.cached(key_prefix=make_cache_key) def get_map(): # Parse parameters into custom and WMS parameters. custom_parameters = [] custom_parameters_json = request.args.get('PARAMS','[]') if custom_parameters_json: custom_parameters = json.loads(custom_parameters_json) wms_parameters = request.args.items() result_field_json = request.args.get('RESULT_FIELD','{}') result_field = json.loads(result_field_json) filters_json = request.args.get('FILTERS','[]') if filters_json: filters = json.loads(filters_json) else: filters = [] map_image = results_services.get_map(wms_parameters=wms_parameters, filters=filters, result_field=result_field) # Return the image. return Response(map_image, mimetype='image/gif') if __name__ == '__main__': session = sa_session.get_session() app.run(debug=True)
def test(self): session = sa_session.get_session() self.failUnless(session)
def main(): output_dir = '/home/adorsk/projects/sasi/sasi_model/outputs/shapefiles' result_tag = 'gc30_all' t = 1999 # Get result dao. db_session = sa_session.get_session() result_dao = SA_Result_DAO(session=db_session) # Get values by time, cell, and field for results. values_by_t_c_f = result_dao.get_values_by_t_c_f(filters=[ {'attr': 'tag', 'op': '==', 'value': result_tag}, {'attr': 'time', 'op': '==', 'value': t} ]) # # Make fiona collection from result set. # # Define schema. geometry_type = 'MultiPolygon' schema = { 'geometry': geometry_type, 'properties': { 'type_id': 'str', 'hab_type': 'str' } } generic_attrs = ['A', 'Y', 'X', 'Z', 'ZZ'] for generic_attr in generic_attrs: schema['properties'][generic_attr] = 'float' # Write shpfile. filename = "%s/%s.%s.shp" % (output_dir, result_tag, t) driver = 'ESRI Shapefile' crs = {'init': "epsg:4326"} with collection( filename, "w", driver=driver, schema=schema, crs=crs ) as c: record_counter = 1 for cell, cell_fields in values_by_t_c_f[t].items(): if (record_counter % 1000) == 0: print >> sys.stderr, "%s" % record_counter # Populate record properties. habitat_types = set(["(%s)" % h.habitat_type.id for h in cell.habitats]) properties = { 'type_id': cell.type_id, 'hab_type': ' & '.join(habitat_types) } for generic_attr in generic_attrs: properties[generic_attr] = cell_fields.get(generic_attr,0.0) # Populate record geometry. wkb_geom = "%s" % cell.geom.geom_wkb geometry = GeomBuilder().build_wkb(wkb_geom) # Assemble the record. record = { 'id': record_counter, 'geometry': geometry, 'properties': properties } # Write the record. c.write(record) record_counter += 1
def get_dao(): session = sa_session.get_session() return SA_Result_DAO(session=session)
def get_dao(): session = sa_session.get_session() return SA_Habitat_DAO(session=session)
def main(): # Get db session. session = sa_session.get_session() # Clear habitat_type tables for t in [sa_habitat.table]: session.execute(t.delete()) # Load shapefile sf = ogr.Open(conf.conf['sasi_habitat_file']) # Get feature layer. layer = sf.GetLayer(0) # Get layer srs. layer_srs = layer.GetSpatialRef() # Set target srs to 4326 (default used by most GIS software). target_srs = ogr.osr.SpatialReference() target_srs.ImportFromEPSG(4326) # Get fields. layer_def = layer.GetLayerDefn() field_count = layer_def.GetFieldCount() fields = [layer_def.GetFieldDefn(i).GetName() for i in range(field_count)] # Initialize a list to hold habitat objects. habitats = [] # For each cell feature... counter = 0 features = [f for f in layer] for f in features: if (counter % 1000) == 0: print >> sys.stderr, "%s" % (counter), counter += 1 # Get feature attributes. f_attributes = {} for i in range(field_count): f_attributes[fields[i]] = f.GetField(i) # Skip blank rows. if (not f_attributes['SOURCE']): continue # Get feature geometry. ogr_g = f.GetGeometryRef() # Transform to target_srs. ogr_g = f.GetGeometryRef() ogr_g.TransformTo(target_srs) # We convert each feature into a multipolygon, since # we may have a mix of normal polygons and multipolygons. geom = wkb.loads(ogr_g.ExportToWkb()) if geom.geom_type =='Polygon': geom = MultiPolygon([(geom.exterior.coords, geom.interiors )]) # Get habitat_type's energy code. energy = energy_mappings.shp_to_va[f_attributes['Energy']] # Get habitat_type's substrate object. substrate_id = substrate_mappings.shp_to_va[f_attributes['TYPE_SUB'].strip()] substrate = session.query(Substrate).filter(Substrate.id == substrate_id).one() # Get habitat_type object. habitat_type = session.query(Habitat_Type).join(Habitat_Type.substrate).filter(Substrate.id == substrate_id).filter(Habitat_Type.energy == energy).one() # Make habitat object from feature data. r = Habitat( id_km100 = f_attributes['100km_Id'], id_km1000 = f_attributes['1000Km_Id'], id_vor = f_attributes['Vor_id'], z = f_attributes['z'], habitat_type = habitat_type, area = f_attributes['Area_Km'], geom = geom.wkt ) habitats.append(r) print >> sys.stderr, "Writing habitats to db" session.add_all(habitats) session.commit() print >> sys.stderr, "Calculating areas for habitats." habitat_area = geo_func.area(func.geography(Habitat.geom)).label('habitat_area') habitat_infos = session.query(Habitat, habitat_area).all() for (habitat, habitat_area) in habitat_infos: habitat.area = habitat_area session.commit() print >> sys.stderr, "done"