def costs(self, v): to_db_add = [] # Get the query obj session = Session() q = session.query(Costs) # Need the new instance here to avoid __setattr__ issues df = pd.DataFrame(v) for idx, row in df.iterrows(): # Now invert the expanded dict back into a single JSONB column for storage res = q.filter(Costs.match_id == idx).first() if res: #update the JSON blob costs_new_or_updated = row.to_dict() for k, v in costs_new_or_updated.items(): if v is None: continue elif np.isnan(v): v = None res._cost[k] = v sqlalchemy.orm.attributes.flag_modified(res, '_cost') session.add(res) session.commit() else: row = row.to_dict() costs = row.pop('_costs', {}) for k, v in row.items(): if np.isnan(v): v = None costs[k] = v cost = Costs(match_id=idx, _cost=costs) to_db_add.append(cost) if to_db_add: session.bulk_save_objects(to_db_add) session.commit()
def create_network(self, nodes=[]): cmds = 0 session = Session() for res in session.query(Overlay): msg = json.dumps({'oid': res.id, 'time': time.time()}) # If nodes are passed, process only those overlaps containing # the provided node(s) if nodes: for r in res.overlaps: if r in nodes: self.redis_queue.rpush( config['redis']['processing_queue'], msg) cmds += 1 break else: self.redis_queue.rpush(config['redis']['processing_queue'], msg) cmds += 1 script = 'acn_create_network' spawn_jobarr(script, cmds, mem=config['cluster']['processing_memory'], queue=config['cluster']['queue'], env=config['python']['env_name']) session.close()
def keypoints(self, kps): session = Session() io_keypoints.to_hdf(self.keypoint_file, keypoints=kps) res = session.query(Keypoints).filter(getattr(Keypoints,'image_id') == self['node_id']).first() if res is None: _ = self.keypoint_file res = self._from_db(Keypoints) res.nkeypoints = len(kps) session.commit()
def compute_overlaps(self): query = """ SELECT ST_AsEWKB(geom) AS geom FROM ST_Dump(( SELECT ST_Polygonize(the_geom) AS the_geom FROM ( SELECT ST_Union(the_geom) AS the_geom FROM ( SELECT ST_ExteriorRing(footprint_latlon) AS the_geom FROM images) AS lines ) AS noded_lines ) )""" session = Session() oquery = session.query(Overlay) iquery = session.query(Images) rows = [] for q in self._engine.execute(query).fetchall(): overlaps = [] b = bytes(q['geom']) qgeom = shapely.wkb.loads(b) res = iquery.filter( Images.footprint_latlon.ST_Intersects( from_shape(qgeom, srid=949900))) for i in res: fgeom = to_shape(i.footprint_latlon) area = qgeom.intersection(fgeom).area if area < 1e-6: continue overlaps.append(i.id) o = Overlay(geom='SRID=949900;{}'.format(qgeom.wkt), overlaps=overlaps) res = oquery.filter(Overlay.overlaps == o.overlaps).first() if res is None: rows.append(o) session.bulk_save_objects(rows) session.commit() res = oquery.filter(sqlalchemy.func.cardinality(Overlay.overlaps) <= 1) res.delete(synchronize_session=False) session.commit() session.close()
def fundamental_matrix(self, v): session = Session() res = session.query(table_obj).\ filter(table_obj.source == self.source['node_id']).\ filter(table_obj.destination == self.destination['node_id']).first() if res: res.fundamental = v else: edge = Edges(source=self.source['node_id'], destination=self.destination['node_id'], fundamental = v) session.add(edge) session.commit()
def matches(self): session = Session() q = session.query(Matches) qf = q.filter(Matches.source == self.source['node_id'], Matches.destination == self.destination['node_id']) odf = pd.read_sql(qf.statement, q.session.bind).set_index('id') df = pd.DataFrame(odf.values, index=odf.index.values, columns=odf.columns.values) df.index.name = 'id' # Explicit close to get the session cleaned up session.close() return DbDataFrame(df, parent=self, name='matches')
def ring(self, ring): # Setters need a single session and so should not make use of the # syntax sugar _from_db session = Session() res = session.query(Edges).\ filter(Edges.source == self.source['node_id']).\ filter(Edges.destination == self.destination['node_id']).first() if res: res.ring = ring else: edge = Edges(source=self.source['node_id'], destination=self.destination['node_id'], ring=ring) session.add(edge) session.commit() return
def _from_db(self, table_obj): """ Generic database query to pull the row associated with this node from an arbitrary table. We assume that the row id matches the node_id. Parameters ---------- table_obj : object The declared table class (from db.model) """ session = Session() res = session.query(table_obj).\ filter(table_obj.source == self.source['node_id']).\ filter(table_obj.destination == self.destination['node_id']) session.close() return res
def matches(self, v): to_db_add = [] to_db_update = [] df = pd.DataFrame(v) df.index.name = v.index.name # Get the query obj session = Session() q = session.query(Matches) for idx, row in df.iterrows(): # Determine if this is an update or the addition of a new row if hasattr(row, 'id'): res = q.filter(Matches.id == row.id).first() match_id = row.id elif v.index.name == 'id': res = q.filter(Matches.id == row.name).first() match_id = row.name else: res = None if res: # update mapping = {} mapping['id'] = match_id for index in row.index: row_val = row[index] if isinstance(row_val, (np.int, )): row_val = int(row_val) elif isinstance(row_val, (np.float, )): row_val = float(row_val) elif isinstance(row_val, WKBElement): continue mapping[index] = row_val to_db_update.append(mapping) else: match = Matches(source=int(row.source), source_idx=int(row.source_idx), destination=int(row.destination), destination_idx=int(row.destination_idx)) to_db_add.append(match) if to_db_add: session.bulk_save_objects(to_db_add) if to_db_update: session.bulk_update_mappings(Matches, to_db_update) session.commit()
def __init__(self, *args, parent=None, **kwargs): # If this is the first time that the image is seen, add it to the DB if parent is None: self.parent = Parent(config) else: self.parent = parent # Create a session to work in session = Session() # For now, just use the PATH to determine if the node/image is in the DB res = session.query(Images).filter( Images.path == kwargs['image_path']).first() exists = False if res: exists = True kwargs['node_id'] = res.id session.close() super(NetworkNode, self).__init__(*args, **kwargs) if exists is False: # Create the camera entry try: self._camera = create_camera(self.geodata) serialized_camera = self._camera.getModelState() cam = Cameras(camera=serialized_camera) except: cam = None kpspath = io_keypoints.create_output_path(self.geodata.file_name) # Create the keypoints entry kps = Keypoints(path=kpspath, nkeypoints=0) # Create the image i = Images(name=kwargs['image_name'], path=kwargs['image_path'], footprint_latlon=self.footprint, cameras=cam, keypoints=kps) session = Session() session.add(i) session.commit() session.close() self.job_status = defaultdict(dict)
def _from_db(self, table_obj, key='image_id'): """ Generic database query to pull the row associated with this node from an arbitrary table. We assume that the row id matches the node_id. Parameters ---------- table_obj : object The declared table class (from db.model) key : str The name of the column to compare this object's node_id with. For most tables this will be the default, 'image_id' because 'image_id' is the foreign key in the DB. For the Images table (the parent table), the key is simply 'id'. """ if 'node_id' not in self.keys(): return session = Session() res = session.query(table_obj).filter(getattr(table_obj,key) == self['node_id']).first() session.close() return res
def masks(self, v): def dict_check(input): for k, v in input.items(): if isinstance(v, dict): dict_check(v) elif v is None: continue elif np.isnan(v): input[k] = None df = pd.DataFrame(v) session = Session() res = session.query(Edges).\ filter(Edges.source == self.source['node_id']).\ filter(Edges.destination == self.destination['node_id']).first() if res: as_dict = df.to_dict() dict_check(as_dict) # Update the masks res.masks = as_dict session.add(res) session.commit()
def matches(self): session = Session() session.query(Matches).filter(Matches.source == self.source['node_id'], Matches.destination == self.destination['node_id']).delete() session.commit() session.close() return