def observation_generator(self, sql, sql_args): """Generator for Observation :param sql: A SQL statement which must return rows describing observations :param sql_args: Any variables required to populate the query provided in 'sql' :return: A generator which produces Event instances from the supplied SQL, closing any opened cursors on completion. """ self.con.execute(sql, sql_args) results = self.con.fetchall() output = [] for result in results: observation = mp.Observation(obstory_id=result['obstory_id'], obstory_name=result['obstory_name'], obs_time=result['obsTime'], obs_id=result['publicId'], obs_type=result['obsType']) # Look up observation metadata sql = """SELECT f.metaKey, stringValue, floatValue FROM archive_metadata m INNER JOIN archive_metadataFields f ON m.fieldId=f.uid WHERE m.observationId=%s """ self.con.execute(sql, (result['uid'], )) for item in self.con.fetchall(): value = first_non_null( [item['stringValue'], item['floatValue']]) observation.meta.append(mp.Meta(item['metaKey'], value)) # Fetch file objects sql = "SELECT f.repositoryFname FROM archive_files f WHERE f.observationId=%s" self.con.execute(sql, (result['uid'], )) for item in self.con.fetchall(): observation.file_records.append( self.db.get_file(item['repositoryFname'])) # Count votes for observation self.con.execute( "SELECT COUNT(*) FROM archive_obs_likes WHERE observationId=" "(SELECT uid FROM archive_observations WHERE publicId=%s);", (result['publicId'], )) observation.likes = self.con.fetchone()['COUNT(*)'] output.append(observation) return output
def obsgroup_generator(self, sql, sql_args): """Generator for ObservationGroup :param sql: A SQL statement which must return rows describing observation groups :param sql_args: Any variables required to populate the query provided in 'sql' :return: A generator which produces Event instances from the supplied SQL, closing any opened cursors on completion. """ self.con.execute(sql, sql_args) results = self.con.fetchall() output = [] for result in results: obs_group = mp.ObservationGroup( group_id=result['publicId'], title=result['title'], obs_time=result['time'], set_time=result['setAtTime'], semantic_type=result['semanticType'], user_id=result['setByUser']) # Look up observation group metadata sql = """SELECT f.metaKey, stringValue, floatValue FROM archive_metadata m INNER JOIN archive_metadataFields f ON m.fieldId=f.uid WHERE m.groupId=%s """ self.con.execute(sql, (result['uid'], )) for item in self.con.fetchall(): value = first_non_null( [item['stringValue'], item['floatValue']]) obs_group.meta.append(mp.Meta(item['metaKey'], value)) # Fetch observation objects sql = """SELECT o.publicId FROM archive_obs_group_members m INNER JOIN archive_observations o ON m.observationId=o.uid WHERE m.groupId=%s """ self.con.execute(sql, (result['uid'], )) for item in self.con.fetchall(): obs_group.obs_records.append( self.db.get_observation(item['publicId'])) output.append(obs_group) return output
def file_generator(self, sql, sql_args): """Generator for FileRecord :param sql: A SQL statement which must return rows describing files. :param sql_args: Any variables required to populate the query provided in 'sql' :return: A generator which produces FileRecord instances from the supplied SQL, closing any opened cursors on completion. """ self.con.execute(sql, sql_args) results = self.con.fetchall() output = [] for result in results: file_record = mp.FileRecord( obstory_id=result['obstory_id'], obstory_name=result['obstory_name'], observation_id=result['observationId'], repository_fname=result['repositoryFname'], file_time=result['fileTime'], file_size=result['fileSize'], file_name=result['fileName'], mime_type=result['mimeType'], file_md5=result['fileMD5'], semantic_type=result['semanticType']) # Look up observation metadata sql = """SELECT f.metaKey, stringValue, floatValue FROM archive_metadata m INNER JOIN archive_metadataFields f ON m.fieldId=f.uid WHERE m.fileId=%s """ self.con.execute(sql, (result['uid'], )) for item in self.con.fetchall(): value = first_non_null( [item['stringValue'], item['floatValue']]) file_record.meta.append(mp.Meta(item['metaKey'], value)) output.append(file_record) return output
def metadata_to_object_list(db_handle, obs_time, obs_id, meta_dict): metadata_objs = [] for meta_field in meta_dict: value = meta_dict[meta_field] # Short string fields get stored as string metadata (up to 64kB, or just under) if type(value) != str or len(value) < 65500: metadata_objs.append( mp.Meta("meteorpi:" + meta_field, meta_dict[meta_field])) # Long strings are turned into separate files else: fname = os.path.join("/tmp", str(uuid.uuid4())) open(fname, "w").write(value) db_handle.register_file(file_path=fname, mime_type="application/json", semantic_type=meta_field, file_time=obs_time, file_meta=[], observation_id=obs_id, user_id=user) return metadata_objs
events = cur.fetchall() print "Fetched %8d events" % len(events) for item in events: obstory_id = item[2].strip() utc = item[3] / 1000.0 meta_list = [] if item[0] in eventMetaDict: for meta in eventMetaDict[item[0]]: if meta[2] is not None: meta_val = meta[2] elif meta[3] is not None: meta_val = meta[3] / 1000.0 else: meta_val = meta[4] meta_list.append(mp.Meta(meta[1], meta_val)) obs = db.register_observation(obstory_name=obstory_id, user_id="migrate", obs_time=utc, obs_type="movingObject", obs_meta=meta_list) event_public_ids[item[0]] = obs.obs_id db.commit() # Migrate files with closing(con.trans()) as transaction: with closing(transaction.cursor()) as cur: cur.execute("""SELECT internalID, cameraID, fileID, mimeType, fileName, semanticType, fileTime, fileOffset, fileSize, statusID, md5Hex FROM t_file;""") files = cur.fetchall()