def find_data_entries(self, dataset, offset, limit, start_time=None, end_time=None): try: with self.connection() as repo: start_time = dam.format_time( start_time) if start_time is not None else None end_time = dam.format_time( end_time) if end_time is not None else None dam_objs = repo.retrieve_tuples("data", dataset=dataset.repository_id, offset=offset, limit=limit, startTime=start_time, endTime=end_time) except dam.DAMException as e: logger.exception("Exception while getting data entries") raise PersistenceError("Error getting data entries: %s" % (str(e))) ret = [] for dam_obj in dam_objs["results"]: data_entry = DataEntry() data_entry.id = dam_obj["metadata"]["id"] data_entry.dataset = dataset.id data_entry.timestamp = parse_timestamp(dam_obj["metadata"]["time"]) self._copy_attrs(dam_obj["data"], data_entry) ret.append(data_entry) return SearchResults(ret, dam_objs["offset"], dam_objs["limit"], dam_objs["count"])
def persist_data_entry(self, dataset, schema, data_entry, cwd): # Check the attributes are actually in the schema self.validate_schema(data_entry.data, schema.attrs) try: with self.connection() as repo: dam_obs = { "dam_type": "Observation", "dataset": dataset.repository_id, "time": dam.format_time(data_entry.timestamp) } dam_obs = repo.ingest(dam_obs, lock=True) self._persist_attributes(dam_obs, data_entry.data, cwd) repo.unlock(dam_obs["id"]) self.mark_for_reset(dam_obs["id"]) return self.get_data_entry(dataset.id, dam_obs["id"]) except dam.DuplicateEntityException as e: logger.exception("Exception while persisting data entry") raise PersistenceError( "Error persisting data entry: DuplicateEntityException %s" % (str(e))) except dam.DAMException as e: logger.exception("Exception while persisting data entry") raise PersistenceError("Error persisting data entry: %s" % (str(e)))
def find_data_entries(self, dataset, offset, limit, start_time=None, end_time=None): try: with self.connection() as repo: start_time = dam.format_time(start_time) if start_time is not None else None end_time = dam.format_time(end_time) if end_time is not None else None dam_objs = repo.retrieve_tuples("data", dataset=dataset.repository_id, offset=offset, limit=limit, startTime=start_time, endTime=end_time) except dam.DAMException as e: logger.exception("Exception while getting data entries") raise PersistenceError("Error getting data entries: %s"%(str(e))) ret = [] for dam_obj in dam_objs["results"]: data_entry = DataEntry() data_entry.id = dam_obj["metadata"]["id"] data_entry.dataset = dataset.id data_entry.timestamp = parse_timestamp(dam_obj["metadata"]["time"]) self._copy_attrs(dam_obj["data"], data_entry) ret.append(data_entry) return SearchResults(ret, dam_objs["offset"], dam_objs["limit"], dam_objs["count"])
def persist_data_entry(self, dataset, schema, data_entry, cwd): # Check the attributes are actually in the schema self.validate_schema(data_entry.data, schema.attrs) try: with self.connection() as repo: dam_obs = {"dam_type":"Observation", "dataset":dataset.repository_id, "time":dam.format_time(data_entry.timestamp)} dam_obs = repo.ingest(dam_obs, lock=True) self._persist_attributes(dam_obs, data_entry.data, cwd) repo.unlock(dam_obs["id"]) self.mark_for_reset(dam_obs["id"]) return self.get_data_entry(dataset.id, dam_obs["id"]) except dam.DuplicateEntityException as e: logger.exception("Exception while persisting data entry") raise PersistenceError("Error persisting data entry: DuplicateEntityException %s"%(str(e))) except dam.DAMException as e: logger.exception("Exception while persisting data entry") raise PersistenceError("Error persisting data entry: %s"%(str(e)))