def find_data_entries(self, dataset, offset, limit, start_time=None, end_time=None): try: with self.connection() as repo: start_time = dam.format_time( start_time) if start_time is not None else None end_time = dam.format_time( end_time) if end_time is not None else None dam_objs = repo.retrieve_tuples("data", dataset=dataset.repository_id, offset=offset, limit=limit, startTime=start_time, endTime=end_time) except dam.DAMException as e: logger.exception("Exception while getting data entries") raise PersistenceError("Error getting data entries: %s" % (str(e))) ret = [] for dam_obj in dam_objs["results"]: data_entry = DataEntry() data_entry.id = dam_obj["metadata"]["id"] data_entry.dataset = dataset.id data_entry.timestamp = parse_timestamp(dam_obj["metadata"]["time"]) self._copy_attrs(dam_obj["data"], data_entry) ret.append(data_entry) return SearchResults(ret, dam_objs["offset"], dam_objs["limit"], dam_objs["count"])
def _create_data_entry(self, obs, schema): """Internal method for creating the DataEntry domain object from a database observation """ entry = DataEntry() entry.dataset = obs.dataset entry.id = obs.id entry.timestamp = obs.timestamp for attr in obs.attrs: if isinstance(schema.attrs[attr.name], FileDataType): entry[attr.name] = FileObject(f_path=attr.value) else: entry[attr.name] = attr.value return entry
def get_data_entry(self, dataset_id, data_entry_id): try: with self.connection() as repo: dam_obj = repo.getTuples(data_entry_id) except dam.DAMException as e: logger.exception("Exception while getting data entry") raise PersistenceError("Error getting data entry: %s" % (str(e))) if dam_obj == None and len(dam_obj) == 1: return None dam_obj = dam_obj[0] data_entry = DataEntry() data_entry.id = data_entry_id data_entry.dataset = dataset_id data_entry.timestamp = parse_timestamp(dam_obj["metadata"]["time"]) self._copy_attrs(dam_obj["data"], data_entry) return data_entry
def get_data_entry(self, dataset_id, data_entry_id): try: with self.connection() as repo: dam_obj = repo.getTuples(data_entry_id) except dam.DAMException as e: logger.exception("Exception while getting data entry") raise PersistenceError("Error getting data entry: %s"%(str(e))) if dam_obj == None and len(dam_obj) == 1: return None dam_obj = dam_obj[0] data_entry = DataEntry() data_entry.id = data_entry_id data_entry.dataset = dataset_id data_entry.timestamp = parse_timestamp(dam_obj["metadata"]["time"]) self._copy_attrs(dam_obj["data"], data_entry) return data_entry
def find_data_entries(self, dataset, offset, limit, start_time=None, end_time=None): try: with self.connection() as repo: start_time = dam.format_time(start_time) if start_time is not None else None end_time = dam.format_time(end_time) if end_time is not None else None dam_objs = repo.retrieve_tuples("data", dataset=dataset.repository_id, offset=offset, limit=limit, startTime=start_time, endTime=end_time) except dam.DAMException as e: logger.exception("Exception while getting data entries") raise PersistenceError("Error getting data entries: %s"%(str(e))) ret = [] for dam_obj in dam_objs["results"]: data_entry = DataEntry() data_entry.id = dam_obj["metadata"]["id"] data_entry.dataset = dataset.id data_entry.timestamp = parse_timestamp(dam_obj["metadata"]["time"]) self._copy_attrs(dam_obj["data"], data_entry) ret.append(data_entry) return SearchResults(ret, dam_objs["offset"], dam_objs["limit"], dam_objs["count"])