def save(self, data): data = utils.to_list(data) if len(data) == 0: return [] group = self._refine_group_from_data(self.GROUP, data) return Key(self._save(group, data))
def remove_records(self, key): self.__make_connection() return self.__collection.delete({ '_id': { '$in': utils.to_list(key) } }).deleted_count
def activate_records(self, key): self.__make_connection() return self.__collection.update_many( {'_id': { '$in': utils.to_list(key) }}, {'$set': { 'completed': False }})
def mark_complete(self, key): self.__make_connection() return self.__collection.update_many( {'_id': { '$in': utils.to_list(key) }}, {'$set': { 'completed': True }})
def remove_dataset(self, datasets): datasets = utils.to_list(datasets) with HDF5File() as file: group = file[self.GROUP] for dataset in self._get_datasets(group): if dataset in datasets: self._remove_index_data( self.GROUP, Key(list(group[dataset].attrs.keys()))) del group[dataset] return True
def save(self, data): self.__make_connection() try: return self.__collection.insert_many(utils.to_list(data), ordered=False).inserted_ids except BulkWriteError as err: self.failed_instances.extend([ failed_instance['op'] for failed_instance in err.details['writeErrors'] ]) print( f'Failed to write {len(self.failed_instances)} {self.COLLECTION}s! These are probably duplicates, but dumping to data directory anyway...', flush=True) utils.save_json( json.loads(json_util.dumps(self.failed_instances)), utils.rotate_file( os.path.join(config.DATA_DIR, f'{self.COLLECTION}.json'))) self.failed_instances = []
def test_to_list(data, length): ls_data = utils.to_list(data) assert isinstance(ls_data, (list, tuple)) assert all(map(lambda x: isinstance(x, dict), ls_data)) assert len(ls_data) == length
def __init__(self, impl=None): self.impl = impl or rxns.create_intramolecular_reactions() self.impl = utils.to_list(self.impl)
def __init__(self, impl=None): self.impl = impl or rxns.create_intermolecular_reactions() self.impl = utils.to_list(self.impl) self.backbones = list( map(lambda x: x.mol, repo.create_backbone_repository().load()))