def remove(graphs_cancellation_ids): """Remove Graph Cancellation events with given Ids Args: graphs_cancellation_ids (list of ObjectID) List of Graph IDs to remove """ get_db_connector().graphs_cancellations.delete_many({'_id': {'$in': graphs_cancellation_ids}})
def remove(runs_cancellation_ids): """Remove Run Cancellation events with given Ids Args: runs_cancellation_ids (list of ObjectID) List of Run IDs to remove """ get_db_connector()[Collections.RUN_CANCELLATIONS].delete_many( {'_id': { '$in': runs_cancellation_ids }})
def get_graph_cancellations(): """Get all Graph Cancellation events""" res = [] for graphs_cancellation_dict in get_db_connector( ).graphs_cancellations.find(): res.append(GraphCancellation.from_dict(graphs_cancellation_dict)) return res
def save(self, force=False): """Save Object in the database""" if not self.__class__.DB_COLLECTION: raise ClassNotSavable("Class `{}` is not savable.".format( self.__class__.__name__)) if not self.is_dirty() and not force: return True now = datetime.datetime.utcnow() obj_dict = self.to_dict() obj_dict["update_date"] = now getattr(get_db_connector(), self.__class__.DB_COLLECTION).find_one_and_update( {'_id': obj_dict['_id']}, { "$setOnInsert": { "insertion_date": now }, "$set": obj_dict }, upsert=True, ) self._dirty = False return True
def get_run_cancellations(): """Get all Run Cancellation events""" res = [] for runs_cancellation_dict in get_db_connector()[ Collections.RUN_CANCELLATIONS].find(): res.append(RunCancellation.from_dict(runs_cancellation_dict)) return res
def get_db_graphs(author, search=None, per_page=20, offset=0, status=None, recent=False): """Get subset of the Graphs. Args: author (ObjectId): Author of the Graphs search (str): Search pattern per_page (int): Number of Graphs per page offset (int): Offset Return: (list of dicts) List of Graphs in dict format """ and_query = GraphCollectionManager._get_basic_query( author=author, search=search, status=status, ) sort_key = 'update_date' if recent else 'insertion_date' db_graphs = get_db_connector().graphs.find({ '$and': and_query }).sort(sort_key, -1).skip(offset).limit(per_page) return list(db_graphs)
def get_db_nodes(author, status=None, base_node_names=None, search=None, per_page=20, offset=0): """Get subset of the Nodes. Args: author (ObjectId): Author of the Nodes status (str, None): Node Running Status base_node_names (str, list of str, None): Node Running Status search (str, None): Search pattern per_page (int): Number of Nodes per page offset (int): Offset Return: (list of dict) List of Nodes in dict format """ and_query = NodeCollectionManager._get_basic_query( author=author, status=status, base_node_names=base_node_names, search=search) db_nodes = get_db_connector().nodes.find({ '$and': and_query }).sort('insertion_date', -1).skip(offset).limit(per_page) res = [] for node in db_nodes: node['_readonly'] = (author != to_object_id(node['author'])) res.append(node) return res
def get_db_node(self, node_id, user_id=None): """Get dict representation of the Node. Args: node_id (ObjectId, str): Node ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Node """ res = get_db_connector()[self.collection].find_one( {'_id': to_object_id(node_id)}) if not res: return res res['_readonly'] = (user_id != to_object_id(res['author'])) sub_nodes_dicts = None for parameter in res['parameters']: if parameter['name'] == '_nodes': sub_nodes_dicts = parameter['value']['value'] break # TODO join collections using database capabilities if self.collection == Collections.RUNS: self._update_sub_nodes_fields(sub_nodes_dicts, '_id', _PROPERTIES_TO_GET_FROM_SUBS) self._update_sub_nodes_fields( sub_nodes_dicts, 'original_node_id', ['node_status'], reference_collection=Collections.TEMPLATES) return res
def get_db_nodes_by_ids(ids): """Find all the Nodes with a given IDs. Args: ids (list of ObjectID): Node Ids """ db_nodes = get_db_connector().nodes.find({'_id': {'$in': list(ids)}}) return list(db_nodes)
def get_list(start_datetime=None, end_datetime=None, non_protected_only=False): """List of NodeCache objects. Args: start_datetime (datetime, None): Start datetime or None if selecting from beginning end_datetime (datetime, None): End datetime or None if selecting until now Return: Iterator on the list of dict-like objects """ return get_db_connector().node_cache.find(NodeCacheManager._make_query(start_datetime, end_datetime, non_protected_only))
def get_worker_states(): states = getattr(get_db_connector(), Collections.WORKER_STATE)\ .find({}).sort('insertion_date', -1) unique_worker_states = {} for state in states: if state['worker_id'] in unique_worker_states: continue unique_worker_states[state['worker_id']] = state return list(sorted(unique_worker_states.values(), key=lambda state: state['worker_id']))
def get_graphs(graph_running_status): """Find all the Graphs with a given graph_running_status. Args: graph_running_status (str): Graph Running Status """ db_graphs = get_db_connector().graphs.find( {'graph_running_status': graph_running_status}) graphs = [] for db_graph in db_graphs: graphs.append(Graph.from_dict(db_graph)) return graphs
def get_db_graph(graph_id): """Get dict representation of the Graph. Args: graph_id (ObjectId, str): Graph ID Return: (dict) dict representation of the Graph """ return GraphCollectionManager._update_node_statuses( get_db_connector().graphs.find_one({'_id': to_object_id(graph_id)}))
def get_db_objects_by_ids(self, ids, collection=None): """Find all the Objects with a given IDs. Args: ids (list of ObjectID): Object Ids """ db_objects = get_db_connector()[collection or self.collection].find( {'_id': { '$in': list(ids) }}) return list(db_objects)
def find_user_by_name(username): """Find User. Args: username (str) Username Return: (User) User object or None """ user_dict = getattr(get_db_connector(), User.DB_COLLECTION).find_one({'username': username}) if not user_dict: return None return User(user_dict)
def get_db_node(node_id, user_id=None): """Get dict representation of the Graph. Args: node_id (ObjectId, str): Node ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Graph """ res = get_db_connector().nodes.find_one({'_id': to_object_id(node_id)}) if res: res['_readonly'] = (user_id != to_object_id(res['author'])) return res
def load(cls, _id): """Load object from db. Args: _id (str, ObjectId): ID of the object in DB """ obj_dict = getattr(get_db_connector(), cls.DB_COLLECTION).find_one({'_id': ObjectId(_id)}) if not obj_dict: raise DBObjectNotFound( 'Object `{_id}` not found in `{collection}` collection'.format( _id=_id, collection=cls.DB_COLLECTION, )) return cls.from_dict(obj_dict)
def get_db_graphs_count(author, search=None): """Get number of the Graphs that satisfy given conditions. Args: author (ObjectId): Author of the Graphs search (str): Search pattern Return: (int) Number of Graphs found. """ and_query = GraphCollectionManager._get_basic_query( author=author, search=search, ) return get_db_connector().graphs.count({'$and': and_query})
def get_db_graph(graph_id, user_id=None): """Get dict representation of the Graph. Args: graph_id (ObjectId, str): Graph ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Graph """ res = GraphCollectionManager._update_node_statuses( get_db_connector().graphs.find_one({'_id': to_object_id(graph_id)})) if res: res['_readonly'] = (to_object_id(user_id) != to_object_id( res['author'])) return res
def get_db_object(self, object_id, user_id=None): """Get dict representation of an Object. Args: object_id (ObjectId, str): Object ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Object """ res = get_db_connector()[self.collection].find_one( {'_id': to_object_id(object_id)}) if not res: return res res['_readonly'] = (user_id != to_object_id(res['author'])) return res
def get(node, user_id): """Pull NodeCache if exists. Args: node (Node): Node object user_id (ObjectId, str): User ID Return: (NodeCache) NodeCache or None """ key = NodeCache.generate_key(node, user_id) db_node_cache = get_db_connector().node_cache.find({ 'key': key }).sort('insertion_date', -1).limit(1) caches = list(db_node_cache) if len(caches): return NodeCache.from_dict(caches[0]) else: return None
def get(node): """Pull NodeCache if exists. Args: node (Node): Node object Return: (NodeCache) NodeCache or None """ key = NodeCache.generate_key(node) db_node_cache = get_db_connector().node_cache.find({ 'key': key, 'removed': { '$ne': True } }).sort('insertion_date', -1).limit(1) caches = list(db_node_cache) if len(caches): return NodeCache.from_dict(caches[0]) else: return None
def pick_node(self, kinds): node = get_db_connector()[self.collection].find_one_and_update( { '$and': [ { 'kind': { '$in': kinds, } }, { 'node_running_status': { '$in': [ NodeRunningStatus.READY, NodeRunningStatus.IN_QUEUE, ] } }, ], }, {'$set': { 'node_running_status': NodeRunningStatus.RUNNING }}, return_document=ReturnDocument.AFTER) return node
def get_db_nodes_count(author, status=None, base_node_names=None, search=None): """Get number of the Nodes with given conditions. Args: author (ObjectId): Author of the Nodes status (str, None): Node Running Status base_node_names (str, list of str, None): Node Running Status search (str, None): Search pattern Return: (int) Number of Nodes """ and_query = NodeCollectionManager._get_basic_query( author=author, status=status, base_node_names=base_node_names, search=search, ) return get_db_connector().nodes.count({'$and': and_query})
def get_db_graphs(search='', per_page=20, offset=0, status=None): """Get subset of the Graphs. Args: search (str): Search pattern per_page (int): Number of Graphs per page offset (int): Offset Return: (list of dicts) List of Graphs in dict format """ if status and isinstance(status, str): status = [status] aggregate_list = [] search_parameters, search_string = parse_search_string(search) # Match and_query = {} if status: and_query['graph_running_status'] = {'$in': status} if search_string: and_query['$text'] = {'$search': search_string} if len(and_query): aggregate_list.append({"$match": and_query}) # Join with users aggregate_list.append({ '$lookup': { 'from': 'users', 'localField': 'author', 'foreignField': '_id', 'as': '_user' } }) # rm password hash aggregate_list.append({"$project": { "_user.password_hash": 0, }}) # Match username and_query = {} if 'author' in search_parameters: and_query['_user.username'] = search_parameters['author'] if len(and_query): aggregate_list.append({"$match": and_query}) # sort sort_key = search_parameters.get('order', 'insertion_date') try: sort_order = -1 if strtobool(search_parameters.get('desc', '1')) else 1 except ValueError: sort_order = -1 aggregate_list.append({"$sort": {sort_key: sort_order}}) # counts and pagination aggregate_list.append({ '$facet': { "metadata": [{ "$count": "total" }], "list": [{ "$skip": int(offset) }, { "$limit": int(per_page) }], } }) return next(get_db_connector().graphs.aggregate(aggregate_list), None)
def clean_up(): """Remove NodeCache objects with flag `removed` set """ return get_db_connector().node_cache.remove({'removed': True})
def find_users(): return getattr(get_db_connector(), User.DB_COLLECTION).find({})
def get_db_nodes(status='', base_node_names=None, search='', per_page=20, offset=0, user_id=None): """Get subset of the Nodes. Args: status (str, None): Node Running Status base_node_names (str, list of str, None): Node Running Status search (str, None): Search pattern per_page (int): Number of Nodes per page offset (int): Offset Return: (list of dict) List of Nodes in dict format """ if status and isinstance(status, basestring): status = [status] if base_node_names and isinstance(base_node_names, basestring): base_node_names = [base_node_names] aggregate_list = [] search_parameters, search_string = parse_search_string(search) # Match and_query = {} if base_node_names: and_query['base_node_name'] = {'$in': base_node_names} if status: and_query['node_status'] = {'$in': status} if search_string: and_query['$text'] = {'$search': search_string} if len(and_query): aggregate_list.append({"$match": and_query}) # Join with users aggregate_list.append({ '$lookup': { 'from': 'users', 'localField': 'author', 'foreignField': '_id', 'as': '_user' } }) # rm password hash aggregate_list.append({"$project": { "_user.password_hash": 0, }}) # Match username and_query = {} if 'author' in search_parameters: and_query['_user.username'] = search_parameters['author'] if len(and_query): aggregate_list.append({"$match": and_query}) # sort sort_dict = OrderedDict() if 'sort' in search_parameters: # TODO more sort options if search_parameters['sort'] == 'starred': sort_dict['starred'] = -1 sort_dict['insertion_date'] = -1 aggregate_list.append({"$sort": sort_dict}) aggregate_list.append({ "$addFields": { '_readonly': { '$ne': ["$author", to_object_id(user_id)] }, } }) # counts and pagination aggregate_list.append({ '$facet': { "metadata": [{ "$count": "total" }], "list": [{ "$skip": int(offset) }, { "$limit": int(per_page) }], } }) return next(get_db_connector().nodes.aggregate(aggregate_list), None)
def get_master_state(): states = getattr(get_db_connector(), MasterState.DB_COLLECTION)\ .find({}).sort('insertion_date', -1).limit(1) return states[0] if states.count() > 0 else None