def exec(self): Log.i(TAG, "Executing DataPipelineProcess.") database = self.data.get('Database') mainColName = self.data.get('Collection') targetColName = self.data.get('to') fieldAddress = self.data.get('fieldRef') mainCollection = Provider.get_collection(mainColName, database) targetCollection = Provider.get_collection(targetColName, database) references = mainCollection.read(parser.parser(f"{fieldAddress}")) ref_key = DataPipelineProcess.getLastFieldSegment(fieldAddress) print(mainCollection.read(parser.parser(self.data.get('query')))) for reference in references: if type(reference.get(ref_key)) is list: for ref in reference.get(ref_key): self.output.append(targetCollection .findById(ref)) else: self.output = targetCollection \ .findById(reference.get(ref_key)) acknowledgement = { "Ack": self.output, "addr": self.data.get('addr') } return acknowledgement
def get_collection(cls, col_name: str, db_name): if Collections.collection_list.get(db_name) is not None: database = Collections.collection_list.get(db_name) result_col = None for collection in database: if col_name == collection.col_name: result_col = collection break if result_col is not None: print("Getting collection from ram") return result_col else: # Fetching or create new Collection print("Fetching or create new Collection") query = f""" time_stamp, db_name &eq "{db_name}", col_name &eq "{col_name}" """ result = Collections.meta_collection.readOne( parser.parser(query)) if not result: print( "Getting new collection because collection is not in ram and also on a disk" ) return Collections.create_new_collection(col_name, db_name) else: result = result[0] print("Getting collection from disk") result = initializer.getCollection( config.DATABASE_PATH + getPathSeparator() + str(result.get("time_stamp")) + ".col") Collections.add_collection(result) return result else: query = f""" time_stamp, db_name &eq "{db_name}", col_name &eq "{col_name}" """ result = Collections.meta_collection.readOne(parser.parser(query)) if not result: print("Getting new collection: @no database found") return Collections.create_new_collection(col_name, db_name) else: result = result[0] print("Getting collection from disk: @root else") result = initializer.getCollection( config.DATABASE_PATH + getPathSeparator() + str(result.get('time_stamp')) + ".col") Collections.add_collection(result) return result
def exec(self): Log.i(TAG, "Executing ReadProcess.") db_name, col_name, query = BaseRIDUProcess.meta_separator( self.data.meta_data) col = Collections.get_collection(col_name, db_name) query_object = parser.parser(query) return {"Ack": col.read(query_object), "addr": self.data.addr}
def __getCollectionNameForDisk(collection: Collection) -> str: query = f" time_stamp,db_name &eq \"{collection.parent}\", col_name &eq \"{collection.col_name}\"" Log.d(TAG, "Searching collection name for disk") data = Provider.meta_collection.readOne(parser.parser(query)) Log.d(TAG, f"Collection name for disk is {data}") Log.d(TAG, f"{data.get('time_stamp')}.col") return str(data.get("time_stamp"))
def testViewQuery(self): query = """ name, email, age &eq 18 """ view = ["name", "email"] query_obj = parser.parser(query) self.assertEqual(query_obj.view, view)
def testSelectiveQuery(self): query = """ name, email, age &eq 18 """ field = 'age' query_obj = parser.parser(query) self.assertEqual(query_obj.selective[0]['field'], field)
def get_collection(cls, col_name: str, db_name): if Provider.collection_list.get(db_name) is not None: database = Provider.collection_list.get(db_name) result_col = None for collection in database: if col_name == collection.col_name: result_col = collection break if result_col is not None: Log.i(TAG, "Getting collection from Ram") return result_col else: # Fetching or create new Collection Log.i(TAG, "Fetching or create new Collection") query = f""" time_stamp, db_name &eq "{db_name}", col_name &eq "{col_name}" """ result = Provider.meta_collection.readOne(parser(query)) if not result: Log.i(TAG, "Creating new collection") return Provider.create_new_collection(col_name, db_name) else: Log.i(TAG, "Getting collection from disk") result = loadCollection( config.DATABASE_PATH + getPathSeparator() + str(result.get("time_stamp")) + ".col") Provider.add_collection(result) return result else: query = f""" time_stamp, db_name &eq "{db_name}", col_name &eq "{col_name}" """ result = Provider.meta_collection.readOne(parser(query)) if not result: Log.i(TAG, "Creating new collection") return Provider.create_new_collection(col_name, db_name) else: Log.i(TAG, "Getting collection from disk") result = loadCollection( config.DATABASE_PATH + getPathSeparator() + str(result.get('time_stamp')) + ".col") Provider.add_collection(result) return result
def exec(self): Log.i(TAG, "Executing UpdateProcess.") db_name, col_name, query = Collection.meta_separator(self.data.meta_data) col = Provider.get_collection(col_name, db_name) query_object = parser.parser(query) acknowledgement = { "Ack": col.updateAll(query_object, self.data.user_data), "addr": self.data.addr } Event.emmit('col-change', col) return acknowledgement
def getCollection(database: str) -> [str]: collections = [] records = Provider.meta_collection.read(parser(f"col_name, db_name &eq \"{database}\"")) for record in records: collections.append(record['col_name']) return collections
def getDatabases() -> [str]: databases = [] records = Provider.meta_collection.read(parser("db_name")) for record in records: databases.append(record['db_name']) return databases