def onCollectionChange(collection: Collection): Log.i(TAG, "Event -> Collection Changed") for proc in renderProcess(collection): loop_runner.loop.system_process.put(proc) for proc in renderProcess(Provider.meta_collection): loop_runner.loop.system_process.put(proc) manage_loop_status()
def run(self): Log.i(TAG, "EventLoop Started") while self.shouldContinue(): self.isRunning = True self.loop.execute_query_process() self.loop.execute_sys_process() self.isRunning = False
def exec(self): Log.i(TAG, "Executing DataPipelineProcess.") database = self.data.get('Database') mainColName = self.data.get('Collection') targetColName = self.data.get('to') fieldAddress = self.data.get('fieldRef') mainCollection = Provider.get_collection(mainColName, database) targetCollection = Provider.get_collection(targetColName, database) references = mainCollection.read(parser.parser(f"{fieldAddress}")) ref_key = DataPipelineProcess.getLastFieldSegment(fieldAddress) print(mainCollection.read(parser.parser(self.data.get('query')))) for reference in references: if type(reference.get(ref_key)) is list: for ref in reference.get(ref_key): self.output.append(targetCollection .findById(ref)) else: self.output = targetCollection \ .findById(reference.get(ref_key)) acknowledgement = { "Ack": self.output, "addr": self.data.get('addr') } return acknowledgement
def listen(self, **kwargs): super().bind((self.host, self.port)) Log.i(TAG, f"Server is listening on port {self.port}") Event.on('on_task_complete', self.send_ack) while True: super().listen(1) client, addr = super().accept() Log.d(TAG, f"Client connected with {addr}") self.clients.append({ "client": client, "addr": addr }) while True: try: raw_query = str(client.recv(1024).decode("UTF-8")) Log.d(TAG, f"Received data from client {addr}") Log.d(TAG, f"Data -> {raw_query}") if raw_query.lower() == 'exit': client.close() break json_query = json.loads(raw_query) json_query['addr'] = str(addr) if json_query['type'] is not None and json_query['type'] == 'Request': Event.emmit('request', json.dumps(json_query)) Log.d(TAG, f"Client is requesting for RIDU operation") elif json_query['type'] is not None and json_query['type'] == 'Subscription': Log.d(TAG, f"Client is requesting for subscription") Event.emmit('req_sub', json.dumps(json_query)) # code to communicate with hyperlite engine except Exception as err: Log.e(TAG, f"Connection broken -> {err}") client.close() break
def exec(self): Log.i(TAG, "Executing ReadProcess.") db_name, col_name, query = BaseRIDUProcess.meta_separator( self.data.meta_data) col = Collections.get_collection(col_name, db_name) query_object = parser.parser(query) return {"Ack": col.read(query_object), "addr": self.data.addr}
def execute_sys_process(self): for i in range(0, self.system_process.qsize()): Log.i( TAG, f"Executing 1 system process from total {self.system_process.qsize()} process" ) print("System Task Ack : " + str(self.system_process.get().exec()))
def execute_query_process(self): for i in range(0, self.query_processes.qsize()): Log.i( TAG, f"Executing 1 query process from total {self.query_processes.qsize()} process" ) Event.emmit("on_task_complete", self.query_processes.get().exec())
def exec(self): Log.i(TAG, "Executing ReadByIdProcess.") db_name, col_name, object_id = Collection.meta_separator(self.data.meta_data) col = Provider.get_collection(col_name, db_name) acknowledgement = { "Ack": col.findById(object_id), "addr": self.data.addr } return acknowledgement
def renderProviderProcess(data): Log.i(TAG, f"Rendering {data['show']} Provider Process...") show = data['show'] if show == "Databases": return DatabasedProviderProcess(data) elif show == "Collections": return CollectionProviderProcess(data) else: return None
def exec(self): Log.i(TAG, "Executing InsertAllProcess.") db_name, col_name = BaseRIDUProcess.meta_separator(self.data.meta_data) col = Provider.get_collection(col_name, db_name) acknowledgement = { "Ack": col.insertAll(self.data.user_data), "addr": self.data.addr } Event.emmit('col-change', col) return acknowledgement
def exec(self): Log.i(TAG, "Executing UpdateOneProcess.") db_name, col_name, object_id = Collection.meta_separator(self.data.meta_data) col = Provider.get_collection(col_name, db_name) acknowledgement = { "Ack": col.updateOne(object_id, self.data.user_data), "addr": self.data.addr } Event.emmit('col-change', col) return acknowledgement
def exec(self): Log.i(TAG, "Executing DeleteProcess.") db_name, col_name, object_id = Collection.meta_separator( self.data.meta_data) col = Collections.get_collection(col_name, db_name) acknowledgement = { "Ack": col.delete(object_id), "addr": self.data.addr } Event.emmit('col-change', col) return acknowledgement
def exec(self): Log.i(TAG, "Executing UpdateProcess.") db_name, col_name, query = Collection.meta_separator( self.data.meta_data) col = Collections.get_collection(col_name, db_name) query_object = parser.parser(query) acknowledgement = { "Ack": col.updateAll(query_object, self.data.user_data), "addr": self.data.addr } Event.emmit('col-change', col) return acknowledgement
def exec(self): Log.i(TAG, "Executing InsertProcess.") db_name, col_name = BaseRIDUProcess.meta_separator(self.data.meta_data) Log.d(TAG, f"{db_name, col_name}") col = Collections.get_collection(col_name, db_name) Log.d(TAG, "col obj fetched.") acknowledgement = { "Ack": col.insert(self.data.user_data), "addr": self.data.addr } Event.emmit('col-change', col) return acknowledgement
def initMe(): Log.c(TAG, "Starting Hyperlite Database") Log.w(TAG, f"We are running on {config.PLATFORM} Operating System") Log.i(TAG, f"Database files can be found on {config.DATABASE_PATH} ") if os.path.exists(config.COLLECTION_PATH): meta_col = initializer.getCollection(config.COLLECTION_PATH) Collections.meta_collection = meta_col Log.i(TAG, "Meta collection found on disk") else: meta_col = Collection("hyperlite.col", "MetaData") Collections.meta_collection = meta_col Log.w( TAG, "Meta collection file not found so creating new meta collection")
def renderRIDUProcess(parsed_data): Log.i(TAG, "Rendering Process...") if parsed_data.request_type == 'Read': return ReadProcess(parsed_data) elif parsed_data.request_type == 'Update': return UpdateProcess(parsed_data) elif parsed_data.request_type == 'Insert': return InsertProcess(parsed_data) elif parsed_data.request_type == 'Delete': return DeleteProcess(parsed_data) elif parsed_data.request_type == 'ReadById': return ReadByIdProcess(parsed_data) elif parsed_data.request_type == 'ReadOne': return ReadOneProcess(parsed_data) elif parsed_data.request_type == 'UpdateOne': return UpdateOneProcess(parsed_data) else: Log.e(TAG, "No compatible request_type found.") return None
def manage_loop_status(): if not loop_runner.isRunning: Log.i(TAG, "EventLoop is stopped, Rerunning EventLoop...") Event.emmit('loop-rerun')
def __init__(self, parsed_data): self.data = parsed_data Log.i(TAG, "DeleteProcess created.")
def __init__(self, parsed_data): self.data = parsed_data Log.i(TAG, "InsertProcess created.")
def __init__(self, data): self.data = data self.output = [] Log.i(TAG, "DataPipelineProcess created.")
def __init__(self, parsed_data): self.data = parsed_data Log.i(TAG, "ReadByIdProcess created.")
def get_collection(cls, col_name: str, db_name): if Provider.collection_list.get(db_name) is not None: database = Provider.collection_list.get(db_name) result_col = None for collection in database: if col_name == collection.col_name: result_col = collection break if result_col is not None: Log.i(TAG, "Getting collection from Ram") return result_col else: # Fetching or create new Collection Log.i(TAG, "Fetching or create new Collection") query = f""" time_stamp, db_name &eq "{db_name}", col_name &eq "{col_name}" """ result = Provider.meta_collection.readOne(parser(query)) if not result: Log.i(TAG, "Creating new collection") return Provider.create_new_collection(col_name, db_name) else: Log.i(TAG, "Getting collection from disk") result = loadCollection( config.DATABASE_PATH + getPathSeparator() + str(result.get("time_stamp")) + ".col") Provider.add_collection(result) return result else: query = f""" time_stamp, db_name &eq "{db_name}", col_name &eq "{col_name}" """ result = Provider.meta_collection.readOne(parser(query)) if not result: Log.i(TAG, "Creating new collection") return Provider.create_new_collection(col_name, db_name) else: Log.i(TAG, "Getting collection from disk") result = loadCollection( config.DATABASE_PATH + getPathSeparator() + str(result.get('time_stamp')) + ".col") Provider.add_collection(result) return result
def __init__(self): self.loop = EventLoop() self.isRunning: bool = self.shouldContinue() Event.on('loop-rerun', self.run) Log.i(TAG, "EventLoop Ready")
def doctor(): if not os.path.exists(config.DATABASE_PATH): Log.w(TAG, "Database directory does not exist") os.makedirs(config.DATABASE_PATH) Log.i(TAG, "Database directory Created")
def listen(self, **kwargs): super().bind((self.host, self.port)) Log.i(TAG, f"Server is listening on port {self.port}") Event.on('on_task_complete', self.send_ack) while True: super().listen(1) client, addr = super().accept() Log.d(TAG, f"Client connected with {addr}") clientObj = {"client": client, "addr": addr} self.clients.append(clientObj) while True: try: raw_query = str( client.recv(1024 * 1024 * 1024).decode("UTF-8")) Log.d(TAG, f"Received data from client {addr}") if raw_query.lower() == 'exit': client.close() break json_query = json.loads(raw_query) json_query['addr'] = str(addr) if json_query['type'] is not None and json_query[ 'type'] == 'Request': Event.emmit('request', json.dumps(json_query)) Log.d(TAG, f"Client is requesting for RIDU operation") elif json_query['type'] is not None and json_query[ 'type'] == 'Subscription': Log.d(TAG, f"Client is requesting for subscription") Event.emmit('req_sub', json.dumps(json_query)) elif json_query['type'] is not None and json_query[ 'type'] == 'Pipeline': Log.d(TAG, f"Client is requesting for Data Pipeline") Event.emmit('req_pipe', json_query) elif json_query['type'] is not None and json_query[ 'type'] == 'Provider': Log.d(TAG, f"Client is requesting for Provider Component") Event.emmit('req_provider', json_query) # code to communicate with hyperlite engine except ConnectionResetError as err: Log.e(TAG, f"Connection Reset -> {err}") client.close() Log.d(TAG, f"{self.clients}") self.clients.remove(clientObj) Log.i(TAG, "Client removed from Clients list") Log.d( TAG, f"Connected clients -> {self.clients if len(self.clients) != 0 else 'No Clients'}" ) break except Exception as err: Log.e(TAG, f"Connection broken -> {err}") # errorSchema = """ # { # "type": "Error", # "message": "{}" # } # """.format(err) # Log.d(TAG, errorSchema) # client.send(errorSchema.encode('UTF-8')) # client.close() break
def send_ack(self, ack): for client in self.clients: if str(client["addr"]) == ack["addr"]: Log.i(TAG, "Ack has send to client") client["client"].send(json.dumps(ack["Ack"]).encode("UTF-8"))