def process_webhooks(): """ Searches for webhook files, and sends the result if completed. """ # list of webhooks left to send. webhooks_left = glob.glob(os.path.join(_utils.RAM_DIR, "*webhook")) if not webhooks_left: return for webhook_f in webhooks_left: try: # check if res exists unique_id = os.path.basename(webhook_f).split(".")[0] res_path = glob.glob(os.path.join(_utils.RAM_DIR, f"{unique_id}*res")) if not res_path: continue res_path = res_path[0] try: pred = pickle.load(open(res_path, "rb")) except: # This means, preds are not written yet. continue try: json.dumps(pred) # if return dict has any non json serializable values, this might help. except: pred = str(pred) pred = {"prediction": pred, "success": True, "unique_id": unique_id} webhook_url = open(webhook_f).read().strip() # try 3 times with timeout=5 seconds. for _ in range(3): try: requests.post(webhook_url, json=pred, timeout=5) _utils.logger.info(f"webhook success: {unique_id}") _utils.logger.info(f"{unique_id} with url {webhook_url} processed.") break except Exception as ex: _utils.logger.warn(ex) _utils.logger.warn( f"webhook failed for {unique_id} with url {webhook_url} in try {_}" ) pass # will be deleted after success or after 3 fails _utils.cleanup(unique_id) except Exception as exc: try: unique_id = os.path.basename(webhook_f).split(".")[0] _utils.cleanup(unique_id) except Exception as exx: _utils.logger.warn(exx, exc_info=True) _utils.logger.warn(f"Failed to cleanup {webhook_f}") _utils.logger.exception(exc, exc_info=True)
def on_post(self, req, resp): try: unique_id = req.media["unique_id"] _utils.logger.info(f"unique_id: {unique_id} Result request recieved.") res_path = os.path.join(_utils.RAM_DIR, unique_id + ".res") res_path_disk = os.path.join(_utils.DISK_DIR, unique_id + ".res") try: try: pred = pickle.load(open(res_path, "rb")) except: pred = pickle.load(open(res_path_disk, "rb")) try: response = json.dumps({"prediction": pred, "success": True}) except: response = json.dumps({"prediction": str(pred), "success": True}) _utils.cleanup(unique_id) _utils.logger.info(f"unique_id: {unique_id} cleaned up.") resp.body = response resp.status = falcon.HTTP_200 except: if not glob.glob(os.path.join(_utils.RAM_DIR, unique_id + ".inp*")): _utils.logger.info(f"unique_id: {unique_id} does not exist.") resp.body = json.dumps( { "success": None, "reason": f"{unique_id} does not exist. You might have already accessed its result.", } ) resp.status = falcon.HTTP_200 else: _utils.logger.info(f"unique_id: {unique_id} processing.") resp.body = json.dumps({"success": None, "reason": "processing"}) resp.status = falcon.HTTP_200 except Exception as ex: try: _utils.cleanup(unique_id) except: pass _utils.logger.exception(ex, exc_info=True) resp.body = json.dumps({"success": False, "reason": str(ex)}) resp.status = falcon.HTTP_400
def on_post(self, req, resp): try: unique_id = _utils.get_uuid() webhook = req.media.get("webhook") _utils.write_webhook(unique_id, webhook) if _utils.MAX_PER_CLIENT_BATCH and ( len(req.media["data"]) > _utils.MAX_PER_CLIENT_BATCH ): resp.body, resp.status = ( json.dumps( { "success": False, "reason": f"Maximum number of examples allowed in client batch is {_utils.MAX_PER_CLIENT_BATCH}", } ), falcon.HTTP_200, ) else: if isinstance(req.media["data"], list): handle_json_request(unique_id, req.media["data"]) req.media.clear() resp.body = json.dumps({"success": True, "unique_id": unique_id}) resp.status = falcon.HTTP_200 elif isinstance(req.media["data"], dict): handle_file_dict_request(unique_id, req.media["data"]) req.media.clear() resp.body = json.dumps({"success": True, "unique_id": unique_id}) resp.status = falcon.HTTP_200 else: resp.body, resp.status = ( json.dumps({"success": False, "reason": "invalid request"}), falcon.HTTP_400, ) except Exception as ex: try: _utils.cleanup(unique_id) except: pass _utils.logger.exception(ex, exc_info=True) resp.body = json.dumps({"success": False, "reason": str(ex)}) resp.status = falcon.HTTP_400
def wait_and_read_pred(res_path, unique_id): """ Waits for and reads pickle file at res_path. :param res_path: the result pickle file path to watch. :param unique_id: unique_id used in cleanup :return response: python dict with keys "success" and "prediction"/ "reason" :return status: HTTP status code """ # Keeping track of start_time for TIMEOUT implementation _utils.logger.info(f"unique_id: {unique_id} waiting for {res_path}") start_time = time.time() # Default response and status response, status = ( json.dumps({"success": False, "reason": "timeout"}), falcon.HTTP_503, ) while True: try: # if pickle doesn't exist, while loop continues/ pred = pickle.load(open(res_path, "rb")) try: response = json.dumps({"prediction": pred, "success": True}) # if return dict has any non json serializable values, we str() it except: _utils.logger.info( f"unique_id: {unique_id} could not json serialize the result." ) response = json.dumps({"prediction": str(pred), "success": True}) status = falcon.HTTP_200 break except: # stop in case of timeout if time.time() - start_time >= _utils.TIMEOUT: _utils.logger.warn( f"unique_id: {unique_id} timedout, with timeout {_utils.TIMEOUT}" ) break time.sleep(_utils.SYNC_RESULT_POLING_SLEEP) # Since this is the last step in /sync, we delete all files related to this unique_id _utils.cleanup(unique_id) _utils.logger.info(f"unique_id: {unique_id} cleaned up.") return response, status
def on_post(self, req, resp): try: if ONLY_ASYNC: resp.body, resp.status = ( json.dumps( { "success": False, "reason": "ONLY_ASYNC is set to True on this server.", } ), falcon.HTTP_200, ) else: priority = 8 try: priority = int(req.media["priority"]) except: pass unique_id = _utils.get_uuid(priority=8) res_path = None if _utils.MAX_PER_CLIENT_BATCH and ( len(req.media["data"]) > _utils.MAX_PER_CLIENT_BATCH ): resp.body, resp.status = ( json.dumps( { "success": False, "reason": f"Maximum number of examples allowed in client batch is {_utils.MAX_PER_CLIENT_BATCH}", } ), falcon.HTTP_200, ) else: if isinstance(req.media["data"], list): res_path = handle_json_request(unique_id, req.media["data"]) elif isinstance(req.media["data"], dict): res_path = handle_file_dict_request( unique_id, req.media["data"] ) else: resp.body, resp.status = ( json.dumps({"success": False, "reason": "invalid request"}), falcon.HTTP_400, ) if res_path: req.media.clear() resp.body, resp.status = wait_and_read_pred(res_path, unique_id) except Exception as ex: try: _utils.cleanup(unique_id) except: pass _utils.logger.exception(ex, exc_info=True) resp.body = json.dumps({"success": False, "reason": str(ex)}) resp.status = falcon.HTTP_400
def on_post(self, req, resp): try: unique_id = _utils.get_uuid() _utils.logger.info(f"unique_id: {unique_id} Async request recieved.") webhook = req.media.get("webhook") _utils.write_webhook(unique_id, webhook) if _utils.MAX_PER_CLIENT_BATCH and ( len(req.media["data"]) > _utils.MAX_PER_CLIENT_BATCH ): _utils.logger.info( f'unique_id: {unique_id} has batch of size {len(req.media["data"])}. MAX_PER_CLIENT_BATCH: {_utils.MAX_PER_CLIENT_BATCH}' ) resp.body, resp.status = ( json.dumps( { "success": False, "reason": f"Maximum number of examples allowed in client batch is {_utils.MAX_PER_CLIENT_BATCH}", } ), falcon.HTTP_200, ) elif len(req.media["data"]) == 0: _utils.logger.info(f"unique_id: {unique_id} has empty batch.") resp.body, resp.status = ( json.dumps({"prediction": [], "success": True}), falcon.HTTP_200, ) else: if isinstance(req.media["data"], list): if _utils.FILE_MODE: _utils.logger.info( f"unique_id: {unique_id} is a JSON input. Expectig FILE input." ) resp.body = json.dumps( {"success": False, "reason": "Expecting FILE input"} ) resp.status = falcon.HTTP_400 else: handle_json_request(unique_id, req.media["data"]) req.media.clear() resp.body = json.dumps( {"success": True, "unique_id": unique_id} ) resp.status = falcon.HTTP_200 elif isinstance(req.media["data"], dict): if not _utils.FILE_MODE: _utils.logger.info( f"unique_id: {unique_id} is a FILE input. Expectig JSON input." ) resp.body = json.dumps( {"success": False, "reason": "Expecting JSON input"} ) resp.status = falcon.HTTP_400 else: handle_file_dict_request(unique_id, req.media["data"]) req.media.clear() resp.body = json.dumps( {"success": True, "unique_id": unique_id} ) resp.status = falcon.HTTP_200 else: resp.body, resp.status = ( json.dumps({"success": False, "reason": "invalid request"}), falcon.HTTP_400, ) except Exception as ex: try: _utils.cleanup(unique_id) except: pass _utils.logger.exception(ex, exc_info=True) resp.body = json.dumps({"success": False, "reason": str(ex)}) resp.status = falcon.HTTP_400
def on_post(self, req, resp): try: if ONLY_ASYNC: resp.body, resp.status = ( json.dumps( { "success": False, "reason": "ONLY_ASYNC is set to True on this server.", } ), falcon.HTTP_200, ) else: priority = 8 try: priority = int(req.media["priority"]) except: pass unique_id = _utils.get_uuid(priority=8) _utils.logger.info(f"unique_id: {unique_id} Sync request recieved.") res_path = None if _utils.MAX_PER_CLIENT_BATCH and ( len(req.media["data"]) > _utils.MAX_PER_CLIENT_BATCH ): _utils.logger.info( f'unique_id: {unique_id} has batch of size {len(req.media["data"])}. MAX_PER_CLIENT_BATCH: {_utils.MAX_PER_CLIENT_BATCH}' ) resp.body, resp.status = ( json.dumps( { "success": False, "reason": f"Maximum number of examples allowed in client batch is {_utils.MAX_PER_CLIENT_BATCH}", } ), falcon.HTTP_200, ) elif len(req.media["data"]) == 0: _utils.logger.info(f"unique_id: {unique_id} has empty batch.") resp.body, resp.status = ( json.dumps({"prediction": [], "success": True}), falcon.HTTP_200, ) else: if isinstance(req.media["data"], list): if _utils.FILE_MODE: _utils.logger.info( f"unique_id: {unique_id} is a JSON input. Expectig FILE input." ) resp.body = json.dumps( {"success": False, "reason": "Expecting FILE input"} ) resp.status = falcon.HTTP_400 else: res_path = handle_json_request(unique_id, req.media["data"]) elif isinstance(req.media["data"], dict): if not _utils.FILE_MODE: _utils.logger.info( f"unique_id: {unique_id} is a FILE input. Expectig JSON input." ) resp.body = json.dumps( {"success": False, "reason": "Expecting JSON input"} ) resp.status = falcon.HTTP_400 else: res_path = handle_file_dict_request( unique_id, req.media["data"] ) else: resp.body, resp.status = ( json.dumps({"success": False, "reason": "invalid request"}), falcon.HTTP_400, ) if res_path: req.media.clear() resp.body, resp.status = wait_and_read_pred(res_path, unique_id) except Exception as ex: try: _utils.cleanup(unique_id) except: pass _utils.logger.exception(ex, exc_info=True) resp.body = json.dumps({"success": False, "reason": str(ex)}) resp.status = falcon.HTTP_400