def __init__(self, logger=None, task_id=None, server_address=None, agent_token=None, ignore_errors=False, ignore_task_id=False): self._ignore_task_id = ignore_task_id self.logger = take_with_default(logger, default_logger) self._ignore_errors = ignore_errors self.task_id = take_with_default(task_id, os.environ["TASK_ID"]) self.server_address = take_with_default(server_address, os.environ[SERVER_ADDRESS]) self.agent_token = take_with_default(agent_token, os.environ[AGENT_TOKEN]) self.public_api = Api.from_env(ignore_task_id=self._ignore_task_id) self._app_url = self.public_api.app.get_url(self.task_id) self._session_dir = "/sessions/{}".format(self.task_id) self.api = AgentAPI(token=self.agent_token, server_address=self.server_address, ext_logger=self.logger) self.api.add_to_metadata('x-task-id', str(self.task_id)) self.callbacks = {} self.processing_queue = queue.Queue()#(maxsize=self.QUEUE_MAX_SIZE) self.logger.debug('App is created', extra={"task_id": self.task_id, "server_address": self.server_address}) self._ignore_stop_for_debug = False self._error = None self.stop_event = asyncio.Event() self.executor = concurrent.futures.ThreadPoolExecutor() self.loop = asyncio.get_event_loop() # May want to catch other signals too signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT, signal.SIGQUIT) for s in signals: self.loop.add_signal_handler(s, lambda s=s: asyncio.create_task(self._shutdown(signal=s))) # comment out the line below to see how unhandled exceptions behave self.loop.set_exception_handler(self.handle_exception)
def init_docker_image(self): self.download_or_get_repo() api = Api(self.info['server_address'], self.info['api_token']) module_id = self.info["appInfo"]["moduleId"] self.logger.info("APP moduleId == {} in ecosystem".format(module_id)) self.app_config = api.app.get_info(module_id)["config"] #self.app_config = sly.io.json.load_json_file(os.path.join(self.dir_task_src, 'config.json')) self.read_dockerimage_from_config() super().init_docker_image()
def _load_data_if_required(self, event_obj): try: req_id = event_obj[REQUEST_ID] event_data = event_obj[DATA] request_type = event_data.get(REQUEST_TYPE, INFERENCE) if request_type == INFERENCE: frame_info = event_data.get(VIDEO, None) if frame_info is None: # For inference we need to download an image and add it to the event data. image_hash = event_data.get(IMAGE_HASH) if image_hash is None: img_data = self._load_arbitrary_image(req_id) else: src_node_token = event_obj[DATA].get( 'src_node_token', '') img_data = self._load_image_from_sly( req_id, image_hash, src_node_token) event_data['image_arr'] = img_data self.logger.trace('Input image is obtained.', extra={REQUEST_ID: req_id}) else: # download frame video_id = frame_info[VIDEO_ID] frame_index = frame_info[FRAME_INDEX] image_uniq_key = "video_{}_frame_{}.png".format( video_id, frame_index) img_data = self.image_cache.get(image_uniq_key) if img_data is None: api_token = event_data['api_token'] public_api = Api(self.server_address, api_token, retry_count=20, external_logger=self.logger) img_data = public_api.video.frame.download_np( video_id, frame_index) self.image_cache.add(image_uniq_key, img_data) event_data['image_arr'] = img_data self.logger.trace('Frame is obtained.', extra={REQUEST_ID: req_id}) self.final_processing_queue.put(item=(event_data, req_id)) except Exception as e: res_msg = {} self.logger.error(traceback.format_exc(), exc_info=True, extra={'exc_str': str(e)}) res_msg.update({ 'success': False, 'error': json.dumps(traceback.format_exc()) }) self.thread_pool.submit(function_wrapper_nofail, self._send_data, res_msg, req_id) # skip errors
def handle_message_sync(self, request_msg): try: state = request_msg.get(STATE, None) context = request_msg.get(CONTEXT, None) command = request_msg["command"] user_api_token = request_msg["api_token"] user_public_api = Api(self.server_address, user_api_token, retry_count=5, external_logger=self.logger, ignore_task_id=self._ignore_task_id) if command == STOP_COMMAND: self.logger.info("APP receives stop signal from user") self.stop_event.set() if command == STOP_COMMAND and command not in self.callbacks: _default_stop(user_public_api, self.task_id, context, state, self.logger) if self._ignore_stop_for_debug is False: #self.stop() asyncio.run_coroutine_threadsafe(self._shutdown(), self.loop) return else: self.logger.info("STOP event is ignored ...") elif command in AppService.DEFAULT_EVENTS and command not in self.callbacks: raise KeyError("App received default command {!r}. Use decorator \"callback\" to handle it." .format(command)) elif command not in self.callbacks: raise KeyError("App received unhandled command {!r}. Use decorator \"callback\" to handle it." .format(command)) if command == STOP_COMMAND: if self._ignore_stop_for_debug is False: self.callbacks[command](api=user_public_api, task_id=self.task_id, context=context, state=state, app_logger=self.logger) asyncio.run_coroutine_threadsafe(self._shutdown(), self.loop) return else: self.logger.info("STOP event is ignored ...") else: self.callbacks[command](api=user_public_api, task_id=self.task_id, context=context, state=state, app_logger=self.logger) except KeyError as e: self.logger.error(e, exc_info=False) except Exception as e: self.logger.error(traceback.format_exc(), exc_info=True, extra={'exc_str': repr(e)}) if self._ignore_errors is False: self.logger.info("App will be stopped due to error") #asyncio.create_task(self._shutdown(error=e)) asyncio.run_coroutine_threadsafe(self._shutdown(error=e), self.loop)
def download_tar(github_url, tar_path, github_token=None, version="master", log_progress=True): headers = {} if github_token is not None: headers = {"Authorization": "token {}".format(github_token)} ensure_base_path(tar_path) if ".git" not in github_url: github_url += ".git" tar_url = github_url.replace(".git", "/archive/{}.tar.gz".format(version)) r = requests.get(tar_url, headers=headers, stream=True) if r.status_code != requests.codes.ok: Api._raise_for_status(r) progress = Progress("Downloading (KB)", len(r.content) / 1024) with open(tar_path, 'wb') as f: for chunk in r.iter_content(chunk_size=8192): f.write(chunk) progress.iters_done_report(len(chunk) / 1024)
def download_or_get_repo(self): git_url = self.app_info["githubUrl"] version = self.app_info.get("version", "master") already_downloaded = False path_cache = None if version != "master": path_cache = os.path.join( constants.APPS_STORAGE_DIR(), *Path(git_url.replace(".git", "")).parts[1:], version) already_downloaded = sly.fs.dir_exists(path_cache) if already_downloaded is False: self.logger.info("Git repo will be downloaded") api = Api(self.info['server_address'], self.info['api_token']) tar_path = os.path.join(self.dir_task_src, 'repo.tar.gz') api.app.download_git_archive(self.app_info["moduleId"], self.app_info["id"], version, tar_path, log_progress=True, ext_logger=self.logger) with tarfile.open(tar_path) as archive: archive.extractall(self.dir_task_src) subdirs = get_subdirs(self.dir_task_src) if len(subdirs) != 1: raise RuntimeError( "Repo is downloaded and extracted, but resulting directory not found" ) extracted_path = os.path.join(self.dir_task_src, subdirs[0]) for filename in os.listdir(extracted_path): shutil.move(os.path.join(extracted_path, filename), os.path.join(self.dir_task_src, filename)) remove_dir(extracted_path) silent_remove(tar_path) #git.download(git_url, self.dir_task_src, github_token, version) if path_cache is not None: shutil.copytree(self.dir_task_src, path_cache) else: self.logger.info("Git repo already exists") shutil.copytree(path_cache, self.dir_task_src)