def post_resource(): try: resource_id = upload_file_stream(request.files['data']) return JSONEncoder().encode({ 'status': ResourcePostStatus.SUCCESS, 'resource_id': resource_id }) except Exception as e: app.logger.error(e) return _make_fail_response('Internal error: "{}"'.format(str(e)))
def _postprocess_outputs(self, outputs): for key, filename in outputs.items(): if os.path.exists(filename): matching_outputs = list( filter(lambda o: o.name == key, self.node.outputs)) assert len( matching_outputs ) == 1, "Found more that 1 output with the same name `{}`".format( key) filename = resource_manager[ matching_outputs[0].file_type].postprocess_output(filename) with open(filename, 'rb') as f: self.node.get_output_by_name( key).resource_id = upload_file_stream(f) else: raise IOError( "Output `{}` (filename: `{}`) does not exist".format( key, filename))
def upload_logs(self, final=False): with BaseBash.logs_lock: if self.final_logs_uploaded: return self.final_logs_uploaded = final for key, filename in self.logs.items(): if key not in self.logs_sizes: # the logs have not been initialized yey continue if os.path.exists(filename) and os.stat( filename).st_size != self.logs_sizes[key]: log = self.node.get_log_by_name(key) self.logs_sizes[key] = os.stat(filename).st_size with open(filename, 'rb') as f: # resource_id should be None if the file has not been uploaded yet # otherwise assign it log.resource_id = upload_file_stream( f, log.resource_id)
def execute_job(self, executor): try: try: status = JobReturnStatus.FAILED executor.workdir = os.path.join('/tmp', str(uuid.uuid1())) executor.init_workdir() with TickThread(executor): status = executor.run() except Exception: try: f = six.BytesIO() f.write(traceback.format_exc().encode()) executor.node.get_log_by_name('worker').resource_id = upload_file_stream(f) logging.error(traceback.format_exc()) except Exception: # This case of `except` has happened before due to I/O failure logging.critical(traceback.format_exc()) raise finally: executor.clean_up() logging.info('Node {node_id} `{title}` finished with status `{status}`'.format( node_id=executor.node._id, title=executor.node.title, status=status, )) if status == JobReturnStatus.SUCCESS: executor.node.node_running_status = NodeRunningStatus.SUCCESS elif executor.node._id in self._killed_run_ids: self._killed_run_ids.remove(executor.node._id) executor.node.node_running_status = NodeRunningStatus.CANCELED elif status == JobReturnStatus.FAILED: executor.node.node_running_status = NodeRunningStatus.FAILED else: raise Exception("Unknown return status value: `{}`".format(status)) except Exception as e: logging.warning('Execution failed: {}'.format(e)) executor.node.node_running_status = NodeRunningStatus.FAILED finally: executor.node.save(collection=Collections.RUNS) with self._run_id_to_executor_lock: del self._run_id_to_executor[executor.node._id]
def upload_file(): assert len(request.files) == 1 title = request.form.get('title', '{title}') description = request.form.get('description', '{description}') file_type = request.form.get('file_type', FILE_KIND) node_kind = request.form.get('node_kind', 'basic-file') app.logger.debug(request) if file_type not in RESOURCE_TYPES: app.logger.debug(file_type) app.logger.debug(RESOURCE_TYPES) return make_fail_response( 'Unknown file type `{}`'.format(file_type)), 400 resource_id = upload_file_stream(request.files['data']) file = plynx.db.node.Node.from_dict({ 'title': title, 'description': description, 'kind': node_kind, 'node_running_status': NodeRunningStatus.STATIC, 'node_status': NodeStatus.READY, }) file.outputs.append( plynx.db.node.Output.from_dict({ 'name': 'file', 'file_type': file_type, 'values': [resource_id], })) file.author = g.user._id file.save() return JSONEncoder().encode({ 'status': ResourcePostStatus.SUCCESS, 'resource_id': resource_id, 'node': file.to_dict(), })
def upload_file(): assert len(request.files) == 1 title = request.form.get('title', '{title}') description = request.form.get('description', '{description}') file_type = request.form.get('file_type', FileCls.NAME) if file_type not in RESOURCE_TYPES: return make_fail_response( 'Unknown file type `{}`'.format(file_type)), 400 resource_id = upload_file_stream(request.files['data']) file = FileNodeClass.get_default() file.author = g.user._id file.title = title file.description = description file.outputs[0].resource_id = resource_id file.outputs[0].file_type = file_type file.save() return JSONEncoder().encode({ 'status': ResourcePostStatus.SUCCESS, 'resource_id': resource_id, 'node': file.to_dict(), })
def upload_logs(self, final=False): is_dirty = False with self.logs_lock: if self.final_logs_uploaded: return is_dirty self.final_logs_uploaded = final for key, filename in self.logs.items(): if key not in self.logs_sizes: # the logs have not been initialized yet continue if os.path.exists(filename) and os.stat( filename).st_size != self.logs_sizes[key]: is_dirty = True log = self.node.get_log_by_name(key) self.logs_sizes[key] = os.stat(filename).st_size with open(filename, 'rb') as f: # resource_id should be None if the file has not been uploaded yet # otherwise assign it log.values = [ upload_file_stream( f, log.values[0] if len(log.values) > 0 else None) ] return is_dirty
def _postprocess_outputs(self, outputs): for key, filename in outputs.items(): logging.info("Uploading output `{}` - `{}`".format(key, filename)) if os.path.exists(filename): logging.info('path exists') matching_outputs = list( filter(lambda o: o.name == key, self.node.outputs)) assert len( matching_outputs ) == 1, "Found more that 1 output with the same name `{}`".format( key) filename = self._resource_manager.kind_to_resource_class[ matching_outputs[0].file_type].postprocess_output(filename) logging.info(filename) with open(filename, 'rb') as f: # resource_id self.node.get_output_by_name(key).values = [ upload_file_stream(f) ] logging.info(self.node.get_output_by_name(key).to_dict()) else: raise IOError( "Output `{}` (filename: `{}`) does not exist".format( key, filename))
def _postprocess_logs(self, logs): for key, filename in logs.items(): if os.path.exists(filename) and os.stat(filename).st_size != 0: with open(filename, 'rb') as f: self.node.get_log_by_name( key).resource_id = upload_file_stream(f)
def _postprocess_outputs(self, outputs): for key, filename in outputs.items(): if os.path.exists(filename): with open(filename, 'rb') as f: self.node.get_output_by_name( key).resource_id = upload_file_stream(f)
def post_resource(): resource_id = upload_file_stream(request.files['data']) return JSONEncoder().encode({ 'status': ResourcePostStatus.SUCCESS, 'resource_id': resource_id })
def _run_worker(self): while not self._stop_event.is_set(): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: if self._run_status == RunStatus.IDLE: sock.connect((self._host, self._port)) message = WorkerMessage( worker_id=self._worker_id, run_status=self._run_status, message_type=WorkerMessageType.GET_JOB, body=None, graph_id=None) send_msg(sock, message) master_message = recv_msg(sock) logging.debug( "Asked for a job; Received mesage: {}".format( master_message)) if master_message and master_message.message_type == MasterMessageType.SET_JOB: logging.info( "Got the job: graph_id=`{graph_id}` job_id=`{job_id}`" .format( graph_id=master_message.graph_id, job_id=master_message.job.node._id, )) self._job_killed = False self._started_at = datetime.datetime.now() self._job = master_message.job self._graph_id = master_message.graph_id self._set_run_status(RunStatus.RUNNING) timeout_parameter = self._job.node.get_parameter_by_name( '_timeout', throw=False) if timeout_parameter: self._node_timeout = int( timeout_parameter.value) else: self._node_timeout = None try: self._job.init_workdir() status = self._job.run() except Exception: try: status = JobReturnStatus.FAILED f = six.BytesIO() f.write(traceback.format_exc().encode()) self._job.node.get_log_by_name( 'worker' ).resource_id = upload_file_stream(f) except Exception: logging.critical(traceback.format_exc()) self.stop() finally: self._job.clean_up() self._job_killed = True if status == JobReturnStatus.SUCCESS: self._set_run_status(RunStatus.SUCCESS) elif status == JobReturnStatus.FAILED: self._set_run_status(RunStatus.FAILED) logging.info( "Worker(`{worker_id}`) finished with status {status}" .format( worker_id=self._worker_id, status=self._run_status, )) elif self._run_status == RunStatus.RUNNING: raise RunningPipelineException( "Not supposed to have this state") elif self._run_status in [ RunStatus.SUCCESS, RunStatus.FAILED ]: sock.connect((self._host, self._port)) if self._run_status == RunStatus.SUCCESS: status = WorkerMessageType.JOB_FINISHED_SUCCESS elif self._run_status == RunStatus.FAILED: status = WorkerMessageType.JOB_FINISHED_FAILED message = WorkerMessage(worker_id=self._worker_id, run_status=self._run_status, message_type=status, body=self._job, graph_id=self._graph_id) send_msg(sock, message) master_message = recv_msg(sock) if master_message and master_message.message_type == MasterMessageType.AKNOWLEDGE: self._set_run_status(RunStatus.IDLE) finally: sock.close() except socket.error: pass except Exception: self.stop() raise self._stop_event.wait(timeout=Worker.RUNNER_TIMEOUT) logging.info("Exit {}".format(self._run_worker.__name__))
def post_resource(): resource_id = upload_file_stream(request.files['data']) return make_success_response({ 'resource_id': resource_id })