示例#1
0
    def _send_ok_response_to_mq(self, job_id, status_code, status_msg, result):
        """
        This function sends response back to the MLCP (Machine Learning Control Panel) via queue if the job succeeded
        :param job_id: the job identifier used for monitoring via the MLCP.
        :param status_code: result status of the flow run.
        :param status_msg: result message of the flow run.
        :param result: response of the flow run - if json serialized returned in the message queue as well.
        :return: None
        """
        response_obj = {
            "job_id": job_id,
            "status_code": status_code,
            "status_msg": status_msg
        }
        try:
            # trying to JSON-ify result object
            response_obj['result'] = result
            response_json = json.dumps(response_obj)
        except Exception as error:
            print(error)
            response_obj['result'] = {}
            response_json = json.dumps(response_obj)

        message_queue_instance.send_message(
            settings['queues']['send_queue_name'], response_json)
示例#2
0
文件: main.py 项目: nerav-doshi/mlapp
 def _dispatch_jobs_to_mq(self, configurations):
     """
     This function sends configurations to the queue to be picked up later by a listening Application/Worker.
     :param configurations: list of configurations to be sent
     :return: None
     """
     for configuration in configurations:
         response_json = json.dumps(configuration)
         message_queue_instance.send_message(settings['queues']['send_queue_name'], json.dumps(response_json))
示例#3
0
文件: main.py 项目: nerav-doshi/mlapp
 def _send_error_response_to_mq(self, job_id, status_code, status_msg):
     """
     This function sends response back to the MLCP (Machine Learning Control Panel) via queue if the job failed
     :param job_id: the job identifier used for monitoring via the MLCP.
     :param status_code: error status of the flow run.
     :param status_msg: error message of the flow run.
     :return: None
     """
     response_json = json.dumps({"job_id": job_id, "status_code": status_code, "status_msg": status_msg})
     message_queue_instance.send_message(settings['queues']['send_queue_name'], response_json)
示例#4
0
文件: main.py 项目: nerav-doshi/mlapp
    def run_msg_sender(self, asset_name, config_path, config_name=None):
        """
        This function is an endpoint of the ML App Library to be used in a local environment.
        It sends a local configuration file in your local computer to be run in an outside Application/Worker via
        message queue.
        :param asset_name: name of the asset to be run
        :param config_path: path to configuration file
        :param config_name: in case configuration file is python looks for variable in this name as the configuration
        """
        try:
            message_to_send = read_json_file(config_path)
        except Exception as e:
            message_to_send = self._read_py_file(asset_name, config_path, config_name)

        job_id = str(uuid.uuid4())
        message_to_send['job_id'] = job_id
        message_queue_instance.send_message(settings['queues']['listen_queue_names'][0], json.dumps(message_to_send))
        print("Message Sent (job_id: " + job_id + "): ", asset_name, config_path)