Example #1
0
def write_finalized_job(workflow_instance_data, worker_output_data, worker_id, connection):
    # prepare_write_output(workflow_instance_data, worker_output_data, worker_id):
    new_workflow_instance_data = prepare_write_output(
        workflow_instance_data, worker_output_data, worker_id
    )
    connection.zadd(
        REDIS_KEY_INSTANCE_WORKER_FINALIZED_JOBS % (
            new_workflow_instance_data['workflow_instance']['workflow'],
            new_workflow_instance_data['workflow_instance']['id'], worker_id
        ),
        time(),
        to_json(new_workflow_instance_data)
    )
    connection.lpush(
        REDIS_KEY_QUEUE_FINALIZED,
        to_json({
            'workflow': new_workflow_instance_data['workflow_instance']['workflow'],
            'workflow_instance_id': new_workflow_instance_data['workflow_instance']['id'],
            'workflow_instance_name': new_workflow_instance_data['workflow_instance'].get(
                'name', 'untitled'),
            # TODO: be able to always return a valid meta_worker
            'meta_worker': new_workflow_instance_data['workflow_instance'].get(
                'meta_worker', 'not sure'
            ),
            'worker_id': worker_id
        })
    )
Example #2
0
    def setUpClass(cls):
        cls.test_base_path = '/tmp/tests_poliglo_server'
        cls.config_path = _write_file(cls.test_base_path, 'config.json',
                                      to_json(CONFIG))
        cls.workflows_path = os.path.join(cls.test_base_path, 'workflows')

        for workflow in WORKFLOWS:
            _write_file(cls.workflows_path,
                        'workflow_' + workflow.get('id') + '.json',
                        to_json(workflow))

        poliglo_server.CONFIG = poliglo_server.load_config(cls.config_path)
        poliglo_server.DEBUG = True

        poliglo_server.WORKFLOWS = poliglo_server.load_workflows(
            cls.workflows_path)
        cls.app = poliglo_server.app.test_client()
Example #3
0
 def test_get_one_workflow_instance(self):
     response = self.app.post('/workflows/workflow_1/workflow_instances',
                              data=to_json({'name': 'Script 1 - 1'}),
                              headers={'content-type': 'application/json'})
     workflow_instance_id = json_loads(response.data).get('id')
     response = self.app.get('/workflow_instances/' + workflow_instance_id)
     workflow_instance = json_loads(response.data)
     self.assertEqual('Script 1 - 1', workflow_instance.get('name'))
def process(specific_info, data, *args):
    inputs = poliglo.get_inputs(data, specific_info)
    connection = args[0].get('connection')

    waiting_queue_name = get_waiting_queue_name(
        data['process']['id'], data['process']['worker_id'], inputs['wait_jobs_from']
    )
    connection.zadd(waiting_queue_name, time(), to_json(data))
    return []
Example #5
0
 def test_get_workflow_instance_status_pending(self):
     response = self.app.post(
         '/workflows/workflow_1/workflow_instances', data=to_json({'name': 'Script 1 - 1'}),
         headers={'content-type':'application/json'}
     )
     workflow_instance_id = json_loads(response.data).get('id')
     response = self.app.get('/workflow_instances/'+workflow_instance_id+'/status')
     workflow_instance = json_loads(response.data)
     self.assertEqual('pending', workflow_instance.get('status'))
Example #6
0
    def test_workflow_get_workflow_instance_one_exists(self):
        url = '/workflows/workflow_1/workflow_instances'
        self.app.post(url,
                      data=to_json({'name': 'Script 1 - 1'}),
                      headers={'content-type': 'application/json'})

        response = self.app.get(url)
        workflow_instances = json_loads(response.data)
        self.assertEqual(1, len(workflow_instances))
        self.assertEqual('Script 1 - 1', workflow_instances[0].get('name'))
 def _setup_config(cls):
     cls.config = {
         "all": {
             "REDIS_HOST": "127.0.0.1",
             "REDIS_PORT": 6379,
             "REDIS_DB": 5,
             "POLIGLO_SERVER_URL": "http://localhost:9016"
         }
     }
     cls.config_path = "/tmp/config.json"
     open(cls.config_path, 'w').write(to_json(cls.config))
Example #8
0
def get_workflow_instance_outputs(workflow_instance_id):
    connection = get_connection(CONFIG.get('all'))
    workflow_instance_data = _get_workflow_instance(connection, workflow_instance_id)
    workflow_id = workflow_instance_data.get('type')
    workflow = _get_workflow(workflow_id)
    # TODO: Manage multiple final nodes
    worker_id = _find_last_worker_id(workflow, workflow.get('start_worker_id'))[0]
    target_key = REDIS_KEY_INSTANCE_WORKER_FINALIZED_JOBS % (workflow_id, workflow_instance_id, worker_id)
    return to_json([
        json_loads(data).get('workers_output', {}).get(worker_id) for data in connection.zrange(target_key, 0, -1)
    ])
Example #9
0
    def test_workflow_get_workflow_instance_one_exists(self):
        url = '/workflows/workflow_1/workflow_instances'
        self.app.post(
            url, data=to_json({'name': 'Script 1 - 1'}),
            headers={'content-type':'application/json'}
        )

        response = self.app.get(url)
        workflow_instances = json_loads(response.data)
        self.assertEqual(1, len(workflow_instances))
        self.assertEqual('Script 1 - 1', workflow_instances[0].get('name'))
Example #10
0
    def setUpClass(cls):
        cls.test_base_path = '/tmp/tests_poliglo_server'
        cls.config_path = _write_file(
            cls.test_base_path,
            'config.json',
            to_json(CONFIG)
        )
        cls.workflows_path = os.path.join(cls.test_base_path, 'workflows')

        for workflow in WORKFLOWS:
            _write_file(
                cls.workflows_path,
                'workflow_'+workflow.get('id')+'.json',
                to_json(workflow)
            )

        poliglo_server.CONFIG = poliglo_server.load_config(cls.config_path)
        poliglo_server.DEBUG = True

        poliglo_server.WORKFLOWS = poliglo_server.load_workflows(
            cls.workflows_path
        )
        cls.app = poliglo_server.app.test_client()
Example #11
0
def write_one_output(connection, output_meta_worker, output_worker_id, workflow_instance_data):
    new_job_id = str(uuid4())
    workflow_instance_data['jobs_ids'] = workflow_instance_data['jobs_ids'] + [new_job_id]
    workflow_instance_data['workflow_instance']['worker_id'] = output_worker_id
    workflow_instance_data['workflow_instance']['meta_worker'] = output_meta_worker
    add_new_job_id(
        connection,
        workflow_instance_data['workflow_instance']['workflow'],
        workflow_instance_data['workflow_instance']['id'],
        output_worker_id,
        new_job_id
    )

    add_data_to_next_worker(connection, output_meta_worker, to_json(workflow_instance_data))
    def _setup_script(cls):
        script = {
            "id": "test_wait_jobs",
            "name": "test_wait_jobs",
            "start_worker_id": "generate_numbers_1",
            "workers": [
                {
                    "worker_type": "generate_numbers",
                    "id": "generate_numbers_1",
                    "default_inputs": {
                        "numbers_range": [0, 10],
                        "sleep": 0
                    },
                    "next_workers": ["filter_1"]
                },
                {
                    "worker_type": "filter",
                    "id": "filter_1",
                    "default_inputs": {
                        "min": 1000
                    },
                    "next_workers": ["wait_jobs_1"]
                },
                {
                    "worker_type": "wait_jobs",
                    "id": "wait_jobs_1",
                    "default_inputs": {
                        "wait_jobs_from": ["generate_numbers_1", "filter_1", "wait_jobs_1"]
                    },
                    "next_workers": ["count_numbers_1"]
                },
                {
                    "id": "count_numbers_1",
                    "worker_type": "count_numbers",
                    "next_workers": []
                }
            ]
        }

        cls.script_path = "/tmp/wait_jobs_test_scripts"
        if not os.path.exists(cls.script_path):
            os.makedirs(cls.script_path)
        open(cls.script_path+"/script_test_wait_jobs.json", 'w').write(to_json(script))
Example #13
0
    def _setup_workflow(cls):
        workflow = {
            "id": "test_wait_jobs",
            "name": "test_wait_jobs",
            "start_worker_id": "generate_numbers_1",
            "workers": {
                "generate_numbers_1": {
                    "meta_worker": "generate_numbers",
                    "default_inputs": {
                        "numbers_range": [0, 10],
                        "sleep": 0
                    },
                    "next_workers": ["filter_1"]
                },
                "filter_1": {
                    "meta_worker": "filter",
                    "default_inputs": {
                        "min": 1000
                    },
                    "next_workers": ["wait_jobs_1"]
                },
                "wait_jobs_1": {
                    "meta_worker": "wait_jobs",
                    "default_inputs": {
                        "wait_jobs_from": ["generate_numbers_1", "filter_1", "wait_jobs_1"]
                    },
                    "next_workers": ["count_numbers_1"]
                },
                "count_numbers_1": {
                    "meta_worker": "count_numbers",
                    "next_workers": []
                }
            }
        }

        cls.workflow_path = "/tmp/wait_jobs_test_workflows"
        if not os.path.exists(cls.workflow_path):
            os.makedirs(cls.workflow_path)
        open(cls.workflow_path+"/workflow_test_wait_jobs.json", 'w').write(to_json(workflow))
Example #14
0
def stats_add_new_instance(connection, workflow, workflow_instance_info):
    connection.zadd(REDIS_KEY_INSTANCES % workflow, time(),
                    to_json(workflow_instance_info))
Example #15
0
        })
    )

def write_error_job(connection, worker_id, raw_data, error):
    metric_name = 'errors'
    try:
        workflow_instance_data = json_loads(raw_data)
        if not workflow_instance_data.get('workers_error'):
            workflow_instance_data['workers_error'] = {}
        workflow_instance_data['workers_error'][worker_id] = {
            'error': str(error), 'traceback': traceback.format_exc()
        }
        metric_name = REDIS_KEY_INSTANCE_WORKER_ERRORS % (
            workflow_instance_data['workflow_instance']['workflow'],
            workflow_instance_data['workflow_instance']['id'],
            worker_id
        )
    except Exception, e:
        workflow_instance_data = {'workers_error': {}, 'raw_data': raw_data}
        workflow_instance_data['workers_error'][worker_id] = {
            'error': 'cannot json_loads', 'traceback': traceback.format_exc()
        }
        metric_name = REDIS_KEY_INSTANCE_WORKER_ERRORS % (
            'unknown', 'unknown', worker_id
        )
    try:
        json_encoded = to_json(workflow_instance_data)
    except Exception, e:
        json_encoded = to_json(convert_object_to_unicode(workflow_instance_data))
    connection.zadd(metric_name, time(), json_encoded)