def test_get_one_workflow_instance(self): response = self.app.post('/workflows/workflow_1/workflow_instances', data=to_json({'name': 'Script 1 - 1'}), headers={'content-type': 'application/json'}) workflow_instance_id = json_loads(response.data).get('id') response = self.app.get('/workflow_instances/' + workflow_instance_id) workflow_instance = json_loads(response.data) self.assertEqual('Script 1 - 1', workflow_instance.get('name'))
def test_get_workflow_instance_status_pending(self): response = self.app.post( '/workflows/workflow_1/workflow_instances', data=to_json({'name': 'Script 1 - 1'}), headers={'content-type':'application/json'} ) workflow_instance_id = json_loads(response.data).get('id') response = self.app.get('/workflow_instances/'+workflow_instance_id+'/status') workflow_instance = json_loads(response.data) self.assertEqual('pending', workflow_instance.get('status'))
def test_reuse_already_created_workflow_instance(self): _, raw_data = self.connection.lpush.call_args[0] workflow_instance_data = json_loads(raw_data) poliglo.start.start_workflow_instance(self.connection, self.workflow, self.start_meta_worker, self.start_worker_id, self.workflow_instance_name, self.initial_data) _, raw_data = self.connection.lpush.call_args[0] workflow_instance_data_2 = json_loads(raw_data) self.assertEqual( select_dict_el(workflow_instance_data, 'workflow_instance.id'), select_dict_el(workflow_instance_data_2, 'workflow_instance.id'))
def test_get_meta_worker_workflow_set_outputs_type(self): response = self.app.get('/meta_workers/filter/workflows') meta_worker_workflows = json_loads(response.data) self.assertEqual( ['write'], meta_worker_workflows['workflow_1']['filter_1'].get('__next_workers_types') )
def test_get_all_workflows(self): response = self.app.get('/workflows') workflows = json_loads(response.data) self.assertEqual(2, len(workflows)) workflow_names = [workflow.get('name') for workflow in workflows] self.assertEqual(sorted(['Script 1', 'Script 2']), sorted(workflow_names))
def test_inputs_set(self): worker_queue, raw_data = self.connection.lpush.call_args[0] received_data = json_loads(raw_data) self.assertEqual( poliglo.variables.REDIS_KEY_QUEUE % self.start_meta_worker, worker_queue) self.assertEqual(self.initial_data, received_data['inputs'])
def prepare_worker(master_mind_url, meta_worker): _, _, content = make_request(POLIGLO_SERVER_URL_WORKER_WORKFLOWS % (master_mind_url, meta_worker)) worker_workflows = json_loads(content) worker_config = get_config(master_mind_url, meta_worker) connection = get_connection(worker_config) return worker_workflows, connection
def test_workflow_get_workflow_instance_one_exists(self): url = '/workflows/workflow_1/workflow_instances' self.app.post(url, data=to_json({'name': 'Script 1 - 1'}), headers={'content-type': 'application/json'}) response = self.app.get(url) workflow_instances = json_loads(response.data) self.assertEqual(1, len(workflow_instances)) self.assertEqual('Script 1 - 1', workflow_instances[0].get('name'))
def test_workflow_get_workflow_instance_one_exists(self): url = '/workflows/workflow_1/workflow_instances' self.app.post( url, data=to_json({'name': 'Script 1 - 1'}), headers={'content-type':'application/json'} ) response = self.app.get(url) workflow_instances = json_loads(response.data) self.assertEqual(1, len(workflow_instances)) self.assertEqual('Script 1 - 1', workflow_instances[0].get('name'))
def test_get_meta_worker_workflows(self): response = self.app.get('/meta_workers/filter/workflows') meta_worker_workflows = json_loads(response.data) self.assertEqual(['filter_1'], meta_worker_workflows['workflow_1'].keys()) self.assertEqual(sorted(['filter_1', 'filter_2']), sorted(meta_worker_workflows['workflow_2'].keys())) self.assertEqual( WORKFLOWS[1]['workers']['filter_1']['default_inputs'], meta_worker_workflows['workflow_2']['filter_1']['default_inputs'])
def get_workflow_instance_outputs(workflow_instance_id): connection = get_connection(CONFIG.get('all')) workflow_instance_data = _get_workflow_instance(connection, workflow_instance_id) workflow_id = workflow_instance_data.get('type') workflow = _get_workflow(workflow_id) # TODO: Manage multiple final nodes worker_id = _find_last_worker_id(workflow, workflow.get('start_worker_id'))[0] target_key = REDIS_KEY_INSTANCE_WORKER_FINALIZED_JOBS % (workflow_id, workflow_instance_id, worker_id) return to_json([ json_loads(data).get('workers_output', {}).get(worker_id) for data in connection.zrange(target_key, 0, -1) ])
def test_get_meta_worker_workflows(self): response = self.app.get('/meta_workers/filter/workflows') meta_worker_workflows = json_loads(response.data) self.assertEqual( ['filter_1'], meta_worker_workflows['workflow_1'].keys() ) self.assertEqual( sorted(['filter_1', 'filter_2']), sorted(meta_worker_workflows['workflow_2'].keys()) ) self.assertEqual( WORKFLOWS[1]['workers']['filter_1']['default_inputs'], meta_worker_workflows['workflow_2']['filter_1']['default_inputs'] )
def write_error_job(connection, worker_id, raw_data, error): metric_name = 'errors' try: workflow_instance_data = json_loads(raw_data) if not workflow_instance_data.get('workers_error'): workflow_instance_data['workers_error'] = {} workflow_instance_data['workers_error'][worker_id] = { 'error': str(error), 'traceback': traceback.format_exc() } metric_name = REDIS_KEY_INSTANCE_WORKER_ERRORS % ( workflow_instance_data['workflow_instance']['workflow'], workflow_instance_data['workflow_instance']['id'], worker_id ) except Exception, e: workflow_instance_data = {'workers_error': {}, 'raw_data': raw_data} workflow_instance_data['workers_error'][worker_id] = { 'error': 'cannot json_loads', 'traceback': traceback.format_exc() } metric_name = REDIS_KEY_INSTANCE_WORKER_ERRORS % ( 'unknown', 'unknown', worker_id )
def test_get_meta_worker_workflow_set_outputs_type(self): response = self.app.get('/meta_workers/filter/workflows') meta_worker_workflows = json_loads(response.data) self.assertEqual(['write'], meta_worker_workflows['workflow_1'] ['filter_1'].get('__next_workers_types'))
def test_get_meta_worker_config(self): response = self.app.get('/meta_workers/filter/config') config = json_loads(response.data) expected_config = CONFIG.get('all') expected_config.update(CONFIG.get('filter')) self.assertEqual(expected_config, config)
def test_workflow_get_workflow_instance_empty(self): response = self.app.get('/workflows/workflow_1/workflow_instances') workflow_instances = json_loads(response.data) self.assertEqual(0, len(workflow_instances))
def test_get_one_workflow(self): response = self.app.get('/workflows/workflow_1') workflow = json_loads(response.data) self.assertEqual('Script 1', workflow.get('name'))
def test_get_all_workflows_grouped(self): response = self.app.get('/workflows?by_group=1') workflows = json_loads(response.data) self.assertEqual(['No group'], workflows.keys()) self.assertEqual(2, len(workflows['No group']))
def test_get_meta_workers(self): response = self.app.get('/meta_workers') expected_meta_workers = ["filter", "write", "send_to_s3", "add_one"] meta_workers = json_loads(response.data) self.assertEqual(sorted(expected_meta_workers), sorted(meta_workers))
def test_inital_worker_output(self): _, raw_data = self.connection.lpush.call_args[0] received_data = json_loads(raw_data) self.assertEqual(self.initial_data, received_data['workers_output']['initial'])
def get_config(master_mind_url, meta_worker): _, _, content = make_request(POLIGLO_SERVER_URL_WORKER_CONFIG % (master_mind_url, meta_worker)) worker_config = json_loads(content) return worker_config
def test_set_jobs_ids(self): _, raw_data = self.connection.lpush.call_args[0] received_data = json_loads(raw_data) self.assertEqual(1, len(received_data['jobs_ids']))
def get_job_data(raw_data, encoding='utf-8'): data_to_loads = raw_data if not isinstance(raw_data, unicode): data_to_loads = raw_data.decode(encoding) return json_loads(data_to_loads)