def test_super_trigger_consume_subscription(self): subscription = self.dart.await_subscription_generation( self.subscription.id) self.assertEqual(subscription.data.state, SubscriptionState.ACTIVE) tr_args = { 'subscription_id': self.subscription.id, 'unconsumed_data_size_in_bytes': 49524 } tr = Trigger(data=TriggerData('test-trigger', 'subscription_batch', None, tr_args, TriggerState.ACTIVE)) self.trigger = self.dart.save_trigger(tr) st_args = { 'fire_after': 'ALL', 'completed_trigger_ids': [self.trigger.id] } st = Trigger( data=TriggerData('test-super-trigger', 'super', [self.workflow.id], st_args, TriggerState.ACTIVE)) super_trigger = self.dart.save_trigger(st) wf_instances = self.dart.await_workflow_completion(self.workflow.id, num_instances=3) for wfi in wf_instances: self.assertEqual(wfi.data.state, WorkflowInstanceState.COMPLETED) stats = self.dart.get_subscription_element_stats(self.subscription.id) ses = SubscriptionElementStats(SubscriptionElementState.CONSUMED, 3, 152875004 + 834620 + 49524) self.assertEqual([s.to_dict() for s in stats], [ses.to_dict()]) self.dart.delete_trigger(super_trigger.id) self.dart.delete_trigger(self.trigger.id)
def test_trigger_schema(self): args = {'completed_workflow_id': 'ABC123'} state = None tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', ['ABC123'], args, state=state)) obj_before = tr.to_dict() tr = default_and_validate(tr, trigger_schema(workflow_completion_trigger.params_json_schema)) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, tr.to_dict())
def setUp(self): dart = Dart(host='localhost', port=5000) """ :type dart: dart.client.python.dart_client.Dart """ self.dart = dart dst_args = {'action_sleep_time_in_seconds': 0} dst0 = Datastore(data=DatastoreData('test-datastore0', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE)) self.datastore0 = self.dart.save_datastore(dst0) dst1 = Datastore(data=DatastoreData('test-datastore1', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE)) self.datastore1 = self.dart.save_datastore(dst1) wf0 = Workflow(data=WorkflowData( 'test-workflow0', self.datastore0.id, state=WorkflowState.ACTIVE)) self.workflow0 = self.dart.save_workflow(wf0, self.datastore0.id) wf1 = Workflow(data=WorkflowData( 'test-workflow1', self.datastore1.id, state=WorkflowState.ACTIVE)) self.workflow1 = self.dart.save_workflow(wf1, self.datastore1.id) a00 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) a01 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) self.action00, self.action01 = self.dart.save_actions( [a00, a01], workflow_id=self.workflow0.id) a10 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) a11 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) self.action10, self.action11 = self.dart.save_actions( [a10, a11], workflow_id=self.workflow1.id) tr_args = {'completed_workflow_id': self.workflow0.id} tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', None, tr_args, TriggerState.ACTIVE)) self.trigger = self.dart.save_trigger(tr) st_args = { 'fire_after': 'ALL', 'completed_trigger_ids': [self.trigger.id] } st = Trigger(data=TriggerData('test-super-trigger', 'super', [self.workflow1.id], st_args, TriggerState.ACTIVE)) self.super_trigger = self.dart.save_trigger(st)
def test_trigger_schema(self): args = {'completed_workflow_id': 'ABC123'} state = None tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', ['ABC123'], args, state=state)) obj_before = tr.to_dict() tr = default_and_validate( tr, trigger_schema(workflow_completion_trigger.params_json_schema)) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, tr.to_dict())
def _resolve_and_save_trigger(self, entity_id, entity_map, actual_entities_by_node_id, actual_entities_by_unsaved_id): actual_id, unsaved_id = self._resolve(EntityType.trigger, entity_id, entity_map, actual_entities_by_unsaved_id) if actual_id: return actual_id node_id = self._node_id(EntityType.trigger, unsaved_id) trigger = Trigger.from_dict(entity_map['unsaved_entities'][node_id]) assert isinstance(trigger, Trigger) if trigger.data.args and trigger.data.args.get('completed_workflow_id'): trigger.data.args['completed_workflow_id'] = self._resolve_and_save_workflow(trigger.data.args['completed_workflow_id'], entity_map, actual_entities_by_node_id, actual_entities_by_unsaved_id) if trigger.data.args and trigger.data.args.get('event_id'): trigger.data.args['event_id'] = self._resolve_and_save_event(trigger.data.args['event_id'], entity_map, actual_entities_by_node_id, actual_entities_by_unsaved_id) if trigger.data.args and trigger.data.args.get('subscription_id'): trigger.data.args['subscription_id'] = self._resolve_and_save_subscription(trigger.data.args['subscription_id'], entity_map, actual_entities_by_node_id, actual_entities_by_unsaved_id) if trigger.data.workflow_ids: wf_ids = set() for wf_id in trigger.data.workflow_ids: wf_ids.add(self._resolve_and_save_workflow(wf_id, entity_map, actual_entities_by_node_id, actual_entities_by_unsaved_id)) trigger.data.workflow_ids = list(wf_ids) if trigger.data.args.get('completed_trigger_ids'): t_ids = set() for t_id in trigger.data.args['completed_trigger_ids']: t_ids.add(self._resolve_and_save_trigger(t_id, entity_map, actual_entities_by_node_id, actual_entities_by_unsaved_id)) trigger.data.args['completed_trigger_ids'] = list(t_ids) trigger = self._trigger_service.save_trigger(trigger, commit_and_initialize=False, flush=True) actual_entities_by_node_id[node_id] = trigger actual_entities_by_unsaved_id[unsaved_id] = trigger return trigger.id
def post_trigger(): trigger = Trigger.from_dict(request.get_json()) return { 'results': trigger_service().save_trigger(trigger=trigger, user_id=current_user.email).to_dict() }
def test_trigger_schema_invalid(self): with self.assertRaises(DartValidationException) as context: name = None args = {'completed_workflow_id': 'ABC123'} tr = Trigger(data=TriggerData(name, 'workflow_completion', ['ABC123'], args)) # should fail because the name is missing default_and_validate( tr, trigger_schema(workflow_completion_trigger.params_json_schema)) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_crud(self): args = {'completed_workflow_id': self.workflow.id} tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', [self.workflow.id], args)) posted_tr = self.dart.save_trigger(tr) self.assertEqual(posted_tr.data.to_dict(), tr.data.to_dict()) trigger = self.dart.get_trigger(posted_tr.id) self.assertEqual(posted_tr.to_dict(), trigger.to_dict()) self.dart.delete_trigger(trigger.id) try: self.dart.get_trigger(trigger.id) except DartRequestException as e: self.assertEqual(e.response.status_code, 404) return self.fail('trigger should have been missing after delete!')
def test_crud(self): args = {'completed_workflow_id': self.workflow.id} tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', [self.workflow.id], args)) posted_tr = self.dart.save_trigger(tr) tr.data.user_id = posted_tr.data.user_id # tags should get the wf_id so we remove it for the test sake posted_tr_data = posted_tr.data.to_dict() posted_tr_data['tags'] = [] tr_data = tr.data.to_dict() tr_data['tags'] = [] self.assertEqual(posted_tr_data, tr_data) trigger = self.dart.get_trigger(posted_tr.id) self.assertEqual(posted_tr.to_dict(), trigger.to_dict()) self.dart.delete_trigger(trigger.id) try: self.dart.get_trigger(trigger.id) except DartRequestException as e: self.assertEqual(e.response.status_code, 404) return self.fail('trigger should have been missing after delete!')
def add_no_op_engine_sub_graphs(config): engine_config = config['engines']['no_op_engine'] opts = engine_config['options'] dart = Dart(opts['dart_host'], opts['dart_port'], opts['dart_api_version']) assert isinstance(dart, Dart) _logger.info('saving no_op_engine sub_graphs') engine_id = None for e in dart.get_engines(): if e.data.name == 'no_op_engine': engine_id = e.id if not engine_id: raise subgraph_definitions = [ SubGraphDefinition(data=SubGraphDefinitionData( name='workflow chaining demo', description='demonstrate workflow chaining', engine_name='no_op_engine', related_type=EntityType.datastore, related_is_a=Relationship.PARENT, workflows=[ Workflow(id=Ref.workflow(1), data=WorkflowData( name='no-op-workflow-chaining-wf1', datastore_id=Ref.parent(), engine_name='no_op_engine', state=WorkflowState.ACTIVE, )), Workflow(id=Ref.workflow(2), data=WorkflowData( name='no-op-workflow-chaining-wf2', datastore_id=Ref.parent(), engine_name='no_op_engine', state=WorkflowState.ACTIVE, )), ], actions=[ Action(id=Ref.action(1), data=ActionData( name=NoOpActionTypes.action_that_succeeds.name, engine_name='no_op_engine', action_type_name=NoOpActionTypes. action_that_succeeds.name, workflow_id=Ref.workflow(1), order_idx=1, state=ActionState.TEMPLATE, )), Action(id=Ref.action(2), data=ActionData( name=NoOpActionTypes.action_that_succeeds.name, action_type_name=NoOpActionTypes. action_that_succeeds.name, engine_name='no_op_engine', workflow_id=Ref.workflow(1), order_idx=2, state=ActionState.TEMPLATE, )), Action(id=Ref.action(3), data=ActionData( name=NoOpActionTypes.action_that_succeeds.name, action_type_name=NoOpActionTypes. action_that_succeeds.name, engine_name='no_op_engine', workflow_id=Ref.workflow(1), order_idx=3, state=ActionState.TEMPLATE, )), Action(id=Ref.action(4), data=ActionData( name=NoOpActionTypes.action_that_succeeds.name, action_type_name=NoOpActionTypes. action_that_succeeds.name, engine_name='no_op_engine', workflow_id=Ref.workflow(1), order_idx=4, state=ActionState.TEMPLATE, )), Action(id=Ref.action(5), data=ActionData( name=NoOpActionTypes.action_that_succeeds.name, action_type_name=NoOpActionTypes. action_that_succeeds.name, engine_name='no_op_engine', workflow_id=Ref.workflow(2), order_idx=1, state=ActionState.TEMPLATE, )), Action(id=Ref.action(6), data=ActionData( name=NoOpActionTypes.action_that_succeeds.name, action_type_name=NoOpActionTypes. action_that_succeeds.name, engine_name='no_op_engine', workflow_id=Ref.workflow(2), order_idx=2, state=ActionState.TEMPLATE, )), Action(id=Ref.action(7), data=ActionData( name=NoOpActionTypes.action_that_fails.name, action_type_name=NoOpActionTypes.action_that_fails. name, engine_name='no_op_engine', workflow_id=Ref.workflow(2), order_idx=3, state=ActionState.TEMPLATE, )), ], triggers=[ Trigger(id=Ref.trigger(1), data=TriggerData( name='no-op-trigger-workflow-completion', trigger_type_name=workflow_completion_trigger.name, workflow_ids=[Ref.workflow(2)], state=TriggerState.ACTIVE, args={'completed_workflow_id': Ref.workflow(1)})), ], )) ] for e in subgraph_definitions: s = dart.save_subgraph_definition(e, engine_id) _logger.info('created subgraph_definition: %s' % s.id)
def post_trigger(): trigger = Trigger.from_dict(request.get_json()) return {'results': trigger_service().save_trigger(trigger).to_dict()}
on_failure_email=['*****@*****.**'], on_success_email=['*****@*****.**'], on_started_email=['*****@*****.**'], )), datastore_id=datastore.id) print 'created workflow: %s' % workflow.id wf_actions = dart.save_actions(actions=[ Action(data=ActionData('consume_subscription', 'consume_subscription', state=ActionState.TEMPLATE, args={ 'subscription_id': subscription.id, 'target_file_format': FileFormat.PARQUET, 'target_row_format': RowFormat.NONE, 'target_compression': Compression.SNAPPY, })), ], workflow_id=workflow.id) print 'created workflow action: %s' % wf_actions[0].id trigger = dart.save_trigger( Trigger(data=TriggerData(name='rmn_direct_trigger_DW-3307', trigger_type_name='subscription_batch', workflow_ids=[workflow.id], args={ 'subscription_id': subscription.id, 'unconsumed_data_size_in_bytes': 16000000 }))) print 'created trigger: %s' % trigger.id
on_failure_email=['*****@*****.**'], on_success_email=['*****@*****.**'], on_started_email=['*****@*****.**'], )), datastore_id=datastore.id) print 'created workflow: %s' % workflow.id a2 = dart.save_actions(actions=[ Action(data=ActionData('consume_subscription', 'consume_subscription', state=ActionState.TEMPLATE, args={ 'subscription_id': subscription.id, 'target_file_format': FileFormat.PARQUET, 'target_row_format': RowFormat.NONE, 'target_compression': Compression.SNAPPY, })), ], workflow_id=workflow.id)[0] print 'created workflow action: %s' % a2.id trigger = dart.save_trigger( Trigger(data=TriggerData(name='owen_eu_parquet_trigger_DW-3213_v3', trigger_type_name='subscription_batch', workflow_ids=[workflow.id], args={ 'subscription_id': subscription.id, 'unconsumed_data_size_in_bytes': 16000000 }))) print 'created trigger: %s' % trigger.id
def patch_trigger(trigger): """ :type trigger: dart.model.trigger.Trigger """ p = JsonPatch(request.get_json()) return update_trigger(trigger, Trigger.from_dict(p.apply(trigger.to_dict())))
datastore.id, state=WorkflowState.ACTIVE, on_failure_email=['*****@*****.**'], on_success_email=['*****@*****.**'], on_started_email=['*****@*****.**'], ) ), datastore.id) print 'created workflow: %s' % workflow.id a0, a1 = dart.save_actions([ Action(data=ActionData('start_datastore', 'start_datastore', state=ActionState.TEMPLATE)), Action(data=ActionData('load_dataset', 'load_dataset', state=ActionState.TEMPLATE, args={ 'dataset_id': dataset.id, 's3_path_start_prefix_inclusive': 's3://example-bucket/prd/beacon/native_app/v2/parquet/snappy/createdpartition=2015-06-27', })), ], workflow_id=workflow.id) print 'created action: %s' % a0.id print 'created action: %s' % a1.id event = dart.save_event(Event(data=EventData('beacon_native_app_to_parquet_emr_job_completion', state=EventState.ACTIVE))) print 'created event: %s' % event.id trigger = dart.save_trigger(Trigger(data=TriggerData( 'beacon_native_app_to_parquet_emr_job_completion_trigger', 'event', [workflow.id], {'event_id': event.id})) ) print 'created trigger: %s' % trigger.id
def post_trigger(): trigger = Trigger.from_dict(request.get_json()) return {'results': trigger_service().save_trigger(trigger=trigger, user_id=current_user.email).to_dict()}
def _get_engineless_static_subgraphs_by_related_type(graph_entity_service): sub_graph_map = {} d_entity_models = graph_entity_service.to_entity_models_with_randomized_ids( [ Dataset(id=Ref.dataset(1), data=DatasetData(None, None, None, None, None, columns=[], partitions=[])) ]) e_entity_models = graph_entity_service.to_entity_models_with_randomized_ids( [Event(id=Ref.event(1), data=EventData('event'))]) sub_graph_map[None] = [ SubGraph( name='dataset', description='create a new dataset entity', related_type=None, related_is_a=None, graph=graph_entity_service.to_graph(None, d_entity_models), entity_map=graph_entity_service.to_entity_map(d_entity_models), icon='⬟', ), SubGraph( name='event', description='create a new event entity', related_type=None, related_is_a=None, graph=graph_entity_service.to_graph(None, e_entity_models), entity_map=graph_entity_service.to_entity_map(e_entity_models), icon='★', ), ] entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Subscription(id=Ref.subscription(1), data=SubscriptionData('subscription', Ref.parent())) ]) sub_graph_map[EntityType.dataset] = [ SubGraph( name='subscription', description='create a new subscription entity', related_type=EntityType.dataset, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='⬢', ), ] entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Trigger(id=Ref.trigger(1), data=TriggerData(name='%s_trigger' % event_trigger.name, trigger_type_name=event_trigger.name, state=TriggerState.INACTIVE, workflow_ids=[], args={'event_id': Ref.parent()})) ]) sub_graph_map[EntityType.event] = [ SubGraph( name='event trigger', description='create a new event trigger entity', related_type=EntityType.event, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='▼', ), ] entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Trigger(id=Ref.trigger(1), data=TriggerData( name='%s_trigger' % subscription_batch_trigger.name, trigger_type_name=subscription_batch_trigger.name, state=TriggerState.INACTIVE, workflow_ids=[], args={ 'subscription_id': Ref.parent(), 'unconsumed_data_size_in_bytes': 1000000 })) ]) sub_graph_map[EntityType.subscription] = [ SubGraph( name='subscription batch trigger', description='create a new subscription batch trigger entity', related_type=EntityType.subscription, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='▼', ), ] return sub_graph_map
)), datastore_id=datastore.id) print 'created workflow: %s' % workflow.id a2 = dart.save_actions(actions=[ Action(data=ActionData('consume_subscription', 'consume_subscription', state=ActionState.TEMPLATE, args={ 'subscription_id': subscription.id, 'target_file_format': FileFormat.PARQUET, 'target_row_format': RowFormat.NONE, 'target_compression': Compression.SNAPPY, })), ], workflow_id=workflow.id)[0] print 'created workflow action: %s' % a2.id trigger = dart.save_trigger( Trigger(data=TriggerData( name='owen_eu_parquet_trigger_DW-3411_v1', trigger_type_name='subscription_batch', workflow_ids=[workflow.id], args={ 'subscription_id': subscription.id, 'unconsumed_data_size_in_bytes': 16000000 }, state=TriggerState.ACTIVE, ))) print 'created trigger: %s' % trigger.id
def put_trigger(trigger): """ :type trigger: dart.model.trigger.Trigger """ updated_trigger = Trigger.from_dict(request.get_json()) trigger = trigger_service().update_trigger_state(trigger, updated_trigger.data.state) return {'results': trigger.to_dict()}
def put_trigger(trigger): """ :type trigger: dart.model.trigger.Trigger """ updated_trigger = Trigger.from_dict(request.get_json()) trigger = trigger_service().update_trigger_state( trigger, updated_trigger.data.state) return {'results': trigger.to_dict()}
def setUp(self): dart = Dart(host='localhost', port=5000) """ :type dart: dart.client.python.dart_client.Dart """ self.dart = dart cs = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] df = DataFormat(FileFormat.PARQUET, RowFormat.NONE) dataset_data = DatasetData('test-dataset0', 'test_dataset_table0', 's3://test/dataset/0/%s' + random_id(), df, cs) self.dataset0 = self.dart.save_dataset(Dataset(data=dataset_data)) cs = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] df = DataFormat(FileFormat.PARQUET, RowFormat.NONE) dataset1_location = 's3://test/dataset/1/%s' + random_id() dataset_data = DatasetData('test-dataset1', 'test_dataset_table1', dataset1_location, df, cs) self.dataset1 = self.dart.save_dataset(Dataset(data=dataset_data)) cs = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] df = DataFormat(FileFormat.PARQUET, RowFormat.NONE) dataset_data = DatasetData('test-dataset2-no-show', 'test_dataset_table2', 's3://test/dataset/2/%s' + random_id(), df, cs) self.dataset2 = self.dart.save_dataset(Dataset(data=dataset_data)) s = Subscription(data=SubscriptionData('test-subscription0', self.dataset0.id)) self.subscription0 = self.dart.save_subscription(s) s = Subscription(data=SubscriptionData('test-subscription2-no-show', self.dataset2.id)) self.subscription2 = self.dart.save_subscription(s) dst_args = {'action_sleep_time_in_seconds': 0} dst = Datastore(data=DatastoreData('test-datastore0', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE)) self.datastore0 = self.dart.save_datastore(dst) dst = Datastore(data=DatastoreData('test-datastore1', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE)) self.datastore1 = self.dart.save_datastore(dst) dst = Datastore(data=DatastoreData('test-datastore2-no-show', 'no_op_engine', args=dst_args, state=DatastoreState.ACTIVE)) self.datastore2 = self.dart.save_datastore(dst) wf0 = Workflow(data=WorkflowData('test-workflow0', self.datastore0.id, state=WorkflowState.ACTIVE)) self.workflow0 = self.dart.save_workflow(wf0, self.datastore0.id) wf1 = Workflow(data=WorkflowData('test-workflow1', self.datastore1.id, state=WorkflowState.ACTIVE)) self.workflow1 = self.dart.save_workflow(wf1, self.datastore1.id) wf2 = Workflow(data=WorkflowData('test-workflow2-no-show', self.datastore2.id, state=WorkflowState.ACTIVE)) self.workflow2 = self.dart.save_workflow(wf2, self.datastore2.id) a_args = {'source_hdfs_path': 'hdfs:///user/hive/warehouse/test', 'destination_s3_path': dataset1_location} a00 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) a01 = Action(data=ActionData(NoOpActionTypes.consume_subscription.name, NoOpActionTypes.consume_subscription.name, {'subscription_id': self.subscription0.id}, state=ActionState.TEMPLATE)) a02 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) a03 = Action(data=ActionData(NoOpActionTypes.copy_hdfs_to_s3_action.name, NoOpActionTypes.copy_hdfs_to_s3_action.name, a_args, state=ActionState.TEMPLATE)) a04 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE)) self.action00, self.action01, self.action02, self.action03, self.action04 = \ self.dart.save_actions([a00, a01, a02, a03, a04], workflow_id=self.workflow0.id) a10 = Action(data=ActionData(NoOpActionTypes.load_dataset.name, NoOpActionTypes.load_dataset.name, {'dataset_id': self.dataset1.id}, state=ActionState.TEMPLATE)) self.action10 = self.dart.save_actions([a10], workflow_id=self.workflow1.id) a20 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.HAS_NEVER_RUN)) a21 = Action(data=ActionData(NoOpActionTypes.load_dataset.name, NoOpActionTypes.load_dataset.name, {'dataset_id': self.dataset2.id}, state=ActionState.TEMPLATE)) self.action20 = self.dart.save_actions([a20], datastore_id=self.datastore2.id) self.action21 = self.dart.save_actions([a21], workflow_id=self.workflow2.id) self.event1 = self.dart.save_event(Event(data=EventData('test-event1', state=EventState.ACTIVE))) self.event2 = self.dart.save_event(Event(data=EventData('test-event2-no-show', state=EventState.ACTIVE))) tr_args = {'event_id': self.event1.id} tr = Trigger(data=TriggerData('test-event-trigger1', 'event', [self.workflow1.id], tr_args, TriggerState.ACTIVE)) self.event_trigger1 = self.dart.save_trigger(tr) tr_args = {'event_id': self.event2.id} tr = Trigger(data=TriggerData('test-event-trigger2-no-show', 'event', [self.workflow2.id], tr_args, TriggerState.ACTIVE)) self.event_trigger2 = self.dart.save_trigger(tr) st_args = {'fire_after': 'ALL', 'completed_trigger_ids': [self.event_trigger1.id]} st = Trigger(data=TriggerData('test-super-trigger1', 'super', None, st_args, TriggerState.ACTIVE)) self.super_trigger1 = self.dart.save_trigger(st) st_args = {'fire_after': 'ANY', 'completed_trigger_ids': [self.super_trigger1.id]} st = Trigger(data=TriggerData('test-super-trigger2', 'super', [self.workflow1.id], st_args, TriggerState.ACTIVE)) self.super_trigger2 = self.dart.save_trigger(st)
def put_trigger(trigger): """ :type trigger: dart.model.trigger.Trigger """ return update_trigger(trigger, Trigger.from_dict(request.get_json()))
def _get_static_subgraphs_by_related_type(engine, graph_entity_service): engine_name = engine.data.name sub_graph_map = {EntityType.workflow: []} for action_type in engine.data.supported_action_types: entity_models = graph_entity_service.to_entity_models_with_randomized_ids( [ Action( id=Ref.action(1), data=ActionData( name=action_type.name, action_type_name=action_type.name, engine_name=engine_name, workflow_id=Ref.parent(), state=ActionState.TEMPLATE, args={} if action_type.params_json_schema else None)) ]) sub_graph_map[EntityType.workflow].append( SubGraph( name=action_type.name, description=action_type.description, related_type=EntityType.workflow, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='●', )) entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Trigger(id=Ref.trigger(1), data=TriggerData( name='%s_trigger' % workflow_completion_trigger.name, trigger_type_name=workflow_completion_trigger.name, state=TriggerState.INACTIVE, workflow_ids=[], args={'completed_workflow_id': Ref.parent()})) ]) sub_graph_map[EntityType.workflow].extend([ SubGraph( name='workflow completion trigger', description='create a new workflow_completion trigger entity', related_type=EntityType.workflow, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='▼', ), ]) entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Trigger(id=Ref.trigger(1), data=TriggerData( name='%s_trigger' % scheduled_trigger.name, trigger_type_name=scheduled_trigger.name, state=TriggerState.INACTIVE, workflow_ids=[Ref.child()], )) ]) sub_graph_map[EntityType.workflow].extend([ SubGraph( name='scheduled trigger', description='create a new scheduled trigger entity', related_type=EntityType.workflow, related_is_a=Relationship.CHILD, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='▼', ), ]) entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Trigger(id=Ref.trigger(1), data=TriggerData( name='%s_trigger' % super_trigger.name, trigger_type_name=super_trigger.name, state=TriggerState.INACTIVE, workflow_ids=[Ref.child()], )) ]) sub_graph_map[EntityType.workflow].extend([ SubGraph( name='super trigger', description='create a new super trigger entity', related_type=EntityType.workflow, related_is_a=Relationship.CHILD, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='▼', ), ]) entity_models = graph_entity_service.to_entity_models_with_randomized_ids([ Workflow(id=Ref.workflow(1), data=WorkflowData(name='workflow', datastore_id=Ref.parent(), engine_name=engine_name, state=WorkflowState.INACTIVE)) ]) sub_graph_map[EntityType.datastore] = [ SubGraph( name='workflow', description='create a new workflow entity', related_type=EntityType.datastore, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='◆', ) ] for action_type in engine.data.supported_action_types: entity_models = graph_entity_service.to_entity_models_with_randomized_ids( [ Action( id=Ref.action(1), data=ActionData( name=action_type.name, action_type_name=action_type.name, engine_name=engine_name, datastore_id=Ref.parent(), state=ActionState.HAS_NEVER_RUN, args={} if action_type.params_json_schema else None)) ]) sub_graph_map[EntityType.datastore].append( SubGraph( name=action_type.name, description=action_type.description, related_type=EntityType.datastore, related_is_a=Relationship.PARENT, graph=graph_entity_service.to_graph(None, entity_models), entity_map=graph_entity_service.to_entity_map(entity_models), icon='●', )) return sub_graph_map