def test_workflow_schema_invalid(self): with self.assertRaises(DartValidationException) as context: name = None wf = Workflow(data=WorkflowData(name, 'ABC123')) # should fail because the name is missing default_and_validate(wf, workflow_schema()) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_event_schema_invalid(self): with self.assertRaises(DartValidationException) as context: name = None e = Event(data=EventData(name)) # should fail because the name is missing default_and_validate(e, event_schema()) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_trigger_schema_invalid(self): with self.assertRaises(DartValidationException) as context: name = None args = {'completed_workflow_id': 'ABC123'} tr = Trigger(data=TriggerData(name, 'workflow_completion', ['ABC123'], args)) # should fail because the name is missing default_and_validate(tr, trigger_schema(workflow_completion_trigger.params_json_schema)) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_action_schema_invalid(self): with self.assertRaises(DartValidationException) as context: a = Action(data=ActionData('copy_hdfs_to_s3', 'copy_hdfs_to_s3', { 'source_hdfs_path': 'hdfs:///user/hive/warehouse/dtest4', # 'destination_s3_path': 's3://fake-bucket/dart_testing', }, engine_name='no_op_engine')) # should fail because destination_s3_path is required default_and_validate(a, action_schema(NoOpActionTypes.copy_hdfs_to_s3_action.params_json_schema)).to_dict() self.assertTrue(isinstance(context.exception, DartValidationException))
def test_datastore_schema_invalid(self): with self.assertRaises(DartValidationException) as context: dst = Datastore( data=DatastoreData('test-datastore', 'fake_engine', args={'data_to_freespace_ratio_yo': 0})) default_and_validate(dst, datastore_schema(self.options_json_schema)) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_dataset_schema_invalid(self): with self.assertRaises(DartValidationException) as context: columns = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] df = DataFormat(FileFormat.PARQUET, RowFormat.NONE) location = None ds = Dataset(data=DatasetData('test-dataset', 'test_dataset_table', location, df, columns)) # should fail because location is required default_and_validate(ds, dataset_schema()) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_subscription_schema_invalid(self): with self.assertRaises(DartValidationException) as context: start = 's3://my-test-bucket/impala/impala' end = 's3://my-test-bucket/impala/install' regex = '.*\\.rpm' name = None sub = Subscription(data=SubscriptionData(name, 'ABC123', start, end, regex)) # should fail because the name is missing default_and_validate(sub, subscription_schema()) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_subscription_schema_invalid(self): with self.assertRaises(DartValidationException) as context: start = 's3://my-test-bucket/impala/impala' end = 's3://my-test-bucket/impala/install' regex = '.*\\.rpm' name = None sub = Subscription( data=SubscriptionData(name, 'ABC123', start, end, regex)) # should fail because the name is missing default_and_validate(sub, subscription_schema()) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_trigger_schema_invalid(self): with self.assertRaises(DartValidationException) as context: name = None args = {'completed_workflow_id': 'ABC123'} tr = Trigger(data=TriggerData(name, 'workflow_completion', ['ABC123'], args)) # should fail because the name is missing default_and_validate( tr, trigger_schema(workflow_completion_trigger.params_json_schema)) self.assertTrue(isinstance(context.exception, DartValidationException))
def test_event_schema(self): state = None e = Event(data=EventData('test-event', state=state)) obj_before = e.to_dict() e = default_and_validate(e, event_schema()) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, e.to_dict())
def save_trigger(self, trigger, commit_and_initialize=True, flush=False): """ :type trigger: dart.model.trigger.Trigger """ trigger_type_name = trigger.data.trigger_type_name if trigger_type_name == self._manual_trigger_processor.trigger_type().name: raise DartValidationException('manual triggers cannot be saved') trigger_processor = self._trigger_processors.get(trigger_type_name) if not trigger_processor: raise DartValidationException('unknown trigger_type_name: %s' % trigger_type_name) assert isinstance(trigger_processor, TriggerProcessor) trigger = default_and_validate(trigger, trigger_schema(trigger_processor.trigger_type().params_json_schema)) trigger_dao = TriggerDao() trigger_dao.id = random_id() trigger_dao.data = trigger.data.to_dict() db.session.add(trigger_dao) if flush: db.session.flush() trigger = trigger_dao.to_model() if commit_and_initialize: db.session.commit() trigger = trigger_dao.to_model() try: trigger_processor.initialize_trigger(trigger, self) except: db.session.delete(trigger_dao) db.session.commit() raise return trigger
def default_and_validate_trigger(self, trigger): trigger_type_name = trigger.data.trigger_type_name trigger_processor = self._trigger_processors.get(trigger_type_name) return default_and_validate( trigger, trigger_schema( trigger_processor.trigger_type().params_json_schema))
def test_workflow_schema(self): state = None wf = Workflow(data=WorkflowData('test-workflow', 'ABC123', state=state)) obj_before = wf.to_dict() wf = default_and_validate(wf, workflow_schema()) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, wf.to_dict())
def save_trigger(self, trigger, commit_and_initialize=True, flush=False): """ :type trigger: dart.model.trigger.Trigger """ trigger_type_name = trigger.data.trigger_type_name if trigger_type_name == self._manual_trigger_processor.trigger_type( ).name: raise DartValidationException('manual triggers cannot be saved') trigger_processor = self._trigger_processors.get(trigger_type_name) if not trigger_processor: raise DartValidationException('unknown trigger_type_name: %s' % trigger_type_name) assert isinstance(trigger_processor, TriggerProcessor) trigger = default_and_validate( trigger, trigger_schema( trigger_processor.trigger_type().params_json_schema)) trigger_dao = TriggerDao() trigger_dao.id = random_id() trigger_dao.data = trigger.data.to_dict() db.session.add(trigger_dao) if flush: db.session.flush() trigger = trigger_dao.to_model() if commit_and_initialize: db.session.commit() trigger = trigger_dao.to_model() try: trigger_processor.initialize_trigger(trigger, self) except: db.session.delete(trigger_dao) db.session.commit() raise return trigger
def test_action_schema_invalid(self): with self.assertRaises(DartValidationException) as context: a = Action(data=ActionData( 'copy_hdfs_to_s3', 'copy_hdfs_to_s3', { 'source_hdfs_path': 'hdfs:///user/hive/warehouse/dtest4', # 'destination_s3_path': 's3://fake-bucket/dart_testing', }, engine_name='no_op_engine')) # should fail because destination_s3_path is required default_and_validate( a, action_schema(NoOpActionTypes.copy_hdfs_to_s3_action. params_json_schema)).to_dict() self.assertTrue(isinstance(context.exception, DartValidationException))
def test_workflow_schema(self): state = None wf = Workflow( data=WorkflowData('test-workflow', 'ABC123', state=state)) obj_before = wf.to_dict() wf = default_and_validate(wf, workflow_schema()) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, wf.to_dict())
def update_event(event, name, description, state): """ :type event: dart.model.event.Event """ source_event = event.copy() event = default_and_validate(event, event_schema()) event.data.name = name event.data.description = description event.data.state = state return patch_difference(EventDao, source_event, event)
def test_trigger_schema(self): args = {'completed_workflow_id': 'ABC123'} state = None tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', ['ABC123'], args, state=state)) obj_before = tr.to_dict() tr = default_and_validate(tr, trigger_schema(workflow_completion_trigger.params_json_schema)) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, tr.to_dict())
def update_subscription_name(subscription, name): """ :type subscription: dart.model.subscription.Subscription """ source_subscription = subscription.copy() subscription = default_and_validate(subscription, subscription_schema()) subscription.data.name = name return patch_difference(SubscriptionDao, source_subscription, subscription)
def test_dataset_schema(self): columns = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] num_header_rows = None df = DataFormat(FileFormat.PARQUET, RowFormat.NONE, num_header_rows) ds = Dataset(data=DatasetData('test-dataset', 'test_dataset_table', 's3://bucket/prefix', df, columns)) obj_before = ds.to_dict() obj_after = default_and_validate(ds, dataset_schema()).to_dict() # num_header_rows should have been defaulted to 0, making these unequal self.assertNotEqual(obj_before, obj_after)
def test_subscription_schema(self): start = 's3://my-test-bucket/impala/impala' end = 's3://my-test-bucket/impala/install' regex = '.*\\.rpm' state = None sub = Subscription(data=SubscriptionData('test-subscription', 'ABC123', start, end, regex, state=state)) obj_before = sub.to_dict() sub = default_and_validate(sub, subscription_schema()) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, sub.to_dict())
def default_and_validate_action(self, action, action_type=None): if not action_type: engine = self._engine_service.get_engine_by_name(action.data.engine_name) action_types_by_name = {at.name: at for at in engine.data.supported_action_types} action_type = action_types_by_name.get(action.data.action_type_name) if not action_type: raise DartValidationException('unknown action: "%s"' % action.data.action_type_name) assert isinstance(action_type, ActionType) action = default_and_validate(action, action_schema(action_type.params_json_schema)) return action
def test_action_schema(self): last_in_workflow = None a = Action(data=ActionData('copy_hdfs_to_s3', 'copy_hdfs_to_s3', { 'source_hdfs_path': 'hdfs:///user/hive/warehouse/dtest4', 'destination_s3_path': 's3://fake-bucket/dart_testing', }, engine_name='no_op_engine', last_in_workflow=last_in_workflow)) obj_before = a.to_dict() obj_after = default_and_validate(a, action_schema(NoOpActionTypes.copy_hdfs_to_s3_action.params_json_schema)).to_dict() # many fields should have been defaulted, making these unequal self.assertNotEqual(obj_before, obj_after)
def update_engine(self, engine, updated_engine): updated_engine = default_and_validate(updated_engine, engine_schema()) self._validate_ecs_task_definition(updated_engine.data.ecs_task_definition) p = jsonpatch.make_patch(engine.data.ecs_task_definition, updated_engine.data.ecs_task_definition) prior_arn_missing = not engine.data.ecs_task_definition_arn and updated_engine.data.ecs_task_definition if len(p.patch) > 0 or prior_arn_missing: self._deregister_task_definition(engine.data.ecs_task_definition_arn) updated_engine.data.ecs_task_definition_arn = self._register_ecs_task_definition(updated_engine) return self.update_engine_data(engine.id, updated_engine.data)
def save_trigger(self, trigger, commit_and_initialize=True, flush=False, user_id=None): wf_uuid = uuid.uuid4().hex # to avoid uuid serialization issues trigger.data.tags = trigger.data.tags if (trigger.data.tags) else [] trigger.data.tags.append(wf_uuid) if user_id: trigger.data.user_id = user_id """ :type trigger: dart.model.trigger.Trigger """ trigger_type_name = trigger.data.trigger_type_name if trigger_type_name == self._manual_trigger_processor.trigger_type( ).name: raise DartValidationException('manual triggers cannot be saved') if trigger_type_name == self._retry_trigger_processor.trigger_type( ).name: raise DartValidationException('retry triggers cannot be saved') trigger_processor = self._trigger_processors.get(trigger_type_name) if not trigger_processor: raise DartValidationException('unknown trigger_type_name: %s' % trigger_type_name) assert isinstance(trigger_processor, TriggerProcessor) trigger = default_and_validate( trigger, trigger_schema( trigger_processor.trigger_type().params_json_schema)) trigger_dao = TriggerDao() trigger_dao.id = random_id() if trigger_type_name == 'subscription_batch': sub = self._subscription_service.get_subscription( trigger.data.args['subscription_id']) if sub.data.nudge_id: response = self.update_nudge_with_trigger( sub.data.nudge_id, trigger.data.args['unconsumed_data_size_in_bytes'], trigger_dao.id, trigger.data.trigger_type_name) assert (response.status_code == 200) trigger_dao.data = trigger.data.to_dict() db.session.add(trigger_dao) if flush: db.session.flush() trigger = trigger_dao.to_model() if commit_and_initialize: db.session.commit() trigger = trigger_dao.to_model() try: trigger_processor.initialize_trigger(trigger, self) except: db.session.delete(trigger_dao) db.session.commit() raise return trigger
def test_subscription_schema(self): start = 's3://my-test-bucket/impala/impala' end = 's3://my-test-bucket/impala/install' regex = '.*\\.rpm' state = None sub = Subscription(data=SubscriptionData( 'test-subscription', 'ABC123', start, end, regex, state=state)) obj_before = sub.to_dict() sub = default_and_validate(sub, subscription_schema()) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, sub.to_dict())
def default_and_validate_action(self, action, action_type=None): if not action_type: engine = self._engine_service.get_engine_by_name(action.data.engine_name) action_types_by_name = {at.name: at for at in engine.data.supported_action_types} action_type = action_types_by_name.get(action.data.action_type_name) if not action_type: raise DartValidationException('unknown action: "%s"' % action.data.action_type_name) assert isinstance(action_type, ActionType) if not action.data.args: action.data.args = {} action = default_and_validate(action, action_schema(action_type.params_json_schema)) return action
def test_trigger_schema(self): args = {'completed_workflow_id': 'ABC123'} state = None tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', ['ABC123'], args, state=state)) obj_before = tr.to_dict() tr = default_and_validate( tr, trigger_schema(workflow_completion_trigger.params_json_schema)) # state should be defaulted to INACTIVE self.assertNotEqual(obj_before, tr.to_dict())
def test_datastore_schema(self): dst = Datastore(data=DatastoreData('test-datastore', 'fake_engine', args={'data_to_freespace_ratio': 0, 'secret': 'hi'})) obj_before = dst.to_dict() schema = datastore_schema(self.options_json_schema) dst = default_and_validate(dst, schema) obj_after = dst.to_dict() self.assertNotEqual(obj_before, obj_after) self.assertEqual(obj_after['data']['args']['secret'], 'hi') secrets = {} purge_secrets(obj_after, schema, secrets) self.assertEqual(obj_after['data']['args'].get('secret'), None) self.assertEqual(secrets, {'secret': 'hi'})
def save_event(event, commit=True, flush=False): """ :type event: dart.model.event.Event """ event = default_and_validate(event, event_schema()) event_dao = EventDao() event_dao.id = random_id() event_dao.data = event.data.to_dict() db.session.add(event_dao) if flush: db.session.flush() if commit: db.session.commit() event = event_dao.to_model() return event
def save_subgraph_definition(subgraph_definition, engine, trigger_schemas): """ :type engine: dart.model.engine.Engine :type subgraph_definition: dart.model.graph.SubGraphDefinition """ action_schemas = [action_schema(e.params_json_schema) for e in engine.data.supported_action_types] ds_schema = datastore_schema(engine.data.options_json_schema) schema = subgraph_definition_schema(trigger_schemas, action_schemas, ds_schema) subgraph_definition = default_and_validate(subgraph_definition, schema) subgraph_definition_dao = SubGraphDefinitionDao() subgraph_definition_dao.id = random_id() subgraph_definition_dao.data = subgraph_definition.data.to_dict() subgraph_definition_dao.data['engine_name'] = engine.data.name db.session.add(subgraph_definition_dao) db.session.commit() return subgraph_definition_dao.to_model()
def save_workflow(workflow, commit=True, flush=False): """ :type workflow: dart.model.workflow.Workflow """ workflow = default_and_validate(workflow, workflow_schema()) workflow_dao = WorkflowDao() workflow_dao.id = random_id() workflow_dao.data = workflow.data.to_dict() db.session.add(workflow_dao) if flush: db.session.flush() if commit: db.session.commit() workflow = workflow_dao.to_model() return workflow
def update_engine(self, engine, updated_engine): updated_engine = default_and_validate(updated_engine, engine_schema()) self._validate_ecs_task_definition( updated_engine.data.ecs_task_definition) p = jsonpatch.make_patch(engine.data.ecs_task_definition, updated_engine.data.ecs_task_definition) prior_arn_missing = not engine.data.ecs_task_definition_arn and updated_engine.data.ecs_task_definition if len(p.patch) > 0 or prior_arn_missing: self._deregister_task_definition( engine.data.ecs_task_definition_arn) updated_engine.data.ecs_task_definition_arn = self._register_ecs_task_definition( updated_engine) return self.update_engine_data(engine.id, updated_engine.data)
def save_subscription(self, subscription, commit_and_generate=True, flush=False): """ :type subscription: dart.model.subscription.Subscription """ subscription = default_and_validate(subscription, subscription_schema()) subscription_dao = SubscriptionDao() subscription_dao.id = random_id() subscription.data.state = SubscriptionState.ACTIVE subscription_dao.data = subscription.data.to_dict() db.session.add(subscription_dao) if flush: db.session.flush() subscription = subscription_dao.to_model() if commit_and_generate: db.session.commit() subscription = subscription_dao.to_model() return subscription
def test_action_schema(self): last_in_workflow = None a = Action(data=ActionData( 'copy_hdfs_to_s3', 'copy_hdfs_to_s3', { 'source_hdfs_path': 'hdfs:///user/hive/warehouse/dtest4', 'destination_s3_path': 's3://fake-bucket/dart_testing', }, engine_name='no_op_engine', last_in_workflow=last_in_workflow)) obj_before = a.to_dict() obj_after = default_and_validate( a, action_schema(NoOpActionTypes.copy_hdfs_to_s3_action. params_json_schema)).to_dict() # many fields should have been defaulted, making these unequal self.assertNotEqual(obj_before, obj_after)
def save_trigger(self, trigger, commit_and_initialize=True, flush=False, user_id=None): wf_uuid = uuid.uuid4().hex # to avoid uuid serialization issues trigger.data.tags = trigger.data.tags if (trigger.data.tags) else [] trigger.data.tags.append(wf_uuid) if user_id: trigger.data.user_id = user_id """ :type trigger: dart.model.trigger.Trigger """ trigger_type_name = trigger.data.trigger_type_name if trigger_type_name == self._manual_trigger_processor.trigger_type( ).name: raise DartValidationException('manual triggers cannot be saved') if trigger_type_name == self._retry_trigger_processor.trigger_type( ).name: raise DartValidationException('retry triggers cannot be saved') trigger_processor = self._trigger_processors.get(trigger_type_name) if not trigger_processor: raise DartValidationException('unknown trigger_type_name: %s' % trigger_type_name) assert isinstance(trigger_processor, TriggerProcessor) trigger = default_and_validate( trigger, trigger_schema( trigger_processor.trigger_type().params_json_schema)) trigger_dao = TriggerDao() trigger_dao.id = random_id() trigger_dao.data = trigger.data.to_dict() db.session.add(trigger_dao) if flush: db.session.flush() trigger = trigger_dao.to_model() if commit_and_initialize: db.session.commit() trigger = trigger_dao.to_model() try: trigger_processor.initialize_trigger(trigger, self) except: db.session.delete(trigger_dao) db.session.commit() raise return trigger
def save_subgraph_definition(subgraph_definition, engine, trigger_schemas): """ :type engine: dart.model.engine.Engine :type subgraph_definition: dart.model.graph.SubGraphDefinition """ action_schemas = [ action_schema(e.params_json_schema) for e in engine.data.supported_action_types ] ds_schema = datastore_schema(engine.data.options_json_schema) schema = subgraph_definition_schema(trigger_schemas, action_schemas, ds_schema) subgraph_definition = default_and_validate(subgraph_definition, schema) subgraph_definition_dao = SubGraphDefinitionDao() subgraph_definition_dao.id = random_id() subgraph_definition_dao.data = subgraph_definition.data.to_dict() subgraph_definition_dao.data['engine_name'] = engine.data.name db.session.add(subgraph_definition_dao) db.session.commit() return subgraph_definition_dao.to_model()
def test_datastore_schema(self): dst = Datastore(data=DatastoreData('test-datastore', 'fake_engine', args={ 'data_to_freespace_ratio': 0, 'secret': 'hi' })) obj_before = dst.to_dict() schema = datastore_schema(self.options_json_schema) dst = default_and_validate(dst, schema) obj_after = dst.to_dict() self.assertNotEqual(obj_before, obj_after) self.assertEqual(obj_after['data']['args']['secret'], 'hi') secrets = {} purge_secrets(obj_after, schema, secrets) self.assertEqual(obj_after['data']['args'].get('secret'), None) self.assertEqual(secrets, {'secret': 'hi'})
def save_subscription(self, subscription, commit_and_generate=True, flush=False): """ :type subscription: dart.model.subscription.Subscription """ subscription = default_and_validate(subscription, subscription_schema()) subscription_dao = SubscriptionDao() subscription_dao.id = random_id() subscription.data.state = SubscriptionState.QUEUED subscription.data.queued_time = datetime.now() subscription_dao.data = subscription.data.to_dict() db.session.add(subscription_dao) if flush: db.session.flush() subscription = subscription_dao.to_model() if commit_and_generate: db.session.commit() subscription = subscription_dao.to_model() self._subscription_proxy.generate_subscription_elements(subscription) return subscription
def save_trigger(self, trigger, commit_and_initialize=True, flush=False, user_id=None): wf_uuid = uuid.uuid4().hex # to avoid uuid serialization issues trigger.data.tags = trigger.data.tags if (trigger.data.tags) else [] trigger.data.tags.append(wf_uuid) if user_id: trigger.data.user_id = user_id """ :type trigger: dart.model.trigger.Trigger """ trigger_type_name = trigger.data.trigger_type_name if trigger_type_name == self._manual_trigger_processor.trigger_type().name: raise DartValidationException('manual triggers cannot be saved') if trigger_type_name == self._retry_trigger_processor.trigger_type().name: raise DartValidationException('retry triggers cannot be saved') trigger_processor = self._trigger_processors.get(trigger_type_name) if not trigger_processor: raise DartValidationException('unknown trigger_type_name: %s' % trigger_type_name) assert isinstance(trigger_processor, TriggerProcessor) trigger = default_and_validate(trigger, trigger_schema(trigger_processor.trigger_type().params_json_schema)) trigger_dao = TriggerDao() trigger_dao.id = random_id() if trigger_type_name == 'subscription_batch': sub = self._subscription_service.get_subscription(trigger.data.args['subscription_id']) if sub.data.nudge_id: response = self.update_nudge_with_trigger(sub.data.nudge_id, trigger.data.args['unconsumed_data_size_in_bytes'], trigger_dao.id, trigger.data.trigger_type_name) assert(response.status_code == 200) trigger_dao.data = trigger.data.to_dict() db.session.add(trigger_dao) if flush: db.session.flush() trigger = trigger_dao.to_model() if commit_and_initialize: db.session.commit() trigger = trigger_dao.to_model() try: trigger_processor.initialize_trigger(trigger, self) except: db.session.delete(trigger_dao) db.session.commit() raise return trigger
def save_engine(self, engine): """ :type engine: dart.model.engine.Engine """ engine = default_and_validate(engine, engine_schema()) self._validate_ecs_task_definition(engine.data.ecs_task_definition) engine_dao = EngineDao() engine_dao.id = random_id() engine_dao.name = engine.data.name engine_dao.data = engine.data.to_dict() db.session.add(engine_dao) try: db.session.commit() engine = engine_dao.to_model() engine.data.ecs_task_definition_arn = self._register_ecs_task_definition(engine) return self.update_engine_data(engine.id, engine.data) except SqlAlchemyIntegrityError as e: if hasattr(e, 'orig') and isinstance(e.orig, PostgresIntegrityError) and e.orig.pgcode == '23505': raise DartValidationException('name already exists: %s' % engine.data.name) raise e
def save_dataset(dataset, commit=True, flush=False): """ :type dataset: dart.model.dataset.Dataset """ dataset = default_and_validate(dataset, dataset_schema()) dataset_dao = DatasetDao() dataset_dao.id = random_id() dataset_dao.name = dataset.data.name dataset.data.location = dataset.data.location.rstrip('/') dataset_dao.data = dataset.data.to_dict() db.session.add(dataset_dao) try: if flush: db.session.flush() if commit: db.session.commit() dataset = dataset_dao.to_model() return dataset except SqlAlchemyIntegrityError as e: if hasattr(e, 'orig') and isinstance(e.orig, PostgresIntegrityError) and e.orig.pgcode == '23505': raise DartValidationException('name already exists: %s' % dataset.data.name) raise e
def save_subscription(self, subscription, commit_and_generate=True, flush=False): """ :type subscription: dart.model.subscription.Subscription """ subscription = default_and_validate(subscription, subscription_schema()) subscription_dao = SubscriptionDao() subscription_dao.id = random_id() subscription.data.state = SubscriptionState.QUEUED subscription.data.queued_time = datetime.now() subscription_dao.data = subscription.data.to_dict() db.session.add(subscription_dao) if flush: db.session.flush() subscription = subscription_dao.to_model() if commit_and_generate: db.session.commit() subscription = subscription_dao.to_model() self._subscription_proxy.generate_subscription_elements( subscription) return subscription
def save_trigger(self, trigger, commit_and_initialize=True, flush=False, user_id=None): wf_uuid = uuid.uuid4().hex # to avoid uuid serialization issues trigger.data.tags = trigger.data.tags if (trigger.data.tags) else [] trigger.data.tags.append(wf_uuid) if user_id: trigger.data.user_id = user_id """ :type trigger: dart.model.trigger.Trigger """ trigger_type_name = trigger.data.trigger_type_name if trigger_type_name == self._manual_trigger_processor.trigger_type().name: raise DartValidationException('manual triggers cannot be saved') if trigger_type_name == self._retry_trigger_processor.trigger_type().name: raise DartValidationException('retry triggers cannot be saved') trigger_processor = self._trigger_processors.get(trigger_type_name) if not trigger_processor: raise DartValidationException('unknown trigger_type_name: %s' % trigger_type_name) assert isinstance(trigger_processor, TriggerProcessor) trigger = default_and_validate(trigger, trigger_schema(trigger_processor.trigger_type().params_json_schema)) trigger_dao = TriggerDao() trigger_dao.id = random_id() trigger_dao.data = trigger.data.to_dict() db.session.add(trigger_dao) if flush: db.session.flush() trigger = trigger_dao.to_model() if commit_and_initialize: db.session.commit() trigger = trigger_dao.to_model() try: trigger_processor.initialize_trigger(trigger, self) except: db.session.delete(trigger_dao) db.session.commit() raise return trigger
def save_dataset(dataset, commit=True, flush=False): """ :type dataset: dart.model.dataset.Dataset """ dataset = default_and_validate(dataset, dataset_schema()) dataset_dao = DatasetDao() dataset_dao.id = random_id() dataset_dao.name = dataset.data.name dataset.data.location = dataset.data.location.rstrip('/') dataset_dao.data = dataset.data.to_dict() db.session.add(dataset_dao) try: if flush: db.session.flush() if commit: db.session.commit() dataset = dataset_dao.to_model() return dataset except SqlAlchemyIntegrityError as e: if hasattr(e, 'orig') and isinstance( e.orig, PostgresIntegrityError) and e.orig.pgcode == '23505': raise DartValidationException('name already exists: %s' % dataset.data.name) raise e
def save_engine(self, engine): """ :type engine: dart.model.engine.Engine """ engine = default_and_validate(engine, engine_schema()) self._validate_ecs_task_definition(engine.data.ecs_task_definition) engine_dao = EngineDao() engine_dao.id = random_id() engine_dao.name = engine.data.name engine_dao.data = engine.data.to_dict() db.session.add(engine_dao) try: db.session.commit() engine = engine_dao.to_model() engine.data.ecs_task_definition_arn = self._register_ecs_task_definition( engine) return self.update_engine_data(engine.id, engine.data) except SqlAlchemyIntegrityError as e: if hasattr(e, 'orig') and isinstance( e.orig, PostgresIntegrityError) and e.orig.pgcode == '23505': raise DartValidationException('name already exists: %s' % engine.data.name) raise e
def default_and_validate_trigger(self, trigger): trigger_type_name = trigger.data.trigger_type_name trigger_processor = self._trigger_processors.get(trigger_type_name) return default_and_validate(trigger, trigger_schema(trigger_processor.trigger_type().params_json_schema))
def default_and_validate_event(event): return default_and_validate(event, event_schema())
def default_and_validate_subscription(subscription): return default_and_validate(subscription, subscription_schema())
def default_and_validate_workflow(workflow): return default_and_validate(workflow, workflow_schema())
def default_and_validate_datastore(self, datastore, schema=None): return default_and_validate(datastore, schema or self.get_schema(datastore))