def subgraph_definition_schema(trigger_schemas, action_schemas, datastore_schema): return base_schema({ 'type': 'object', 'properties': { 'name': {'type': 'string'}, 'description': {'type': 'string'}, 'engine_name': {'type': 'string'}, 'related_type': {'type': 'string', 'maxLength': 50}, 'related_is_a': {'type': 'string', 'maxLength': 50}, 'actions': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': {'anyOf': action_schemas}, 'default': [] }, 'datastores': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': datastore_schema, 'default': [] }, 'datasets': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': dataset_schema(), 'default': [] }, 'events': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': event_schema(), 'default': [] }, 'subscriptions': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': subscription_schema(), 'default': [] }, 'triggers': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': {'anyOf': trigger_schemas}, 'default': [] }, 'workflows': { 'x-schema-form': {'type': 'tabarray', 'title': "{{ value.name }}"}, 'type': 'array', 'items': workflow_schema(), 'default': [] }, 'icon': {'type': ['string', 'null']}, 'md_icon': {'type': ['string', 'null']}, }, 'additionalProperties': False, 'required': ['name', 'engine_name', 'related_type', 'related_is_a'] })
def test_dataset_schema(self): columns = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] num_header_rows = None df = DataFormat(FileFormat.PARQUET, RowFormat.NONE, num_header_rows) ds = Dataset(data=DatasetData('test-dataset', 'test_dataset_table', 's3://bucket/prefix', df, columns)) obj_before = ds.to_dict() obj_after = default_and_validate(ds, dataset_schema()).to_dict() # num_header_rows should have been defaulted to 0, making these unequal self.assertNotEqual(obj_before, obj_after)
def test_dataset_schema_invalid(self): with self.assertRaises(DartValidationException) as context: columns = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)] df = DataFormat(FileFormat.PARQUET, RowFormat.NONE) location = None ds = Dataset(data=DatasetData('test-dataset', 'test_dataset_table', location, df, columns)) # should fail because location is required default_and_validate(ds, dataset_schema()) self.assertTrue(isinstance(context.exception, DartValidationException))
def save_dataset(dataset, commit=True, flush=False): """ :type dataset: dart.model.dataset.Dataset """ dataset = default_and_validate(dataset, dataset_schema()) dataset_dao = DatasetDao() dataset_dao.id = random_id() dataset_dao.name = dataset.data.name dataset.data.location = dataset.data.location.rstrip('/') dataset_dao.data = dataset.data.to_dict() db.session.add(dataset_dao) try: if flush: db.session.flush() if commit: db.session.commit() dataset = dataset_dao.to_model() return dataset except SqlAlchemyIntegrityError as e: if hasattr(e, 'orig') and isinstance(e.orig, PostgresIntegrityError) and e.orig.pgcode == '23505': raise DartValidationException('name already exists: %s' % dataset.data.name) raise e
def main(): data = { 'definitions': { 'Action': action_schema(None), 'ActionContext': action_context_schema(), 'ActionResult': action_result_schema(), 'Dataset': dataset_schema(), 'Datastore': datastore_schema(None), 'Engine': engine_schema(), 'ErrorResult': error_result_schema(), 'Event': event_schema(), 'Filter': filter_schema(), 'GraphEntityIdentifier': graph_entity_identifier_schema(), 'GraphEntity': graph_entity_schema(), 'JSONPatch': json_patch_schema(), 'JSONSchema': json_schema_schema(), 'OKResult': ok_result_schema(), 'OrderBy': order_by_schema(), 'SubGraph': sub_graph_schema(), 'SubGraphDefinition': { 'type': 'object' }, #subgraph_definition_schema([{'type': 'object'}], [{'type': 'object'}], {'type': 'object'}), 'Subscription': subscription_schema(), 'Trigger': trigger_schema({'type': 'object'}), 'TriggerType': trigger_type_schema(), 'Workflow': workflow_schema(), 'WorkflowInstance': workflow_instance_schema() } } fix_up(data, data, [None]) print dump(data, Dumper=Dumper, default_style=None, default_flow_style=False, explicit_start=False, explicit_end=False) return 0
def save_dataset(dataset, commit=True, flush=False): """ :type dataset: dart.model.dataset.Dataset """ dataset = default_and_validate(dataset, dataset_schema()) dataset_dao = DatasetDao() dataset_dao.id = random_id() dataset_dao.name = dataset.data.name dataset.data.location = dataset.data.location.rstrip('/') dataset_dao.data = dataset.data.to_dict() db.session.add(dataset_dao) try: if flush: db.session.flush() if commit: db.session.commit() dataset = dataset_dao.to_model() return dataset except SqlAlchemyIntegrityError as e: if hasattr(e, 'orig') and isinstance( e.orig, PostgresIntegrityError) and e.orig.pgcode == '23505': raise DartValidationException('name already exists: %s' % dataset.data.name) raise e
def export_swagger_definitions(out): data = { 'definitions': { 'Action': action_schema({'type': 'object', 'x-nullable': True}), 'ActionResponse': object_response_schema('Action'), 'ActionsResponse': array_response_schema('Action'), 'PagedActionsResponse': paged_response_schema('Action'), 'ActionContext': action_context_schema(), 'ActionContextResponse': object_response_schema('ActionContext'), 'ActionResult': action_result_schema(), 'Dataset': dataset_schema(), 'DatasetResponse': object_response_schema('Dataset'), 'PagedDatasetsResponse': paged_response_schema('Dataset'), 'Datastore': datastore_schema({'type': 'object', 'x-nullable': True}), 'DatastoreResponse': object_response_schema('Datastore'), 'PagedDatastoresResponse': paged_response_schema('Datastore'), 'Engine': engine_schema(), 'EngineResponse': object_response_schema('Engine'), 'PagedEnginesResponse': paged_response_schema('Engine'), 'ErrorResponse': error_response_schema(), 'Event': event_schema(), 'EventResponse': object_response_schema('Event'), 'PagedEventsResponse': paged_response_schema('Event'), 'Filter': filter_schema(), 'GraphEntityIdentifier': graph_entity_identifier_schema(), 'GraphEntityIdentifierResponse': object_response_schema('GraphEntityIdentifier'), 'GraphEntityIdentifiersResponse': array_response_schema('GraphEntityIdentifier'), 'GraphEntity': graph_entity_schema(), 'GraphEntityResponse': object_response_schema('GraphEntity'), 'JSONPatch': json_patch_schema(), 'JSONSchema': json_schema_schema(), 'JSONSchemaResponse': object_response_schema('JSONSchema'), 'ObjectResponse': object_response_schema('object'), 'ObjectsResponse': array_response_schema('object'), 'PagedObjectsResponse': paged_response_schema('object'), 'OKResponse': ok_response_schema(), 'OrderBy': order_by_schema(), 'Subgraph': sub_graph_schema(), 'SubgraphResponse': object_response_schema('Subgraph'), 'SubgraphDefinition': {'type': 'object'}, #subgraph_definition_schema([{'type': 'object'}], [{'type': 'object'}], {'type': 'object'}), 'SubgraphDefinitionResponse': object_response_schema('SubgraphDefinition'), 'Subscription': subscription_schema(), 'SubscriptionResponse': object_response_schema('Subscription'), 'PagedSubscriptionsResponse': paged_response_schema('Subscription'), 'SubscriptionElement': subscription_element_schema(), 'PagedSubscriptionElementsResponse': paged_response_schema('SubscriptionElement'), 'Trigger': trigger_schema({'type': 'object'}), 'TriggerResponse': object_response_schema('Trigger'), 'PagedTriggersResponse': paged_response_schema('Trigger'), 'TriggerType': trigger_type_schema(), 'PagedTriggerTypesResponse': paged_response_schema('TriggerType'), 'Workflow': workflow_schema(), 'WorkflowResponse': object_response_schema('Workflow'), 'PagedWorkflowsResponse': paged_response_schema('Workflow'), 'WorkflowInstance': workflow_instance_schema(), 'WorkflowInstanceResponse': object_response_schema('WorkflowInstance'), 'PagedWorkflowInstancesResponse': paged_response_schema('WorkflowInstance') } } fix_up(data, data, [None]) dump(data, out, Dumper=Dumper, default_style=None, default_flow_style=False, explicit_start=False, explicit_end=False)
def get_dataset_json_schema(): return {'results': dataset_schema()}
def export_swagger_definitions(out): data = { 'definitions': { 'Action': action_schema({ 'type': 'object', 'x-nullable': True }), 'ActionResponse': object_response_schema('Action'), 'ActionsResponse': array_response_schema('Action'), 'PagedActionsResponse': paged_response_schema('Action'), 'ActionContext': action_context_schema(), 'ActionContextResponse': object_response_schema('ActionContext'), 'ActionResult': action_result_schema(), 'Dataset': dataset_schema(), 'DatasetResponse': object_response_schema('Dataset'), 'PagedDatasetsResponse': paged_response_schema('Dataset'), 'Datastore': datastore_schema({ 'type': 'object', 'x-nullable': True }), 'DatastoreResponse': object_response_schema('Datastore'), 'PagedDatastoresResponse': paged_response_schema('Datastore'), 'Engine': engine_schema(), 'EngineResponse': object_response_schema('Engine'), 'PagedEnginesResponse': paged_response_schema('Engine'), 'ErrorResponse': error_response_schema(), 'Event': event_schema(), 'EventResponse': object_response_schema('Event'), 'PagedEventsResponse': paged_response_schema('Event'), 'Filter': filter_schema(), 'GraphEntityIdentifier': graph_entity_identifier_schema(), 'GraphEntityIdentifierResponse': object_response_schema('GraphEntityIdentifier'), 'GraphEntityIdentifiersResponse': array_response_schema('GraphEntityIdentifier'), 'GraphEntity': graph_entity_schema(), 'GraphEntityResponse': object_response_schema('GraphEntity'), 'JSONPatch': json_patch_schema(), 'JSONSchema': json_schema_schema(), 'JSONSchemaResponse': object_response_schema('JSONSchema'), 'ObjectResponse': object_response_schema('object'), 'ObjectsResponse': array_response_schema('object'), 'PagedObjectsResponse': paged_response_schema('object'), 'OKResponse': ok_response_schema(), 'OrderBy': order_by_schema(), 'Subgraph': sub_graph_schema(), 'SubgraphResponse': object_response_schema('Subgraph'), 'SubgraphDefinition': { 'type': 'object' }, #subgraph_definition_schema([{'type': 'object'}], [{'type': 'object'}], {'type': 'object'}), 'SubgraphDefinitionResponse': object_response_schema('SubgraphDefinition'), 'Subscription': subscription_schema(), 'SubscriptionResponse': object_response_schema('Subscription'), 'PagedSubscriptionsResponse': paged_response_schema('Subscription'), 'SubscriptionElement': subscription_element_schema(), 'PagedSubscriptionElementsResponse': paged_response_schema('SubscriptionElement'), 'Trigger': trigger_schema({'type': 'object'}), 'TriggerResponse': object_response_schema('Trigger'), 'PagedTriggersResponse': paged_response_schema('Trigger'), 'TriggerType': trigger_type_schema(), 'PagedTriggerTypesResponse': paged_response_schema('TriggerType'), 'Workflow': workflow_schema(), 'WorkflowResponse': object_response_schema('Workflow'), 'PagedWorkflowsResponse': paged_response_schema('Workflow'), 'WorkflowInstance': workflow_instance_schema(), 'WorkflowInstanceResponse': object_response_schema('WorkflowInstance'), 'PagedWorkflowInstancesResponse': paged_response_schema('WorkflowInstance') } } fix_up(data, data, [None]) dump(data, out, Dumper=Dumper, default_style=None, default_flow_style=False, explicit_start=False, explicit_end=False)
def update_dataset(self, dataset_id, dataset): dataset = default_and_validate(dataset, dataset_schema()) return self.update_dataset_data(dataset_id, dataset.data)
def _query_dataset_query(self, filters): query = DatasetDao.query.order_by(DatasetDao.updated) for f in filters: query = self._filter_service.apply_filter(f, query, DatasetDao, [dataset_schema()]) return query