class DatasetSchemaV3(CreatorMixinSchemaV3, EntitySchemaV3): """Dataset schema.""" class Meta: """Meta class.""" rdf_type = schema.Dataset model = Dataset unknown = EXCLUDE creators = fields.Nested(schema.creator, PersonSchemaV3, many=True) date_created = fields.DateTime(schema.dateCreated, missing=None) date_published = fields.DateTime(schema.datePublished, missing=None) description = fields.String(schema.description, missing=None) files = fields.Nested(schema.hasPart, DatasetFileSchemaV3, many=True) identifier = fields.String(schema.identifier) in_language = fields.Nested(schema.inLanguage, LanguageSchemaV5, missing=None) keywords = fields.List(schema.keywords, fields.String()) license = fields.Uri(schema.license, missing=None, allow_none=True) name = fields.String(schema.alternateName, missing=None) same_as = fields.Nested(schema.sameAs, UrlSchemaV5, missing=None) tags = fields.Nested(schema.subjectOf, DatasetTagSchemaV5, many=True) title = fields.String(schema.name) url = fields.String(schema.url) version = fields.String(schema.version, missing=None) @pre_load def fix_files_context(self, data, **kwargs): """Fix DatasetFile context for _label and external fields.""" from renku.core.utils.migrate import migrate_types data = migrate_types(data) if "@context" not in data: return data context = data["@context"] if not isinstance(context, dict) or "files" not in context: return data context.setdefault("rdfs", "http://www.w3.org/2000/01/rdf-schema#") context.setdefault("_label", "rdfs:label") files = data["@context"]["files"] if not isinstance(files, dict) or "@context" not in files: return data context = files["@context"] context.setdefault("rdfs", "http://www.w3.org/2000/01/rdf-schema#") context.setdefault("_label", "rdfs:label") context.setdefault("external", "renku:external") context.setdefault( "renku", "https://swissdatasciencecenter.github.io/renku-ontology#") return data
class ProjectSchemaV3(JsonLDSchema): """Project Schema.""" class Meta: """Meta class.""" rdf_type = [prov.Location, schema.Project] model = Project unknown = EXCLUDE _id = fields.Id(missing=None) name = fields.String(schema.name, missing=None) created = fields.DateTime(schema.dateCreated, missing=None) updated = fields.DateTime(schema.dateUpdated, missing=None) version = fields.String(schema.schemaVersion, missing=1) creator = fields.Nested(schema.creator, PersonSchemaV3, missing=None)
class DatasetFileSchema(EntitySchema): """DatasetFile schema.""" class Meta: """Meta class.""" rdf_type = schema.DigitalDocument model = DatasetFile unknown = EXCLUDE added = fields.DateTime(schema.dateCreated, format="iso", extra_formats=("%Y-%m-%d", )) name = fields.String(schema.name, missing=None) url = fields.String(schema.url, missing=None) based_on = Nested(schema.isBasedOn, "DatasetFileSchema", missing=None, propagate_client=False) external = fields.Boolean(renku.external, missing=False) source = fields.String(renku.source, missing=None) @pre_dump def fix_datetimes(self, obj, many=False, **kwargs): """Pre dump hook.""" if many: return [self.fix_datetimes(o, many=False, **kwargs) for o in obj] obj.added = self._fix_timezone(obj.added) return obj
class DatasetTagSchema(JsonLDSchema): """DatasetTag schema.""" class Meta: """Meta class.""" rdf_type = schema.PublicationEvent model = DatasetTag unknown = EXCLUDE name = fields.String(schema.name) description = fields.String(schema.description) commit = fields.String(schema.location) created = fields.DateTime(schema.startDate, missing=None, format="iso", extra_formats=("%Y-%m-%d", )) dataset = fields.String(schema.about) _id = fields.Id(init_name="id") @pre_dump def fix_datetimes(self, obj, many=False, **kwargs): """Pre dump hook.""" if many: return [self.fix_datetimes(o, many=False, **kwargs) for o in obj] object.__setattr__(obj, "created", self._fix_timezone(obj.created)) return obj
class DatasetSchema(EntitySchema, CreatorMixinSchema): """Dataset schema.""" class Meta: """Meta class.""" rdf_type = schema.Dataset model = Dataset unknown = EXCLUDE _id = fields.Id(init_name="id", missing=None) _label = fields.String(rdfs.label, init_name="label", missing=None) date_published = fields.DateTime( schema.datePublished, missing=None, allow_none=True, format="%Y-%m-%d", extra_formats=("iso", "%Y-%m-%dT%H:%M:%S"), ) description = fields.String(schema.description, missing=None) identifier = fields.String(schema.identifier) in_language = Nested(schema.inLanguage, LanguageSchema, missing=None) keywords = fields.List(schema.keywords, fields.String(), missing=None, allow_none=True) license = fields.Uri(schema.license, missing=None, allow_none=True) title = fields.String(schema.name) url = fields.String(schema.url) version = fields.String(schema.version, missing=None) date_created = fields.DateTime(schema.dateCreated, missing=None, allow_none=True, format="iso", extra_formats=("%Y-%m-%d", )) files = Nested(schema.hasPart, DatasetFileSchema, many=True) tags = Nested(schema.subjectOf, DatasetTagSchema, many=True) same_as = Nested(schema.sameAs, UrlSchema, missing=None) name = fields.String(schema.alternateName) @pre_dump def fix_datetimes(self, obj, many=False, **kwargs): """Pre dump hook.""" if many: return [self.fix_datetimes(o, many=False, **kwargs) for o in obj] obj.date_published = self._fix_timezone(obj.date_published) obj.date_created = self._fix_timezone(obj.date_created) return obj
class DatasetTagSchemaV5(JsonLDSchema): """DatasetTag schema.""" class Meta: """Meta class.""" rdf_type = schema.PublicationEvent model = DatasetTag unknown = EXCLUDE _id = fields.Id() commit = fields.String(schema.location) created = fields.DateTime(schema.startDate, missing=None) dataset = fields.String(schema.about) description = fields.String(schema.description) name = fields.String(schema.name)
class DatasetFileSchemaV3(EntitySchemaV3): """DatasetFile schema.""" class Meta: """Meta class.""" rdf_type = schema.DigitalDocument model = DatasetFile unknown = EXCLUDE added = fields.DateTime(schema.dateCreated) based_on = fields.Nested(schema.isBasedOn, "DatasetFileSchemaV3", missing=None) name = fields.String(schema.name, missing=None) url = fields.String(schema.url, missing=None) external = fields.Boolean(renku.external, missing=False)
class ProjectSchema(JsonLDSchema): """Project Schema.""" class Meta: """Meta class.""" rdf_type = [schema.Project, prov.Location] model = Project unknown = EXCLUDE name = fields.String(schema.name, missing=None) created = fields.DateTime(schema.dateCreated, missing=None, format="iso", extra_formats=("%Y-%m-%d",)) version = fields.String(schema.schemaVersion, missing=1) agent_version = fields.String(schema.agent, missing="pre-0.11.0") creator = Nested(schema.creator, PersonSchema, missing=None) _id = fields.Id(init_name="id", missing=None) @pre_dump def fix_datetimes(self, obj, many=False, **kwargs): """Pre dump hook.""" if many: return [self.fix_datetimes(o, many=False, **kwargs) for o in obj] obj.created = self._fix_timezone(obj.created) return obj
class DatasetSchema(EntitySchema, CreatorMixinSchema): """Dataset schema.""" class Meta: """Meta class.""" rdf_type = schema.Dataset model = Dataset unknown = EXCLUDE _id = fields.Id(init_name='id', missing=None) _label = fields.String(rdfs.label, init_name='label', missing=None) date_published = fields.DateTime(schema.datePublished, missing=None) description = fields.String(schema.description, missing=None) identifier = fields.String(schema.identifier) in_language = fields.Nested( schema.inLanguage, LanguageSchema, missing=None ) keywords = fields.List(schema.keywords, fields.String()) license = fields.Uri(schema.license, missing=None, allow_none=True) name = fields.String(schema.name) url = fields.String(schema.url) version = fields.String(schema.version, missing=None) created = fields.DateTime(schema.dateCreated, missing=None) files = fields.Nested(schema.hasPart, DatasetFileSchema, many=True) tags = fields.Nested(schema.subjectOf, DatasetTagSchema, many=True) same_as = fields.Nested(schema.sameAs, UrlSchema, missing=None) short_name = fields.String(schema.alternateName) @pre_load def fix_files_context(self, data, **kwargs): """Fix DatasetFile context for _label and external fields.""" context = None if '@context' not in data: return data context = data['@context'] if not isinstance(context, dict) or 'files' not in context: return data context.setdefault('rdfs', 'http://www.w3.org/2000/01/rdf-schema#') files = data['@context']['files'] if not isinstance(files, dict) or '@context' not in files: return data context = files['@context'] context.setdefault('rdfs', 'http://www.w3.org/2000/01/rdf-schema#') context.setdefault('_label', 'rdfs:label') context.setdefault('external', 'renku:external') context.setdefault( 'renku', 'https://swissdatasciencecenter.github.io/renku-ontology#' ) return data @pre_load def migrate_types(self, data, **kwargs): """Fix types.""" from renku.core.utils.migrate import migrate_types return migrate_types(data)