class Request(BASE, ModelBase): """Represents a pre-cache request from other service""" __tablename__ = 'requests' request_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('REQUEST_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) requester = Column(String(20)) request_type = Column(EnumWithValue(RequestType)) transform_tag = Column(String(10)) priority = Column(Integer()) status = Column(EnumWithValue(RequestStatus)) locking = Column(EnumWithValue(RequestLocking)) workload_id = Column(Integer()) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) errors = Column(JSON()) request_metadata = Column(JSON()) processing_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('request_id', name='_REQUESTS_PK'), CheckConstraint('status IS NOT NULL', name='REQ_STATUS_ID_NN'), UniqueConstraint('name', 'scope', 'requester', 'request_type', 'transform_tag', 'workload_id', name='REQUESTS_NAME_SCOPE_UQ '), Index('REQUESTS_SCOPE_NAME_IDX', 'scope', 'name', 'workload_id'), Index('REQUESTS_STATUS_PRIO_IDX', 'status', 'priority', 'request_id'))
class Message(BASE, ModelBase): """Represents the event messages""" __tablename__ = 'messages' msg_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('MESSAGE_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) msg_type = Column(EnumWithValue(MessageType)) status = Column(EnumWithValue(MessageStatus)) substatus = Column(Integer()) locking = Column(EnumWithValue(MessageLocking)) source = Column(EnumWithValue(MessageSource)) destination = Column(EnumWithValue(MessageDestination)) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) transform_id = Column(Integer()) processing_id = Column(Integer()) num_contents = Column(Integer()) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) msg_content = Column(JSON()) _table_args = (PrimaryKeyConstraint('msg_id', name='MESSAGES_PK'), Index('MESSAGES_TYPE_ST_IDX', 'msg_type', 'status', 'destination', 'request_id'), Index('MESSAGES_TYPE_ST_TF_IDX', 'msg_type', 'status', 'destination', 'transform_id'), Index('MESSAGES_TYPE_ST_PR_IDX', 'msg_type', 'status', 'destination', 'processing_id'))
class Workprogress(BASE, ModelBase): """Represents a workprogress which monitors the progress of a workflow""" __tablename__ = 'workprogresses' workprogress_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('WORKPROGRESS_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) # requester = Column(String(20)) # request_type = Column(EnumWithValue(RequestType)) # transform_tag = Column(String(20)) # workload_id = Column(Integer()) priority = Column(Integer()) status = Column(EnumWithValue(WorkprogressStatus)) substatus = Column(EnumWithValue(WorkprogressStatus), default=0) locking = Column(EnumWithValue(WorkprogressLocking)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) errors = Column(JSON()) workprogress_metadata = Column(JSON()) processing_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('workprogress_id', name='WORKPROGRESS_PK'), ForeignKeyConstraint(['request_id'], ['requests.request_id'], name='REQ2WORKPROGRESS_REQ_ID_FK'), CheckConstraint('status IS NOT NULL', name='WORKPROGRESS_STATUS_ID_NN'), # UniqueConstraint('name', 'scope', 'requester', 'request_type', 'transform_tag', 'workload_id', name='REQUESTS_NAME_SCOPE_UQ '), Index('WORKPROGRESS_SCOPE_NAME_IDX', 'workprogress_id', 'request_id', 'name', 'scope'), Index('WORKPROGRESS_STATUS_PRIO_IDX', 'status', 'priority', 'workprogress_id', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Collection(BASE, ModelBase): """Represents a collection""" __tablename__ = 'collections' coll_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('COLLECTION_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) transform_id = Column(BigInteger().with_variant(Integer, "sqlite")) coll_type = Column(EnumWithValue(CollectionType)) relation_type = Column(EnumWithValue(CollectionRelationType)) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) bytes = Column(Integer()) status = Column(EnumWithValue(CollectionStatus)) substatus = Column(EnumWithValue(CollectionStatus), default=0) locking = Column(EnumWithValue(CollectionLocking)) total_files = Column(Integer()) storage_id = Column(Integer()) new_files = Column(Integer()) processed_files = Column(Integer()) processing_files = Column(Integer()) processing_id = Column(Integer()) retries = Column(Integer(), default=0) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) coll_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('coll_id', name='COLLECTIONS_PK'), UniqueConstraint('name', 'scope', 'transform_id', 'relation_type', name='COLLECTIONS_NAME_SCOPE_UQ'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='COLLECTIONS_TRANSFORM_ID_FK'), CheckConstraint('status IS NOT NULL', name='COLLECTIONS_STATUS_ID_NN'), CheckConstraint('transform_id IS NOT NULL', name='COLLECTIONS_TRANSFORM_ID_NN'), Index('COLLECTIONS_STATUS_RELAT_IDX', 'status', 'relation_type'), Index('COLLECTIONS_TRANSFORM_IDX', 'transform_id', 'coll_id'), Index('COLLECTIONS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Content(BASE, ModelBase): """Represents a content""" __tablename__ = 'contents' content_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('CONTENT_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) transform_id = Column(BigInteger().with_variant(Integer, "sqlite")) coll_id = Column(BigInteger().with_variant(Integer, "sqlite")) map_id = Column(BigInteger().with_variant(Integer, "sqlite"), default=0) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) min_id = Column(Integer()) max_id = Column(Integer()) content_type = Column(EnumWithValue(ContentType)) status = Column(EnumWithValue(ContentStatus)) substatus = Column(EnumWithValue(ContentStatus)) locking = Column(EnumWithValue(ContentLocking)) bytes = Column(Integer()) md5 = Column(String(32)) adler32 = Column(String(8)) processing_id = Column(Integer()) storage_id = Column(Integer()) retries = Column(Integer(), default=0) path = Column(String(4000)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) content_metadata = Column(JSON()) _table_args = ( PrimaryKeyConstraint('content_id', name='CONTENTS_PK'), # UniqueConstraint('name', 'scope', 'coll_id', 'content_type', 'min_id', 'max_id', name='CONTENT_SCOPE_NAME_UQ'), # UniqueConstraint('name', 'scope', 'coll_id', 'min_id', 'max_id', name='CONTENT_SCOPE_NAME_UQ'), # UniqueConstraint('content_id', 'coll_id', name='CONTENTS_UQ'), UniqueConstraint('transform_id', 'coll_id', 'map_id', name='CONTENT_ID_UQ'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='CONTENTS_TRANSFORM_ID_FK'), ForeignKeyConstraint(['coll_id'], ['collections.coll_id'], name='CONTENTS_COLL_ID_FK'), CheckConstraint('status IS NOT NULL', name='CONTENTS_STATUS_ID_NN'), CheckConstraint('coll_id IS NOT NULL', name='CONTENTS_COLL_ID_NN'), Index('CONTENTS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'created_at'))
class Processing(BASE, ModelBase): """Represents a processing""" __tablename__ = 'processings' processing_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('PROCESSING_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) transform_id = Column(BigInteger().with_variant(Integer, "sqlite")) status = Column(EnumWithValue(ProcessingStatus)) substatus = Column(EnumWithValue(ProcessingStatus)) locking = Column(EnumWithValue(ProcessingLocking)) submitter = Column(String(20)) submitted_id = Column(Integer()) granularity = Column(Integer()) granularity_type = Column(EnumWithValue(GranularityType)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) submitted_at = Column("submitted_at", DateTime) finished_at = Column("finished_at", DateTime) expired_at = Column("expired_at", DateTime) processing_metadata = Column(JSON()) output_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('processing_id', name='PROCESSINGS_PK'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='PROCESSINGS_TRANSFORM_ID_FK'), CheckConstraint('status IS NOT NULL', name='PROCESSINGS_STATUS_ID_NN'), CheckConstraint('transform_id IS NOT NULL', name='PROCESSINGS_TRANSFORM_ID_NN'), Index('PROCESSINGS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Transform(BASE, ModelBase): """Represents a transform""" __tablename__ = 'transforms' transform_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('TRANSFORM_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) transform_type = Column(EnumWithValue(TransformType)) transform_tag = Column(String(20)) priority = Column(Integer()) safe2get_output_from_input = Column(Integer()) status = Column(EnumWithValue(TransformStatus)) substatus = Column(Integer()) locking = Column(EnumWithValue(TransformLocking)) retries = Column(Integer(), default=0) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) started_at = Column("started_at", DateTime) finished_at = Column("finished_at", DateTime) expired_at = Column("expired_at", DateTime) transform_metadata = Column(JSON()) _table_args = (PrimaryKeyConstraint('transform_id', name='TRANSFORMS_PK'), CheckConstraint('status IS NOT NULL', name='TRANSFORMS_STATUS_ID_NN'), Index('TRANSFORMS_TYPE_TAG_IDX', 'transform_type', 'transform_tag', 'transform_id'), Index('TRANSFORMS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Processing(BASE, ModelBase): """Represents a processing""" __tablename__ = 'processings' processing_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('PROCESSING_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) transform_id = Column(BigInteger().with_variant(Integer, "sqlite")) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) status = Column(EnumWithValue(ProcessingStatus)) substatus = Column(EnumWithValue(ProcessingStatus), default=0) locking = Column(EnumWithValue(ProcessingLocking)) submitter = Column(String(20)) submitted_id = Column(Integer()) granularity = Column(Integer()) granularity_type = Column(EnumWithValue(GranularityType)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) submitted_at = Column("submitted_at", DateTime) finished_at = Column("finished_at", DateTime) expired_at = Column("expired_at", DateTime) _processing_metadata = Column('processing_metadata', JSON()) _running_metadata = Column('running_metadata', JSON()) output_metadata = Column(JSON()) @property def processing_metadata(self): if self._processing_metadata and 'processing' in self._processing_metadata: proc = self._processing_metadata['processing'] proc_data = None if self._running_metadata and 'processing_data' in self._running_metadata: proc_data = self._running_metadata['processing_data'] if proc is not None and proc_data is not None: proc.metadata = proc_data self._processing_metadata['processing'] = proc return self._processing_metadata @processing_metadata.setter def processing_metadata(self, processing_metadata): if self._processing_metadata is None: self._processing_metadata = processing_metadata if self._running_metadata is None: self._running_metadata = {} if processing_metadata and 'processing' in processing_metadata: proc = processing_metadata['processing'] self._running_metadata['processing_data'] = proc.metadata @property def running_metadata(self): return self._running_metadata @running_metadata.setter def running_metadata(self, running_metadata): if self._running_metadata is None: self._running_metadata = {} if running_metadata: for k in running_metadata: if k != 'processing_data': self._running_metadata[k] = running_metadata[k] def _items_extend(self): return [('processing_metadata', self.processing_metadata), ('running_metadata', self.running_metadata)] def update(self, values, flush=True, session=None): if values and 'processing_metadata' in values and 'processing' in values['processing_metadata']: proc = values['processing_metadata']['processing'] if proc is not None: if 'running_metadata' not in values: values['running_metadata'] = {} values['running_metadata']['processing_data'] = proc.metadata if values and 'processing_metadata' in values: del values['processing_metadata'] if values and 'running_metadata' in values: values['_running_metadata'] = values['running_metadata'] del values['running_metadata'] super(Transform, self).update(values, flush, session) _table_args = (PrimaryKeyConstraint('processing_id', name='PROCESSINGS_PK'), ForeignKeyConstraint(['transform_id'], ['transforms.transform_id'], name='PROCESSINGS_TRANSFORM_ID_FK'), CheckConstraint('status IS NOT NULL', name='PROCESSINGS_STATUS_ID_NN'), CheckConstraint('transform_id IS NOT NULL', name='PROCESSINGS_TRANSFORM_ID_NN'), Index('PROCESSINGS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Transform(BASE, ModelBase): """Represents a transform""" __tablename__ = 'transforms' transform_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('TRANSFORM_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) request_id = Column(BigInteger().with_variant(Integer, "sqlite")) workload_id = Column(Integer()) transform_type = Column(EnumWithValue(TransformType)) transform_tag = Column(String(20)) priority = Column(Integer()) safe2get_output_from_input = Column(Integer()) status = Column(EnumWithValue(TransformStatus)) substatus = Column(EnumWithValue(TransformStatus), default=0) locking = Column(EnumWithValue(TransformLocking)) retries = Column(Integer(), default=0) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) started_at = Column("started_at", DateTime) finished_at = Column("finished_at", DateTime) expired_at = Column("expired_at", DateTime) _transform_metadata = Column('transform_metadata', JSON()) _running_metadata = Column('running_metadata', JSON()) @property def transform_metadata(self): if self._transform_metadata and 'work' in self._transform_metadata: work = self._transform_metadata['work'] work_data = None if self._running_metadata and 'work_data' in self._running_metadata: work_data = self._running_metadata['work_data'] if work is not None and work_data is not None: work.metadata = work_data self._transform_metadata['work'] = work return self._transform_metadata @transform_metadata.setter def transform_metadata(self, transform_metadata): if self._transform_metadata is None: self._transform_metadata = transform_metadata if self._running_metadata is None: self._running_metadata = {} if transform_metadata and 'work' in transform_metadata: work = transform_metadata['work'] self._running_metadata['work_data'] = work.metadata @property def running_metadata(self): return self._running_metadata @running_metadata.setter def running_metadata(self, running_metadata): if self._running_metadata is None: self._running_metadata = {} if running_metadata: for k in running_metadata: if k != 'work_data': self._running_metadata[k] = running_metadata[k] def _items_extend(self): return [('transform_metadata', self.transform_metadata), ('running_metadata', self.running_metadata)] def update(self, values, flush=True, session=None): if values and 'transform_metadata' in values and 'work' in values['transform_metadata']: work = values['transform_metadata']['work'] if work is not None: if 'running_metadata' not in values: values['running_metadata'] = {} values['running_metadata']['work_data'] = work.metadata if values and 'transform_metadata' in values: del values['transform_metadata'] if values and 'running_metadata' in values: values['_running_metadata'] = values['running_metadata'] del values['running_metadata'] super(Transform, self).update(values, flush, session) _table_args = (PrimaryKeyConstraint('transform_id', name='TRANSFORMS_PK'), CheckConstraint('status IS NOT NULL', name='TRANSFORMS_STATUS_ID_NN'), Index('TRANSFORMS_TYPE_TAG_IDX', 'transform_type', 'transform_tag', 'transform_id'), Index('TRANSFORMS_STATUS_UPDATED_IDX', 'status', 'locking', 'updated_at', 'next_poll_at', 'created_at'))
class Request(BASE, ModelBase): """Represents a pre-cache request from other service""" __tablename__ = 'requests' request_id = Column(BigInteger().with_variant(Integer, "sqlite"), Sequence('REQUEST_ID_SEQ', schema=DEFAULT_SCHEMA_NAME), primary_key=True) scope = Column(String(SCOPE_LENGTH)) name = Column(String(NAME_LENGTH)) requester = Column(String(20)) request_type = Column(EnumWithValue(RequestType)) transform_tag = Column(String(20)) workload_id = Column(Integer()) priority = Column(Integer()) status = Column(EnumWithValue(RequestStatus)) substatus = Column(EnumWithValue(RequestStatus), default=0) locking = Column(EnumWithValue(RequestLocking)) created_at = Column("created_at", DateTime, default=datetime.datetime.utcnow) updated_at = Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) next_poll_at = Column("next_poll_at", DateTime, default=datetime.datetime.utcnow) accessed_at = Column("accessed_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) expired_at = Column("expired_at", DateTime) errors = Column(JSON()) _request_metadata = Column('request_metadata', JSON()) _processing_metadata = Column('processing_metadata', JSON()) @property def request_metadata(self): if self._request_metadata and 'workflow' in self._request_metadata: workflow = self._request_metadata['workflow'] workflow_data = None if self._processing_metadata and 'workflow_data' in self._processing_metadata: workflow_data = self._processing_metadata['workflow_data'] if workflow is not None and workflow_data is not None: workflow.metadata = workflow_data self._request_metadata['workflow'] = workflow return self._request_metadata @request_metadata.setter def request_metadata(self, request_metadata): if self._request_metadata is None: self._request_metadata = request_metadata if self._processing_metadata is None: self._processing_metadata = {} if request_metadata and 'workflow' in request_metadata: workflow = request_metadata['workflow'] self._processing_metadata['workflow_data'] = workflow.metadata @property def processing_metadata(self): return self._processing_metadata @processing_metadata.setter def processing_metadata(self, processing_metadata): if self._processing_metadata is None: self._processing_metadata = {} if processing_metadata: for k in processing_metadata: if k != 'workflow_data': self._processing_metadata[k] = processing_metadata[k] def _items_extend(self): return [('request_metadata', self.request_metadata), ('processing_metadata', self.processing_metadata)] def update(self, values, flush=True, session=None): if values and 'request_metadata' in values and 'workflow' in values['request_metadata']: workflow = values['request_metadata']['workflow'] if workflow is not None: if 'processing_metadata' not in values: values['processing_metadata'] = {} values['processing_metadata']['workflow_data'] = workflow.metadata if values and 'request_metadata' in values: del values['request_metadata'] if values and 'processing_metadata' in values: values['_processing_metadata'] = values['processing_metadata'] del values['processing_metadata'] super(Request, self).update(values, flush, session) _table_args = (PrimaryKeyConstraint('request_id', name='REQUESTS_PK'), CheckConstraint('status IS NOT NULL', name='REQUESTS_STATUS_ID_NN'), # UniqueConstraint('name', 'scope', 'requester', 'request_type', 'transform_tag', 'workload_id', name='REQUESTS_NAME_SCOPE_UQ '), Index('REQUESTS_SCOPE_NAME_IDX', 'workload_id', 'request_id', 'name', 'scope'), Index('REQUESTS_STATUS_PRIO_IDX', 'status', 'priority', 'workload_id', 'request_id', 'locking', 'updated_at', 'next_poll_at', 'created_at'))