def generic_constraint(self, name, table_name, type_, schema=None, **kw): t = self.table(table_name, schema=schema) types = { "foreignkey": lambda name: sa_schema.ForeignKeyConstraint([], [], name=name), "primary": sa_schema.PrimaryKeyConstraint, "unique": sa_schema.UniqueConstraint, "check": lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint, } try: const = types[type_] except KeyError as ke: raise_( TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))), from_=ke, ) else: const = const(name=name) t.append_constraint(const) return const
class ResidenceOccupancy(Base): __tablename__ = "residence_occupancy" __table_args__ = ( sql_schema.ForeignKeyConstraint(["user_id", "u_community_id"], ["user.id", "user.community_id"]), sql_schema.ForeignKeyConstraint( ["residence_id", "r_community_id"], ["residence.id", "residence.community_id"], ), sql_schema.CheckConstraint("u_community_id = r_community_id"), ) residence_id = Column(sql_types.Integer, nullable=False, primary_key=True) user_id = Column(sql_types.Integer, nullable=False, primary_key=True) r_community_id = Column(sql_types.Integer, nullable=False) u_community_id = Column(sql_types.Integer, nullable=False) relationship_name = Column(sql_types.String(40), nullable=False, default="Occupant") is_owner = Column(sql_types.Boolean, nullable=False, default=False) residence = orm.relationship("Residence", lazy=True, viewonly=True) user = orm.relationship("User", lazy=True, viewonly=True) def __repr__(self): return "ResidenceOccupancy()"
def generic_constraint(self, name: Optional[str], table_name: str, type_: Optional[str], schema: Optional[str] = None, **kw) -> Any: t = self.table(table_name, schema=schema) types: Dict[Optional[str], Any] = { "foreignkey": lambda name: sa_schema.ForeignKeyConstraint([], [], name=name), "primary": sa_schema.PrimaryKeyConstraint, "unique": sa_schema.UniqueConstraint, "check": lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint, } try: const = types[type_] except KeyError as ke: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) from ke else: const = const(name=name) t.append_constraint(const) return const
def drop_constraint(self, name, tablename, type=None): """Drop a constraint of the given name, typically via DROP CONSTRAINT. :param name: name of the constraint. :param tablename: tablename. :param type: optional, required on MySQL. can be 'foreignkey', 'primary', 'unique', or 'check'. .. versionadded:: 0.3.6 'primary' qualfier to enable dropping of MySQL primary key constraints. """ t = self._table(tablename) types = { 'foreignkey':lambda name:schema.ForeignKeyConstraint( [], [], name=name), 'primary':schema.PrimaryKeyConstraint, 'unique':schema.UniqueConstraint, 'check':lambda name:schema.CheckConstraint("", name=name), None:schema.Constraint } try: const = types[type] except KeyError: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) const = const(name=name) t.append_constraint(const) self.impl.drop_constraint(const)
class ProfileServiceSource(Base): __tablename__ = 'dird_profile_service_source' __table_args__ = ( schema.ForeignKeyConstraint( ['profile_service_uuid', 'profile_tenant_uuid'], [ 'dird_profile_service.uuid', 'dird_profile_service.profile_tenant_uuid' ], ondelete='CASCADE', name='dird_profile_service_source_profile_service_uuid_tenant_fkey', ), schema.ForeignKeyConstraint( ['source_uuid', 'source_tenant_uuid'], ['dird_source.uuid', 'dird_source.tenant_uuid'], ondelete='CASCADE', name='dird_profile_service_source_source_uuid_tenant_fkey', ), schema.CheckConstraint('profile_tenant_uuid = source_tenant_uuid'), ) profile_service_uuid = Column(String(UUID_LENGTH), primary_key=True) profile_tenant_uuid = Column(String(UUID_LENGTH)) source_uuid = Column(String(UUID_LENGTH), primary_key=True) source_tenant_uuid = Column(String(UUID_LENGTH)) sources = relationship('Source')
class Profile(Base): __tablename__ = 'dird_profile' __table_args__ = ( schema.UniqueConstraint('uuid', 'tenant_uuid'), schema.UniqueConstraint('name', 'tenant_uuid'), schema.ForeignKeyConstraint( ['display_uuid', 'display_tenant_uuid'], ['dird_display.uuid', 'dird_display.tenant_uuid'], ondelete='SET NULL', name='dird_profile_display_uuid_tenant_fkey', ), schema.CheckConstraint('tenant_uuid = display_tenant_uuid'), ) uuid = Column(String(UUID_LENGTH), server_default=text('uuid_generate_v4()'), primary_key=True) tenant_uuid = Column(String(UUID_LENGTH), ForeignKey('dird_tenant.uuid', ondelete='CASCADE')) name = Column(Text(), nullable=False) display_tenant_uuid = Column(String(UUID_LENGTH)) display_uuid = Column(String(UUID_LENGTH)) display = relationship('Display') services = relationship('ProfileService')
def _check_constraint(self, name, source, condition, schema=None, **kw): t = sa_schema.Table(source, self._metadata(), sa_schema.Column('x', Integer), schema=schema) ck = sa_schema.CheckConstraint(condition, name=name, **kw) t.append_constraint(ck) return ck
def _set_table(self, column, table): e = schema.CheckConstraint( column.in_([0, 1]), name=self.name, _create_rule=util.portable_instancemethod( self._should_create_constraint) ) table.append_constraint(e)
class Phonebook(Base): __tablename__ = 'dird_phonebook' __table_args__ = ( schema.UniqueConstraint('name', 'tenant_uuid'), schema.CheckConstraint("name != ''"), ) id = Column(Integer, primary_key=True) name = Column(String(255), nullable=False) description = Column(Text) tenant_uuid = Column(String(UUID_LENGTH), ForeignKey('dird_tenant.uuid'))
class BillingCharge(Base): __tablename__ = "billing_charge" __table_args__ = ( sql_schema.ForeignKeyConstraint(["user_id", "u_community_id"], ["user.id", "user.community_id"]), sql_schema.ForeignKeyConstraint( ["residence_id", "r_community_id"], ["residence.id", "residence.community_id"], ), sql_schema.CheckConstraint("u_community_id = r_community_id"), sql_schema.CheckConstraint("residence_id is null or user_id is null"), ) id = Column(sql_types.Integer, primary_key=True) residence_id = Column(sql_types.Integer, nullable=True) user_id = Column(sql_types.Integer, nullable=True) r_community_id = Column(sql_types.Integer, nullable=True) u_community_id = Column(sql_types.Integer, nullable=True) name = Column(sql_types.String(200), nullable=False) base_amount = Column(sql_types.Integer, nullable=False) paid = Column(sql_types.Boolean, nullable=False) charge_date = Column(sql_types.Date, nullable=False) due_date = Column(sql_types.Date, nullable=False) def __repr__(self): return (f"BillingCharge(" f"name={self.name}," f"amount={self.amount}," f"paid={self.paid}," f"charge_date={self.charge_date}," f"due_date={self.due_date}," f")") @hybrid_property def amount(self): return decimal.Decimal(".01") * self.base_amount @property def formatted_amount(self): return f"${self.amount}"
def drop_constraint(self, name, table_name, type_=None, schema=None): """Drop a constraint of the given name, typically via DROP CONSTRAINT. :param name: name of the constraint. :param table_name: table name. .. versionchanged:: 0.5.0 The ``tablename`` parameter is now named ``table_name``. As this is a positional argument, the old name is no longer present. :param ``type_``: optional, required on MySQL. can be 'foreignkey', 'primary', 'unique', or 'check'. .. versionchanged:: 0.5.0 The ``type`` parameter is now named ``type_``. The old name ``type`` will remain for backwards compatibility. .. versionadded:: 0.3.6 'primary' qualfier to enable dropping of MySQL primary key constraints. :param schema: Optional schema name to operate within. .. versionadded:: 0.4.0 """ t = self._table(table_name, schema=schema) types = { 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint([], [], name=name), 'primary': sa_schema.PrimaryKeyConstraint, 'unique': sa_schema.UniqueConstraint, 'check': lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint } try: const = types[type_] except KeyError: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) const = const(name=name) t.append_constraint(const) self.impl.drop_constraint(const)
def check_constraint(self, name: Optional[str], source: str, condition: Union["TextClause", "ColumnElement[Any]"], schema: Optional[str] = None, **kw) -> Union["CheckConstraint"]: t = sa_schema.Table( source, self.metadata(), sa_schema.Column("x", Integer), schema=schema, ) ck = sa_schema.CheckConstraint(condition, name=name, **kw) t.append_constraint(ck) return ck
def generic_constraint(self, name, table_name, type_, schema=None, **kw): t = self.table(table_name, schema=schema) types = { 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint( [], [], name=name), 'primary': sa_schema.PrimaryKeyConstraint, 'unique': sa_schema.UniqueConstraint, 'check': lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint } try: const = types[type_] except KeyError: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) else: const = const(name=name) t.append_constraint(const) return const
class TaskGroupTask(WithContact, Titled, Described, RelativeTimeboxed, Slugged, Indexed, db.Model): """Workflow TaskGroupTask model.""" __tablename__ = 'task_group_tasks' _extra_table_args = (schema.CheckConstraint('start_date <= end_date'), ) _title_uniqueness = False _start_changed = False @classmethod def default_task_type(cls): return "text" @classmethod def generate_slug_prefix_for(cls, obj): return "TASK" task_group_id = db.Column( db.Integer, db.ForeignKey('task_groups.id', ondelete="CASCADE"), nullable=False, ) sort_index = db.Column(db.String(length=250), default="", nullable=False) object_approval = db.Column(db.Boolean, nullable=False, default=False) task_type = db.Column(db.String(length=250), default=default_task_type, nullable=False) response_options = db.Column(JsonType(), nullable=False, default=[]) VALID_TASK_TYPES = ['text', 'menu', 'checkbox'] @orm.validates('task_type') def validate_task_type(self, key, value): # pylint: disable=unused-argument if value is None: value = self.default_task_type() if value not in self.VALID_TASK_TYPES: raise ValueError(u"Invalid type '{}'".format(value)) return value def validate_date(self, value): if isinstance(value, datetime): value = value.date() if value is not None and value.year <= 1900: current_century = date.today().year / 100 * 100 year = current_century + value.year % 100 return date(year, value.month, value.day) return value @orm.validates("start_date", "end_date") def validate_end_date(self, key, value): value = self.validate_date(value) if key == "start_date": self._start_changed = True if key == "end_date" and self._start_changed and self.start_date > value: self._start_changed = False raise ValueError("Start date can not be after end date.") return value _publish_attrs = [ 'task_group', 'sort_index', 'relative_start_month', 'relative_start_day', 'relative_end_month', 'relative_end_day', 'object_approval', 'task_type', 'response_options' ] _sanitize_html = [] _aliases = { "title": "Summary", "description": { "display_name": "Task Description", "handler_key": "task_description", }, "contact": { "display_name": "Assignee", "mandatory": True, }, "secondary_contact": None, "start_date": None, "end_date": None, "task_group": { "display_name": "Task Group", "mandatory": True, "filter_by": "_filter_by_task_group", }, "relative_start_date": { "display_name": "Start", "mandatory": True, "description": ("Enter the task start date in the following format:\n" "'mm/dd/yyyy' for one time workflows\n" "'#' for weekly workflows (where # represents day " "of the week & Monday = day 1)\n" "'dd' for monthly workflows\n" "'mmm/mmm/mmm/mmm dd' for monthly workflows " "e.g. feb/may/aug/nov 17\n" "'mm/dd' for yearly workflows"), }, "relative_end_date": { "display_name": "End", "mandatory": True, "description": ("Enter the task end date in the following format:\n" "'mm/dd/yyyy' for one time workflows\n" "'#' for weekly workflows (where # represents day " "of the week & Monday = day 1)\n" "'dd' for monthly workflows\n" "'mmm/mmm/mmm/mmm dd' for monthly workflows " "e.g. feb/may/aug/nov 17\n" "'mm/dd' for yearly workflows"), }, "task_type": { "display_name": "Task Type", "mandatory": True, "description": ("Accepted values are:" "\n'Rich Text'\n'Dropdown'\n'Checkbox'"), } } @classmethod def _filter_by_task_group(cls, predicate): return TaskGroup.query.filter((TaskGroup.id == cls.task_group_id) & ( predicate(TaskGroup.slug) | predicate(TaskGroup.title))).exists() @classmethod def eager_query(cls): query = super(TaskGroupTask, cls).eager_query() return query.options(orm.subqueryload('task_group'), ) def _display_name(self): return self.title + '<->' + self.task_group.display_name def copy(self, _other=None, **kwargs): columns = [ 'title', 'description', 'task_group', 'sort_index', 'relative_start_month', 'relative_start_day', 'relative_end_month', 'relative_end_day', 'start_date', 'end_date', 'contact', 'modified_by', 'task_type', 'response_options', ] contact = None if kwargs.get('clone_people', False): contact = self.contact else: contact = get_current_user() kwargs['modified_by'] = get_current_user() target = self.copy_into(_other, columns, contact=contact, **kwargs) return target
def _check_constraint(self, name, source, condition, **kw): t = schema.Table(source, schema.MetaData(), schema.Column('x', Integer)) ck = schema.CheckConstraint(condition, name=name, **kw) t.append_constraint(ck) return ck
class RecurringCharge(Base): __tablename__ = "recurring_charge" __table_args__ = ( sql_schema.ForeignKeyConstraint(["user_id", "u_community_id"], ["user.id", "user.community_id"]), sql_schema.ForeignKeyConstraint( ["residence_id", "r_community_id"], ["residence.id", "residence.community_id"], ), sql_schema.CheckConstraint("u_community_id = r_community_id"), sql_schema.CheckConstraint("residence_id is null or user_id is null"), ) id = Column(sql_types.Integer, primary_key=True) residence_id = Column(sql_types.Integer, nullable=True) user_id = Column(sql_types.Integer, nullable=True) r_community_id = Column(sql_types.Integer, nullable=True) u_community_id = Column(sql_types.Integer, nullable=True) name = Column(sql_types.String(200), nullable=False) base_amount = Column(sql_types.Integer, nullable=False) frequency = Column(sql_types.Enum(Frequency), nullable=False) frequency_skip = Column(sql_types.Integer, nullable=False, default=1) grace_period = Column(sql_types.Interval, nullable=False) next_charge = Column(sql_types.Date, nullable=False) def __repr__(self): return (f"RecurringCharge(" f"name={self.name}," f"amount={self.amount}," f"frequency={self.frequency}," f"frequency_skip={self.frequency_skip}," f"grace_period={self.grace_period}," f"next_charge={self.next_charge}," f")") @hybrid_property def amount(self): return decimal.Decimal(".01") * self.base_amount @property def formatted_amount(self): return f"${self.amount}" def create_charge(self): new_charge = BillingCharge( name=self.name, amount=self.amount, paid=False, charge_date=self.next_charge, due_date=self.next_charge + self.grace_period, ) return new_charge def find_next_date(self): if self.frequency == Frequency.YEARLY: return self.next_charge.replace(year=self.next_charge.year + self.frequency_skip) elif self.frequency == Frequency.MONTHLY: next_month = self.next_charge.month + self.frequency_skip return self.next_charge.replace(year=self.next_charge.year + next_month // 12, month=next_month % 12) elif self.frequency == Frequency.DAILY: return self.next_charge + datetime.timedelta( days=self.frequency_skip)
class TaskGroupTask(roleable.Roleable, relationship.Relatable, mixins.Titled, mixins.Described, base.ContextRBAC, mixins.Slugged, mixins.Timeboxed, Indexed, db.Model): """Workflow TaskGroupTask model.""" __tablename__ = 'task_group_tasks' _extra_table_args = ( schema.CheckConstraint('start_date <= end_date'), ) _title_uniqueness = False _start_changed = False @classmethod def default_task_type(cls): return cls.TEXT @classmethod def generate_slug_prefix(cls): return "TASK" task_group_id = db.Column( db.Integer, db.ForeignKey('task_groups.id', ondelete="CASCADE"), nullable=False, ) object_approval = db.Column( db.Boolean, nullable=False, default=False) task_type = db.Column( db.String(length=250), default=default_task_type, nullable=False) response_options = db.Column( JsonType(), nullable=False, default=[]) relative_start_day = deferred( db.Column(db.Integer, default=None), "TaskGroupTask" ) relative_end_day = deferred( db.Column(db.Integer, default=None), "TaskGroupTask" ) # This parameter is overridden by workflow backref, but is here to ensure # pylint does not complain _task_group = None @hybrid.hybrid_property def task_group(self): """Getter for task group foreign key.""" return self._task_group @task_group.setter def task_group(self, task_group): """Setter for task group foreign key.""" if not self._task_group and task_group: relationship.Relationship(source=task_group, destination=self) self._task_group = task_group TEXT = 'text' MENU = 'menu' CHECKBOX = 'checkbox' VALID_TASK_TYPES = [TEXT, MENU, CHECKBOX] @orm.validates('task_type') def validate_task_type(self, key, value): # pylint: disable=unused-argument if value is None: value = self.default_task_type() if value not in self.VALID_TASK_TYPES: raise ValueError(u"Invalid type '{}'".format(value)) return value # pylint: disable=unused-argument @orm.validates("start_date", "end_date") def validate_date(self, key, value): """Validates date's itself correctness, start_ end_ dates relative to each other correctness is checked with 'before_insert' hook """ if value is None: return if isinstance(value, datetime.datetime): value = value.date() if value < datetime.date(100, 1, 1): current_century = datetime.date.today().year / 100 return datetime.date(value.year + current_century * 100, value.month, value.day) return value _api_attrs = reflection.ApiAttributes( 'task_group', 'object_approval', 'task_type', 'response_options', reflection.Attribute('view_start_date', update=False, create=False), reflection.Attribute('view_end_date', update=False, create=False), ) _sanitize_html = [] _aliases = { "title": "Summary", "description": { "display_name": "Task Description", "handler_key": "task_description", }, "start_date": { "display_name": "Start Date", "mandatory": True, "description": ( "Enter the task start date\nin the following format:\n" "'mm/dd/yyyy'" ), }, "end_date": { "display_name": "End Date", "mandatory": True, "description": ( "Enter the task end date\nin the following format:\n" "'mm/dd/yyyy'" ), }, "task_group": { "display_name": "Task Group", "mandatory": True, "filter_by": "_filter_by_task_group", }, "task_type": { "display_name": "Task Type", "mandatory": True, "description": ("Accepted values are:" "\n'Rich Text'\n'Dropdown'\n'Checkbox'"), } } @property def workflow(self): """Property which returns parent workflow object.""" return self.task_group.workflow @classmethod def _filter_by_task_group(cls, predicate): return TaskGroup.query.filter( (TaskGroup.id == cls.task_group_id) & (predicate(TaskGroup.slug) | predicate(TaskGroup.title)) ).exists() def _get_view_date(self, date): if date and self.task_group and self.task_group.workflow: return self.task_group.workflow.calc_next_adjusted_date(date) @simple_property def view_start_date(self): return self._get_view_date(self.start_date) @simple_property def view_end_date(self): return self._get_view_date(self.end_date) @classmethod def _populate_query(cls, query): return query.options( orm.Load(cls).joinedload("task_group") .undefer_group("TaskGroup_complete"), orm.Load(cls).joinedload("task_group") .joinedload("workflow") .undefer_group("Workflow_complete"), ) @classmethod def eager_query(cls, **kwargs): return cls._populate_query(super(TaskGroupTask, cls).eager_query(**kwargs)) def _display_name(self): return self.title + '<->' + self.task_group.display_name def copy(self, _other=None, **kwargs): columns = ['title', 'description', 'task_group', 'start_date', 'end_date', 'access_control_list', 'modified_by', 'task_type', 'response_options'] if kwargs.get('clone_people', False): access_control_list = [ {"ac_role_id": acl.ac_role_id, "person": {"id": person.id}} for person, acl in self.access_control_list ] else: role_id = { v: k for (k, v) in role.get_custom_roles_for(self.type).iteritems() }['Task Assignees'] access_control_list = [ {"ac_role_id": role_id, "person": {"id": get_current_user().id}} ] kwargs['modified_by'] = get_current_user() return self.copy_into(_other, columns, access_control_list=access_control_list, **kwargs)
def createDB(self): """ Step through and create the DB structure, relationships and constraints **Note that order matters here, have to define a Table before you can link to it** TODO this can/should all be redone using the new syntax and relations see: http://docs.sqlalchemy.org/en/rel_0_7/orm/relationships.html# for some examples. NOTE: if one stops using sqlite then change file_id, logging_id and file_logging_id to BigIntegers (sqlite doesn't know BigInteger) """ self.init_db(self.user, self.password, self.db_name) metadata = self.metadata data_table = schema.Table('mission', metadata, schema.Column('mission_id', types.Integer, autoincrement=True, primary_key=True, nullable=False), schema.Column('mission_name', types.String(20), nullable=False, unique=True), schema.Column( 'rootdir', types.String(150), nullable=False, ), schema.Column( 'incoming_dir', types.String(150), nullable=False, ), schema.Column( 'codedir', types.String(150), nullable=True, ), schema.Column( 'inspectordir', types.String(150), nullable=True, ), schema.Column( 'errordir', types.String(150), nullable=True, ), extend_existing=True) data_table = schema.Table( 'satellite', metadata, schema.Column('satellite_id', types.Integer, autoincrement=True, primary_key=True, nullable=False), schema.Column('satellite_name', types.String(20), nullable=False), # hmm long enough? schema.Column( 'mission_id', types.Integer, schema.ForeignKey('mission.mission_id'), nullable=False, ), schema.UniqueConstraint('satellite_name', 'mission_id', name='unique_pairs_satellite'), extend_existing=True) data_table = schema.Table( 'instrument', metadata, schema.Column('instrument_id', types.Integer, autoincrement=True, primary_key=True, nullable=False), schema.Column('instrument_name', types.String(20), nullable=False), # hmm long enough? schema.Column( 'satellite_id', types.Integer, schema.ForeignKey('satellite.satellite_id'), nullable=False, ), schema.UniqueConstraint('instrument_name', 'satellite_id', name='unique_pairs_instrument'), extend_existing=True) data_table = schema.Table( 'product', metadata, schema.Column('product_id', types.Integer, autoincrement=True, primary_key=True, nullable=False, index=True), schema.Column('product_name', types.String(100), nullable=False, index=True), # hmm long enough? schema.Column( 'instrument_id', types.Integer, schema.ForeignKey('instrument.instrument_id'), nullable=False, ), schema.Column('relative_path', types.String(100), nullable=False), # hmm long enough? schema.Column('level', types.Float, nullable=False), schema.Column('format', types.Text, nullable=False), # hmm long enough? schema.Column('product_description', types.Text, nullable=True), # hmm long enough? schema.UniqueConstraint('product_name', 'instrument_id', 'relative_path', name='unique_triplet_product'), extend_existing=True) data_table = schema.Table( 'instrumentproductlink', metadata, schema.Column('instrument_id', types.Integer, schema.ForeignKey('instrument.instrument_id'), nullable=False), schema.Column('product_id', types.Integer, schema.ForeignKey('product.product_id'), nullable=False), schema.PrimaryKeyConstraint('instrument_id', 'product_id'), extend_existing=True) data_table = schema.Table( 'process', metadata, schema.Column('process_id', types.Integer, autoincrement=True, primary_key=True, nullable=False, index=True), schema.Column('process_name', types.String(50), nullable=False), # hmm long enough? schema.Column('output_product', types.Integer, schema.ForeignKey('product.product_id'), nullable=True, index=True), schema.Column('output_timebase', types.String(10), nullable=True, index=True), schema.Column('extra_params', types.Text, nullable=True), schema.UniqueConstraint('process_name', 'output_product'), extend_existing=True) data_table = schema.Table( 'productprocesslink', metadata, schema.Column('process_id', types.Integer, schema.ForeignKey('process.process_id'), nullable=False), schema.Column('input_product_id', types.Integer, schema.ForeignKey('product.product_id'), nullable=False), schema.Column('optional', types.Boolean, nullable=False), # schema.Column('yesterday', types.Integer, nullable=False), # schema.Column('tomorrow', types.Integer, nullable=False), schema.PrimaryKeyConstraint('process_id', 'input_product_id'), extend_existing=True) data_table = schema.Table( 'file', metadata, # this was a bigint, sqlalchemy doesn't seem to like this... think here schema.Column('file_id', types.Integer, autoincrement=True, primary_key=True, nullable=False, index=True), schema.Column('filename', types.String(250), nullable=False, unique=True, index=True), schema.Column('utc_file_date', types.Date, nullable=True, index=True), schema.Column('utc_start_time', types.DateTime, nullable=True, index=True), schema.Column('utc_stop_time', types.DateTime, nullable=True, index=True), schema.Column('data_level', types.Float, nullable=False, index=True), schema.Column('interface_version', types.SmallInteger, nullable=False), schema.Column('quality_version', types.SmallInteger, nullable=False), schema.Column('revision_version', types.SmallInteger, nullable=False), schema.Column('verbose_provenance', types.Text, nullable=True), schema.Column('check_date', types.DateTime, nullable=True), schema.Column('quality_comment', types.Text, nullable=True), schema.Column('caveats', types.Text, nullable=True), schema.Column('file_create_date', types.DateTime, nullable=False), schema.Column('met_start_time', types.Float, nullable=True), schema.Column('met_stop_time', types.Float, nullable=True), schema.Column('exists_on_disk', types.Boolean, nullable=False), schema.Column('quality_checked', types.Boolean, nullable=True, default=False), schema.Column('product_id', types.Integer, schema.ForeignKey('product.product_id'), nullable=False), schema.Column('shasum', types.String(40), nullable=True), schema.Column('process_keywords', types.Text, nullable=True), schema.CheckConstraint( 'utc_stop_time is not NULL OR met_stop_time is not NULL'), schema.CheckConstraint( 'utc_start_time is not NULL OR met_start_time is not NULL'), schema.CheckConstraint( 'met_start_time <= met_stop_time'), # in case of one entry schema.CheckConstraint( 'utc_start_time <= utc_stop_time'), # in case of one entry schema.CheckConstraint('interface_version >= 1'), schema.UniqueConstraint('utc_file_date', 'product_id', 'interface_version', 'quality_comment', 'revision_version', name='Unique file tuple'), extend_existing=True) schema.Index('ix_file_big', data_table.columns['filename'], data_table.columns['utc_file_date'], data_table.columns['utc_start_time'], data_table.columns['utc_stop_time'], unique=True) data_table = schema.Table( 'unixtime', metadata, schema.Column('file_id', types.Integer, schema.ForeignKey('file.file_id'), primary_key=True, index=True), schema.Column('unix_start', types.Integer, index=True), schema.Column('unix_stop', types.Integer, index=True), schema.CheckConstraint('unix_start <= unix_stop'), ) data_table = schema.Table( 'filefilelink', metadata, schema.Column('source_file', types.Integer, schema.ForeignKey('file.file_id'), nullable=False, index=True), schema.Column('resulting_file', types.Integer, schema.ForeignKey('file.file_id'), nullable=False, index=True), schema.PrimaryKeyConstraint('source_file', 'resulting_file'), schema.CheckConstraint('source_file <> resulting_file'), # TODO this is supposed to be more general than != extend_existing=True) data_table = schema.Table( 'code', metadata, schema.Column('code_id', types.Integer, autoincrement=True, primary_key=True, nullable=False, index=True), schema.Column('filename', types.String(250), nullable=False, unique=False), schema.Column('relative_path', types.String(100), nullable=False), schema.Column('code_start_date', types.Date, nullable=False), schema.Column('code_stop_date', types.Date, nullable=False), schema.Column('code_description', types.Text, nullable=False), schema.Column('process_id', types.Integer, schema.ForeignKey('process.process_id'), nullable=False, index=True), schema.Column('interface_version', types.SmallInteger, nullable=False), schema.Column('quality_version', types.SmallInteger, nullable=False), schema.Column('revision_version', types.SmallInteger, nullable=False), schema.Column('output_interface_version', types.SmallInteger, nullable=False), schema.Column('active_code', types.Boolean, nullable=False, default=False), schema.Column('date_written', types.Date, nullable=False), schema.Column('shasum', types.String(40), nullable=True), schema.Column('newest_version', types.Boolean, nullable=False), schema.Column('arguments', types.Text, nullable=True), schema.Column('ram', types.Float, nullable=True), # amanount of ram used in Gigs schema.Column('cpu', types.SmallInteger, nullable=True), # number of cpus used schema.CheckConstraint('code_start_date <= code_stop_date'), schema.CheckConstraint('interface_version >= 1'), schema.CheckConstraint('output_interface_version >= 1'), extend_existing=True) data_table = schema.Table( 'processqueue', metadata, schema.Column('file_id', types.Integer, schema.ForeignKey('file.file_id'), primary_key=True, nullable=False, unique=True, index=True), schema.Column('version_bump', types.SmallInteger, nullable=True), schema.Column('instrument_id', types.Integer, schema.ForeignKey('instrument.instrument_id'), nullable=False), schema.CheckConstraint('version_bump is NULL or version_bump < 3'), extend_existing=True) data_table = schema.Table( 'filecodelink', metadata, schema.Column('resulting_file', types.Integer, schema.ForeignKey('file.file_id'), nullable=False), schema.Column('source_code', types.Integer, schema.ForeignKey('code.code_id'), nullable=False), schema.PrimaryKeyConstraint('resulting_file', 'source_code'), extend_existing=True) data_table = schema.Table('release', metadata, schema.Column( 'file_id', types.Integer, schema.ForeignKey('file.file_id'), nullable=False, ), schema.Column('release_num', types.String(20), nullable=False), schema.PrimaryKeyConstraint( 'file_id', 'release_num'), extend_existing=True) data_table = schema.Table( 'processpidlink', metadata, schema.Column('ppl_id', types.Integer, autoincrement=True, primary_key=True, nullable=False), schema.Column('pid', types.Integer, nullable=True), schema.Column('hostname', types.String(100), nullable=True), schema.Column('process_id', types.Integer, schema.ForeignKey('process.process_id'), nullable=True), schema.Column('currentlyprocessing', types.Boolean, nullable=True, default='f'), schema.Column('start_time', types.DateTime, nullable=True, default=func.now()), schema.Column('end_time', types.DateTime, nullable=True, default=func.now())) data_table = schema.Table( 'logging', metadata, schema.Column('logging_id', types.Integer, autoincrement=True, primary_key=True, nullable=False), schema.Column('currently_processing', types.Boolean, nullable=False, default=False), schema.Column('pid', types.Integer, nullable=True), schema.Column('processing_start_time', types.DateTime, nullable=False), # might have to be a TIMESTAMP schema.Column('processing_end_time', types.DateTime, nullable=True), schema.Column('comment', types.Text, nullable=True), schema.Column('mission_id', types.Integer, schema.ForeignKey('mission.mission_id'), nullable=False), schema.Column('user', types.String(30), nullable=False), schema.Column('hostname', types.String(100), nullable=False), # schema.PrimaryKeyConstraint('logging_id'), schema.CheckConstraint( 'processing_start_time < processing_end_time'), extend_existing=True) data_table = schema.Table( 'logging_file', metadata, schema.Column('logging_file_id', types.Integer, autoincrement=True, primary_key=True, nullable=False), schema.Column('logging_id', types.Integer, schema.ForeignKey('logging.logging_id'), nullable=False), schema.Column('file_id', types.Integer, schema.ForeignKey('file.file_id'), nullable=False), schema.Column('code_id', types.Integer, schema.ForeignKey('code.code_id'), nullable=False), schema.Column('comments', types.Text, nullable=True), # schema.PrimaryKeyConstraint('logging_file_id'), extend_existing=True) data_table = schema.Table( 'inspector', metadata, schema.Column('inspector_id', types.Integer, autoincrement=True, primary_key=True, nullable=False, index=True), schema.Column('filename', types.String(250), nullable=False, unique=False), schema.Column('relative_path', types.String(250), nullable=False), schema.Column('description', types.Text, nullable=False), schema.Column('interface_version', types.SmallInteger, nullable=False), schema.Column('quality_version', types.SmallInteger, nullable=False), schema.Column('revision_version', types.SmallInteger, nullable=False), schema.Column('output_interface_version', types.SmallInteger, nullable=False), schema.Column('active_code', types.Boolean, nullable=False, default=False, index=True), schema.Column('date_written', types.Date, nullable=False), schema.Column('shasum', types.String(40), nullable=True), schema.Column('newest_version', types.Boolean, nullable=False, index=True), schema.Column('arguments', types.Text, nullable=True), schema.Column('product', types.Integer, schema.ForeignKey('product.product_id'), nullable=False), schema.CheckConstraint('interface_version >= 1'), schema.CheckConstraint('output_interface_version >= 1'), extend_existing=True) # TODO move this out so that the user chooses the db type # engine = create_engine('postgres:///' + self.filename, echo=False) # metadata.bind = engine metadata.create_all(checkfirst=True)