class Synthese(DB.Model): __tablename__ = "synthese" __table_args__ = {"schema": "gn_synthese"} id_synthese = DB.Column(DB.Integer, primary_key=True) unique_id_sinp = DB.Column(UUID(as_uuid=True)) unique_id_sinp_grp = DB.Column(UUID(as_uuid=True)) id_source = DB.Column(DB.Integer) entity_source_pk_value = DB.Column(DB.Integer) id_dataset = DB.Column(DB.Integer) id_nomenclature_grp_typ = DB.Column(DB.Integer) grp_method = DB.Column(DB.Unicode) id_nomenclature_obs_technique = DB.Column(DB.Integer) id_nomenclature_bio_status = DB.Column(DB.Integer) id_nomenclature_bio_condition = DB.Column(DB.Integer) id_nomenclature_naturalness = DB.Column(DB.Integer) id_nomenclature_exist_proof = DB.Column(DB.Integer) id_nomenclature_valid_status = DB.Column(DB.Integer) id_nomenclature_diffusion_level = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_nomenclature_sex = DB.Column(DB.Integer) id_nomenclature_obj_count = DB.Column(DB.Integer) id_nomenclature_type_count = DB.Column(DB.Integer) id_nomenclature_sensitivity = DB.Column(DB.Integer) id_nomenclature_observation_status = DB.Column(DB.Integer) id_nomenclature_blurring = DB.Column(DB.Integer) id_nomenclature_source_status = DB.Column(DB.Integer) id_nomenclature_behaviour = DB.Column(DB.Integer) count_min = DB.Column(DB.Integer) count_max = DB.Column(DB.Integer) cd_nom = DB.Column(DB.Integer) nom_cite = DB.Column(DB.Unicode) meta_v_taxref = DB.Column(DB.Unicode) sample_number_proof = DB.Column(DB.Unicode) digital_proof = DB.Column(DB.Unicode) non_digital_proof = DB.Column(DB.Unicode) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) depth_min = DB.Column(DB.Integer) depth_max = DB.Column(DB.Integer) precision = DB.Column(DB.Integer) the_geom_4326 = DB.Column(Geometry("GEOMETRY", 4326)) the_geom_point = DB.Column(Geometry("GEOMETRY", 4326)) the_geom_local = DB.Column( Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) place_name = DB.Column(DB.Unicode) date_min = DB.Column(DB.DateTime) date_max = DB.Column(DB.DateTime) validator = DB.Column(DB.Unicode) validation_comment = DB.Column(DB.Unicode) observers = DB.Column(DB.Unicode) determiner = DB.Column(DB.Unicode) id_digitiser = DB.Column(DB.Integer) id_nomenclature_determination_method = DB.Column(DB.Integer) comment_context = DB.Column(DB.Unicode) comment_description = DB.Column(DB.Unicode) additional_data = DB.Column(JSONB) meta_validation_date = DB.Column(DB.DateTime) meta_create_date = DB.Column(DB.DateTime) meta_update_date = DB.Column(DB.DateTime) last_action = DB.Column(DB.Unicode) def get_geofeature(self, recursif=True, columns=None): return self.as_geofeature("the_geom_4326", "id_synthese", recursif, columns=columns)
class Employee(Base): __tablename__ = 'employees' uuid = Column(UUID(as_uuid=True), primary_key=True) name = Column(String)
class Base(db.Model): __abstract__ = True id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) created_on = db.Column(db.DateTime, default=utcnow()) updated_on = db.Column(db.DateTime, default=utcnow(), onupdate=utcnow())
from . import metadata import sqlalchemy as sa from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy import DateTime, text from pydantic import BaseModel import pydantic as pyd import datetime import typing nextflow_event = sa.Table( "nextflow_events", metadata, sa.Column('event_id', UUID, primary_key=True, server_default = sa.text("uuid_generate_v4()")), sa.Column('run_name',sa.String()), sa.Column('run_id', UUID()), sa.Column("utc_time", sa.DateTime(timezone=True)), sa.Column("event", sa.String()), sa.Column("trace", JSONB()), sa.Column("metadata", JSONB()) ) nextflow_traces = sa.Table( "nextflow_traces", metadata, sa.Column("uuid", UUID, primary_key=True, server_default = sa.text("uuid_generate_v4()")), sa.Column("events_uuid", UUID(), sa.ForeignKey("nextflow_events.event_id"), index=True), sa.Column("env", sa.String()),
def load_dialect_impl(self, dialect): """ see sqlalchemy.types.TypeDecorator.load_dialect_impl """ if dialect.name == "postgresql": return dialect.type_descriptor(UUID()) return dialect.type_descriptor(CHAR(32))
class Lot(Thing): id = db.Column(UUID(as_uuid=True), primary_key=True) # uuid is generated on init by default name = db.Column(CIText(), nullable=False) description = db.Column(CIText()) description.comment = """A comment about the lot.""" closed = db.Column(db.Boolean, default=False, nullable=False) closed.comment = """A closed lot cannot be modified anymore.""" devices = db.relationship(Device, backref=db.backref('lots', lazy=True, collection_class=set), secondary=lambda: LotDevice.__table__, lazy=True, collection_class=set) """The **children** devices that the lot has. Note that the lot can have more devices, if they are inside descendant lots. """ parents = db.relationship(lambda: Lot, viewonly=True, lazy=True, collection_class=set, secondary=lambda: LotParent.__table__, primaryjoin=lambda: Lot.id == LotParent.child_id, secondaryjoin=lambda: LotParent.parent_id == Lot.id, cascade='refresh-expire', # propagate changes outside ORM backref=db.backref('children', viewonly=True, lazy=True, cascade='refresh-expire', collection_class=set) ) """The parent lots.""" all_devices = db.relationship(Device, viewonly=True, lazy=True, collection_class=set, secondary=lambda: LotDeviceDescendants.__table__, primaryjoin=lambda: Lot.id == LotDeviceDescendants.ancestor_lot_id, secondaryjoin=lambda: LotDeviceDescendants.device_id == Device.id) """All devices, including components, inside this lot and its descendants. """ amount = db.Column(db.Integer, check_range('amount', min=0, max=100), default=0) owner_id = db.Column(UUID(as_uuid=True), db.ForeignKey(User.id), nullable=False, default=lambda: g.user.id) owner = db.relationship(User, primaryjoin=owner_id == User.id) transfer_state = db.Column(IntEnum(TransferState), default=TransferState.Initial, nullable=False) transfer_state.comment = TransferState.__doc__ receiver_address = db.Column(CIText(), db.ForeignKey(User.email), nullable=False, default=lambda: g.user.email) receiver = db.relationship(User, primaryjoin=receiver_address == User.email) def __init__(self, name: str, closed: bool = closed.default.arg, description: str = None) -> None: """Initializes a lot :param name: :param closed: """ super().__init__(id=uuid.uuid4(), name=name, closed=closed, description=description) Path(self) # Lots have always one edge per default. @property def type(self) -> str: return self.__class__.__name__ @property def url(self) -> urlutils.URL: """The URL where to GET this action.""" return urlutils.URL(url_for_resource(Lot, item_id=self.id)) @property def descendants(self): return self.descendantsq(self.id) @property def is_temporary(self): return False if self.trade else True @classmethod def descendantsq(cls, id): _id = UUIDLtree.convert(id) return (cls.id == Path.lot_id) & Path.path.lquery(exp.cast('*.{}.*'.format(_id), LQUERY)) @classmethod def roots(cls): """Gets the lots that are not under any other lot.""" return cls.query.join(cls.paths).filter(db.func.nlevel(Path.path) == 1) def add_children(self, *children): """Add children lots to this lot. This operation is highly costly as it forces refreshing many models in session. """ for child in children: if isinstance(child, Lot): Path.add(self.id, child.id) db.session.refresh(child) else: assert isinstance(child, uuid.UUID) Path.add(self.id, child) # We need to refresh the models involved in this operation # outside the session / ORM control so the models # that have relationships to this model # with the cascade 'refresh-expire' can welcome the changes db.session.refresh(self) def remove_children(self, *children): """Remove children lots from this lot. This operation is highly costly as it forces refreshing many models in session. """ for child in children: if isinstance(child, Lot): Path.delete(self.id, child.id) db.session.refresh(child) else: assert isinstance(child, uuid.UUID) Path.delete(self.id, child) db.session.refresh(self) def delete(self): """Deletes the lot. This method removes the children lots and children devices orphan from this lot and then marks this lot for deletion. """ self.remove_children(*self.children) db.session.delete(self) def _refresh_models_with_relationships_to_lots(self): session = db.Session.object_session(self) for model in session: if isinstance(model, (Device, Lot, Path)): session.expire(model) def __contains__(self, child: Union['Lot', Device]): if isinstance(child, Lot): return Path.has_lot(self.id, child.id) elif isinstance(child, Device): device = db.session.query(LotDeviceDescendants) \ .filter(LotDeviceDescendants.device_id == child.id) \ .filter(LotDeviceDescendants.ancestor_lot_id == self.id) \ .one_or_none() return device else: raise TypeError('Lot only contains devices and lots, not {}'.format(child.__class__)) def __repr__(self) -> str: return '<Lot {0.name} devices={0.devices!r}>'.format(self)
class MuscleGroupModel(BaseModel): __tablename__ = 'muscle_groups' id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4) name = Column(String(100), nullable=False)
return '<Photo %r> [%r] %r' % (self.uuid, self.status, self.url) class Thumbnail(object): query = db_session.query_property() def __init__(self, photo_uuid=None, width=None, height=None, url=None): self.photo_uuid = photo_uuid self.width = width self.height = height self.url = url photos = Table( 'photos', metadata, Column('uuid', UUID(as_uuid=True), primary_key=True), Column('url', String), Column('status', String), Column('created_at', DateTime) ) thumbnails = Table( 'photo_thumbnails', metadata, Column('uuid', UUID(as_uuid=True), primary_key=True, default=uuid4), Column('photo_uuid', UUID(as_uuid=True), ForeignKey('photos.uuid')), Column('width', Integer), Column('height', Integer), Column('url', String), Column('created_at', DateTime, server_default=func.now()) )
class UserModel(db.Model): __tablename__ = 'users' id = db.Column(UUID(as_uuid=True), unique=True, primary_key=True, server_default=sa_text("uuid_generate_v4()")) username = db.Column(db.String(80), unique=True) password = db.Column(db.String(128)) email = db.Column(db.String(80), unique=True) first_name = db.Column(db.String(80)) last_name = db.Column(db.String(80)) gender = db.Column(db.String(80)) date_of_birth = db.Column(db.String(80)) profile_picture_uri = db.Column(db.String(80)) # def __init__(self, username, password, email, first_name, last_name, gender, date_of_birth, profile_picture_uri): def __init__(self, username, email, password, first_name, last_name): self.username = username self.password = password self.email = email self.first_name = first_name self.last_name = last_name # self.gender = gender # self.date_of_birth = date_of_birth # self.profile_picture_uri = profile_picture_uri def __str__(self): return "User(id='%s')" % self.id def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): db.session.delete(self) db.session.commit() @classmethod def find_by_email(cls, email): query = cls.query.filter_by(email=email).first() print('query') print(query) return query @classmethod def find_by_uuid(cls, _uuid): return cls.query.filter_by(id=_uuid).first() @classmethod def check_password_database(self, password_hash, password): print('pass') print('pass') print('pass') print('pass') print(password) print(self.password) print(UserModel.password) print('pass') token = bcrypt.check_password_hash(password_hash, password) print('token') print('token') print('token') print('token') print(token) print('token') return token
users_table = sqlalchemy.Table( "users", metadata, sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True), sqlalchemy.Column("email", sqlalchemy.String(40), unique=True, index=True), sqlalchemy.Column("name", sqlalchemy.String(100)), sqlalchemy.Column("hashed_password", sqlalchemy.String()), sqlalchemy.Column( "is_active", sqlalchemy.Boolean(), server_default=sqlalchemy.sql.expression.true(), nullable=False, ), ) tokens_table = sqlalchemy.Table( "tokens", metadata, sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True), sqlalchemy.Column( "token", UUID(as_uuid=False), server_default=sqlalchemy.text("uuid_generate_v4()"), unique=True, nullable=False, index=True, ), sqlalchemy.Column("expires", sqlalchemy.DateTime()), sqlalchemy.Column("user_id", sqlalchemy.ForeignKey("users.id")), )
class ContractedWorkPayment(Base, AuditMixin): __tablename__ = 'contracted_work_payment' def __init__(self, application, contracted_work_payment_status_code, contracted_work_payment_code, **kwargs): super(ContractedWorkPayment, self).__init__(**kwargs) initial_status = ContractedWorkPaymentStatusChange( contracted_work_payment=self, application=application, contracted_work_payment_status_code= contracted_work_payment_status_code, contracted_work_payment_code=contracted_work_payment_code) self.status_changes.append(initial_status) contracted_work_payment_id = db.Column(db.Integer, primary_key=True) application_guid = db.Column(UUID(as_uuid=True), db.ForeignKey('application.guid'), nullable=False) work_id = db.Column(db.String, unique=True, nullable=False) interim_actual_cost = db.Column(db.Numeric(14, 2)) final_actual_cost = db.Column(db.Numeric(14, 2)) interim_paid_amount = db.Column(db.Numeric(14, 2)) final_paid_amount = db.Column(db.Numeric(14, 2)) interim_total_hours_worked_to_date = db.Column(db.Numeric(14, 2)) final_total_hours_worked_to_date = db.Column(db.Numeric(14, 2)) interim_number_of_workers = db.Column(db.Integer) final_number_of_workers = db.Column(db.Integer) work_completion_date = db.Column(db.Date) interim_submitter_name = db.Column(db.String) final_submitter_name = db.Column(db.String) interim_eoc_application_document_guid = db.Column( UUID(as_uuid=True), db.ForeignKey('application_document.application_document_guid'), unique=True) final_eoc_application_document_guid = db.Column( UUID(as_uuid=True), db.ForeignKey('application_document.application_document_guid'), unique=True) final_report_application_document_guid = db.Column( UUID(as_uuid=True), db.ForeignKey('application_document.application_document_guid'), unique=True) interim_eoc_document = db.relationship( 'ApplicationDocument', lazy='selectin', foreign_keys=[interim_eoc_application_document_guid]) final_eoc_document = db.relationship( 'ApplicationDocument', lazy='selectin', foreign_keys=[final_eoc_application_document_guid]) interim_report = db.Column(db.String) final_report_document = db.relationship( 'ApplicationDocument', lazy='selectin', foreign_keys=[final_report_application_document_guid]) status_changes = db.relationship( 'ContractedWorkPaymentStatusChange', lazy='selectin', order_by='desc(ContractedWorkPaymentStatusChange.change_timestamp)') payment_documents = db.relationship( 'PaymentDocument', lazy='selectin', secondary='payment_document_contracted_work_payment_xref') # Auditing audit_ind = db.Column(db.Boolean) audit_user = db.Column(db.String) audit_timestamp = db.Column(db.DateTime) # General Reporting surface_landowner = db.Column(db.String) reclamation_was_achieved = db.Column(db.Boolean) # Abandonment Reporting abandonment_downhole_completed = db.Column(db.Boolean) abandonment_cut_and_capped_completed = db.Column(db.Boolean) abandonment_equipment_decommissioning_completed = db.Column(db.Boolean) abandonment_notice_of_operations_submitted = db.Column(db.Boolean) abandonment_was_pipeline_abandoned = db.Column(db.Boolean) abandonment_metres_of_pipeline_abandoned = db.Column(db.Integer) # PSI and DSI Reporting site_investigation_type_of_document_submitted = db.Column(db.String) site_investigation_concerns_identified = db.Column(db.Boolean) # Remediation Reporting remediation_identified_contamination_meets_standards = db.Column( db.Boolean) remediation_type_of_document_submitted = db.Column(db.String) remediation_reclaimed_to_meet_cor_p1_requirements = db.Column(db.Boolean) # Reclamation Reporting reclamation_reclaimed_to_meet_cor_p2_requirements = db.Column(db.Boolean) reclamation_surface_reclamation_criteria_met = db.Column(db.Boolean) @hybrid_property def has_interim_prfs(self): if self.payment_documents: return any( doc.active_ind and doc.payment_document_code == 'INTERIM_PRF' for doc in self.payment_documents) return False @hybrid_property def has_final_prfs(self): if self.payment_documents: return any( doc.active_ind and doc.payment_document_code == 'FINAL_PRF' for doc in self.payment_documents) return False @hybrid_property def interim_payment_status_code(self): if self.interim_payment_status: return self.interim_payment_status.contracted_work_payment_status_code else: return 'INFORMATION_REQUIRED' @hybrid_property def interim_payment_status(self): if self.interim_payment_status_changes: return self.interim_payment_status_changes[0] @hybrid_property def interim_payment_status_changes(self): return [ status for status in self.status_changes if status.contracted_work_payment_code == 'INTERIM' ] @hybrid_property def interim_payment_submission_date(self): if self.interim_payment_status_changes: return self.interim_payment_status_changes[-1].change_timestamp @hybrid_property def final_payment_status_code(self): if self.final_payment_status: return self.final_payment_status.contracted_work_payment_status_code else: return 'INFORMATION_REQUIRED' @hybrid_property def final_payment_status(self): if self.final_payment_status_changes: return self.final_payment_status_changes[0] @hybrid_property def final_payment_status_changes(self): return [ status for status in self.status_changes if status.contracted_work_payment_code == 'FINAL' ] @hybrid_property def final_payment_submission_date(self): if self.final_payment_status_changes: return self.final_payment_status_changes[-1].change_timestamp @hybrid_property def review_deadlines(self): review_deadlines = { 'interim': REVIEW_DEADLINE_NOT_APPLICABLE, 'final': REVIEW_DEADLINE_NOT_APPLICABLE } interim_payment_submission_date = self.interim_payment_submission_date final_payment_submission_date = self.final_payment_submission_date # No payment information has been submitted if interim_payment_submission_date is None and final_payment_submission_date is None: return review_deadlines # We don't need to review payments that have already had at least one PRF issued if interim_payment_submission_date and self.has_interim_prfs: interim_payment_submission_date = None review_deadlines['interim'] = REVIEW_DEADLINE_PAID if final_payment_submission_date and self.has_final_prfs: final_payment_submission_date = None review_deadlines['final'] = REVIEW_DEADLINE_PAID # Both interim and final have been submitted and have completed the payment process if interim_payment_submission_date is None and final_payment_submission_date is None: return review_deadlines days_to_review = timedelta(days=90) if interim_payment_submission_date: interim_deadline = interim_payment_submission_date + days_to_review review_deadlines['interim'] = interim_deadline if final_payment_submission_date: final_deadline = final_payment_submission_date + days_to_review review_deadlines['final'] = final_deadline return review_deadlines def __repr__(self): return f'<{self.__class__.__name__} {self.contracted_work_payment_id} {self.application_guid} {self.work_id}>' @classmethod def find_by_application_guid(cls, application_guid): return cls.query.filter_by(application_guid=application_guid).all() @classmethod def find_by_work_id(cls, work_id): return cls.query.filter_by(work_id=work_id).one_or_none()
class Mine(AuditMixin, Base): __tablename__ = 'mine' mine_guid = db.Column(UUID(as_uuid=True), primary_key=True, server_default=FetchedValue()) mine_no = db.Column(db.String(10)) mine_name = db.Column(db.String(60), nullable=False) mine_note = db.Column(db.String(300), default='') major_mine_ind = db.Column(db.Boolean, nullable=False, default=False) deleted_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue()) mine_region = db.Column(db.String(2), db.ForeignKey('mine_region_code.mine_region_code')) ohsc_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue()) union_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue()) # Relationships #Almost always used and 1:1, so these are joined mine_location = db.relationship('MineLocation', backref='mine', uselist=False, lazy='joined') mine_status = db.relationship('MineStatus', backref='mine', order_by='desc(MineStatus.update_timestamp)', lazy='joined') mine_tailings_storage_facilities = db.relationship( 'MineTailingsStorageFacility', backref='mine', order_by= 'desc(MineTailingsStorageFacility.mine_tailings_storage_facility_name)', lazy='joined') #Almost always used, but faster to use selectin to load related data mine_permit = db.relationship('Permit', backref='mine', order_by='desc(Permit.create_timestamp)', lazy='selectin') mine_type = db.relationship( 'MineType', backref='mine', order_by='desc(MineType.update_timestamp)', primaryjoin= "and_(MineType.mine_guid == Mine.mine_guid, MineType.active_ind==True)", lazy='selectin') #Not always desired, set to lazy load using select mineral_tenure_xref = db.relationship('MineralTenureXref', backref='mine', lazy='select') mine_expected_documents = db.relationship( 'MineExpectedDocument', primaryjoin= "and_(MineExpectedDocument.mine_guid == Mine.mine_guid, MineExpectedDocument.active_ind==True)", backref='mine', order_by='desc(MineExpectedDocument.due_date)', lazy='select') mine_party_appt = db.relationship('MinePartyAppointment', backref="mine", lazy='select') mine_incidents = db.relationship('MineIncident', backref="mine", lazy='select') def __repr__(self): return '<Mine %r>' % self.mine_guid def json(self): return { 'mine_guid': str(self.mine_guid), 'mine_name': self.mine_name, 'mine_no': self.mine_no, 'mine_note': self.mine_note, 'major_mine_ind': self.major_mine_ind, 'region_code': self.mine_region, 'mineral_tenure_xref': [item.json() for item in self.mineral_tenure_xref], 'mine_location': self.mine_location.json() if self.mine_location else None, #Exploration permits must, always, and exclusively have an X as the second character, and we would like this to be returned last: 'mine_permit': [ item.json() for item in self.mine_permit if item.permit_no.lower()[1] != 'x' ] + [ item.json() for item in self.mine_permit if item.permit_no.lower()[1] == 'x' ], 'mine_status': [item.json() for item in self.mine_status], 'mine_tailings_storage_facility': [item.json() for item in self.mine_tailings_storage_facilities], 'mine_expected_documents': [item.json() for item in self.mine_expected_documents], 'mine_type': [item.json() for item in self.active(self.mine_type)], 'verified_status': self.verified_status.json() if self.verified_status else None, } def json_for_list(self): return { 'mine_guid': str(self.mine_guid), 'mine_name': self.mine_name, 'mine_no': self.mine_no, 'mine_note': self.mine_note, 'major_mine_ind': self.major_mine_ind, 'region_code': self.mine_region, 'mine_permit': [item.json_for_list() for item in self.mine_permit], 'mine_status': [item.json() for item in self.mine_status], 'mine_tailings_storage_facility': [item.json() for item in self.mine_tailings_storage_facilities], 'mine_type': [item.json() for item in self.active(self.mine_type)], 'verified_status': self.verified_status.json() if self.verified_status else None, } def json_for_map(self): return { 'mine_guid': str(self.mine_guid), 'mine_name': self.mine_name, 'mine_no': self.mine_no, 'mine_note': self.mine_note, 'major_mine_ind': self.major_mine_ind, 'region_code': self.mine_region, 'mine_location': self.mine_location.json() if self.mine_location else None } def json_by_name(self): return { 'mine_guid': str(self.mine_guid), 'mine_name': self.mine_name, 'mine_no': self.mine_no } def json_by_location(self): #this will get cleaned up when mine_location and mine are merged result = {'mine_guid': str(self.mine_guid)} if self.mine_location: result['latitude'] = str(self.mine_location.latitude ) if self.mine_location.latitude else '' result['longitude'] = str(self.mine_location.longitude ) if self.mine_location.longitude else '' else: result['latitude'] = '' result['longitude'] = '' return result def json_by_permit(self): return { 'mine_guid': str(self.mine_guid), 'mine_permit': [item.json() for item in self.mine_permit] } @staticmethod def active(records): return list(filter(lambda x: x.active_ind, records)) @classmethod def find_by_mine_guid(cls, _id): try: uuid.UUID(_id, version=4) return cls.query.filter_by(mine_guid=_id).filter_by( deleted_ind=False).first() except ValueError: return None @classmethod def find_by_mine_no(cls, _id): return cls.query.filter_by(mine_no=_id).filter_by( deleted_ind=False).first() @classmethod def find_by_mine_name(cls, term=None): MINE_LIST_RESULT_LIMIT = 50 if term: name_filter = Mine.mine_name.ilike('%{}%'.format(term)) mines_q = Mine.query.filter(name_filter).filter_by( deleted_ind=False) mines = mines_q.limit(MINE_LIST_RESULT_LIMIT).all() else: mines = Mine.query.limit(MINE_LIST_RESULT_LIMIT).all() return mines @classmethod def find_by_name_no_permit(cls, term=None): MINE_LIST_RESULT_LIMIT = 50 if term: name_filter = Mine.mine_name.ilike('%{}%'.format(term)) number_filter = Mine.mine_no.ilike('%{}%'.format(term)) permit_filter = Permit.permit_no.ilike('%{}%'.format(term)) mines_q = Mine.query.filter(name_filter | number_filter).filter_by( deleted_ind=False) permit_q = Mine.query.join(Permit).filter(permit_filter) mines = mines_q.union(permit_q).limit(MINE_LIST_RESULT_LIMIT).all() else: mines = Mine.query.limit(MINE_LIST_RESULT_LIMIT).all() return mines @classmethod def find_all_major_mines(cls): return cls.query.filter_by(major_mine_ind=True).filter_by( deleted_ind=False).all() @classmethod def find_by_mine_no_or_guid(cls, _id): result = cls.find_by_mine_guid(_id) if result is None: result = cls.find_by_mine_no(_id) return result @classmethod def create_mine(cls, mine_no, mine_name, mine_category, mine_region, add_to_session=True, ohsc_ind=None, union_ind=None): mine = cls(mine_guid=uuid.uuid4(), mine_no=mine_no, mine_name=mine_name, major_mine_ind=mine_category, mine_region=mine_region, ohsc_ind=ohsc_ind, union_ind=union_ind) if add_to_session: mine.save(commit=False) return mine @validates('mine_name') def validate_mine_name(self, key, mine_name): if not mine_name: raise AssertionError('No mine name provided.') if len(mine_name) > 60: raise AssertionError('Mine name must not exceed 60 characters.') return mine_name @validates('mine_note') def validate_mine_note(self, key, mine_note): mine_note = mine_note if mine_note else '' if len(mine_note) > 300: raise AssertionError('Mine note must not exceed 300 characters.') return mine_note @validates('mine_no') def validate_mine_no(self, key, mine_no): mine_no = mine_no if mine_no else '' if mine_no and len(mine_no) > 10: raise AssertionError('Mine number must not exceed 10 characters.') return mine_no @validates('mine_region') def validate_mine_region(self, key, mine_region): if not mine_region: raise AssertionError('No mine region code provided.') if len(mine_region) > 2: raise AssertionError('Invalid region code') return mine_region
class SyntheseOneRecord(VSyntheseDecodeNomenclatures): """ Model for display details information about one synthese observation Herited from VSyntheseDecodeNomenclatures model for all decoded nomenclatures """ __tablename__ = "synthese" __table_args__ = {"schema": "gn_synthese", "extend_existing": True} id_synthese = DB.Column( DB.Integer, ForeignKey("gn_synthese.v_synthese_decode_nomenclatures.id_synthese"), primary_key=True, ) unique_id_sinp = DB.Column(UUID(as_uuid=True)) id_source = DB.Column(DB.Integer) id_dataset = DB.Column(DB.Integer) cd_hab = DB.Column(DB.Integer, ForeignKey(Habref.cd_hab)) habitat = DB.relationship(Habref, lazy="joined") source = DB.relationship( "TSources", primaryjoin=(TSources.id_source == id_source), foreign_keys=[id_source], ) areas = DB.relationship( "LAreas", secondary=corAreaSynthese, primaryjoin=(corAreaSynthese.c.id_synthese == id_synthese), secondaryjoin=(corAreaSynthese.c.id_area == LAreas.id_area), foreign_keys=[ corAreaSynthese.c.id_synthese, corAreaSynthese.c.id_area ], ) datasets = DB.relationship( "TDatasets", primaryjoin=(TDatasets.id_dataset == id_dataset), foreign_keys=[id_dataset], ) acquisition_framework = DB.relationship( "TAcquisitionFramework", uselist=False, secondary=TDatasets.__table__, primaryjoin=(TDatasets.id_dataset == id_dataset), secondaryjoin=(TDatasets.id_acquisition_framework == TAcquisitionFramework.id_acquisition_framework), ) cor_observers = DB.relationship( "User", uselist=True, secondary=CorObserverSynthese.__table__, primaryjoin=(CorObserverSynthese.id_synthese == id_synthese), secondaryjoin=(User.id_role == CorObserverSynthese.id_role), ) validations = DB.relationship( "TValidations", primaryjoin=(TValidations.uuid_attached_row == unique_id_sinp), foreign_keys=[unique_id_sinp], uselist=True, ) medias = DB.relationship( TMedias, primaryjoin=(unique_id_sinp == TMedias.uuid_attached_row), foreign_keys=[TMedias.uuid_attached_row], )
class VSyntheseForWebApp(DB.Model): __tablename__ = "v_synthese_for_web_app" __table_args__ = {"schema": "gn_synthese"} id_synthese = DB.Column( DB.Integer, ForeignKey("gn_synthese.v_synthese_decode_nomenclatures.id_synthese"), primary_key=True, ) unique_id_sinp = DB.Column(UUID(as_uuid=True)) unique_id_sinp_grp = DB.Column(UUID(as_uuid=True)) id_source = DB.Column(DB.Integer) entity_source_pk_value = DB.Column(DB.Integer) id_dataset = DB.Column(DB.Integer) dataset_name = DB.Column(DB.Integer) id_acquisition_framework = DB.Column(DB.Integer) count_min = DB.Column(DB.Integer) count_max = DB.Column(DB.Integer) cd_nom = DB.Column(DB.Integer) cd_ref = DB.Column(DB.Unicode) nom_cite = DB.Column(DB.Unicode) nom_valide = DB.Column(DB.Unicode) nom_vern = DB.Column(DB.Unicode) lb_nom = DB.Column(DB.Unicode) meta_v_taxref = DB.Column(DB.Unicode) sample_number_proof = DB.Column(DB.Unicode) digital_proof = DB.Column(DB.Unicode) non_digital_proof = DB.Column(DB.Unicode) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) depth_min = DB.Column(DB.Integer) depth_max = DB.Column(DB.Integer) place_name = DB.Column(DB.Unicode) the_geom_4326 = DB.Column(Geometry("GEOMETRY", 4326)) date_min = DB.Column(DB.DateTime) date_max = DB.Column(DB.DateTime) validator = DB.Column(DB.Unicode) validation_comment = DB.Column(DB.Unicode) observers = DB.Column(DB.Unicode) determiner = DB.Column(DB.Unicode) id_digitiser = DB.Column(DB.Integer) comment_context = DB.Column(DB.Unicode) comment_description = DB.Column(DB.Unicode) meta_validation_date = DB.Column(DB.DateTime) meta_create_date = DB.Column(DB.DateTime) meta_update_date = DB.Column(DB.DateTime) last_action = DB.Column(DB.Unicode) id_nomenclature_geo_object_nature = DB.Column(DB.Integer) id_nomenclature_info_geo_type = DB.Column(DB.Integer) id_nomenclature_grp_typ = DB.Column(DB.Integer) grp_method = DB.Column(DB.Unicode) id_nomenclature_obs_technique = DB.Column(DB.Integer) id_nomenclature_bio_status = DB.Column(DB.Integer) id_nomenclature_bio_condition = DB.Column(DB.Integer) id_nomenclature_naturalness = DB.Column(DB.Integer) id_nomenclature_exist_proof = DB.Column(DB.Integer) id_nomenclature_valid_status = DB.Column(DB.Integer) id_nomenclature_diffusion_level = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_nomenclature_sex = DB.Column(DB.Integer) id_nomenclature_obj_count = DB.Column(DB.Integer) id_nomenclature_type_count = DB.Column(DB.Integer) id_nomenclature_sensitivity = DB.Column(DB.Integer) id_nomenclature_observation_status = DB.Column(DB.Integer) id_nomenclature_blurring = DB.Column(DB.Integer) id_nomenclature_source_status = DB.Column(DB.Integer) id_nomenclature_valid_status = DB.Column(DB.Integer) id_nomenclature_behaviour = DB.Column(DB.Integer) reference_biblio = DB.Column(DB.Unicode) name_source = DB.Column(DB.Unicode) url_source = DB.Column(DB.Unicode) st_asgeojson = DB.Column(DB.Unicode) def get_geofeature(self, recursif=False, columns=()): return self.as_geofeature("the_geom_4326", "id_synthese", recursif, columns=columns)
class Item(BaseMetadata, LocalRolesMixin, Mixin, VersionMixin, Base): """Model class to be used as base for all first level class models.""" __tablename__ = 'items' path = sa.Column(ARRAY(UUID(as_uuid=True)), nullable=False, index=True, info={ 'colanderalchemy': { 'title': 'Path', 'missing': colander.drop, 'typ': colander.List } }) """List of all parent objects including itself.""" type = sa.Column(sa.String(50), index=True, info={ 'colanderalchemy': { 'title': 'type', 'missing': colander.drop, 'typ': colander.String } }) """Polymorphic type.""" @declared_attr def __mapper_args__(cls) -> dict: """Return polymorphic identity.""" cls_name = cls.__name__.lower() args = { 'polymorphic_identity': cls_name, } if cls_name == 'item': args['polymorphic_on'] = cls.type return args @classmethod def create(cls, payload: dict) -> 'Item': """Factory that creates a new instance of this object. :param payload: Dictionary containing attributes and values :type payload: dict """ # we are going to change the payload so we need to avoid side effects payload = deepcopy(payload) # add local roles can_view using payload, actors and special attribute from the class can_view = payload.get('can_view', []) payload['can_view'] = list( set(can_view).union(cls._default_can_view())) actors_data = { actor: payload.pop(actor) for actor in cls.__actors__ if actor in payload } obj_id = payload.setdefault('id', uuid.uuid4()) if isinstance(obj_id, str): obj_id = uuid.UUID(obj_id) # look for a parent id get the parent instance parent_attr = getattr(cls, '__parent_attr__', None) path = [] parent_id = payload.get(parent_attr, None) if parent_attr else None if parent_id: parent = Item.get(parent_id) path = list(parent.path) path.append(obj_id) payload['path'] = path # create and add to the session the new instance obj = cls(**payload) session = obj.__session__ session.add(obj) session.flush() # add local roles using update method if actors_data: obj.update(actors_data) # TODO: fire object created event here? return obj def update(self, values: dict): """Update the object with given values. This implementation take care of update local role attributes. :param values: Dictionary containing attributes and values :type values: dict """ actors = self.__class__.__actors__ for key, value in values.items(): if key not in actors: setattr(self, key, value) else: set_local_roles_by_role_name(self, key, value) def to_dict(self, excludes: Attributes = None, includes: Attributes = None) -> dict: """Return a dictionary with fields and values used by this Class. :param excludes: attributes to exclude from dict representation. :param includes: attributes to include from dict representation. :returns: Dictionary with fields and values used by this Class """ data = super().to_dict(excludes=excludes, includes=includes) roles = {} for lr in self._all_local_roles.all(): principal_id = lr.principal_id if lr.role_name not in roles: roles[lr.role_name] = [principal_id] else: roles[lr.role_name].append(principal_id) data['_roles'] = roles return data @declared_attr def _all_local_roles(cls): """All local roles for this Item using all parent objects in path.""" return sa.orm.relationship( 'LocalRole', foreign_keys='LocalRole.item_id', primaryjoin='LocalRole.item_id==any_(Item.path)', order_by='asc(LocalRole.role_name)', cascade='all, delete-orphan', lazy='dynamic', info={ 'colanderalchemy': { 'title': 'All local roles: including from parent objects.', 'missing': colander.drop, } }) def __repr__(self) -> str: """Representation model Item.""" template = "<{0}(id='{1}' state='{2}' created='{3}' updated='{4}' type='{5}')>" return template.format(self.__class__.__name__, self.id, self.state, self.created_at, self.updated_at, self.type)
from sqlalchemy import Table, Column, Enum, DateTime, Boolean, Text, ForeignKey from sqlalchemy.dialects.postgresql import UUID from . import TaskTypes from . import metadata # Tasks are stored in DB upon creation of a task object through the # Mediator, task status is updated upon completion of said task. tasks = Table( "tasks", metadata, Column("id", UUID(as_uuid=True), primary_key=True, nullable=False), Column("owner", UUID(as_uuid=True), ForeignKey('users.id', ondelete="CASCADE"), nullable=False), Column("task_type", Enum(TaskTypes)), Column("time_start", DateTime), Column("time_finished", DateTime), Column("error", Text()), Column("success", Boolean))
from sqlalchemy import Column, ForeignKey, Table from sqlalchemy.dialects.postgresql import UUID from db.database import Base analysis_module_type_directive_mapping = Table( "analysis_module_type_directive_mapping", Base.metadata, Column( "analysis_module_type_uuid", UUID(as_uuid=True), ForeignKey("analysis_module_type.uuid", ondelete="CASCADE"), index=True, primary_key=True, ), Column( "directive_uuid", UUID(as_uuid=True), ForeignKey("node_directive.uuid"), primary_key=True, ), )
class Kyc(Base): documentName = db.Column(db.String(80)) documentUrl = db.Column(db.String(200)) user = db.Column(UUID(as_uuid=True), db.ForeignKey('user.uid'))
class TStationsOcchab(ReleveCruvedAutorization): __tablename__ = "t_stations" __table_args__ = {"schema": "pr_occhab"} id_station = DB.Column(DB.Integer, primary_key=True) unique_id_sinp_station = DB.Column(UUID(as_uuid=True), default=select( [func.uuid_generate_v4()])) id_dataset = DB.Column(DB.Integer, ForeignKey('gn_meta.t_datasets.id_dataset')) date_min = DB.Column(DB.DateTime) date_max = DB.Column(DB.DateTime) observers_txt = DB.Column(DB.Unicode) station_name = DB.Column(DB.Unicode) is_habitat_complex = DB.Column(DB.Boolean) id_nomenclature_exposure = DB.Column( DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) depth_min = DB.Column(DB.Integer) depth_max = DB.Column(DB.Integer) area = DB.Column(DB.Float) id_nomenclature_area_surface_calculation = DB.Column( DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) id_nomenclature_geographic_object = DB.Column( DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) comment = DB.Column(DB.Unicode) id_digitiser = DB.Column(DB.Integer) geom_4326 = DB.Column(Geometry("GEOMETRY", 4626)) t_habitats = relationship("THabitatsOcchab", lazy="joined", cascade="all, delete-orphan") dataset = relationship("TDatasets", lazy="joined") observers = DB.relationship( User, lazy="joined", secondary=CorStationObserverOccHab.__table__, primaryjoin=(CorStationObserverOccHab.id_station == id_station), secondaryjoin=(CorStationObserverOccHab.id_role == User.id_role), foreign_keys=[ CorStationObserverOccHab.id_station, CorStationObserverOccHab.id_role, ], ) # overright the constructor # to inherit of ReleModel, the constructor must define some mandatory attribute def __init__(self, *args, **kwargs): super(TStationsOcchab, self).__init__(*args, **kwargs) self.observer_rel = getattr(self, 'observers') self.dataset_rel = getattr(self, 'dataset') self.id_digitiser_col = getattr(self, 'id_digitiser') self.id_dataset_col = getattr(self, 'id_dataset') def get_geofeature(self, recursif=True): return self.as_geofeature("geom_4326", "id_station", recursif, relationships=[ 'observers', 't_habitats', 'habref', 'dataset', ])
class Notification(Base): description = db.Column(db.String(200)) sent_status = db.Column(db.BOOLEAN, default=True) user = db.Column(UUID(as_uuid=True)) orderRelated = db.Column(UUID(as_uuid=True), db.ForeignKey('user.uid'))
class Host(db.Model): __tablename__ = "hosts" # These Index entries are essentially place holders so that the # alembic autogenerate functionality does not try to remove the indexes __table_args__ = (Index("idxinsightsid", text("(canonical_facts ->> 'insights_id')")), Index("idxgincanonicalfacts", "canonical_facts"), Index("idxaccount", "account"),) id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) account = db.Column(db.String(10)) display_name = db.Column(db.String(200), default=_set_display_name_on_save) ansible_host = db.Column(db.String(255)) created_on = db.Column(db.DateTime, default=datetime.utcnow) modified_on = db.Column( db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow ) facts = db.Column(JSONB) tags = db.Column(JSONB) canonical_facts = db.Column(JSONB) system_profile_facts = db.Column(JSONB) def __init__( self, canonical_facts, display_name=display_name, ansible_host=None, account=account, facts=None, system_profile_facts=None, ): if not canonical_facts: raise InventoryException(title="Invalid request", detail="At least one of the canonical " "fact fields must be present.") self.canonical_facts = canonical_facts if display_name: # Only set the display_name field if input the display_name has # been set...this will make it so that the "default" logic will # get called during the save to fill in an empty display_name self.display_name = display_name self._update_ansible_host(ansible_host) self.account = account self.facts = facts self.system_profile_facts = system_profile_facts or {} @classmethod def from_json(cls, d): canonical_facts = CanonicalFacts.from_json(d) facts = Facts.from_json(d.get("facts")) return cls( canonical_facts, d.get("display_name", None), d.get("ansible_host"), d.get("account"), facts, d.get("system_profile", {}), ) def to_json(self): json_dict = CanonicalFacts.to_json(self.canonical_facts) json_dict["id"] = str(self.id) json_dict["account"] = self.account json_dict["display_name"] = self.display_name json_dict["ansible_host"] = self.ansible_host json_dict["facts"] = Facts.to_json(self.facts) json_dict["created"] = self.created_on.isoformat()+"Z" json_dict["updated"] = self.modified_on.isoformat()+"Z" return json_dict def to_system_profile_json(self): json_dict = {"id": str(self.id), "system_profile": self.system_profile_facts or {} } return json_dict def save(self): db.session.add(self) def update(self, input_host): self.update_canonical_facts(input_host.canonical_facts) self.update_display_name(input_host.display_name) self._update_ansible_host(input_host.ansible_host) self.update_facts(input_host.facts) def patch(self, patch_data): logger.debug("patching host (id=%s) with data: %s" % (self.id, patch_data)) if not patch_data: raise InventoryException(title="Bad Request", detail="Patch json document cannot be empty.") self._update_ansible_host(patch_data.get("ansible_host")) self.update_display_name(patch_data.get("display_name")) def _update_ansible_host(self, ansible_host): if ansible_host is not None: # Allow a user to clear out the ansible host with an empty string self.ansible_host = ansible_host def update_display_name(self, input_display_name): if input_display_name: self.display_name = input_display_name elif not self.display_name: # This is the case where the display_name is not set on the # existing host record and the input host does not have it set if "fqdn" in self.canonical_facts: self.display_name = self.canonical_facts["fqdn"] else: self.display_name = self.id def update_canonical_facts(self, canonical_facts): logger.debug(("Updating host's (id=%s) canonical_facts (%s)" " with input canonical_facts=%s") % (self.id, self.canonical_facts, canonical_facts)) self.canonical_facts.update(canonical_facts) logger.debug("Host (id=%s) has updated canonical_facts (%s)" % (self.id, self.canonical_facts)) orm.attributes.flag_modified(self, "canonical_facts") def update_facts(self, facts_dict): if facts_dict: if not self.facts: self.facts = facts_dict return for input_namespace, input_facts in facts_dict.items(): self.replace_facts_in_namespace(input_namespace, input_facts) def replace_facts_in_namespace(self, namespace, facts_dict): self.facts[namespace] = facts_dict orm.attributes.flag_modified(self, "facts") def merge_facts_in_namespace(self, namespace, facts_dict): if not facts_dict: return if self.facts[namespace]: self.facts[namespace] = {**self.facts[namespace], **facts_dict} else: # The value currently stored in the namespace is None so replace it self.facts[namespace] = facts_dict orm.attributes.flag_modified(self, "facts") def _update_system_profile(self, input_system_profile): logger.debug("Updating host's (id=%s) system profile" % (self.id)) if not self.system_profile_facts: self.system_profile_facts = input_system_profile else: # Update the fields that were passed in self.system_profile_facts = {**self.system_profile_facts, **input_system_profile} orm.attributes.flag_modified(self, "system_profile_facts") def __repr__(self): tmpl = "<Host id='%s' account='%s' display_name='%s' canonical_facts=%s>" return tmpl % ( self.id, self.account, self.display_name, self.canonical_facts, )
class Study_day(CommonModel): __tablename__ = 'study_day' id_study_day = db.Column(UUID(as_uuid=True)) Schedule_id = db.Column(UUID(as_uuid=True), db.ForeignKey("schedule.id")) Schedule = db.relationship("Schedule")
class Poll(base): """The model for a Poll.""" __tablename__ = 'poll' id = Column(Integer, primary_key=True) uuid = Column(UUID(as_uuid=True), unique=True, nullable=False, server_default=text('gen_random_uuid()')) created_at = Column(DateTime, server_default=func.now(), nullable=False) updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) # Options name = Column(String) description = Column(String) locale = Column(String, server_default='english') poll_type = Column(String, nullable=False) anonymous = Column(Boolean, nullable=False) number_of_votes = Column(Integer) allow_new_options = Column(Boolean, nullable=False, default=False) option_sorting = Column(String, nullable=False) user_sorting = Column(String, nullable=False) results_visible = Column(Boolean, nullable=False, default=True) show_percentage = Column(Boolean, nullable=False, default=True) european_date_format = Column(Boolean, nullable=False, default=False) # Flags created = Column(Boolean, nullable=False, default=False) closed = Column(Boolean, nullable=False, default=False) due_date = Column(DateTime, nullable=True) next_notification = Column(DateTime, nullable=True) # Chat state variables expected_input = Column(String) in_settings = Column(Boolean, nullable=False, default=False) current_date = Column(Date, server_default=func.now(), nullable=False) # OneToOne user_id = Column(BigInteger, ForeignKey('user.id', ondelete='cascade', name='user'), nullable=False, index=True) user = relationship('User', foreign_keys='Poll.user_id') # OneToMany options = relationship('PollOption', order_by='asc(PollOption.id)', lazy='joined', passive_deletes='all') votes = relationship('Vote', passive_deletes=True) references = relationship('Reference', lazy='joined', passive_deletes='all') notifications = relationship('Notification', passive_deletes='all') def __init__(self, user): """Create a new poll.""" self.user = user self.poll_type = PollType.single_vote.name self.anonymous = False self.results_visible = True self.user_sorting = UserSorting.user_chrono.name self.option_sorting = OptionSorting.option_chrono.name def __repr__(self): """Print as string.""" return f'Poll with Id: {self.id}, name: {self.name}' def should_show_result(self): """Determine, whether this results of this poll should be shown.""" return self.results_visible or self.closed def has_date_option(self): """Check whether this poll has a date option.""" for option in self.options: if option.is_date: return True return False def get_formatted_due_date(self): """Get the formatted date.""" if self.european_date_format: return self.due_date.strftime('%d.%m.%Y %H:%M UTC') return self.due_date.strftime('%Y-%m-%d %H:%M UTC') def set_due_date(self, date): """Set the due date and the next notification.""" now = datetime.now() self.due_date = date if now < self.due_date - timedelta(days=7): self.next_notification = self.due_date - timedelta(days=7) elif now < self.due_date - timedelta(days=1): self.next_notification = self.due_date - timedelta(days=1) elif now < self.due_date - timedelta(hours=6): self.next_notification = self.due_date - timedelta(hours=6) else: self.next_notification = self.due_date def clone(self, session): """Create a clone from the current poll.""" poll = Poll(self.user) poll.created = True session.add(poll) poll.name = self.name poll.description = self.description poll.poll_type = self.poll_type poll.anonymous = self.anonymous poll.number_of_votes = self.number_of_votes poll.allow_new_options = self.allow_new_options poll.option_sorting = self.option_sorting poll.user_sorting = self.user_sorting poll.results_visible = self.results_visible poll.show_percentage = self.show_percentage from pollbot.models import PollOption for option in self.options: new_option = PollOption(poll, option.name) session.add(new_option) return poll
class Study_class(CommonModel): __tablename__ = 'study_class' id_study_class = db.Column(UUID(as_uuid=True)) class_name = db.Column(String(20)) Schedule_id = db.Column(UUID(as_uuid=True), db.ForeignKey("schedule.id")) Schedule = db.relationship("Schedule")
class MineStatus(AuditMixin, Base): __tablename__ = 'mine_status' mine_status_guid = db.Column(UUID(as_uuid=True), primary_key=True, server_default=FetchedValue()) mine_guid = db.Column(UUID(as_uuid=True), db.ForeignKey('mine.mine_guid')) mine_status_xref_guid = db.Column( UUID(as_uuid=True), db.ForeignKey('mine_status_xref.mine_status_xref_guid')) status_date = db.Column(db.DateTime, nullable=True, default=None) effective_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) expiry_date = db.Column(db.DateTime, nullable=False, default=datetime.strptime('9999-12-31', '%Y-%m-%d')) active_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue()) mine_status_xref = db.relationship('MineStatusXref', lazy='joined') status_description = association_proxy('mine_status_xref', 'description') @hybrid_property def status_values(self): status_values_list = [] if self.mine_status_xref.mine_operation_status_code: status_values_list.append( self.mine_status_xref.mine_operation_status_code) if self.mine_status_xref.mine_operation_status_reason_code: status_values_list.append( self.mine_status_xref.mine_operation_status_reason_code) if self.mine_status_xref.mine_operation_status_sub_reason_code: status_values_list.append( self.mine_status_xref.mine_operation_status_sub_reason_code) return status_values_list @hybrid_property def status_labels(self): status_labels_list = [] if self.mine_status_xref.mine_operation_status_code: status_labels_list.append( self.mine_status_xref.mine_operation_status.description) if self.mine_status_xref.mine_operation_status_reason_code: status_labels_list.append( self.mine_status_xref.mine_operation_status_reason.description) if self.mine_status_xref.mine_operation_status_sub_reason_code: status_labels_list.append( self.mine_status_xref.mine_operation_status_sub_reason. description) return status_labels_list def __repr__(self): return '<MineStatus %r>' % self.mine_status_guid def validate_status_code_exists(self, mine_status_xref, mine_status_code, code_or_description): try: return mine_status_xref[mine_status_code][code_or_description] except KeyError: return None def json(self, show_mgr=True): return { 'mine_status_guid': str(self.mine_status_guid), 'mine_guid': str(self.mine_guid), 'mine_status_xref_guid': str(self.mine_status_xref.mine_status_xref_guid), 'status_values': self.status_values, 'status_labels': self.status_labels, 'effective_date': self.effective_date.isoformat(), 'expiry_date': self.expiry_date.isoformat() if self.expiry_date is not None else None } @classmethod def find_by_mine_status_guid(cls, _id): return cls.query.filter_by(mine_status_guid=_id).first() @classmethod def find_by_mine_guid(cls, _id): return cls.query.filter_by(mine_guid=_id).filter_by( active_ind=True).first()
def Id(): return Column( UUID(as_uuid=True), primary_key=True, server_default=sqlalchemy.text("uuid_generate_v4()"), )
def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return dialect.type_descriptor(UUID()) else: return dialect.type_descriptor(CHAR(32))
class Rooms(Base, BaseModel): __tablename__ = 'rooms' room_id = sq.Column(sq.String(32), primary_key=True) created_at = sq.Column(sq.TIMESTAMP) end_time = sq.Column(sq.TIMESTAMP) creator = sq.Column(UUID())
def updated_by(cls): return db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'), default=current_user_id, onupdate=current_user_id)
class HistoricalSystemProfile(db.Model): __tablename__ = "historical_system_profiles" id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) account = db.Column(db.String(10), nullable=False) inventory_id = db.Column(UUID(as_uuid=True), index=True) created_on = db.Column(db.DateTime, default=datetime.utcnow) system_profile = db.Column(JSONB) captured_on = db.Column(db.DateTime, default=datetime.utcnow) def __init__(self, system_profile, inventory_id, account): self.inventory_id = inventory_id self.account = account self.system_profile = system_profile # set the ID here so we can override the system profile's id with the historical profile generated_id = str(uuid.uuid4()) self.id = generated_id self.system_profile["id"] = generated_id # set this now and not at commit time. It is needed as the fallback value for captured_on. self.created_on = datetime.utcnow() self.captured_on = self._get_captured_date() def _get_captured_date(self): captured_dt = self.created_on if self.system_profile.get("captured_date", None): captured_dt = dateutil.parser.parse( self.system_profile["captured_date"]) return self._get_utc_aware_dt(captured_dt) def _get_utc_aware_dt(self, datetime_in): """ https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive assume UTC if no timezone exists for captured_date. This field is read from `date --utc` on the client system; some records are in UTC but don't have a TZ due to a bug I introduced that's since been fixed. """ if (datetime_in.tzinfo is None or datetime_in.tzinfo.utcoffset(datetime_in) is None): return pytz.utc.localize(datetime_in) else: return datetime_in @property def display_name(self): return self.system_profile["display_name"] @property def captured_date(self): captured_dt = self.created_on if self.system_profile.get("captured_date", None): captured_dt = dateutil.parser.parse( self.system_profile["captured_date"]) return self._get_utc_aware_dt(captured_dt) def to_json(self): created_dt = self._get_utc_aware_dt(self.created_on) json_dict = {} json_dict["id"] = str(self.id) json_dict["account"] = self.account json_dict["inventory_id"] = self.inventory_id json_dict["created"] = created_dt.isoformat() json_dict["system_profile"] = self.system_profile json_dict["display_name"] = self.display_name json_dict["captured_date"] = self.captured_date return json_dict