コード例 #1
0
ファイル: test_defaults.py プロジェクト: xen/asyncpgsa
t_int_enum_default = MyIntEnum.ITEM_1
t_datetime_default = datetime(2017, 1, 1)
t_date_default = date(2017, 1, 1)
t_date_2_default = lambda: date(2017, 2, 1)
t_interval_default = timedelta(seconds=60)
t_boolean_default = True

users = Table(
    'users', metadata, Column('id', PG_UUID, unique=True, default=uuid4),
    Column('name', types.String(60), nullable=False, default=name_default),
    Column('t_list',
           types.ARRAY(types.String(60)),
           nullable=False,
           default=t_list_default),
    Column('t_enum',
           types.Enum(MyEnum),
           nullable=False,
           default=t_enum_default),
    Column('t_int_enum',
           types.Enum(MyIntEnum),
           nullable=False,
           default=t_int_enum_default),
    Column('t_datetime',
           types.DateTime(),
           nullable=False,
           default=t_datetime_default),
    Column('t_date', types.DateTime(), nullable=False, default=t_date_default),
    Column('t_date_2',
           types.DateTime(),
           nullable=False,
           default=t_date_2_default),
コード例 #2
0
class Section(db.Model):
    """ A model for sections.

    NOTE:   Currently forces the instructor to have an account first before
            sections can be stored.

    TODO:   Interface with the Attendance table here not in Users
    """

    __tablename__ = "sections"
    id = Column(db.Integer, primary_key=True)
    section_id = Column(db.String(255), nullable=False, unique=True)
    section_type = Column(types.Enum(SectionType), nullable=False)
    instructor_id = Column(db.Integer, ForeignKey('users.id'))
    date_rule = Column(db.PickleType, nullable=False)
    location = Column(db.String(255), nullable=False)

    # Relationships
    assistants = db.relationship("Enrollment")
    attendance = db.relationship("Attendance")
    instructor = db.relationship("User", back_populates="sections")

    def get_enrolled_assistants(self):
        """ Return all lab assistants assigned to this section. """
        return self.assistants

    def get_attendance_by_date(self, date):
        """ Returns the attendance for this section on DATE. """
        return [row for row in self.attendance if row.date == date]

    def is_valid_date(self, date):
        """ Returns true if DATE is a valid class date for this section. """
        return date_in_rule(date, self.date_rule)

    @transaction
    def add_section(self):
        section = Section.lookup_by_section_id(self.section_id)
        if section is not None:
            logger.warning("Cannot add duplicate section {0}".format(self.section_id))
            raise TypeError("Cannot add duplicate section")
        db.session.add(self)

    @staticmethod
    def lookup_by_section_id(section_id):
        """ Returns the section with the Berkeley id SECTION_ID. """
        return Section.query.filter_by(section_id=section_id).one_or_none()

    @staticmethod
    def lookup_by_instructor_id(instructor_id):
        """ Returns a list of sections associated with the instructors id. """
        return Section.query.filter_by(instructor_id=instructor_id)

    @staticmethod
    @transaction
    def load_sections_from_csv(contents):
        """ Populates the Section table from CONTENTS. Expects contents to be
        a list of dicts where each element is a row in the table.
        """
        if not check_sections_csv(contents):
            raise TypeError("Missing necessary columns")

        not_added = set()
        for entry in contents:
            section = Section.lookup_by_section_id(entry['section_id'])
            if section is None:
                user = User.lookup_by_sid(entry['instructor_id'])
                if user is None:
                    not_added.add(entry['instructor_id'])
                else:
                    logger.info("CALLING(load_sections_from_csv) creating section "
                        + entry['section_id']
                    )
                    date_rule = generate_rrule(entry['start_date'], entry['start_time'])
                    section = Section(section_id=entry['section_id'],
                        section_type=entry['section_type'],
                        instructor_id=user.id,
                        date_rule=date_rule,
                        location=entry['location']
                    )
                    db.session.add(section)
        if len(not_added) > 0:
            logger.warning("CALLING(load_sections_from_csv) missing instructors " + not_added)
            raise TypeError("Instructors do not have an account! " + not_added)
コード例 #3
0
class Sample(Model, PriorityMixin):

    age_at_sampling = Column(types.FLOAT)
    application_version_id = Column(ForeignKey("application_version.id"), nullable=False)
    application_version = orm.relationship(
        ApplicationVersion, foreign_keys=[application_version_id]
    )
    capture_kit = Column(types.String(64))
    comment = Column(types.Text)
    control = Column(types.Enum(*CONTROL_OPTIONS))
    created_at = Column(types.DateTime, default=dt.datetime.now)
    customer_id = Column(ForeignKey("customer.id", ondelete="CASCADE"), nullable=False)
    customer = orm.relationship("Customer", foreign_keys=[customer_id])
    delivered_at = Column(types.DateTime)
    deliveries = orm.relationship(Delivery, backref="sample")
    downsampled_to = Column(types.BigInteger)
    from_sample = Column(types.String(128))
    id = Column(types.Integer, primary_key=True)
    internal_id = Column(types.String(32), nullable=False, unique=True)
    invoice_id = Column(ForeignKey("invoice.id"))
    invoiced_at = Column(types.DateTime)  # DEPRECATED
    _is_external = Column("is_external", types.Boolean)  # DEPRECATED
    is_tumour = Column(types.Boolean, default=False)
    loqusdb_id = Column(types.String(64))
    name = Column(types.String(128), nullable=False)
    no_invoice = Column(types.Boolean, default=False)
    order = Column(types.String(64))
    ordered_at = Column(types.DateTime, nullable=False)
    organism_id = Column(ForeignKey("organism.id"))
    organism = orm.relationship("Organism", foreign_keys=[organism_id])
    _phenotype_groups = Column(types.Text)
    _phenotype_terms = Column(types.Text)
    prepared_at = Column(types.DateTime)

    priority = Column(types.Enum(Priority), default=Priority.standard, nullable=False)
    reads = Column(types.BigInteger, default=0)
    received_at = Column(types.DateTime)
    reference_genome = Column(types.String(255))
    sequence_start = Column(types.DateTime)
    sequenced_at = Column(types.DateTime)
    sex = Column(types.Enum(*SEX_OPTIONS), nullable=False)
    subject_id = Column(types.String(128))
    ticket_number = Column(types.Integer)
    time_point = Column(types.Integer)

    def __str__(self) -> str:
        return f"{self.internal_id} ({self.name})"

    @property
    @deprecated(
        version="1.4.0",
        message="This field is deprecated, use sample.application_version.application.is_external",
    )
    def is_external(self):
        """Return if this is an externally sequenced sample."""
        return self._is_external

    @property
    def sequencing_qc(self) -> bool:
        """Return sequencing qc passed or failed."""
        application = self.application_version.application
        # Express priority needs to be analyzed at a lower threshold for primary analysis
        if self.priority == Priority.express:
            one_half_of_target_reads = application.target_reads / 2
            return self.reads >= one_half_of_target_reads
        return self.reads > application.expected_reads

    @property
    def phenotype_groups(self) -> List[str]:
        """Return a list of phenotype_groups."""
        return self._phenotype_groups.split(",") if self._phenotype_groups else []

    @phenotype_groups.setter
    def phenotype_groups(self, phenotype_term_list: List[str]):
        self._phenotype_groups = ",".join(phenotype_term_list) if phenotype_term_list else None

    @property
    def phenotype_terms(self) -> List[str]:
        """Return a list of phenotype_terms."""
        return self._phenotype_terms.split(",") if self._phenotype_terms else []

    @phenotype_terms.setter
    def phenotype_terms(self, phenotype_term_list: List[str]):
        self._phenotype_terms = ",".join(phenotype_term_list) if phenotype_term_list else None

    @property
    def state(self) -> str:
        """Get the current sample state."""
        if self.delivered_at:
            return f"Delivered {self.delivered_at.date()}"
        if self.sequenced_at:
            return f"Sequenced {self.sequenced_at.date()}"
        if self.sequence_start:
            return f"Sequencing {self.sequence_start.date()}"
        if self.received_at:
            return f"Received {self.received_at.date()}"

        return f"Ordered {self.ordered_at.date()}"

    def to_dict(self, links: bool = False, flowcells: bool = False) -> dict:
        """Represent as dictionary"""
        data = super(Sample, self).to_dict()
        data["priority"] = self.priority_human
        data["customer"] = self.customer.to_dict()
        data["application_version"] = self.application_version.to_dict()
        data["application"] = self.application_version.application.to_dict()
        if links:
            data["links"] = [link_obj.to_dict(family=True, parents=True) for link_obj in self.links]
        if flowcells:
            data["flowcells"] = [flowcell_obj.to_dict() for flowcell_obj in self.flowcells]
        return data
コード例 #4
0
# coding: utf-8

from __future__ import unicode_literals

from sqlalchemy import schema, types, ForeignKey
from sqlalchemy.orm import relationship
from ..base import Model
from ..audit import AuditModel
from ..id_generator import id_generator, date_generator


checkType = types.Enum('航前检查', '航后检查', '定期检查', '航线检查')


class MaintenanceRecord(Model, AuditModel):
    "维护保养记录的模型定义"
    __tablename__ = 'maintenance_record'

    def _id_generator():
        return id_generator('WHJL', MaintenanceRecord, 'recordNum')

    id = schema.Column(types.Integer, primary_key=True)
    # 编号
    recordNum = schema.Column(types.String(255), default=_id_generator)
    # 机型
    planeType = schema.Column(types.String(255))
    # 飞机注册号
    jihao = schema.Column(types.String(255))
    # 检查类型
    checkType = schema.Column(checkType, default='航前检查')
    # 检查地点
コード例 #5
0
class Job(SystemModel):
    __tablename__ = "job"

    id = Column(types.Integer, primary_key=True)
    job_id = Column(types.String)
    run_id = Column(GUID, nullable=False, default=uuid.uuid4)
    state = Column(types.Enum(State, name="job_state"))
    started_at = Column(types.DateTime)
    ended_at = Column(types.DateTime)
    payload = Column(MutableDict.as_mutable(JSONEncodedDict))
    payload_flags = Column(IntFlag, default=0)
    trigger = Column(types.String, default=current_trigger)

    def __init__(self, **kwargs):
        kwargs["state"] = kwargs.get("state", State.IDLE)
        kwargs["payload"] = kwargs.get("payload", {})
        super().__init__(**kwargs)

    def is_running(self):
        return self.state is State.RUNNING

    def has_error(self):
        return self.state is State.FAIL

    def is_complete(self):
        return self.state in [State.SUCCESS, State.FAIL]

    def is_success(self):
        return self.state is State.SUCCESS

    def can_transit(self, state: State) -> bool:
        if self.state is state:
            return True

        return state.name in self.state.transitions()

    def transit(self, state: State) -> (State, State):
        transition = (self.state, state)

        if not self.can_transit(state):
            raise ImpossibleTransitionError(transition)

        if self.state is state:
            return transition

        self.state = state

        return transition

    @contextmanager
    def run(self, session):
        try:
            self.start()
            self.save(session)

            yield

            self.success()
            self.save(session)
        except Exception as err:
            logging.error(err)
            self.fail(error=err)
            self.save(session)
            raise

    def start(self):
        self.started_at = datetime.utcnow()
        self.transit(State.RUNNING)

    def fail(self, error=None):
        self.ended_at = datetime.utcnow()
        self.transit(State.FAIL)
        if error:
            self.payload.update({"error": str(error)})

    def success(self):
        self.ended_at = datetime.utcnow()
        self.transit(State.SUCCESS)

    def __repr__(self):
        return (
            "<Job(id='%s', job_id='%s', state='%s', started_at='%s', ended_at='%s')>"
            %
            (self.id, self.job_id, self.state, self.started_at, self.ended_at))

    def save(self, session):
        session.add(self)
        session.commit()

        return self
コード例 #6
0
class RecurringCharge(Base):
    __tablename__ = "recurring_charge"
    __table_args__ = (
        sql_schema.ForeignKeyConstraint(["user_id", "u_community_id"],
                                        ["user.id", "user.community_id"]),
        sql_schema.ForeignKeyConstraint(
            ["residence_id", "r_community_id"],
            ["residence.id", "residence.community_id"],
        ),
        sql_schema.CheckConstraint("u_community_id = r_community_id"),
        sql_schema.CheckConstraint("residence_id is null or user_id is null"),
    )
    id = Column(sql_types.Integer, primary_key=True)
    residence_id = Column(sql_types.Integer, nullable=True)
    user_id = Column(sql_types.Integer, nullable=True)
    r_community_id = Column(sql_types.Integer, nullable=True)
    u_community_id = Column(sql_types.Integer, nullable=True)

    name = Column(sql_types.String(200), nullable=False)
    base_amount = Column(sql_types.Integer, nullable=False)
    frequency = Column(sql_types.Enum(Frequency), nullable=False)
    frequency_skip = Column(sql_types.Integer, nullable=False, default=1)
    grace_period = Column(sql_types.Interval, nullable=False)
    next_charge = Column(sql_types.Date, nullable=False)

    def __repr__(self):
        return (f"RecurringCharge("
                f"name={self.name},"
                f"amount={self.amount},"
                f"frequency={self.frequency},"
                f"frequency_skip={self.frequency_skip},"
                f"grace_period={self.grace_period},"
                f"next_charge={self.next_charge},"
                f")")

    @hybrid_property
    def amount(self):
        return decimal.Decimal(".01") * self.base_amount

    @property
    def formatted_amount(self):
        return f"${self.amount}"

    def create_charge(self):
        new_charge = BillingCharge(
            name=self.name,
            amount=self.amount,
            paid=False,
            charge_date=self.next_charge,
            due_date=self.next_charge + self.grace_period,
        )
        return new_charge

    def find_next_date(self):
        if self.frequency == Frequency.YEARLY:
            return self.next_charge.replace(year=self.next_charge.year +
                                            self.frequency_skip)
        elif self.frequency == Frequency.MONTHLY:
            next_month = self.next_charge.month + self.frequency_skip
            return self.next_charge.replace(year=self.next_charge.year +
                                            next_month // 12,
                                            month=next_month % 12)
        elif self.frequency == Frequency.DAILY:
            return self.next_charge + datetime.timedelta(
                days=self.frequency_skip)
コード例 #7
0
ファイル: hh_data2.py プロジェクト: dckc/hh-office
class Insurance(IntId, Audited, Base):
    __tablename__ = 'Insurance'
    __table_args = dict(mysql_engine='InnoDB')

    Carrier_id = Column(INTEGER(),
           ForeignKey('Carrier.id', ondelete="CASCADE"),
           nullable=False)
    carrier = orm.relationship('Carrier')

    # for office use, reports, etc
    notice = Column(TextLine)
    details = Column(TEXT())

    deductible = Column(TextLine)
    copay = Column(Money)
    deductible_met = Column(BOOLEAN(), server_default=text('0'))

    # Field 1 from user_print_file_spec.csv
    payer_type = Column(types.Enum('Medicare',
                                   'Medicaid',
                                   'Group Health Plan',
                                   'Other'), nullable=False,
                        server_default=text('Group Health Plan'))
    id_number = Column(types.String(30), nullable=False)
    # Field 2
    # Column('patient_name', types.String(30), nullable=False),
    Client_id = Column(INTEGER(),
           ForeignKey('Client.id', ondelete="CASCADE"),
           nullable=False)
    client = orm.relationship('Client')
    # Field 3
    #Column('patient_dob', types.Date, nullable=False),
    patient_sex = Column(types.Enum('M', 'F'), nullable=False)
    # Field 4
    insured_name = Column(types.String(30))
    # Field 5: see Client
    # Field 6
    patient_rel = Column(types.Enum('Self', 'Spouse', 'Child', 'Other'),
           nullable=False)
    # Field 7
    insured_address = Column(types.String(30))
    insured_city = Column(types.String(24))
    insured_state = Column(types.String(3))
    insured_zip = Column(types.String(12))
    insured_phone = Column(types.String(15))
    # Field 8
    patient_status = Column(types.Enum('Single', 'Married', 'Other'))
    patient_status2 = Column(types.Enum('Employed',
                                        'Full Time Student',
                                        'Part Time Student'))
    # skip 10
    # Field 11
    insured_policy = Column(types.String(30))
    # Field 11a
    insured_dob = Column(types.Date)
    insured_sex = Column(types.Enum('M', 'F'))
    # 12, 13 are blank; skip 14-18; 19 is reserved
    # 20 is computed per-claim
    # Field 21
    dx1 = Column(types.String(8), ForeignKey('Diagnosis.icd9'), nullable=False)
    dx2 = Column(types.String(8), ForeignKey('Diagnosis.icd9'))
    # Field 23
    approval = Column(TEXT())
コード例 #8
0
def define_harvester_tables():

    global harvest_source_table
    global harvest_job_table
    global harvest_object_table
    global harvest_object_extra_table
    global harvest_gather_error_table
    global harvest_object_error_table
    global harvest_log_table

    harvest_source_table = Table(
        'harvest_source',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('url', types.UnicodeText, nullable=False),
        Column('title', types.UnicodeText, default=u''),
        Column('description', types.UnicodeText, default=u''),
        Column('config', types.UnicodeText, default=u''),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
        Column('type', types.UnicodeText, nullable=False),
        Column('active', types.Boolean, default=True),
        Column('user_id', types.UnicodeText, default=u''),
        Column('publisher_id', types.UnicodeText, default=u''),
        Column('frequency', types.UnicodeText, default=u'MANUAL'),
        Column('next_run', types.DateTime),
    )
    # Was harvesting_job
    harvest_job_table = Table(
        'harvest_job',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
        Column('gather_started', types.DateTime),
        Column('gather_finished', types.DateTime),
        Column('finished', types.DateTime),
        Column('source_id', types.UnicodeText,
               ForeignKey('harvest_source.id')),
        # status: New, Running, Finished
        Column('status', types.UnicodeText, default=u'New', nullable=False),
    )
    # A harvest_object contains a representation of one dataset during a
    # particular harvest
    harvest_object_table = Table(
        'harvest_object',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        # The guid is the 'identity' of the dataset, according to the source.
        # So if you reharvest it, then the harvester knows which dataset to
        # update because of this identity. The identity needs to be unique
        # within this CKAN.
        Column('guid', types.UnicodeText, default=u''),
        # When you harvest a dataset multiple times, only the latest
        # successfully imported harvest_object should be flagged 'current'.
        # The import_stage usually reads and writes it.
        Column('current', types.Boolean, default=False),
        Column('gathered', types.DateTime, default=datetime.datetime.utcnow),
        Column('fetch_started', types.DateTime),
        Column('content', types.UnicodeText, nullable=True),
        Column('fetch_finished', types.DateTime),
        Column('import_started', types.DateTime),
        Column('import_finished', types.DateTime),
        # state: WAITING, FETCH, IMPORT, COMPLETE, ERROR
        Column('state', types.UnicodeText, default=u'WAITING'),
        Column('metadata_modified_date', types.DateTime),
        Column('retry_times', types.Integer, default=0),
        Column('harvest_job_id', types.UnicodeText,
               ForeignKey('harvest_job.id')),
        Column('harvest_source_id', types.UnicodeText,
               ForeignKey('harvest_source.id')),
        Column('package_id',
               types.UnicodeText,
               ForeignKey('package.id', deferrable=True),
               nullable=True),
        # report_status: 'added', 'updated', 'not modified', 'deleted', 'errored'
        Column('report_status', types.UnicodeText, nullable=True),
        Index('harvest_job_id_idx', 'harvest_job_id'),
    )

    # New table
    harvest_object_extra_table = Table(
        'harvest_object_extra',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_object_id', types.UnicodeText,
               ForeignKey('harvest_object.id')),
        Column('key', types.UnicodeText),
        Column('value', types.UnicodeText),
    )

    # New table
    harvest_gather_error_table = Table(
        'harvest_gather_error',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_job_id', types.UnicodeText,
               ForeignKey('harvest_job.id')),
        Column('message', types.UnicodeText),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )
    # New table
    harvest_object_error_table = Table(
        'harvest_object_error',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_object_id', types.UnicodeText,
               ForeignKey('harvest_object.id')),
        Column('message', types.UnicodeText),
        Column('stage', types.UnicodeText),
        Column('line', types.Integer),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )
    # Harvest Log table
    harvest_log_table = Table(
        'harvest_log',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('content', types.UnicodeText, nullable=False),
        Column(
            'level',
            types.Enum('DEBUG',
                       'INFO',
                       'WARNING',
                       'ERROR',
                       'CRITICAL',
                       name='log_level')),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )

    mapper(
        HarvestSource,
        harvest_source_table,
        properties={
            'jobs':
            relation(
                HarvestJob,
                lazy=True,
                backref=u'source',
                order_by=harvest_job_table.c.created,
            ),
        },
    )

    mapper(
        HarvestJob,
        harvest_job_table,
    )

    mapper(
        HarvestObject,
        harvest_object_table,
        properties={
            'package': relation(
                Package,
                lazy=True,
                backref='harvest_objects',
            ),
            'job': relation(
                HarvestJob,
                lazy=True,
                backref=u'objects',
            ),
            'source': relation(
                HarvestSource,
                lazy=True,
                backref=u'objects',
            ),
        },
    )

    mapper(
        HarvestGatherError,
        harvest_gather_error_table,
        properties={
            'job': relation(HarvestJob, backref='gather_errors'),
        },
    )

    mapper(
        HarvestObjectError,
        harvest_object_error_table,
        properties={
            'object':
            relation(HarvestObject,
                     backref=backref('errors', cascade='all,delete-orphan')),
        },
    )

    mapper(
        HarvestObjectExtra,
        harvest_object_extra_table,
        properties={
            'object':
            relation(HarvestObject,
                     backref=backref('extras', cascade='all,delete-orphan')),
        },
    )

    mapper(
        HarvestLog,
        harvest_log_table,
    )

    event.listen(HarvestObject, 'before_insert',
                 harvest_object_before_insert_listener)
コード例 #9
0
ファイル: column.py プロジェクト: AnyBlok/AnyBlok
 def __init__(self, *args, **kwargs):
     self.enum_cls = kwargs.pop('enum_cls')
     self.sqlalchemy_type = types.Enum(self.enum_cls)
     super(Enum, self).__init__(*args, **kwargs)
コード例 #10
0
# coding: utf-8

from __future__ import unicode_literals

from sqlalchemy import schema, types, ForeignKey
from sqlalchemy.orm import relationship

from ..base import Model
from ..audit import AuditModel
from ..id_generator import id_generator, date_generator

fault_type = types.Enum('一般故障', '严重故障')


class ExamineRepairRecord(Model, AuditModel):
    "排故检修记录的模型定义"
    __tablename__ = 'examine_repair_record'

    def _id_generator():
        return id_generator('PGJX', ExamineRepairRecord, 'recordNum')

    id = schema.Column(types.Integer, primary_key=True)
    recordNum = schema.Column(types.String(255), default=_id_generator)
    faultType = schema.Column(fault_type, nullable=False, default='一般故障')
    planeType = schema.Column(types.String(255), nullable=False)
    jihao = schema.Column(types.String(255))
    faultDate = schema.Column(types.String(255),
                              default=date_generator,
                              nullable=False)
    faultAdress = schema.Column(types.String(255))
    reportsMaker = schema.Column(types.String(255))
コード例 #11
0
ファイル: User.py プロジェクト: SBillion/timetableasy
def init_db():
    """define table user and mapping"""

    # Database definition
    from sqlalchemy import types, orm
    from sqlalchemy.schema import Column, Table, Sequence, ForeignKey
    from sqlalchemy.orm import relationship, backref, relation, mapper
    # Dependencies
    from Planning import Planning
    from Class import Class
    from Cursus import Cursus
    from Campus import Campus

    t_user = Table(
        'user',
        db.metadata,
        Column('id',
               types.Integer,
               Sequence('user_seq_id', optional=True),
               nullable=False,
               primary_key=True),
        Column('name', types.VARCHAR(255), nullable=False),
        Column('firstname', types.VARCHAR(255), nullable=False),
        Column('login', types.VARCHAR(64), nullable=False, unique=True),
        Column('password', types.VARCHAR(255), nullable=False),
        Column('email', types.VARCHAR(255), nullable=False),
        Column('type',
               types.Enum('admin', 'manager', 'teacher', 'student'),
               nullable=False),
        Column('id_planning',
               types.Integer,
               ForeignKey('planning.id'),
               nullable=False),
        Column('id_class', types.Integer, ForeignKey('class.id')),
    )

    t_user_cursus = Table(
        'user_cursus',
        db.metadata,
        Column('id_user', types.Integer, ForeignKey('user.id'),
               nullable=False),
        Column('id_cursus',
               types.Integer,
               ForeignKey('cursus.id'),
               nullable=False),
    )

    t_user_campus = Table(
        'user_campus',
        db.metadata,
        Column('id_user', types.Integer, ForeignKey('user.id'),
               nullable=False),
        Column('id_campus',
               types.Integer,
               ForeignKey('campus.id'),
               nullable=False),
    )

    t_teacher_campus = Table(
        'teacher_campus',
        db.metadata,
        Column('id_user', types.Integer, ForeignKey('user.id'),
               nullable=False),
        Column('id_campus',
               types.Integer,
               ForeignKey('campus.id'),
               nullable=False),
    )

    t_user_class = Table(
        'user_class',
        db.metadata,
        Column('id_user', types.Integer, ForeignKey('user.id'),
               nullable=False),
        Column('id_class',
               types.Integer,
               ForeignKey('class.id'),
               nullable=False),
    )

    mapper(User,
           t_user,
           properties={
               'planning':
               relationship(Planning,
                            backref=backref('type_user', uselist=False)),
               'student_class':
               relationship(Class, backref="students"),
               'cursus':
               relationship(Cursus,
                            secondary=t_user_cursus,
                            backref='managers'),
               'campus':
               relationship(Campus,
                            secondary=t_user_campus,
                            backref='managers'),
               'manager_class':
               relationship(Class, secondary=t_user_class, backref='managers'),
               'teacher_campus':
               relationship(Campus,
                            secondary=t_teacher_campus,
                            backref='teachers'),
           })
コード例 #12
0
ファイル: models.py プロジェクト: mobyle2-legacy/apex
class AuthUser(Base):
    """ Table name: auth_users

::

    id = Column(types.Integer(), primary_key=True)
    login = Column(Unicode(80), default=u'', index=True)
    username = Column(Unicode(80), default=u'', index=True)
    _password = Column('password', Unicode(80), default=u'')
    email = Column(Unicode(80), default=u'', index=True)
    active = Column(types.Enum(u'Y',u'N',u'D'), default=u'Y')
    """
    __tablename__ = 'auth_users'
    __table_args__ = {"sqlite_autoincrement": True}

    id = Column(types.Integer(), primary_key=True)
    login = Column(Unicode(80), default=u'', index=True)
    username = Column(Unicode(80), default=u'', index=True)
    _password = Column('password', Unicode(80), default=u'')
    email = Column(Unicode(80), default=u'', index=True)
    active = Column(types.Enum(u'Y', u'N', u'D', name=u"active"), default=u'Y')

    groups = relationship('AuthGroup', secondary=user_group_table, \
                      backref='auth_users')

    last_events = relationship('AuthUserLog', \
                         order_by='AuthUserLog.time.desc()')
    login_log = relationship('AuthUserLog', \
                         order_by='AuthUserLog.id')
    """
    Fix this to use association_proxy
    groups = association_proxy('user_group_table', 'authgroup')
    """
    @property
    def last_logins(self):
        return [a for a in self.last_events if a.event == 'L']

    @property
    def last_login(self):
        if self.last_logins:
            return self.last_logins[0]

    def _set_password(self, password):
        self._password = BCRYPTPasswordManager().encode(password, rounds=12)

    def _get_password(self):
        return self._password

    password = synonym('_password', descriptor=property(_get_password, \
                       _set_password))

    def in_group(self, group):
        """
        Returns True or False if the user is or isn't in the group.
        """
        return group in [g.name for g in self.groups]

    @classmethod
    def get_by_id(cls, id):
        """
        Returns AuthUser object or None by id

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_id(1)
        """
        return DBSession.query(cls).filter(cls.id == id).first()

    @classmethod
    def get_by_login(cls, login):
        """
        Returns AuthUser object or None by login

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_login('$G$1023001')
        """
        return DBSession.query(cls).filter(cls.login == login).first()

    @classmethod
    def get_by_username(cls, username):
        """
        Returns AuthUser object or None by username

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_username('username')
        """
        return DBSession.query(cls).filter(cls.username == username).first()

    @classmethod
    def get_by_email(cls, email):
        """
        Returns AuthUser object or None by email

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_email('*****@*****.**')
        """
        return DBSession.query(cls).filter(cls.email == email).first()

    @classmethod
    def check_password(cls, **kwargs):
        if kwargs.has_key('id'):
            user = cls.get_by_id(kwargs['id'])
        if kwargs.has_key('username'):
            user = cls.get_by_username(kwargs['username'])

        if not user:
            return False
        if BCRYPTPasswordManager().check(user.password, kwargs['password']):
            return True
        else:
            return False

    def get_profile(self, request=None):
        """
        Returns AuthUser.profile object, creates record if it doesn't exist.

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_id(1)
           profile = user.get_profile(request)

        in **development.ini**

        .. code-block:: python

           apex.auth_profile =
        """
        if not request:
            request = get_current_request()

        auth_profile = request.registry.settings.get('apex.auth_profile')
        if auth_profile:
            resolver = DottedNameResolver(auth_profile.split('.')[0])
            profile_cls = resolver.resolve(auth_profile)
            return get_or_create(DBSession, profile_cls, user_id=self.id)
コード例 #13
0
ファイル: models.py プロジェクト: nycynik/apex
class AuthUser(Base):
    """ Table name: auth_users

::

    id = Column(types.Integer(), primary_key=True)
    login = Column(Unicode(80), default=u'', index=True)
    _password = Column('password', Unicode(80), default=u'')
    email = Column(Unicode(80), default=u'', index=True)
    active = Column(types.Enum(u'Y',u'N',u'D'), default=u'Y')
    """
    __tablename__ = 'auth_users'
    __table_args__ = {'sqlite_autoincrement': True}

    id = Column(types.Integer(), primary_key=True)
    auth_id = Column(types.Integer, ForeignKey(AuthID.id), index=True)
    provider = Column(Unicode(80), default=u'local', index=True)
    login = Column(Unicode(80), default=u'', index=True)
    salt = Column(Unicode(24))
    _password = Column('password', Unicode(80), default=u'')
    email = Column(Unicode(80), default=u'', index=True)
    created = Column(types.DateTime(), default=func.now())
    active = Column(types.Enum(u'Y', u'N', u'D', name=u'active'), default=u'Y')

    # need unique index on auth_id, provider, login
    # create unique index ilp on auth_users (auth_id,login,provider);
    # how do we handle same auth on multiple ids?

    def _set_password(self, password):
        self.salt = self.get_salt(24)
        password = password + self.salt
        self._password = BCRYPTPasswordManager().encode(password, rounds=12)

    def _get_password(self):
        return self._password

    password = synonym('_password', descriptor=property(_get_password, \
                       _set_password))

    def get_salt(self, length):
        m = hashlib.sha256()
        word = ''

        for i in list(range(length)):
            word += random.choice(string.ascii_letters)

        m.update(word.encode('utf-8'))

        return str(m.hexdigest()[:length])

    @classmethod
    def get_by_id(cls, id):
        """ 
        Returns AuthUser object or None by id

        .. code-block:: python

           from apex.models import AuthID

           user = AuthID.get_by_id(1)
        """
        return DBSession.query(cls).filter(cls.id == id).first()

    @classmethod
    def get_by_login(cls, login):
        """ 
        Returns AuthUser object or None by login

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_login('login')
        """
        return DBSession.query(cls).filter(cls.login == login).first()

    @classmethod
    def get_by_email(cls, email):
        """ 
        Returns AuthUser object or None by email

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_email('*****@*****.**')
        """
        return DBSession.query(cls).filter(cls.email == email).first()

    @classmethod
    def check_password(cls, **kwargs):
        if 'id' in kwargs:
            user = cls.get_by_id(kwargs['id'])
        if 'login' in kwargs:
            user = cls.get_by_login(kwargs['login'])

        if not user:
            return False
        try:
            if BCRYPTPasswordManager().check(
                    user.password, '%s%s' % (kwargs['password'], user.salt)):
                return True
        except TypeError:
            pass

        request = get_current_request()
        fallback_auth = request.registry.settings.get('apex.fallback_auth')
        if fallback_auth:
            resolver = DottedNameResolver(fallback_auth.split('.', 1)[0])
            fallback = resolver.resolve(fallback_auth)
            return fallback().check(DBSession, request, user, \
                       kwargs['password'])

        return False
コード例 #14
0
class User(db.Model, UserMixin):
    """ A model for Users. Note all arguments for SECTION_ID takes in the
    Berkeley assigned alphanumeric ID.
    """

    __tablename__ = "users"
    id = Column(db.Integer, primary_key=True)
    sid = Column(db.Integer, unique=True)
    gid = Column(db.Integer, nullable=False, unique=True)
    name = Column(db.String(255))
    email = Column(db.String(255), nullable=False)
    access = Column(types.Enum(AccessLevel), nullable=False)

    # Relationships
    sections = db.relationship("Section")
    enrolled = db.relationship("Enrollment")

    def get_sections_instructed(self):
        """ Returns all sections the user is teaching. """
        if self.access == AccessLevel.STAFF or self.access == AccessLevel.ADMIN:
            return self.sections
        return []

    def get_sections_enrolled(self):
        """ Returns a list of all sections the user is enrolled in. """
        results = set()
        if self.access == AccessLevel.ASSISTANT:
            for entry in self.enrolled:
                s = Section.query.filter_by(id=entry.section_id).one_or_none()
                if s is not None:
                    results.add(s)
                else:
                    logger.warning("Error getting enrolled section: no such section exists {0}".format(entry.section_id))
        return list(results)

    def get_all_attendances(self):
        """ Returns all attendance entries for user. """
        if self.access == AccessLevel.ASSISTANT:
            return Attendance.query.filter_by(assistant_id=self.id).all()
        return []

    def mark_unmarked(self, section_id, date):
        """ Marks the assistant as unmarked from SECTION_ID on DATE. """
        self.mark_attendance(section_id, date, AttendanceType.UNMARKED)

    def mark_present(self, section_id, date):
        """ Marks the assistant as present from SECTION_ID on DATE. """
        self.mark_attendance(section_id, date, AttendanceType.PRESENT)

    def mark_absent(self, section_id, date):
        """ Marks the assistant as absent from SECTION_ID on DATE. """
        self.mark_attendance(section_id, date, AttendanceType.ABSENT)

    @transaction
    def mark_attendance(self, section_id, date, attend):
        """ Marks the assistant as ATTEND from SECTION_ID on DATE. If element
        with SECTION_ID and DATE already exists, then updates the attendance
        to ATTEND.
        """
        if self.access != AccessLevel.ASSISTANT:
            logger.info("Set attendance error for {0}: staff member".format(self.name))
            raise TypeError("Cannot set attendance for staff")
        section = Section.lookup_by_section_id(section_id)
        if section is None:
            logger.info("Set attendance error {0}: no such section {1}".format(self.name, section_id))
            raise TypeError("No section found!")
        if not section.is_valid_date(date):
            logger.info("Set attendance error for {0}: wrong date {1}".format(
                self.name,
                date
            ))
            raise TypeError("Cannot set attendance for section on {0}".format(date))
        mark = None
        if attend != AttendanceType.UNMARKED:
            mark = datetime.now()
        elem = Attendance.lookup_by_assistant_section_date(self.id, section.id, date)
        if elem is None:
            elem = Attendance(assistant_id=self.id,
                mark_date=mark,
                section_id=section.id,
                section_date=date,
                attendance_type=attend
            )
            db.session.add(elem)
        else:
            elem.attendance_type = attend

    @transaction
    def enroll(self, section_id):
        """ Enrolls an assistant in a section. Note SECTION_ID is the Berkeley
        assigned section id.
        """
        section = Section.lookup_by_section_id(section_id)
        if self.access != AccessLevel.ASSISTANT:
            logger.info("Enrolling {0} to {1} error: cannot enroll staff member".format(
                self.name,
                section_id
            ))
            raise TypeError("Cannot enroll staff member")
        if section is None:
            logger.info("Enrolling {0} to {1} error: section does not exist".format(
                self.name,
                section_id
            ))
            raise TypeError("Section does not exist")
        enrollment = Enrollment.lookup_by_assistant_section(self.id, section.id)
        if enrollment is None:
            enrollment = Enrollment(user_id=self.id, section_id=section.id)
            db.session.add(enrollment)

    @staticmethod
    def all_assistants():
        """ Returns all lab assistants. """
        return User.query.filter_by(access=AccessLevel.ASSISTANT).all()

    @staticmethod
    def all_staff():
        """ Returns all staff members (including admins). """
        return User.query.filter(or_(db.users.access==AccessLevel.STAFF, db.users.access==AccessLevel.ADMIN))

    @staticmethod
    def all_admin():
        """ Returns all lab assistants. """
        return User.query.filter_by(access=AccessLevel.ADMIN).all()

    @staticmethod
    def lookup_by_google(google_id):
        """ Gets a user with the google assigned user id. """
        return User.query.filter_by(gid=google_id).one_or_none()

    @staticmethod
    def lookup_by_id(user_id):
        """ Gets the User id by the primary key. """
        return User.query.get(user_id)

    @staticmethod
    def lookup_by_sid(student_id):
        """ Gets a user by the associated Berkeley student id. """
        return User.query.filter_by(sid=student_id).one_or_none()
コード例 #15
0
ファイル: ecdkg.py プロジェクト: wizfmsGithub/pydkg
class ECDKG(db.Base):
    __tablename__ = 'ecdkg'

    node_id = Column(types.Integer, ForeignKey('ecdkg_node.id'))
    node = relationship('ECDKGNode', back_populates='protocol_instances')

    decryption_condition = Column(types.String(32), index=True)
    phase = Column(types.Enum(ECDKGPhase), nullable=False, default=ECDKGPhase.uninitialized)
    threshold = Column(types.Integer)
    encryption_key = Column(db.CurvePoint)
    decryption_key = Column(db.PrivateValue)
    participants = relationship('ECDKGParticipant', back_populates='ecdkg')

    secret_poly1 = Column(db.Polynomial)
    secret_poly2 = Column(db.Polynomial)
    verification_points = Column(db.CurvePointTuple)
    encryption_key_vector = Column(db.CurvePointTuple)

    __table_args__ = (UniqueConstraint('node_id', 'decryption_condition'),)

    def init(self):
        for addr in networking.channels.keys():
            self.get_or_create_participant_by_address(addr)

        # everyone should on agree on participants
        self.threshold = math.ceil(THRESHOLD_FACTOR * (len(self.participants)+1))

        spoly1 = random_polynomial(self.threshold)
        spoly2 = random_polynomial(self.threshold)

        self.secret_poly1 = spoly1
        self.secret_poly2 = spoly2

        self.encryption_key_vector = tuple(secp256k1.multiply(secp256k1.G, coeff) for coeff in self.secret_poly1)

        self.verification_points = tuple(
            secp256k1.add(secp256k1.multiply(secp256k1.G, a), secp256k1.multiply(G2, b))
            for a, b in zip(spoly1, spoly2)
        )

        self.phase = ECDKGPhase.key_distribution

    def process_advance_to_phase(self, target_phase: ECDKGPhase):
        if self.phase < target_phase:
            self.phase = target_phase
            db.Session.commit()

    def process_secret_shares(self, sender_address: int, secret_shares: (int, int), signature: 'rsv triplet'):
        own_address = self.node.address
        participant = self.get_participant_by_address(sender_address)
        share1, share2 = secret_shares

        msg_bytes = (
            b'SECRETSHARES' +
            self.decryption_condition.encode() +
            util.address_to_bytes(own_address) +
            util.private_value_to_bytes(share1) +
            util.private_value_to_bytes(share2)
        )

        recovered_address = util.address_from_message_and_signature(msg_bytes, signature)

        if sender_address != recovered_address:
            raise ValueError(
                'sender address {:040x} does not match recovered address {:040x}'
                .format(sender_address, recovered_address)
            )

        if participant.secret_share1 is None and participant.secret_share2 is None:
            participant.secret_share1 = share1
            participant.secret_share2 = share2
            participant.shares_signature = signature

            db.Session.commit()

            if participant.verification_points is not None:
                self.process_secret_share_verification(sender_address)

        elif participant.secret_share1 != share1 or participant.secret_share2 != share2:
            participant.get_or_create_complaint_by_complainer_address(own_address)
            raise ValueError(
                '{:040x} sent shares for {} which do not match: {} != {}'
                .format(
                    sender_address,
                    self.decryption_condition,
                    (participant.secret_share1, participant.secret_share2),
                    (share1, share2),
                )
            )

    def process_verification_points(self, sender_address: int, verification_points: tuple, signature: 'rsv triplet'):
        own_address = self.node.address
        participant = self.get_participant_by_address(sender_address)

        msg_bytes = (
            b'VERIFICATIONPOINTS' +
            self.decryption_condition.encode() +
            util.curve_point_tuple_to_bytes(verification_points)
        )

        recovered_address = util.address_from_message_and_signature(msg_bytes, signature)

        if sender_address != recovered_address:
            raise ValueError(
                'sender address {:040x} does not match recovered address {:040x}'
                .format(sender_address, recovered_address)
            )

        if participant.verification_points is None:
            participant.verification_points = verification_points
            participant.verification_points_signature = signature

            db.Session.commit()

            if participant.secret_share1 is not None and participant.secret_share2 is not None:
                self.process_secret_share_verification(sender_address)

        elif participant.verification_points != verification_points:
            participant.get_or_create_complaint_by_complainer_address(own_address)
            raise ValueError(
                '{:040x} sent verification points for {} which do not match: {} != {}'
                .format(
                    sender_address,
                    self.decryption_condition,
                    participant.verification_points,
                    verification_points,
                )
            )

    def process_secret_share_verification(self, address: int):
        own_address = self.node.address
        participant = self.get_participant_by_address(address)

        share1 = participant.secret_share1
        share2 = participant.secret_share2

        vlhs = secp256k1.add(secp256k1.multiply(secp256k1.G, share1),
                             secp256k1.multiply(G2, share2))
        vrhs = functools.reduce(
            secp256k1.add,
            (secp256k1.multiply(ps, pow(own_address, k, secp256k1.N))
                for k, ps in enumerate(participant.verification_points)))

        if vlhs == vrhs:
            return

        participant.get_or_create_complaint_by_complainer_address(own_address)

    def process_encryption_key_vector(self,
                                      sender_address: int,
                                      encryption_key_vector: tuple,
                                      signature: 'rsv triplet'):
        own_address = self.node.address
        participant = self.get_participant_by_address(sender_address)

        msg_bytes = (
            b'ENCRYPTIONKEYPART' +
            self.decryption_condition.encode() +
            util.curve_point_tuple_to_bytes(encryption_key_vector)
        )

        recovered_address = util.address_from_message_and_signature(msg_bytes, signature)

        if sender_address != recovered_address:
            raise ValueError(
                'sender address {:040x} does not match recovered address {:040x}'
                .format(sender_address, recovered_address)
            )

        if participant.encryption_key_vector is None:
            lhs = secp256k1.multiply(secp256k1.G, participant.secret_share1)
            rhs = functools.reduce(
                secp256k1.add,
                (secp256k1.multiply(ps, pow(own_address, k, secp256k1.N))
                    for k, ps in enumerate(encryption_key_vector)))
            if lhs != rhs:
                participant.get_or_create_complaint_by_complainer_address(own_address)
                raise ValueError(
                    '{:040x} sent enc key vector which does not match previously sent secret share'
                    .format(sender_address)
                )

            participant.encryption_key_vector = encryption_key_vector
            participant.encryption_key_vector_signature = signature

            if all(p.encryption_key_vector is not None for p in self.participants):
                self.encryption_key = functools.reduce(
                    secp256k1.add,
                    (p.encryption_key_vector[0] for p in self.participants),
                    self.encryption_key_vector[0]
                )

            db.Session.commit()
        elif participant.encryption_key_vector != encryption_key_vector:
            participant.get_or_create_complaint_by_complainer_address(own_address)
            raise ValueError(
                '{:040x} sent encryption key part for {} which do not match: {} != {}'
                .format(
                    sender_address,
                    self.decryption_condition,
                    participant.encryption_key_vector,
                    encryption_key_vector,
                )
            )

    def process_decryption_key_part(self,
                                    sender_address: int,
                                    decryption_key_part: int,
                                    signature: 'rsv triplet'):
        participant = self.get_participant_by_address(sender_address)

        msg_bytes = (
            b'DECRYPTIONKEYPART' +
            self.decryption_condition.encode() +
            util.private_value_to_bytes(decryption_key_part)
        )

        recovered_address = util.address_from_message_and_signature(msg_bytes, signature)

        if sender_address != recovered_address:
            raise ValueError(
                'sender address {:040x} does not match recovered address {:040x}'
                .format(sender_address, recovered_address)
            )

        if participant.decryption_key_part is None:
            if secp256k1.multiply(secp256k1.G, decryption_key_part) != participant.encryption_key_vector[0]:
                participant.get_or_create_complaint_by_complainer_address(own_address)
                raise ValueError(
                    '{:040x} sent dec key part which does not match previously sent enc key vector'
                    .format(sender_address)
                )

            participant.decryption_key_part = decryption_key_part
            participant.decryption_key_part_signature = signature

            if all(p.decryption_key_part is not None for p in self.participants):
                self.decryption_key = (
                    sum(p.decryption_key_part for p in self.participants) +
                    self.secret_poly1[0]
                ) % secp256k1.N

            db.Session.commit()
        elif participant.decryption_key_part != decryption_key_part:
            participant.get_or_create_complaint_by_complainer_address(own_address)
            raise ValueError(
                '{:040x} sent decryption key part for {} which do not match: {} != {}'
                .format(
                    sender_address,
                    self.decryption_condition,
                    participant.decryption_key_part,
                    decryption_key_part,
                )
            )

    async def run_until_phase(self, target_phase: ECDKGPhase):
        while self.phase < target_phase:
            logging.info('handling {} phase...'.format(self.phase.name))
            await getattr(self, 'handle_{}_phase'.format(self.phase.name))()

    async def handle_key_distribution_phase(self):
        signed_secret_shares = await networking.broadcast_jsonrpc_call_on_all_channels(
            'get_signed_secret_shares', self.decryption_condition)

        for participant in self.participants:
            address = participant.eth_address

            if address not in signed_secret_shares:
                logging.warning('missing shares from address {:040x}'.format(address))
                continue

            try:
                self.process_secret_shares(address, *signed_secret_shares[address])
            except Exception as e:
                logging.warning(
                    'exception occurred while processing secret shares from {:040x}: {}'
                    .format(address, e)
                )

        logging.info('set all secret shares')
        signed_verification_points = await networking.broadcast_jsonrpc_call_on_all_channels(
            'get_signed_verification_points', self.decryption_condition)

        for participant in self.participants:
            address = participant.eth_address

            if address not in signed_verification_points:
                logging.warning('missing verification points from address {:040x}'.format(address))
                continue

            try:
                self.process_verification_points(address, *signed_verification_points[address])
            except Exception as e:
                logging.warning(
                    'exception occurred while processing verification points from {:040x}: {}'
                    .format(address, e)
                )

        self.process_advance_to_phase(ECDKGPhase.key_verification)

    async def handle_key_verification_phase(self):
        self.process_advance_to_phase(ECDKGPhase.key_check)

    async def handle_key_check_phase(self):
        complaints = await networking.broadcast_jsonrpc_call_on_all_channels(
            'get_complaints', self.decryption_condition)

        for participant in self.participants:
            complainer_address = participant.eth_address

            if complainer_address not in complaints:
                logging.warning('missing complaints from address {:040x}'.format(complainer_address))
                continue

            # TODO: Add complaints and collect responses to complaints

        self.process_advance_to_phase(ECDKGPhase.key_generation)

    async def handle_key_generation_phase(self):
        signed_encryption_key_vectors = await networking.broadcast_jsonrpc_call_on_all_channels(
            'get_signed_encryption_key_vector', self.decryption_condition)

        for participant in self.participants:
            address = participant.eth_address

            if address not in signed_encryption_key_vectors:
                # TODO: this is supposed to be broadcast... maybe try getting it from other nodes instead?
                logging.warning('missing encryption key part from address {:040x}'.format(address))
                continue

            try:
                self.process_encryption_key_vector(address, *signed_encryption_key_vectors[address])
            except Exception as e:
                logging.warning(
                    'exception occurred while processing encryption key part from {:040x}: {}'
                    .format(address, e)
                )

        self.process_advance_to_phase(ECDKGPhase.key_publication)

    async def handle_key_publication_phase(self):
        await util.decryption_condition_satisfied(self.decryption_condition)

        signed_decryption_key_parts = await networking.broadcast_jsonrpc_call_on_all_channels(
            'get_signed_decryption_key_part', self.decryption_condition)

        for p in self.participants:
            address = p.eth_address

            if address not in signed_decryption_key_parts:
                # TODO: switch to interpolation of secret shares if waiting doesn't work
                logging.warning('missing decryption key part from address {:040x}'.format(address))
                continue

            try:
                self.process_decryption_key_part(address, *signed_decryption_key_parts[address])
            except Exception as e:
                logging.warning(
                    'exception occurred while processing decryption key part from {:040x}: {}'
                    .format(address, e)
                )

        self.process_advance_to_phase(ECDKGPhase.complete)

    def get_participant_by_address(self, address: int) -> 'ECDKGParticipant':
        participant = (
            db.Session
            .query(ECDKGParticipant)
            .filter(ECDKGParticipant.ecdkg_id == self.id,
                    ECDKGParticipant.eth_address == address)
            .scalar()
        )

        if participant is None:
            raise ValueError('could not find participant with address {:040x}'.format(address))

        return participant

    def get_or_create_participant_by_address(self, address: int) -> 'ECDKGParticipant':
        try:
            return self.get_participant_by_address(address)
        except ValueError:
            participant = ECDKGParticipant(ecdkg_id=self.id, eth_address=address)
            db.Session.add(participant)
            db.Session.commit()
            return participant

    def get_signed_secret_shares(self, address: int) -> ((int, int), 'rsv triplet'):
        private_key = self.node.private_key

        secret_shares = (eval_polynomial(self.secret_poly1, address),
                         eval_polynomial(self.secret_poly2, address))

        msg_bytes = (
            b'SECRETSHARES' +
            self.decryption_condition.encode() +
            util.address_to_bytes(address) +
            util.private_value_to_bytes(secret_shares[0]) +
            util.private_value_to_bytes(secret_shares[1])
        )

        signature = util.sign_with_key(msg_bytes, private_key)

        return (secret_shares, signature)

    def get_signed_verification_points(self) -> (tuple, 'rsv triplet'):
        private_key = self.node.private_key

        msg_bytes = (
            b'VERIFICATIONPOINTS' +
            self.decryption_condition.encode() +
            util.curve_point_tuple_to_bytes(self.verification_points)
        )

        signature = util.sign_with_key(msg_bytes, private_key)

        return (self.verification_points, signature)

    def get_signed_encryption_key_vector(self) -> ((int, int), 'rsv triplet'):
        private_key = self.node.private_key

        msg_bytes = (
            b'ENCRYPTIONKEYPART' +
            self.decryption_condition.encode() +
            util.curve_point_tuple_to_bytes(self.encryption_key_vector)
        )

        signature = util.sign_with_key(msg_bytes, private_key)

        return (self.encryption_key_vector, signature)

    def get_signed_decryption_key_part(self) -> (int, 'rsv triplet'):
        private_key = self.node.private_key

        msg_bytes = (
            b'DECRYPTIONKEYPART' +
            self.decryption_condition.encode() +
            util.private_value_to_bytes(self.secret_poly1[0])
        )

        signature = util.sign_with_key(msg_bytes, private_key)

        return (self.secret_poly1[0], signature)

    def get_complaints_by(self, address: int) -> dict:
        return (
            db.Session
            .query(ECDKGComplaint)
            .filter(  # ECDKGComplaint.participant.ecdkg_id == self.id,
                    ECDKGComplaint.complainer_address == address)
            .all()
        )

    def to_state_message(self) -> dict:
        own_address = self.node.address

        msg = {'address': '{:040x}'.format(own_address)}

        for attr in ('decryption_condition', 'phase', 'threshold'):
            val = getattr(self, attr)
            if val is not None:
                msg[attr] = val

        msg['participants'] = {'{:040x}'.format(p.eth_address): p.to_state_message() for p in self.participants}

        for attr in ('encryption_key',):
            val = getattr(self, attr)
            if val is not None:
                msg[attr] = '{0[0]:064x}{0[1]:064x}'.format(val)

        for attr in ('verification_points', 'encryption_key_vector'):
            val = getattr(self, attr)
            if val is not None:
                msg[attr] = tuple('{0[0]:064x}{0[1]:064x}'.format(pt) for pt in val)

        return msg
コード例 #16
0
from sqlalchemy import Table, Column
from sqlalchemy import types
from sqlalchemy import ForeignKey, UniqueConstraint, Index

from ckan.model import Package
from ckanext.publicamundi.model import Base

from ckanext.publicamundi.lib import languages

language_codes = languages.get_all('iso-639-1').keys()
Language = types.Enum(*language_codes, name='language_code')

translation_states = ('active', 'draft', 'deleted')
TranslationState = types.Enum(*translation_states, name='translation_state')


class PackageTranslation(Base):
    __table__ = Table(
        'package_translation',
        Base.metadata,
        Column('tid', types.Integer(), primary_key=True, autoincrement=True),
        Column('package_id',
               types.UnicodeText(),
               ForeignKey(Package.id, ondelete='cascade'),
               nullable=False),
        Column('source_language', Language),
        Column('language', Language, nullable=False),
        Column('key', types.UnicodeText(), nullable=False),
        Column('value', types.UnicodeText()),
        Column('state', TranslationState, default='active'),
        Index('ix_package_translation_package_key', 'package_id', 'key'),
コード例 #17
0
ファイル: test_converter.py プロジェクト: timmygee/graphene
def test_should_enum_convert_string():
    assert_column_conversion(types.Enum(), graphene.String)
コード例 #18
0
    def _create_layer(self,
                      exclude_properties=False,
                      metadatas=None,
                      geom_type=False):
        """ This function is central for this test class. It creates
        a layer with two features, and associates a restriction area
        to it. """
        import transaction
        from sqlalchemy import Column, Table, types, ForeignKey
        from sqlalchemy.ext.declarative import declarative_base
        from geoalchemy2 import Geometry
        from c2cgeoportal_commons.models import DBSession
        from c2cgeoportal_commons.models.main import RestrictionArea, LayerWMS

        self.__class__._table_index += 1
        id = self.__class__._table_index

        engine = DBSession.c2c_rw_bind
        connection = engine.connect()

        if not self.metadata:
            self.metadata = declarative_base(bind=engine).metadata

        tablename = "geo_table_{0:d}".format(id)
        schemaname = "geodata"

        table1 = Table("{0!s}_child".format(tablename),
                       self.metadata,
                       Column("id", types.Integer, primary_key=True),
                       Column("name", types.Unicode),
                       schema=schemaname)

        self._tables.append(table1)

        table2 = Table(
            tablename,
            self.metadata,
            Column("id", types.Integer, primary_key=True),
            Column(
                "child_id", types.Integer,
                ForeignKey("{0!s}.{1!s}_child.id".format(
                    schemaname, tablename))),
            Column("name", types.Unicode, nullable=False),
            Column("deleted", types.Boolean),
            Column("last_update_user", types.Unicode),
            Column("last_update_date", types.DateTime),
            Column("date", types.Date),
            Column("start_time", types.Time),
            # Column("interval", Interval()),
            Column("short_name1", types.String, nullable=True),
            Column("short_name2", types.String(50), nullable=True),
            Column("short_number", types.Integer, nullable=True),
            Column("double_number", types.Float(precision=4)),
            Column("large_binary", types.LargeBinary(length=60),
                   nullable=True),
            Column("value",
                   types.Enum("one", "two", "three", name="an_enum_value")),
            Column("numeric",
                   types.Numeric(precision=5, scale=2),
                   nullable=True),
            Column("numeric2", types.Numeric(), nullable=True),
            schema=schemaname)
        if geom_type:
            table2.append_column(Column("geom", Geometry("POINT", srid=21781)))
        else:
            table2.append_column(Column("geom", Geometry(srid=21781)))

        self._tables.append(table2)

        table2.drop(checkfirst=True)
        table1.drop(checkfirst=True)
        table1.create()
        table2.create()

        ins = table1.insert().values(name="c1é")
        connection.execute(ins).inserted_primary_key[0]
        ins = table1.insert().values(name="c2é")
        connection.execute(ins).inserted_primary_key[0]

        layer = LayerWMS(name="test_WMS_1", public=True)
        layer.layer = "test_wms"
        layer.id = id
        layer.geo_table = "{0!s}.{1!s}".format(schemaname, tablename)
        layer.interfaces = [self.main]
        layer.ogc_server = self.ogc_server

        if exclude_properties:
            layer.exclude_properties = "name"

        if metadatas:
            layer.metadatas = metadatas

        DBSession.add(self.layer_group_1)
        self.layer_group_1.children = self.layer_group_1.children + [layer]

        DBSession.add(self.layer_group_1)

        ra = RestrictionArea()
        ra.name = "__test_ra"
        ra.layers = [layer]
        ra.roles = [self.role]
        ra.readwrite = True
        DBSession.add(ra)

        transaction.commit()

        self.layer_ids.append(id)
        return id
コード例 #19
0
ファイル: models.py プロジェクト: Preston-Landers/apex
class AuthUser(Base):
    """ Table name: auth_users

::

    id = Column(types.Integer(), primary_key=True)
    login = Column(Unicode(80), default=u'', index=True)
    _password = Column('password', Unicode(80), default=u'')
    email = Column(Unicode(80), default=u'', index=True)
    active = Column(types.Enum(u'Y',u'N',u'D'), default=u'Y')
    """
    __tablename__ = 'auth_users'
    __table_args__ = {'sqlite_autoincrement': True}

    id = Column(types.Integer(), primary_key=True)
    auth_id = Column(types.Integer, ForeignKey(AuthID.id), index=True)
    provider = Column(Unicode(80), default=u'local', index=True)
    login = Column(Unicode(80), default=u'', index=True)
    salt = Column(Unicode(40))
    _password = Column('password', Unicode(80), default=u'')
    email = Column(Unicode(80), default=u'', index=True)
    created = Column(types.DateTime(), default=func.now())
    active = Column(types.Enum(u'Y', u'N', u'D', name=u'active'), default=u'Y')

    # need unique index on auth_id, provider, login
    # create unique index ilp on auth_users (auth_id,login,provider);
    # how do we handle same auth on multiple ids?

    def _set_password(self, password, rounds=13):
        # Handle arbitrarily long passwords by pre-hashing
        self._password = bcrypt.hashpw(
            self._pre_hash_password(password),
            bcrypt.gensalt(rounds=rounds)).decode('utf-8')

    def _get_password(self):
        return self._password

    @staticmethod
    def _pre_hash_password(password):
        return hashlib.sha512(password.encode('utf-8')).digest()

    password = synonym('_password',
                       descriptor=property(_get_password, _set_password))

    def get_salt(self, length, rounds=13):
        # salt = bcrypt.gensalt(rounds=rounds)
        # salt = salt.decode('utf-8')[:length]
        # return salt
        # Ignored - salt is generated once in _set_password and stored inside the hash.
        return ''

    @classmethod
    def get_by_id(cls, id):
        """ 
        Returns AuthUser object or None by id

        .. code-block:: python

           from apex.models import AuthID

           user = AuthID.get_by_id(1)
        """
        return DBSession.query(cls).filter(cls.id == id).first()

    @classmethod
    def get_by_login(cls, login):
        """ 
        Returns AuthUser object or None by login

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_login('login')
        """
        return DBSession.query(cls).filter(cls.login == login).first()

    @classmethod
    def get_by_email(cls, email):
        """ 
        Returns AuthUser object or None by email

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_email('*****@*****.**')
        """
        return DBSession.query(cls).filter(cls.email == email).first()

    @classmethod
    def check_password(cls, **kwargs):
        user = None
        if 'id' in kwargs:
            user = cls.get_by_id(kwargs['id'])
        if 'login' in kwargs:
            user = cls.get_by_login(kwargs['login'])

        if not user:
            return False
        try:
            if bcrypt.checkpw(cls._pre_hash_password(kwargs['password']),
                              user.password.encode('utf-8')):
                return True
        except TypeError:
            pass

        request = get_current_request()
        fallback_auth = request.registry.settings.get('apex.fallback_auth')
        if fallback_auth:
            resolver = DottedNameResolver(fallback_auth.split('.', 1)[0])
            fallback = resolver.resolve(fallback_auth)
            return fallback().check(DBSession, request, user,
                                    kwargs['password'])

        return False
コード例 #20
0
def test_should_not_enum_convert_enum_without_name():
    field = get_field(types.Enum("one", "two"))
    re_err = r"No type name specified for Enum\('one', 'two'\)"
    with pytest.raises(TypeError, match=re_err):
        field.type()
コード例 #21
0
ファイル: models.py プロジェクト: Preston-Landers/apex
class AuthID(Base):
    """ Table name: auth_id

::

    id = Column(types.Integer(), primary_key=True)
    display_name = Column(Unicode(80), default=u'')
    active = Column(types.Enum(u'Y',u'N',u'D', name=u'active'), default=u'Y')
    created = Column(types.DateTime(), default=func.now())

    """

    __tablename__ = 'auth_id'
    __table_args__ = {'sqlite_autoincrement': True}

    id = Column(types.Integer(), primary_key=True)
    display_name = Column(Unicode(80), default=u'')
    active = Column(types.Enum(u'Y', u'N', u'D', name=u'active'), default=u'Y')
    created = Column(types.DateTime(), default=func.now())

    groups = relationship('AuthGroup',
                          secondary=auth_group_table,
                          backref='auth_users')

    users = relationship('AuthUser')
    """
    Fix this to use association_proxy
    groups = association_proxy('auth_group_table', 'authgroup')
    """

    last_login = relationship('AuthUserLog',
                              order_by='AuthUserLog.id.desc()',
                              uselist=False)
    login_log = relationship('AuthUserLog', order_by='AuthUserLog.id')

    def in_group(self, group):
        """
        Returns True or False if the user is or isn't in the group.
        """
        return group in [g.name for g in self.groups]

    @classmethod
    def get_by_id(cls, id):
        """ 
        Returns AuthID object or None by id

        .. code-block:: python

           from apex.models import AuthID

           user = AuthID.get_by_id(1)
        """
        return DBSession.query(cls).filter(cls.id == id).first()

    def get_profile(self, request=None):
        """
        Returns AuthUser.profile object, creates record if it doesn't exist.

        .. code-block:: python

           from apex.models import AuthUser

           user = AuthUser.get_by_id(1)
           profile = user.get_profile(request)

        in **development.ini**

        .. code-block:: python

           apex.auth_profile = 
        """
        if not request:
            request = get_current_request()

        auth_profile = request.registry.settings.get('apex.auth_profile')
        if auth_profile:
            resolver = DottedNameResolver(auth_profile.split('.')[0])
            profile_cls = resolver.resolve(auth_profile)
            return get_or_create(DBSession, profile_cls, auth_id=self.id)

    @property
    def group_list(self):
        group_list = []
        if self.groups:
            for group in self.groups:
                group_list.append(group.name)
        return ','.join(map(str, group_list))
コード例 #22
0
class Family(Model, PriorityMixin):
    __table_args__ = (UniqueConstraint("customer_id", "name", name="_customer_name_uc"),)

    action = Column(types.Enum(*CASE_ACTIONS))
    analyses = orm.relationship(Analysis, backref="family", order_by="-Analysis.completed_at")
    _cohorts = Column(types.Text)
    comment = Column(types.Text)
    created_at = Column(types.DateTime, default=dt.datetime.now)
    customer_id = Column(ForeignKey("customer.id", ondelete="CASCADE"), nullable=False)
    customer = orm.relationship(Customer, foreign_keys=[customer_id])
    data_analysis = Column(types.Enum(*list(Pipeline)))
    data_delivery = Column(types.Enum(*list(DataDelivery)))
    id = Column(types.Integer, primary_key=True)
    internal_id = Column(types.String(32), unique=True, nullable=False)
    name = Column(types.String(128), nullable=False)
    ordered_at = Column(types.DateTime, default=dt.datetime.now)
    _panels = Column(types.Text)

    priority = Column(types.Enum(Priority), default=Priority.standard, nullable=False)
    synopsis = Column(types.Text)

    @property
    def cohorts(self) -> List[str]:
        """Return a list of cohorts."""
        return self._cohorts.split(",") if self._cohorts else []

    @cohorts.setter
    def cohorts(self, cohort_list: List[str]):
        self._cohorts = ",".join(cohort_list) if cohort_list else None

    @property
    def panels(self) -> List[str]:
        """Return a list of panels."""
        return self._panels.split(",") if self._panels else []

    @panels.setter
    def panels(self, panel_list: List[str]):
        self._panels = ",".join(panel_list) if panel_list else None

    @property
    def latest_analyzed(self) -> Optional[dt.datetime]:
        return self.analyses[0].completed_at if self.analyses else None

    @property
    def latest_sequenced(self) -> Optional[dt.datetime]:
        sequenced_dates = []
        for link in self.links:
            if link.sample.application_version.application.is_external:
                sequenced_dates.append(link.sample.ordered_at)
            elif link.sample.sequenced_at:
                sequenced_dates.append(link.sample.sequenced_at)
        return max(sequenced_dates) if sequenced_dates else None

    @property
    def all_samples_pass_qc(self) -> bool:
        pass_qc = []
        for link in self.links:
            if link.sample.application_version.application.is_external or link.sample.sequencing_qc:
                pass_qc.append(True)
            else:
                pass_qc.append(False)
        return all(pass_qc)

    def __str__(self) -> str:
        return f"{self.internal_id} ({self.name})"

    def to_dict(self, links: bool = False, analyses: bool = False) -> dict:
        """Represent as dictionary"""
        data = super(Family, self).to_dict()
        data["panels"] = self.panels
        data["priority"] = self.priority_human
        data["customer"] = self.customer.to_dict()
        if links:
            data["links"] = [link_obj.to_dict(samples=True) for link_obj in self.links]
        if analyses:
            data["analyses"] = [
                analysis_obj.to_dict(family=False) for analysis_obj in self.analyses
            ]
        return data
コード例 #23
0
ファイル: database.py プロジェクト: ankitdobhal/maestral
class SyncEvent(Base):  # type: ignore
    """Represents a file or folder change in the sync queue

    This class is used to represent both local and remote file system changes and track
    their sync progress. Some instance attributes will depend on the state of the sync
    session, e.g., :attr:`local_path` will depend on the current path of the local
    Dropbox folder. They may therefore become invalid between sync sessions.

    The class methods :meth:`from_dbx_metadata` and :meth:`from_file_system_event`
    should be used to properly construct a :class:`SyncEvent` from a
    :class:`dropbox.files.Metadata` instance or a
    :class:`watchdog.events.FileSystemEvent` instance, respectively.
    """

    __tablename__ = "history"

    id = Column(sqltypes.Integer, primary_key=True)
    """A unique identifier of the SyncEvent."""

    direction = Column(sqltypes.Enum(SyncDirection), nullable=False)
    """The :class:`SyncDirection`."""

    item_type = Column(sqltypes.Enum(ItemType), nullable=False)
    """
    The :class:`ItemType`. May be undetermined for remote deletions.
    """

    sync_time = Column(sqltypes.Float, nullable=False)
    """The time the SyncEvent was registered."""

    dbx_id = Column(sqltypes.String)
    """
    A unique dropbox ID for the file or folder. Will only be set for download events
    which are not deletions.
    """

    dbx_path = Column(StringPath, nullable=False)
    """
    Dropbox path of the item to sync. If the sync represents a move operation, this will
    be the destination path. Follows the casing from server.
    """

    local_path = Column(StringPath, nullable=False)
    """
    Local path of the item to sync. If the sync represents a move operation, this will
    be the destination path. This will be correctly cased.
    """

    dbx_path_from = Column(StringPath)
    """
    Dropbox path that this item was moved from. Will only be set if :attr:`change_type`
    is :attr:`ChangeType.Moved`. Follows the casing from server.
    """

    local_path_from = Column(StringPath)
    """
    Local path that this item was moved from. Will only be set if :attr:`change_type`
    is :attr:`ChangeType.Moved`. This will be correctly cased.
    """

    rev = Column(sqltypes.String)
    """
    The file revision. Will only be set for remote changes. Will be ``'folder'`` for
    folders and ``None`` for deletions.
    """

    content_hash = Column(sqltypes.String)
    """
    A hash representing the file content. Will be ``'folder'`` for folders and ``None``
    for deletions. Set for both local and remote changes.
    """

    change_type = Column(sqltypes.Enum(ChangeType), nullable=False)
    """
    The :class:`ChangeType`. Remote SyncEvents currently do not generate moved events
    but are reported as deleted and added at the new location.
    """

    change_time = Column(sqltypes.Float)
    """
    Local ctime or remote ``client_modified`` time for files. ``None`` for folders or
    for remote deletions. Note that ``client_modified`` may not be reliable as it is set
    by other clients and not verified.
    """

    change_dbid = Column(sqltypes.String)
    """
    The Dropbox ID of the account which performed the changes. This may not be set for
    added folders or deletions on the server.
    """

    change_user_name = Column(sqltypes.String)
    """
    The user name corresponding to :attr:`change_dbid`, if the account still exists.
    This field may not be set for performance reasons.
    """

    status = Column(sqltypes.Enum(SyncStatus), nullable=False)
    """The :class:`SyncStatus`."""

    size = Column(sqltypes.Integer, nullable=False)
    """Size of the item in bytes. Always zero for folders."""

    completed = Column(sqltypes.Integer, default=0)
    """
    File size in bytes which has already been uploaded or downloaded. Always zero for
    folders.
    """
    @hybrid_property
    def change_time_or_sync_time(self) -> float:
        """
        Change time when available, otherwise sync time. This can be used for sorting or
        user information purposes.
        """
        return self.change_time or self.sync_time

    @change_time_or_sync_time.expression  # type: ignore
    def change_time_or_sync_time(cls) -> Case:
        return case(
            [(cls.change_time != None, cls.change_time)],  # noqa: E711
            else_=cls.sync_time,
        )

    @property
    def is_file(self) -> bool:
        """Returns True for file changes"""
        return self.item_type == ItemType.File

    @property
    def is_directory(self) -> bool:
        """Returns True for folder changes"""
        return self.item_type == ItemType.Folder

    @property
    def is_added(self) -> bool:
        """Returns True for added items"""
        return self.change_type == ChangeType.Added

    @property
    def is_moved(self) -> bool:
        """Returns True for moved items"""
        return self.change_type == ChangeType.Moved

    @property
    def is_changed(self) -> bool:
        """Returns True for changed file contents"""
        return self.change_type == ChangeType.Modified

    @property
    def is_deleted(self) -> bool:
        """Returns True for deleted items"""
        return self.change_type == ChangeType.Removed

    @property
    def is_upload(self) -> bool:
        """Returns True for changes to upload"""
        return self.direction == SyncDirection.Up

    @property
    def is_download(self) -> bool:
        """Returns True for changes to download"""
        return self.direction == SyncDirection.Down

    def __repr__(self):
        return (
            f"<{self.__class__.__name__}(direction={self.direction.name}, "
            f"change_type={self.change_type.name}, item_type={self.item_type}, "
            f"dbx_path='{self.dbx_path}')>")

    @classmethod
    def from_dbx_metadata(cls, md: Metadata,
                          sync_engine: "SyncEngine") -> "SyncEvent":
        """
        Initializes a SyncEvent from the given Dropbox metadata.

        :param md: Dropbox Metadata.
        :param sync_engine: SyncEngine instance.
        :returns: An instance of this class with attributes populated from the given
            Dropbox Metadata.
        """
        if isinstance(md, DeletedMetadata):
            # there is currently no API call to determine who deleted a file or folder
            change_type = ChangeType.Removed
            change_time = None
            size = 0
            rev = None
            hash_str = None
            dbx_id = None
            change_dbid = None

            local_rev = sync_engine.get_local_rev(md.path_lower)
            if local_rev == "folder":
                item_type = ItemType.Folder
            elif local_rev is not None:
                item_type = ItemType.File
            else:
                item_type = ItemType.Unknown

        elif isinstance(md, FolderMetadata):
            # there is currently no API call to determine who added a folder
            change_type = ChangeType.Added
            item_type = ItemType.Folder
            size = 0
            rev = "folder"
            hash_str = "folder"
            dbx_id = md.id
            change_time = None
            change_dbid = None

        elif isinstance(md, FileMetadata):
            item_type = ItemType.File
            rev = md.rev
            hash_str = md.content_hash
            dbx_id = md.id
            size = md.size
            change_time = md.client_modified.replace(
                tzinfo=timezone.utc).timestamp()
            if sync_engine.get_local_rev(md.path_lower):
                change_type = ChangeType.Modified
            else:
                change_type = ChangeType.Added
            if md.sharing_info:
                change_dbid = md.sharing_info.modified_by
            else:
                # file is not a shared folder, therefore
                # the current user must have added or modified it
                change_dbid = sync_engine.client.account_id
        else:
            raise RuntimeError(f"Cannot convert {md} to SyncEvent")

        dbx_path_cased = sync_engine.correct_case(md.path_display)

        return cls(
            direction=SyncDirection.Down,
            item_type=item_type,
            sync_time=time.time(),
            dbx_path=dbx_path_cased,
            dbx_id=dbx_id,
            local_path=sync_engine.to_local_path_from_cased(dbx_path_cased),
            rev=rev,
            content_hash=hash_str,
            change_type=change_type,
            change_time=change_time,
            change_dbid=change_dbid,
            status=SyncStatus.Queued,
            size=size,
            completed=0,
        )

    @classmethod
    def from_file_system_event(cls, event: FileSystemEvent,
                               sync_engine: "SyncEngine") -> "SyncEvent":
        """
        Initializes a SyncEvent from the given local file system event.

        :param event: Local file system event.
        :param sync_engine: SyncEngine instance.
        :returns: An instance of this class with attributes populated from the given
            SyncEvent.
        """

        change_dbid = sync_engine.client.account_id
        to_path = getattr(event, "dest_path", event.src_path)
        from_path = None

        if event.event_type == EVENT_TYPE_CREATED:
            change_type = ChangeType.Added
        elif event.event_type == EVENT_TYPE_DELETED:
            change_type = ChangeType.Removed
        elif event.event_type == EVENT_TYPE_MOVED:
            change_type = ChangeType.Moved
            from_path = event.src_path
        elif event.event_type == EVENT_TYPE_MODIFIED:
            change_type = ChangeType.Modified
        else:
            raise RuntimeError(f"Cannot convert {event} to SyncEvent")

        change_time: Optional[float]
        stat: Optional[os.stat_result]

        try:
            stat = os.stat(to_path)
        except OSError:
            stat = None

        if event.is_directory:
            item_type = ItemType.Folder
            size = 0
            try:
                change_time = stat.st_birthtime  # type: ignore
            except AttributeError:
                change_time = None
        else:
            item_type = ItemType.File
            change_time = stat.st_ctime if stat else None
            size = stat.st_size if stat else 0

        # Note: We get the content hash here instead of later, even though the
        # calculation may be slow and :meth:`from_file_system_event` may be called
        # serially and not from a thread pool. This is because hashing is CPU bound
        # and parallelization would cause large multi-core CPU usage (or result in
        # throttling of our thread-pool).

        return cls(
            direction=SyncDirection.Up,
            item_type=item_type,
            sync_time=time.time(),
            dbx_path=sync_engine.to_dbx_path(to_path),
            local_path=to_path,
            dbx_path_from=sync_engine.to_dbx_path(from_path)
            if from_path else None,
            local_path_from=from_path,
            content_hash=sync_engine.get_local_hash(to_path),
            change_type=change_type,
            change_time=change_time,
            change_dbid=change_dbid,
            status=SyncStatus.Queued,
            size=size,
            completed=0,
        )
def upgrade():
    op.alter_column('analysis',
                    'status',
                    existing_type=types.Enum(*OLD_OPTIONS),
                    type_=types.Enum(*NEW_OPTIONS))
コード例 #25
0
ファイル: fields.py プロジェクト: annndrey/npui
	def update_impl(self):
		if self.enum:
			self.values = list(self.enum.values())
		self.impl = types.Enum(*self.values, name=self.name)