Example #1
0
class Node(Base):
    __tablename__ = 'nodes'
    __table_args__ = (
        UniqueConstraint('cluster_id', 'hostname',
                         name='_hostname_cluster_uc'),
    )
    id = Column(Integer, primary_key=True)
    uuid = Column(String(36), nullable=False,
                  default=lambda: str(uuid.uuid4()), unique=True)
    #此节点位于那个集群
    cluster_id = Column(Integer, ForeignKey('clusters.id', ondelete='CASCADE'))
    group_id = Column(
        Integer,
        ForeignKey('nodegroups.id', ondelete='SET NULL'),
        nullable=True
    )
    name = Column(Unicode(100))
    #节点状态
    status = Column(
        Enum(*consts.NODE_STATUSES, name='node_status'),
        nullable=False,
        default=consts.NODE_STATUSES.discover
    )
    meta = Column(MutableDict.as_mutable(JSON), default={})
    mac = Column(psql.MACADDR, nullable=False, unique=True)
    ip = Column(psql.INET)
    hostname = Column(String(255), nullable=False,
                      default="", server_default="")
    manufacturer = Column(Unicode(50))
    platform_name = Column(String(150))
    kernel_params = Column(Text)
    progress = Column(Integer, default=0)
    os_platform = Column(String(150))
    pending_addition = Column(Boolean, default=False)
    pending_deletion = Column(Boolean, default=False)
    changes = relationship("ClusterChanges", backref="node")
    error_type = Column(String(100))
    error_msg = Column(Text)
    timestamp = Column(DateTime, nullable=False)
    online = Column(Boolean, default=True)
    labels = Column(
        MutableDict.as_mutable(JSON), nullable=False, server_default='{}')
    roles = Column(psql.ARRAY(String(consts.ROLE_NAME_MAX_SIZE)),
                   default=[], nullable=False, server_default='{}')
    pending_roles = Column(psql.ARRAY(String(consts.ROLE_NAME_MAX_SIZE)),
                           default=[], nullable=False, server_default='{}')
    primary_tags = Column(
        MutableList.as_mutable(psql.ARRAY(String(consts.ROLE_NAME_MAX_SIZE))),
        default=[], nullable=False, server_default='{}')

    nic_interfaces = relationship("NodeNICInterface", backref="node",
                                  cascade="all, delete-orphan",
                                  order_by="NodeNICInterface.name")
    bond_interfaces = relationship("NodeBondInterface", backref="node",
                                   cascade="all, delete-orphan",
                                   order_by="NodeBondInterface.name")
    # hash function from raw node agent request data - for caching purposes
    agent_checksum = Column(String(40), nullable=True)

    ip_addrs = relationship("IPAddr", viewonly=True)
    replaced_deployment_info = Column(MutableList.as_mutable(JSON), default=[])
    replaced_provisioning_info = Column(
        MutableDict.as_mutable(JSON), default={})
    network_template = Column(MutableDict.as_mutable(JSON), default=None,
                              server_default=None, nullable=True)
    extensions = Column(psql.ARRAY(String(consts.EXTENSION_NAME_MAX_SIZE)),
                        default=[], nullable=False, server_default='{}')
    vms_conf = Column(MutableList.as_mutable(JSON),
                      default=[], server_default='[]', nullable=False)
    attributes = Column(
        MutableDict.as_mutable(JSON),
        default={}, server_default='{}', nullable=False)

    @property
    def interfaces(self):
        return self.nic_interfaces + self.bond_interfaces

    @property
    def uid(self):
        return str(self.id)

    @property
    def offline(self):
        return not self.online

    @property
    def network_data(self):
        # TODO(enchantner): move to object
        from nailgun.extensions.network_manager.manager import NetworkManager
        return NetworkManager.get_node_networks(self)

    @property
    def needs_reprovision(self):
        return self.status == 'error' and self.error_type == 'provision' and \
            not self.pending_deletion

    @property
    def needs_redeploy(self):
        return (
            self.status in [
                consts.NODE_STATUSES.error,
                consts.NODE_STATUSES.provisioned,
                consts.NODE_STATUSES.stopped
            ] or len(self.pending_roles)) and not self.pending_deletion

    @property
    def needs_redeletion(self):
        return self.status == 'error' and self.error_type == 'deletion'

    @property
    def human_readable_name(self):
        return self.name or self.mac

    @property
    def full_name(self):
        return u'%s (id=%s, mac=%s)' % (self.name, self.id, self.mac)

    @property
    def all_roles(self):
        """Returns all roles, self.roles and self.pending_roles."""
        return set(self.pending_roles + self.roles)

    def _check_interface_has_required_params(self, iface):
        return bool(iface.get('name') and iface.get('mac'))

    def _clean_iface(self, iface):
        # cleaning up unnecessary fields - set to None if bad
        for param in ["max_speed", "current_speed"]:
            val = iface.get(param)
            if not (isinstance(val, int) and val >= 0):
                val = None
            iface[param] = val
        return iface

    def update_meta(self, data):
        # helper for basic checking meta before updation
        result = []
        if "interfaces" in data:
            for iface in data["interfaces"]:
                if not self._check_interface_has_required_params(iface):
                    logger.warning(
                        "Invalid interface data: {0}. "
                        "Interfaces are not updated.".format(iface)
                    )
                    data["interfaces"] = self.meta.get("interfaces")
                    self.meta = data
                    return
                result.append(self._clean_iface(iface))

        data["interfaces"] = result
        self.meta = data

    def create_meta(self, data):
        # helper for basic checking meta before creation
        result = []
        if "interfaces" in data:
            for iface in data["interfaces"]:
                if not self._check_interface_has_required_params(iface):
                    logger.warning(
                        "Invalid interface data: {0}. "
                        "Skipping interface.".format(iface)
                    )
                    continue
                result.append(self._clean_iface(iface))

        data["interfaces"] = result
        self.meta = data
Example #2
0
    Column('extent', Geometry, nullable=True),
    Column('name', String, nullable=False),
    Column('srid', Integer, nullable=False),
    Column('table_name', String, nullable=False),
    Column('file_name', String, nullable=False),
    Column('variable_id', Integer, ForeignKey('variable.id'), nullable=False),
    Column('provider_id', Integer, ForeignKey('provider.id'), nullable=False),
    Column('dataformat_id',
           Integer,
           ForeignKey('dataformat.id'),
           nullable=False),
    UniqueConstraint('start_time',
                     'end_time',
                     'level',
                     'variable_id',
                     'provider_id',
                     'level',
                     'extent',
                     'dataformat_id',
                     name='datagranule_unique_idx'))

mapper(DataGranule,
       datagranule,
       properties={
           'variable': relationship(Variable),
           'provider': relationship(Provider),
           'dataformat': relationship(DataFormat)
       })

rastertile = Table(
    'rastertile', metadata, Column('id', Integer, primary_key=True),
class DagRun(Base, LoggingMixin):
    """
    DagRun describes an instance of a Dag. It can be created
    by the scheduler (for regular runs) or by an external trigger
    """
    __tablename__ = "dag_run"

    ID_PREFIX = 'scheduled__'
    ID_FORMAT_PREFIX = ID_PREFIX + '{0}'

    id = Column(Integer, primary_key=True)
    dag_id = Column(String(ID_LEN))
    execution_date = Column(UtcDateTime, default=timezone.utcnow)
    start_date = Column(UtcDateTime, default=timezone.utcnow)
    end_date = Column(UtcDateTime)
    _state = Column('state', String(50), default=State.RUNNING)
    run_id = Column(String(ID_LEN))
    external_trigger = Column(Boolean, default=True)
    conf = Column(PickleType)

    dag = None

    __table_args__ = (
        Index('dag_id_state', dag_id, _state),
        UniqueConstraint('dag_id', 'execution_date'),
        UniqueConstraint('dag_id', 'run_id'),
    )

    def __repr__(self):
        return (
            '<DagRun {dag_id} @ {execution_date}: {run_id}, '
            'externally triggered: {external_trigger}>'
        ).format(
            dag_id=self.dag_id,
            execution_date=self.execution_date,
            run_id=self.run_id,
            external_trigger=self.external_trigger)

    def get_state(self):
        return self._state

    def set_state(self, state):
        if self._state != state:
            self._state = state
            self.end_date = timezone.utcnow() if self._state in State.finished() else None

    @declared_attr
    def state(self):
        return synonym('_state',
                       descriptor=property(self.get_state, self.set_state))

    @classmethod
    def id_for_date(cls, date, prefix=ID_FORMAT_PREFIX):
        return prefix.format(date.isoformat()[:19])

    @provide_session
    def refresh_from_db(self, session=None):
        """
        Reloads the current dagrun from the database

        :param session: database session
        """
        DR = DagRun

        exec_date = func.cast(self.execution_date, DateTime)

        dr = session.query(DR).filter(
            DR.dag_id == self.dag_id,
            func.cast(DR.execution_date, DateTime) == exec_date,
            DR.run_id == self.run_id
        ).one()

        self.id = dr.id
        self.state = dr.state

    @staticmethod
    @provide_session
    def find(dag_id=None, run_id=None, execution_date=None,
             state=None, external_trigger=None, no_backfills=False,
             session=None):
        """
        Returns a set of dag runs for the given search criteria.

        :param dag_id: the dag_id to find dag runs for
        :type dag_id: int, list
        :param run_id: defines the the run id for this dag run
        :type run_id: str
        :param execution_date: the execution date
        :type execution_date: datetime.datetime
        :param state: the state of the dag run
        :type state: str
        :param external_trigger: whether this dag run is externally triggered
        :type external_trigger: bool
        :param no_backfills: return no backfills (True), return all (False).
            Defaults to False
        :type no_backfills: bool
        :param session: database session
        :type session: sqlalchemy.orm.session.Session
        """
        DR = DagRun

        qry = session.query(DR)
        if dag_id:
            qry = qry.filter(DR.dag_id == dag_id)
        if run_id:
            qry = qry.filter(DR.run_id == run_id)
        if execution_date:
            if isinstance(execution_date, list):
                qry = qry.filter(DR.execution_date.in_(execution_date))
            else:
                qry = qry.filter(DR.execution_date == execution_date)
        if state:
            qry = qry.filter(DR.state == state)
        if external_trigger is not None:
            qry = qry.filter(DR.external_trigger == external_trigger)
        if no_backfills:
            # in order to prevent a circular dependency
            from airflow.jobs import BackfillJob
            qry = qry.filter(DR.run_id.notlike(BackfillJob.ID_PREFIX + '%'))

        dr = qry.order_by(DR.execution_date).all()

        return dr

    @provide_session
    def get_task_instances(self, state=None, session=None):
        """
        Returns the task instances for this dag run
        """
        from airflow.models.taskinstance import TaskInstance  # Avoid circular import
        tis = session.query(TaskInstance).filter(
            TaskInstance.dag_id == self.dag_id,
            TaskInstance.execution_date == self.execution_date,
        )
        if state:
            if isinstance(state, six.string_types):
                tis = tis.filter(TaskInstance.state == state)
            else:
                # this is required to deal with NULL values
                if None in state:
                    tis = tis.filter(
                        or_(TaskInstance.state.in_(state),
                            TaskInstance.state.is_(None))
                    )
                else:
                    tis = tis.filter(TaskInstance.state.in_(state))

        if self.dag and self.dag.partial:
            tis = tis.filter(TaskInstance.task_id.in_(self.dag.task_ids))

        return tis.all()

    @provide_session
    def get_task_instance(self, task_id, session=None):
        """
        Returns the task instance specified by task_id for this dag run

        :param task_id: the task id
        """

        from airflow.models.taskinstance import TaskInstance  # Avoid circular import
        TI = TaskInstance
        ti = session.query(TI).filter(
            TI.dag_id == self.dag_id,
            TI.execution_date == self.execution_date,
            TI.task_id == task_id
        ).first()

        return ti

    def get_dag(self):
        """
        Returns the Dag associated with this DagRun.

        :return: DAG
        """
        if not self.dag:
            raise AirflowException("The DAG (.dag) for {} needs to be set"
                                   .format(self))

        return self.dag

    @provide_session
    def get_previous_dagrun(self, state=None, session=None):
        # type: (Optional[str], Optional[Session]) -> Optional['DagRun']
        """The previous DagRun, if there is one"""

        session = cast(Session, session)  # mypy

        filters = [
            DagRun.dag_id == self.dag_id,
            DagRun.execution_date < self.execution_date,
        ]
        if state is not None:
            filters.append(DagRun.state == state)
        return session.query(DagRun).filter(
            *filters
        ).order_by(
            DagRun.execution_date.desc()
        ).first()

    @provide_session
    def get_previous_scheduled_dagrun(self, session=None):
        """The previous, SCHEDULED DagRun, if there is one"""
        dag = self.get_dag()

        return session.query(DagRun).filter(
            DagRun.dag_id == self.dag_id,
            DagRun.execution_date == dag.previous_schedule(self.execution_date)
        ).first()

    @provide_session
    def update_state(self, session=None):
        """
        Determines the overall state of the DagRun based on the state
        of its TaskInstances.

        :return: State
        """

        dag = self.get_dag()

        tis = self.get_task_instances(session=session)
        self.log.debug("Updating state for %s considering %s task(s)", self, len(tis))

        for ti in list(tis):
            # skip in db?
            if ti.state == State.REMOVED:
                tis.remove(ti)
            else:
                ti.task = dag.get_task(ti.task_id)

        # pre-calculate
        # db is faster
        start_dttm = timezone.utcnow()
        unfinished_tasks = self.get_task_instances(
            state=State.unfinished(),
            session=session
        )
        none_depends_on_past = all(not t.task.depends_on_past for t in unfinished_tasks)
        none_task_concurrency = all(t.task.task_concurrency is None
                                    for t in unfinished_tasks)
        # small speed up
        if unfinished_tasks and none_depends_on_past and none_task_concurrency:
            # todo: this can actually get pretty slow: one task costs between 0.01-015s
            no_dependencies_met = True
            for ut in unfinished_tasks:
                # We need to flag upstream and check for changes because upstream
                # failures/re-schedules can result in deadlock false positives
                old_state = ut.state
                deps_met = ut.are_dependencies_met(
                    dep_context=DepContext(
                        flag_upstream_failed=True,
                        ignore_in_retry_period=True,
                        ignore_in_reschedule_period=True),
                    session=session)
                if deps_met or old_state != ut.current_state(session=session):
                    no_dependencies_met = False
                    break

        duration = (timezone.utcnow() - start_dttm).total_seconds() * 1000
        Stats.timing("dagrun.dependency-check.{}".format(self.dag_id), duration)

        leaf_task_ids = {t.task_id for t in dag.leaves}
        leaf_tis = [ti for ti in tis if ti.task_id in leaf_task_ids]

        # if all roots finished and at least one failed, the run failed
        if not unfinished_tasks and any(
            leaf_ti.state in {State.FAILED, State.UPSTREAM_FAILED} for leaf_ti in leaf_tis
        ):
            self.log.info('Marking run %s failed', self)
            self.set_state(State.FAILED)
            dag.handle_callback(self, success=False, reason='task_failure',
                                session=session)

        # if all leafs succeeded and no unfinished tasks, the run succeeded
        elif not unfinished_tasks and all(
            leaf_ti.state in {State.SUCCESS, State.SKIPPED} for leaf_ti in leaf_tis
        ):
            self.log.info('Marking run %s successful', self)
            self.set_state(State.SUCCESS)
            dag.handle_callback(self, success=True, reason='success', session=session)

        # if *all tasks* are deadlocked, the run failed
        elif (unfinished_tasks and none_depends_on_past and
              none_task_concurrency and no_dependencies_met):
            self.log.info('Deadlock; marking run %s failed', self)
            self.set_state(State.FAILED)
            dag.handle_callback(self, success=False, reason='all_tasks_deadlocked',
                                session=session)

        # finally, if the roots aren't done, the dag is still running
        else:
            self.set_state(State.RUNNING)

        self._emit_duration_stats_for_finished_state()

        # todo: determine we want to use with_for_update to make sure to lock the run
        session.merge(self)
        session.commit()

        return self.state

    def _emit_duration_stats_for_finished_state(self):
        if self.state == State.RUNNING:
            return

        duration = (self.end_date - self.start_date)
        if self.state is State.SUCCESS:
            Stats.timing('dagrun.duration.success.{}'.format(self.dag_id), duration)
        elif self.state == State.FAILED:
            Stats.timing('dagrun.duration.failed.{}'.format(self.dag_id), duration)

    @provide_session
    def verify_integrity(self, session=None):
        """
        Verifies the DagRun by checking for removed tasks or tasks that are not in the
        database yet. It will set state to removed or add the task if required.
        """
        from airflow.models.taskinstance import TaskInstance  # Avoid circular import

        dag = self.get_dag()
        tis = self.get_task_instances(session=session)

        # check for removed or restored tasks
        task_ids = set()
        for ti in tis:
            task_instance_mutation_hook(ti)
            task_ids.add(ti.task_id)
            task = None
            try:
                task = dag.get_task(ti.task_id)
            except AirflowException:
                if ti.state == State.REMOVED:
                    pass  # ti has already been removed, just ignore it
                elif self.state is not State.RUNNING and not dag.partial:
                    self.log.warning("Failed to get task '{}' for dag '{}'. "
                                     "Marking it as removed.".format(ti, dag))
                    Stats.incr(
                        "task_removed_from_dag.{}".format(dag.dag_id), 1, 1)
                    ti.state = State.REMOVED

            is_task_in_dag = task is not None
            should_restore_task = is_task_in_dag and ti.state == State.REMOVED
            if should_restore_task:
                self.log.info("Restoring task '{}' which was previously "
                              "removed from DAG '{}'".format(ti, dag))
                Stats.incr("task_restored_to_dag.{}".format(dag.dag_id), 1, 1)
                ti.state = State.NONE
            session.merge(ti)

        # check for missing tasks
        for task in six.itervalues(dag.task_dict):
            if task.start_date > self.execution_date and not self.is_backfill:
                continue

            if task.task_id not in task_ids:
                Stats.incr(
                    "task_instance_created-{}".format(task.__class__.__name__),
                    1, 1)
                ti = TaskInstance(task, self.execution_date)
                task_instance_mutation_hook(ti)
                session.add(ti)

        try:
            session.commit()
        except IntegrityError as err:
            self.log.info(str(err))
            self.log.info(
                'Hit IntegrityError while creating the TIs for %s - %s',
                dag.dag_id, self.execution_date
            )
            self.log.info('Doing session rollback.')
            session.rollback()

    @staticmethod
    def get_run(session, dag_id, execution_date):
        """
        :param dag_id: DAG ID
        :type dag_id: unicode
        :param execution_date: execution date
        :type execution_date: datetime
        :return: DagRun corresponding to the given dag_id and execution date
            if one exists. None otherwise.
        :rtype: airflow.models.DagRun
        """
        qry = session.query(DagRun).filter(
            DagRun.dag_id == dag_id,
            DagRun.external_trigger == False, # noqa
            DagRun.execution_date == execution_date,
        )
        return qry.first()

    @property
    def is_backfill(self):
        from airflow.jobs import BackfillJob
        return (
            self.run_id is not None and
            self.run_id.startswith(BackfillJob.ID_PREFIX)
        )

    @classmethod
    @provide_session
    def get_latest_runs(cls, session):
        """Returns the latest DagRun for each DAG. """
        subquery = (
            session
            .query(
                cls.dag_id,
                func.max(cls.execution_date).label('execution_date'))
            .group_by(cls.dag_id)
            .subquery()
        )
        dagruns = (
            session
            .query(cls)
            .join(subquery,
                  and_(cls.dag_id == subquery.c.dag_id,
                       cls.execution_date == subquery.c.execution_date))
            .all()
        )
        return dagruns
Example #4
0
'''
Example #5
0
class ConfigurationSetting(Base, HasFullTableCache):
    """An extra piece of site configuration.
    A ConfigurationSetting may be associated with an
    ExternalIntegration, a Library, both, or neither.
    * The secret used by the circulation manager to sign OAuth bearer
      tokens is not associated with an ExternalIntegration or with a
      Library.
    * The link to a library's privacy policy is associated with the
      Library, but not with any particular ExternalIntegration.
    * The "website ID" for an Overdrive collection is associated with
      an ExternalIntegration (the Overdrive integration), but not with
      any particular Library (since multiple libraries might share an
      Overdrive collection).
    * The "identifier prefix" used to determine which library a patron
      is a patron of, is associated with both a Library and an
      ExternalIntegration.
    """
    __tablename__ = 'configurationsettings'
    id = Column(Integer, primary_key=True)
    external_integration_id = Column(Integer,
                                     ForeignKey('externalintegrations.id'),
                                     index=True)
    library_id = Column(Integer, ForeignKey('libraries.id'), index=True)
    key = Column(Unicode, index=True)
    _value = Column(Unicode, name="value")

    __table_args__ = (UniqueConstraint('external_integration_id', 'library_id',
                                       'key'), )

    _cache = HasFullTableCache.RESET
    _id_cache = HasFullTableCache.RESET

    def __repr__(self):
        return u'<ConfigurationSetting: key=%s, ID=%d>' % (self.key, self.id)

    @classmethod
    def sitewide_secret(cls, _db, key):
        """Find or create a sitewide shared secret.
        The value of this setting doesn't matter, only that it's
        unique across the site and that it's always available.
        """
        secret = ConfigurationSetting.sitewide(_db, key)
        if not secret.value:
            secret.value = os.urandom(24).encode('hex')
            # Commit to get this in the database ASAP.
            _db.commit()
        return secret.value

    @classmethod
    def explain(cls, _db, include_secrets=False):
        """Explain all site-wide ConfigurationSettings."""
        lines = []
        site_wide_settings = []

        for setting in _db.query(ConfigurationSetting).filter(
                ConfigurationSetting.library == None).filter(
                    ConfigurationSetting.external_integration == None):
            if not include_secrets and setting.key.endswith("_secret"):
                continue
            site_wide_settings.append(setting)
        if site_wide_settings:
            lines.append("Site-wide configuration settings:")
            lines.append("---------------------------------")
        for setting in sorted(site_wide_settings, key=lambda s: s.key):
            if setting.value is None:
                continue
            lines.append("%s='%s'" % (setting.key, setting.value))
        return lines

    @classmethod
    def sitewide(cls, _db, key):
        """Find or create a sitewide ConfigurationSetting."""
        return cls.for_library_and_externalintegration(_db, key, None, None)

    @classmethod
    def for_library(cls, key, library):
        """Find or create a ConfigurationSetting for the given Library."""
        _db = Session.object_session(library)
        return cls.for_library_and_externalintegration(_db, key, library, None)

    @classmethod
    def for_externalintegration(cls, key, externalintegration):
        """Find or create a ConfigurationSetting for the given
        ExternalIntegration.
        """
        _db = Session.object_session(externalintegration)
        return cls.for_library_and_externalintegration(_db, key, None,
                                                       externalintegration)

    @classmethod
    def _cache_key(cls, library, external_integration, key):
        if library:
            library_id = library.id
        else:
            library_id = None
        if external_integration:
            external_integration_id = external_integration.id
        else:
            external_integration_id = None
        return (library_id, external_integration_id, key)

    def cache_key(self):
        return self._cache_key(self.library, self.external_integration,
                               self.key)

    @classmethod
    def for_library_and_externalintegration(cls, _db, key, library,
                                            external_integration):
        """Find or create a ConfigurationSetting associated with a Library
        and an ExternalIntegration.
        """
        def create():
            """Function called when a ConfigurationSetting is not found in cache
            and must be created.
            """
            return get_one_or_create(_db,
                                     ConfigurationSetting,
                                     library=library,
                                     external_integration=external_integration,
                                     key=key)

        # ConfigurationSettings are stored in cache based on their library,
        # external integration, and the name of the setting.
        cache_key = cls._cache_key(library, external_integration, key)
        setting, ignore = cls.by_cache_key(_db, cache_key, create)
        return setting

    @hybrid_property
    def value(self):
        """What's the current value of this configuration setting?
        If not present, the value may be inherited from some other
        ConfigurationSetting.
        """
        if self._value:
            # An explicitly set value always takes precedence.
            return self._value
        elif self.library and self.external_integration:
            # This is a library-specific specialization of an
            # ExternalIntegration. Treat the value set on the
            # ExternalIntegration as a default.
            return self.for_externalintegration(
                self.key, self.external_integration).value
        elif self.library:
            # This is a library-specific setting. Treat the site-wide
            # value as a default.
            _db = Session.object_session(self)
            return self.sitewide(_db, self.key).value
        return self._value

    @value.setter
    def value(self, new_value):
        if new_value is not None:
            new_value = unicode(new_value)
        self._value = new_value

    @classmethod
    def _is_secret(self, key):
        """Should the value of the given key be treated as secret?
        This will have to do, in the absence of programmatic ways of
        saying that a specific setting should be treated as secret.
        """
        return any(key == x or key.startswith('%s_' % x)
                   or key.endswith('_%s' % x) or ("_%s_" % x) in key
                   for x in ('secret', 'password'))

    @property
    def is_secret(self):
        """Should the value of this key be treated as secret?"""
        return self._is_secret(self.key)

    def value_or_default(self, default):
        """Return the value of this setting. If the value is None,
        set it to `default` and return that instead.
        """
        if self.value is None:
            self.value = default
        return self.value

    MEANS_YES = set(['true', 't', 'yes', 'y'])

    @property
    def bool_value(self):
        """Turn the value into a boolean if possible.
        :return: A boolean, or None if there is no value.
        """
        if self.value:
            if self.value.lower() in self.MEANS_YES:
                return True
            return False
        return None

    @property
    def int_value(self):
        """Turn the value into an int if possible.
        :return: An integer, or None if there is no value.
        :raise ValueError: If the value cannot be converted to an int.
        """
        if self.value:
            return int(self.value)
        return None

    @property
    def float_value(self):
        """Turn the value into an float if possible.
        :return: A float, or None if there is no value.
        :raise ValueError: If the value cannot be converted to a float.
        """
        if self.value:
            return float(self.value)
        return None

    @property
    def json_value(self):
        """Interpret the value as JSON if possible.
        :return: An object, or None if there is no value.
        :raise ValueError: If the value cannot be parsed as JSON.
        """
        if self.value:
            return json.loads(self.value)
        return None

    # As of this release of the software, this is our best guess as to
    # which data sources should have their audiobooks excluded from
    # lanes.
    EXCLUDED_AUDIO_DATA_SOURCES_DEFAULT = [
        DataSourceConstants.OVERDRIVE, DataSourceConstants.AXIS_360,
        DataSourceConstants.RB_DIGITAL
    ]

    @classmethod
    def excluded_audio_data_sources(cls, _db):
        """List the data sources whose audiobooks should not be published in
        feeds, either because this server can't fulfill them or the
        expected client can't play them.
        Most methods like this go into Configuration, but this one needs
        to reference data model objects for its default value.
        """
        value = cls.sitewide(
            _db, Configuration.EXCLUDED_AUDIO_DATA_SOURCES).json_value
        if value is None:
            value = cls.EXCLUDED_AUDIO_DATA_SOURCES_DEFAULT
        return value
class DETSESSO_table:
    # connection string postgres"
    internal_connection = Connection()

    # create engine and metadata

    engine = create_engine(internal_connection.conn_str(),
                           echo=False,
                           convert_unicode=True)
    metadata = MetaData(engine)

    # define tables
    detsesso_table = Table(
        'detsesso_table',
        metadata,
        Column('id_det_sesso', Integer, primary_key=True),
        Column('sito', Text),
        Column('num_individuo', Integer),
        Column('glab_grado_imp', Integer),
        Column('pmast_grado_imp', Integer),
        Column('pnuc_grado_imp', Integer),
        Column('pzig_grado_imp', Integer),
        Column('arcsop_grado_imp', Integer),
        Column('tub_grado_imp', Integer),
        Column('pocc_grado_imp', Integer),
        Column('inclfr_grado_imp', Integer),
        Column('zig_grado_imp', Integer),
        Column('msorb_grado_imp', Integer),
        Column('glab_valori', Integer),
        Column('pmast_valori', Integer),
        Column('pnuc_valori', Integer),
        Column('pzig_valori', Integer),
        Column('arcsop_valori', Integer),
        Column('tub_valori', Integer),
        Column('pocc_valori', Integer),
        Column('inclfr_valori', Integer),
        Column('zig_valori', Integer),
        Column('msorb_valori', Integer),
        Column('palato_grado_imp', Integer),
        Column('mfmand_grado_imp', Integer),
        Column('mento_grado_imp', Integer),
        Column('anmand_grado_imp', Integer),
        Column('minf_grado_imp', Integer),
        Column('brmont_grado_imp', Integer),
        Column('condm_grado_imp', Integer),
        Column('palato_valori', Integer),
        Column('mfmand_valori', Integer),
        Column('mento_valori', Integer),
        Column('anmand_valori', Integer),
        Column('minf_valori', Integer),
        Column('brmont_valori', Integer),
        Column('condm_valori', Integer),
        Column('sex_cr_tot', Float(2, 3)),
        Column('ind_cr_sex', String(100)),
        Column('sup_p_I', String(1)),
        Column('sup_p_II', String(1)),
        Column('sup_p_III', String(1)),
        Column('sup_p_sex', String(1)),
        Column('in_isch_I', String(1)),
        Column('in_isch_II', String(1)),
        Column('in_isch_III', String(1)),
        Column('in_isch_sex', String(1)),
        Column('arco_c_sex', String(1)),
        Column('ramo_ip_I', String(1)),
        Column('ramo_ip_II', String(1)),
        Column('ramo_ip_III', String(1)),
        Column('ramo_ip_sex', String(1)),
        Column('prop_ip_sex', String(1)),
        Column('ind_bac_sex', String(100)),

        # explicit/composite unique constraint.  'name' is optional.
        UniqueConstraint('sito', 'num_individuo', name='ID_det_sesso_unico'))

    metadata.create_all(engine)
class UT_table:
    # connection string postgres"
    internal_connection = Connection()

    # create engine and metadata

    engine = create_engine(internal_connection.conn_str(),
                           echo=False,
                           convert_unicode=True)
    metadata = MetaData(engine)

    # define tables
    ut_table = Table(
        'ut_table',
        metadata,
        Column('id_ut', Integer, primary_key=True),  #0
        Column('progetto', String(100)),  #1
        Column('nr_ut', Integer),  #2
        Column('ut_letterale', String(100)),  #3
        Column('def_ut', String(100)),  #4
        Column('descrizione_ut', Text),  #5
        Column('interpretazione_ut', String(100)),  #6
        Column('nazione', String(100)),  #7
        Column('regione', String(100)),  #8
        Column('provincia', String(100)),  #9
        Column('comune', String(100)),  #10
        Column('frazione', String(100)),  #11
        Column('localita', String(100)),  #12
        Column('indirizzo', String(100)),  #13
        Column('nr_civico', String(100)),  #14
        Column('carta_topo_igm', String(100)),  #15
        Column('carta_ctr', String(100)),  #16
        Column('coord_geografiche', String(100)),  #17
        Column('coord_piane', String(100)),  #18
        Column('quota', Float(3, 2)),  #19
        Column('andamento_terreno_pendenza', String(100)),  #20
        Column('utilizzo_suolo_vegetazione', String(100)),  #21
        Column('descrizione_empirica_suolo', Text),  #22
        Column('descrizione_luogo', Text),  #23
        Column('metodo_rilievo_e_ricognizione', String(100)),  #24
        Column('geometria', String(100)),  #25
        Column('bibliografia', Text),  #26
        Column('data', String(100)),  #27
        Column('ora_meteo', String(100)),  #28
        Column('responsabile', String(100)),  #29
        Column('dimensioni_ut', String(100)),  #30
        Column('rep_per_mq', String(100)),  #31
        Column('rep_datanti', String(100)),  #32
        Column('periodo_I', String(100)),  #33
        Column('datazione_I', String(100)),  #34
        Column('interpretazione_I', String(100)),  #35
        Column('periodo_II', String(100)),  #36
        Column('datazione_II', String(100)),  #37
        Column('interpretazione_II', String(100)),  #38
        Column('documentazione', Text),  #39
        Column('enti_tutela_vincoli', String(100)),  #40
        Column('indagini_preliminari', String(100)),  #41

        # explicit/composite unique constraint.  'name' is optional.
        UniqueConstraint('progetto',
                         'nr_ut',
                         'ut_letterale',
                         name='ID_ut_unico'))

    metadata.create_all(engine)
Example #8
0

Base = declarative_base(cls=BaseMixin)

group_articles = Table(
    'group_articles',
    Base.metadata,
    Column('article_id',
           Integer,
           ForeignKey('article.id', ondelete='CASCADE'),
           nullable=False),
    Column('group_id',
           Integer,
           ForeignKey('group.id', ondelete='CASCADE'),
           nullable=False),
    UniqueConstraint('article_id', 'group_id'),
)

group_files = Table(
    'group_files',
    Base.metadata,
    Column('group_id',
           Integer,
           ForeignKey('group.id', ondelete='CASCADE'),
           nullable=False),
    Column('file_id',
           Integer,
           ForeignKey('file.id', ondelete='CASCADE'),
           nullable=False),
    UniqueConstraint('file_id', 'group_id'),
)
Example #9
0
class Result(Base):
    ''' Data model for a job result. '''

    __tablename__ = 'result'
    __table_args__ = (UniqueConstraint('tracker_id',
                                       'site_url',
                                       name='tracker_id_site_url'), )

    STATUS_TYPES = [(u'f', u'Found'), (u'n', u'Not Found'), (u'e', u'Error')]

    id = Column(Integer, primary_key=True)
    tracker_id = Column(String(255), nullable=False)
    site_name = Column(String(255), nullable=False)
    site_url = Column(String(255), nullable=False)
    site_id = Column(Integer, nullable=False)
    status = Column(ChoiceType(STATUS_TYPES), nullable=False)
    image_file_id = Column(Integer,
                           ForeignKey('file.id', name='fk_image_file'),
                           nullable=True)
    image_file = relationship('File',
                              lazy='joined',
                              backref='result',
                              uselist=False,
                              cascade='all')
    error = Column(String(255), nullable=True)
    html = Column(Text, nullable=True)
    username = Column(String(255), nullable=False)
    created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
    user_id = Column(Integer,
                     ForeignKey('user.id', name='fk_result_user'),
                     nullable=False)

    def __init__(self,
                 tracker_id,
                 site_name,
                 site_url,
                 site_id,
                 status,
                 username,
                 user_id=None,
                 image_file_id=None,
                 thumb=None,
                 error=None,
                 html=None):
        ''' Constructor. '''

        self.tracker_id = tracker_id
        self.site_id = site_id
        self.site_name = site_name
        self.site_url = site_url
        self.status = status
        self.image_file_id = image_file_id
        self.thumb = thumb
        self.error = error
        self.html = html
        self.username = username
        self.user_id = user_id

    def as_dict(self):
        ''' Return dictionary representation of this result. '''

        if self.image_file is not None:
            image_file_url = self.image_file.url()
            image_file_name = self.image_file.name
        else:
            image_file_url = None
            image_file_name = None

        return {
            'created_at': self.created_at.isoformat(),
            'error': self.error,
            'html': self.html,
            'id': self.id,
            'image_file_id': self.image_file_id,
            'image_file_url': image_file_url,
            'image_file_name': image_file_name,
            'site_id': self.site_id,
            'site_name': self.site_name,
            'site_url': self.site_url,
            'status': self.status.code,
            'tracker_id': self.tracker_id,
            'username': self.username,
            'user_id': self.user_id,
        }
Example #10
0
from sqlalchemy import Table, Column, MetaData, testing, ForeignKey, UniqueConstraint, \
    CheckConstraint
from sqlalchemy.types import Integer, String, Boolean
from sqlalchemy.testing import fixtures

meta = MetaData()

customer_table = Table('customer', meta,
                       Column('id', Integer, primary_key=True),
                       Column('name', String),
                       Column('email', String),
                       Column('verified', Boolean),
                       UniqueConstraint('email'))

order_table = Table('order', meta,
                    Column('id', Integer, primary_key=True),
                    Column('customer_id', Integer, ForeignKey('customer.id')),
                    Column('info', String),
                    Column('status', String, CheckConstraint("status in ('open', 'closed')")))

# Regression test for https://github.com/cockroachdb/cockroach/issues/26993
index_table = Table('index', meta,
                    Column('index', Integer, primary_key=True))
view_table = Table('view', meta,
                   Column('view', Integer, primary_key=True))


class IntrospectionTest(fixtures.TestBase):
    def teardown_method(self, method):
        meta.drop_all(testing.db)
def create_year_table(year):
    """
    :type year: int
    """

    # inspector = Inspector.from_engine(engine)
    # table_names = inspector.get_table_names()
    # from pyticas_tetres.db.model import Base
    Base = declarative_base(bind=conn.engine)

    # faverolles 10/8/2019 NOTE: HERE
    tt_table = type(
        'TravelTime%d' % year, (Base, ), {
            '__tablename__':
            'tt_%d' % year,
            'id':
            Column(Integer, primary_key=True, autoincrement=True),
            'route_id':
            Column(Integer,
                   ForeignKey(TTRoute.id, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            'route':
            relationship(TTRoute, backref=backref('tt_%d' % year)),
            'time':
            Column(DateTime, nullable=False),
            'tt':
            Column(Float, nullable=False),
            'vmt':
            Column(Float, nullable=True),
            'speed':
            Column(Float, nullable=True),
            'vht':
            Column(Float, nullable=True),
            'dvh':
            Column(Float, nullable=True),
            'lvmt':
            Column(Float, nullable=True),
            'sv':
            Column(Float, nullable=True),
            '_tt_weathers':
            relationship('TTWeather%d' % year, lazy='joined'),
            '_tt_incidents':
            relationship('TTIncident%d' % year, lazy='joined'),
            '_tt_workzones':
            relationship('TTWorkzone%d' % year, lazy='joined'),
            '_tt_specialevents':
            relationship('TTSpecialevent%d' % year, lazy='joined'),
            '_tt_snowmanagements':
            relationship('TTSnowmgmt%d' % year, lazy='joined'),
            '__table_args__':
            (UniqueConstraint('route_id', 'time', name='_tt_uc_%d' % year), )
        })
    tt_table.__repr__ = lambda self: \
        '<TravelTime%d id="%d" route_id="%d" time="%s" tt="%.1f" vmt="%.1f" speed="%.1f>' % (
            year, self.id, self.route_id, self.time, self.tt, self.vmt, self.speed)
    tt_table.__str__ = tt_table.__repr__

    noaa_table = type(
        'Noaa%d' % year, (Base, ), {
            '__tablename__':
            'noaa_weather_%d' % year,
            'id':
            Column(Integer, primary_key=True, unique=True, autoincrement=True),
            'usaf':
            Column(VARCHAR(6), nullable=False),
            'wban':
            Column(VARCHAR(5), nullable=False),
            'dtime':
            Column(DateTime, nullable=False),
            'precip':
            Column(Float, nullable=True),
            'precip_type':
            Column(VARCHAR(4), nullable=True),
            'precip_intensity':
            Column(VARCHAR(4), nullable=True),
            'precip_qc':
            Column(VARCHAR(4), nullable=True),
            'visibility':
            Column(Float, nullable=True),
            'visibility_qc':
            Column(VARCHAR(4), nullable=True),
            'obscuration':
            Column(VARCHAR(4), nullable=True),
            'descriptor':
            Column(VARCHAR(4), nullable=True),
            'air_temp':
            Column(Float, nullable=True),
            'air_temp_qc':
            Column(VARCHAR(4), nullable=True),
            'dew_point':
            Column(Float, nullable=True),
            'dew_point_qc':
            Column(VARCHAR(4), nullable=True),
            'relative_humidity':
            Column(Float, nullable=True),
            'wind_dir':
            Column(Integer, nullable=True),
            'wind_dir_qc':
            Column(VARCHAR(4), nullable=True),
            'wind_speed':
            Column(Float, nullable=True),
            'wind_speed_qc':
            Column(VARCHAR(4), nullable=True),
            'wind_gust':
            Column(Float, nullable=True),
            'wind_gust_qc':
            Column(VARCHAR(4), nullable=True),
        })
    noaa_table.__repr__ = lambda self: \
        '<NoaaWeather%d id="%d" usaf="%s" wban="%s" time="%s" precip="%s" precip_type="%s">' % (
            year, self.id, self.usaf, self.wban, self.dtime, self.precip, self.precip_type)
    noaa_table.__str__ = noaa_table.__repr__

    tt_weather_table = type(
        'TTWeather%d' % year, (Base, ), {
            '__tablename__':
            'tt_weather_%d' % year,
            'id':
            Column(Integer, primary_key=True, unique=True, autoincrement=True),
            'tt_id':
            Column(Integer,
                   ForeignKey('tt_%d.id' % year, ondelete='CASCADE'),
                   primary_key=True,
                   index=True),
            '_tt':
            relationship(tt_table, backref=backref('tt_weather_%d' % year)),
            'weather_id':
            Column(Integer,
                   ForeignKey('noaa_weather_%d.id' % year, ondelete='CASCADE'),
                   index=True),
            '_weather':
            relationship(noaa_table, backref=backref('tt_weather_%d' % year)),
            'is_extended':
            False,
        })
    setattr(tt_weather_table, 'oc_field', 'weather_id')
    tt_weather_table.__repr__ = lambda self: \
        '<TTWeather%d id="%d" tt_id="%d" weather_id="%d" precip="%s" precip_type="%s" precip_intensity="%s">' % (
            year, self.id, self.tt_id, self.weather_id,
            self._weather.precip if self._weather else 'N/A',
            self._weather.precip_type if self._weather else 'N/A',
            self._weather.precip_intensity if self._weather else 'N/A',)
    tt_weather_table.__str__ = tt_weather_table.__repr__

    tt_workzone_table = type(
        'TTWorkzone%d' % year,
        (Base, ),
        {
            '__tablename__':
            'tt_workzone_%d' % year,
            'id':
            Column(Integer, primary_key=True, unique=True, autoincrement=True),
            'tt_id':
            Column(Integer,
                   ForeignKey('tt_%d.id' % year, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_tt':
            relationship(tt_table, backref=backref('tt_workzone_%d' % year)),
            'workzone_id':
            Column(Integer,
                   ForeignKey(WorkZone.id, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_workzone':
            relationship(WorkZone),
            'loc_type':
            Column(Integer, nullable=True),
            'distance':
            Column(Float, nullable=True),  # in mile
            'off_distance':
            Column(Float, nullable=True),  # in mile
            'is_extended':
            False,
        })
    setattr(tt_workzone_table, 'oc_field', 'workzone_id')
    tt_workzone_table.__repr__ = lambda self: \
        '<TTWorkZone%d id="%d" tt_id="%d" workzone_id="%d" loc_type="%d" distance="%.2f" off_distance="%.2f">' % (
            year, self.id, self.tt_id, self.workzone_id, self.loc_type, self.distance, self.off_distance)
    tt_workzone_table.__str__ = tt_workzone_table.__repr__

    tt_specialevent_table = type(
        'TTSpecialevent%d' % year,
        (Base, ),
        {
            '__tablename__':
            'tt_specialevent_%d' % year,
            'id':
            Column(Integer, primary_key=True, unique=True, autoincrement=True),
            'tt_id':
            Column(Integer,
                   ForeignKey('tt_%d.id' % year, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_tt':
            relationship(tt_table,
                         backref=backref('tt_specialevent_%d' % year)),
            'specialevent_id':
            Column(Integer,
                   ForeignKey(Specialevent.id, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_specialevent':
            relationship(Specialevent),
            'distance':
            Column(Float, nullable=False),  # in mile
            'event_type':
            Column(CHAR, nullable=False),
            'is_extended':
            False,
        })
    setattr(tt_specialevent_table, 'oc_field', 'specialevent_id')
    tt_specialevent_table.__repr__ = lambda self: \
        '<TTSpecialEvent%d id="%d" tt_id="%d" specialevent_id="%d" distance="%d" event_type="%s">' % (
            year, self.id, self.tt_id, self.specialevent_id, self.distance,
            self.event_type)
    tt_specialevent_table.__str__ = tt_specialevent_table.__repr__

    tt_snowmgmt_table = type(
        'TTSnowmgmt%d' % year,
        (Base, ),
        {
            '__tablename__':
            'tt_snowmgmt_%d' % year,
            'id':
            Column(Integer, primary_key=True, unique=True, autoincrement=True),
            'tt_id':
            Column(Integer,
                   ForeignKey('tt_%d.id' % year, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_tt':
            relationship(tt_table, backref=backref('tt_snowmgmt_%d' % year)),
            'snowmgmt_id':
            Column(Integer,
                   ForeignKey(SnowManagement.id, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_snowmgmt':
            relationship(SnowManagement),
            'loc_type':
            Column(Integer, nullable=True),
            'distance':
            Column(Float, nullable=True),  # in mile
            'off_distance':
            Column(Float, nullable=True),  # in mile
            'road_status':
            Column(Integer, nullable=True),
            'recovery_level':
            Column(Integer, nullable=True),
            'is_extended':
            False,
        })
    setattr(tt_snowmgmt_table, 'oc_field', 'snowmgmt_id')
    tt_snowmgmt_table.__repr__ = lambda self: \
        '<TTSnowManagement%d id="%d" tt_id="%d" snowmgmt_id="%d"' \
        ' loc_type="%d" distance="%.2f" off_distance="%.2f" road_status="%d">' % (
            year, self.id, self.tt_id, self.snowmgmt_id,
            self.loc_type, self.distance, self.off_distance, self.road_status)
    tt_snowmgmt_table.__str__ = tt_snowmgmt_table.__repr__

    tt_incident_table = type(
        'TTIncident%d' % year, (Base, ), {
            '__tablename__':
            'tt_incident_%d' % year,
            'id':
            Column(Integer, primary_key=True, unique=True, autoincrement=True),
            'tt_id':
            Column(Integer,
                   ForeignKey('tt_%d.id' % year, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_tt':
            relationship(tt_table, backref=backref('tt_incident_%d' % year)),
            'incident_id':
            Column(Integer,
                   ForeignKey(Incident.id, ondelete='CASCADE'),
                   nullable=False,
                   index=True),
            '_incident':
            relationship(Incident),
            'distance':
            Column(Float, nullable=True),
            'off_distance':
            Column(Float, nullable=True),
            'is_extended':
            False,
        })
    setattr(tt_incident_table, 'oc_field', 'incident_id')
    tt_incident_table.__repr__ = lambda self: \
        '<TTIncidentInfo%d id="%d" tt_id="%d" incident_id="%d" distance="%.2f" off_distance="%.2f">' % (
            year, self.id, self.tt_id, self.incident_id, self.distance, self.off_distance)
    tt_incident_table.__str__ = tt_incident_table.__repr__

    TTTables[year] = tt_table
    TTWeatherTables[year] = tt_weather_table
    TTWorkzoneTables[year] = tt_workzone_table
    TTIncidentTables[year] = tt_incident_table
    TTSnowmgmtTables[year] = tt_snowmgmt_table
    TTSpecialeventTables[year] = tt_specialevent_table
    # WeatherTables[year] = weather_table
    NOAATables[year] = noaa_table

    for _ in range(10):
        try:
            Base.metadata.create_all()
            break
        except Exception as ex:
            # print('=-> exception occured when create yearly table : ', year)
            time.sleep(1)
Example #12
0
 class Config:
     sql_alchemy = [
         UniqueConstraint("collection", "name"),
     ]
Example #13
0
 class Config:
     sql_alchemy = [
         UniqueConstraint("owner", "name"),
     ]
Example #14
0
    class User(Base):
        __tablename__ = 'users'
        __table_args__ = (UniqueConstraint('name', name='_users_uc'), )

        id = Column(Integer, primary_key=True)
        name = Column(String)
Example #15
0
class DruidDatasource(Model, BaseDatasource):

    """ORM object referencing Druid datasources (tables)"""

    __tablename__ = 'datasources'

    type = 'druid'
    query_langtage = 'json'
    cluster_class = DruidCluster
    metric_class = DruidMetric
    column_class = DruidColumn

    baselink = 'druiddatasourcemodelview'

    # Columns
    datasource_name = Column(String(255))
    is_hidden = Column(Boolean, default=False)
    fetch_values_from = Column(String(100))
    cluster_name = Column(
        String(250), ForeignKey('clusters.cluster_name'))
    cluster = relationship(
        'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
    user_id = Column(Integer, ForeignKey('ab_user.id'))
    owner = relationship(
        sm.user_model,
        backref=backref('datasources', cascade='all, delete-orphan'),
        foreign_keys=[user_id])
    UniqueConstraint('cluster_name', 'datasource_name')

    export_fields = (
        'datasource_name', 'is_hidden', 'description', 'default_endpoint',
        'cluster_name', 'offset', 'cache_timeout', 'params',
    )

    @property
    def database(self):
        return self.cluster

    @property
    def connection(self):
        return str(self.database)

    @property
    def num_cols(self):
        return [c.column_name for c in self.columns if c.is_num]

    @property
    def name(self):
        return self.datasource_name

    @property
    def schema(self):
        ds_name = self.datasource_name or ''
        name_pieces = ds_name.split('.')
        if len(name_pieces) > 1:
            return name_pieces[0]
        else:
            return None

    @property
    def schema_perm(self):
        """Returns schema permission if present, cluster one otherwise."""
        return utils.get_schema_perm(self.cluster, self.schema)

    def get_perm(self):
        return (
            '[{obj.cluster_name}].[{obj.datasource_name}]'
            '(id:{obj.id})').format(obj=self)

    @property
    def link(self):
        name = escape(self.datasource_name)
        return Markup('<a href="{self.url}">{name}</a>').format(**locals())

    @property
    def full_name(self):
        return utils.get_datasource_full_name(
            self.cluster_name, self.datasource_name)

    @property
    def time_column_grains(self):
        return {
            'time_columns': [
                'all', '5 seconds', '30 seconds', '1 minute',
                '5 minutes', '1 hour', '6 hour', '1 day', '7 days',
                'week', 'week_starting_sunday', 'week_ending_saturday',
                'month',
            ],
            'time_grains': ['now'],
        }

    def __repr__(self):
        return self.datasource_name

    @renders('datasource_name')
    def datasource_link(self):
        url = '/superset/explore/{obj.type}/{obj.id}/'.format(obj=self)
        name = escape(self.datasource_name)
        return Markup('<a href="{url}">{name}</a>'.format(**locals()))

    def get_metric_obj(self, metric_name):
        return [
            m.json_obj for m in self.metrics
            if m.metric_name == metric_name
        ][0]

    @classmethod
    def import_obj(cls, i_datasource, import_time=None):
        """Imports the datasource from the object to the database.

         Metrics and columns and datasource will be overridden if exists.
         This function can be used to import/export dashboards between multiple
         superset instances. Audit metadata isn't copies over.
        """
        def lookup_datasource(d):
            return db.session.query(DruidDatasource).filter(
                DruidDatasource.datasource_name == d.datasource_name,
                DruidCluster.cluster_name == d.cluster_name,
            ).first()

        def lookup_cluster(d):
            return db.session.query(DruidCluster).filter_by(
                cluster_name=d.cluster_name).one()
        return import_util.import_datasource(
            db.session, i_datasource, lookup_cluster, lookup_datasource,
            import_time)

    @staticmethod
    def version_higher(v1, v2):
        """is v1 higher than v2

        >>> DruidDatasource.version_higher('0.8.2', '0.9.1')
        False
        >>> DruidDatasource.version_higher('0.8.2', '0.6.1')
        True
        >>> DruidDatasource.version_higher('0.8.2', '0.8.2')
        False
        >>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
        False
        >>> DruidDatasource.version_higher('0.8.2', '0.9')
        False
        """
        def int_or_0(v):
            try:
                v = int(v)
            except (TypeError, ValueError):
                v = 0
            return v
        v1nums = [int_or_0(n) for n in v1.split('.')]
        v2nums = [int_or_0(n) for n in v2.split('.')]
        v1nums = (v1nums + [0, 0, 0])[:3]
        v2nums = (v2nums + [0, 0, 0])[:3]
        return v1nums[0] > v2nums[0] or \
            (v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
            (v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])

    def latest_metadata(self):
        """Returns segment metadata from the latest segment"""
        logging.info('Syncing datasource [{}]'.format(self.datasource_name))
        client = self.cluster.get_pydruid_client()
        try:
            results = client.time_boundary(datasource=self.datasource_name)
        except IOError:
            results = None
        if results:
            max_time = results[0]['result']['maxTime']
            max_time = dparse(max_time)
        else:
            max_time = datetime.now()
        # Query segmentMetadata for 7 days back. However, due to a bug,
        # we need to set this interval to more than 1 day ago to exclude
        # realtime segments, which triggered a bug (fixed in druid 0.8.2).
        # https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
        lbound = (max_time - timedelta(days=7)).isoformat()
        if not self.version_higher(self.cluster.druid_version, '0.8.2'):
            rbound = (max_time - timedelta(1)).isoformat()
        else:
            rbound = max_time.isoformat()
        segment_metadata = None
        try:
            segment_metadata = client.segment_metadata(
                datasource=self.datasource_name,
                intervals=lbound + '/' + rbound,
                merge=self.merge_flag,
                analysisTypes=[])
        except Exception as e:
            logging.warning('Failed first attempt to get latest segment')
            logging.exception(e)
        if not segment_metadata:
            # if no segments in the past 7 days, look at all segments
            lbound = datetime(1901, 1, 1).isoformat()[:10]
            if not self.version_higher(self.cluster.druid_version, '0.8.2'):
                rbound = datetime.now().isoformat()
            else:
                rbound = datetime(2050, 1, 1).isoformat()[:10]
            try:
                segment_metadata = client.segment_metadata(
                    datasource=self.datasource_name,
                    intervals=lbound + '/' + rbound,
                    merge=self.merge_flag,
                    analysisTypes=[])
            except Exception as e:
                logging.warning('Failed 2nd attempt to get latest segment')
                logging.exception(e)
        if segment_metadata:
            return segment_metadata[-1]['columns']

    def generate_metrics(self):
        self.generate_metrics_for(self.columns)

    def generate_metrics_for(self, columns):
        metrics = {}
        for col in columns:
            metrics.update(col.get_metrics())
        dbmetrics = (
            db.session.query(DruidMetric)
            .filter(DruidMetric.datasource_id == self.id)
            .filter(or_(DruidMetric.metric_name == m for m in metrics))
        )
        dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
        for metric in metrics.values():
            metric.datasource_id = self.id
            if not dbmetrics.get(metric.metric_name, None):
                with db.session.no_autoflush:
                    db.session.add(metric)

    @classmethod
    def sync_to_db_from_config(
            cls,
            druid_config,
            user,
            cluster,
            refresh=True):
        """Merges the ds config from druid_config into one stored in the db."""
        session = db.session
        datasource = (
            session.query(cls)
            .filter_by(datasource_name=druid_config['name'])
            .first()
        )
        # Create a new datasource.
        if not datasource:
            datasource = cls(
                datasource_name=druid_config['name'],
                cluster=cluster,
                owner=user,
                changed_by_fk=user.id,
                created_by_fk=user.id,
            )
            session.add(datasource)
        elif not refresh:
            return

        dimensions = druid_config['dimensions']
        col_objs = (
            session.query(DruidColumn)
            .filter(DruidColumn.datasource_id == datasource.id)
            .filter(or_(DruidColumn.column_name == dim for dim in dimensions))
        )
        col_objs = {col.column_name: col for col in col_objs}
        for dim in dimensions:
            col_obj = col_objs.get(dim, None)
            if not col_obj:
                col_obj = DruidColumn(
                    datasource_id=datasource.id,
                    column_name=dim,
                    groupby=True,
                    filterable=True,
                    # TODO: fetch type from Hive.
                    type='STRING',
                    datasource=datasource,
                )
                session.add(col_obj)
        # Import Druid metrics
        metric_objs = (
            session.query(DruidMetric)
            .filter(DruidMetric.datasource_id == datasource.id)
            .filter(or_(DruidMetric.metric_name == spec['name']
                    for spec in druid_config['metrics_spec']))
        )
        metric_objs = {metric.metric_name: metric for metric in metric_objs}
        for metric_spec in druid_config['metrics_spec']:
            metric_name = metric_spec['name']
            metric_type = metric_spec['type']
            metric_json = json.dumps(metric_spec)

            if metric_type == 'count':
                metric_type = 'longSum'
                metric_json = json.dumps({
                    'type': 'longSum',
                    'name': metric_name,
                    'fieldName': metric_name,
                })

            metric_obj = metric_objs.get(metric_name, None)
            if not metric_obj:
                metric_obj = DruidMetric(
                    metric_name=metric_name,
                    metric_type=metric_type,
                    verbose_name='%s(%s)' % (metric_type, metric_name),
                    datasource=datasource,
                    json=metric_json,
                    description=(
                        'Imported from the airolap config dir for %s' %
                        druid_config['name']),
                )
                session.add(metric_obj)
        session.commit()

    @staticmethod
    def time_offset(granularity):
        if granularity == 'week_ending_saturday':
            return 6 * 24 * 3600 * 1000  # 6 days
        return 0

    # uses https://en.wikipedia.org/wiki/ISO_8601
    # http://druid.io/docs/0.8.0/querying/granularities.html
    # TODO: pass origin from the UI
    @staticmethod
    def granularity(period_name, timezone=None, origin=None):
        if not period_name or period_name == 'all':
            return 'all'
        iso_8601_dict = {
            '5 seconds': 'PT5S',
            '30 seconds': 'PT30S',
            '1 minute': 'PT1M',
            '5 minutes': 'PT5M',
            '1 hour': 'PT1H',
            '6 hour': 'PT6H',
            'one day': 'P1D',
            '1 day': 'P1D',
            '7 days': 'P7D',
            'week': 'P1W',
            'week_starting_sunday': 'P1W',
            'week_ending_saturday': 'P1W',
            'month': 'P1M',
        }

        granularity = {'type': 'period'}
        if timezone:
            granularity['timeZone'] = timezone

        if origin:
            dttm = utils.parse_human_datetime(origin)
            granularity['origin'] = dttm.isoformat()

        if period_name in iso_8601_dict:
            granularity['period'] = iso_8601_dict[period_name]
            if period_name in ('week_ending_saturday', 'week_starting_sunday'):
                # use Sunday as start of the week
                granularity['origin'] = '2016-01-03T00:00:00'
        elif not isinstance(period_name, string_types):
            granularity['type'] = 'duration'
            granularity['duration'] = period_name
        elif period_name.startswith('P'):
            # identify if the string is the iso_8601 period
            granularity['period'] = period_name
        else:
            granularity['type'] = 'duration'
            granularity['duration'] = utils.parse_human_timedelta(
                period_name).total_seconds() * 1000
        return granularity

    @staticmethod
    def _metrics_and_post_aggs(metrics, metrics_dict):
        all_metrics = []
        post_aggs = {}

        def recursive_get_fields(_conf):
            _type = _conf.get('type')
            _field = _conf.get('field')
            _fields = _conf.get('fields')

            field_names = []
            if _type in ['fieldAccess', 'hyperUniqueCardinality',
                         'quantile', 'quantiles']:
                field_names.append(_conf.get('fieldName', ''))

            if _field:
                field_names += recursive_get_fields(_field)

            if _fields:
                for _f in _fields:
                    field_names += recursive_get_fields(_f)

            return list(set(field_names))

        for metric_name in metrics:
            metric = metrics_dict[metric_name]
            if metric.metric_type != 'postagg':
                all_metrics.append(metric_name)
            else:
                mconf = metric.json_obj
                all_metrics += recursive_get_fields(mconf)
                all_metrics += mconf.get('fieldNames', [])
                if mconf.get('type') == 'javascript':
                    post_aggs[metric_name] = JavascriptPostAggregator(
                        name=mconf.get('name', ''),
                        field_names=mconf.get('fieldNames', []),
                        function=mconf.get('function', ''))
                elif mconf.get('type') == 'quantile':
                    post_aggs[metric_name] = Quantile(
                        mconf.get('name', ''),
                        mconf.get('probability', ''),
                    )
                elif mconf.get('type') == 'quantiles':
                    post_aggs[metric_name] = Quantiles(
                        mconf.get('name', ''),
                        mconf.get('probabilities', ''),
                    )
                elif mconf.get('type') == 'fieldAccess':
                    post_aggs[metric_name] = Field(mconf.get('name'))
                elif mconf.get('type') == 'constant':
                    post_aggs[metric_name] = Const(
                        mconf.get('value'),
                        output_name=mconf.get('name', ''),
                    )
                elif mconf.get('type') == 'hyperUniqueCardinality':
                    post_aggs[metric_name] = HyperUniqueCardinality(
                        mconf.get('name'),
                    )
                elif mconf.get('type') == 'arithmetic':
                    post_aggs[metric_name] = Postaggregator(
                        mconf.get('fn', '/'),
                        mconf.get('fields', []),
                        mconf.get('name', ''))
                else:
                    post_aggs[metric_name] = CustomPostAggregator(
                        mconf.get('name', ''),
                        mconf)
        return all_metrics, post_aggs

    def values_for_column(self,
                          column_name,
                          limit=10000):
        """Retrieve some values for the given column"""
        # TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
        if self.fetch_values_from:
            from_dttm = utils.parse_human_datetime(self.fetch_values_from)
        else:
            from_dttm = datetime(1970, 1, 1)

        qry = dict(
            datasource=self.datasource_name,
            granularity='all',
            intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
            aggregations=dict(count=count('count')),
            dimension=column_name,
            metric='count',
            threshold=limit,
        )

        client = self.cluster.get_pydruid_client()
        client.topn(**qry)
        df = client.export_pandas()
        return [row[column_name] for row in df.to_records(index=False)]

    def get_query_str(self, query_obj, phase=1, client=None):
        return self.run_query(client=client, phase=phase, **query_obj)

    def _add_filter_from_pre_query_data(self, df, dimensions, dim_filter):
        ret = dim_filter
        if df is not None and not df.empty:
            new_filters = []
            for unused, row in df.iterrows():
                fields = []
                for dim in dimensions:
                    f = Dimension(dim) == row[dim]
                    fields.append(f)
                if len(fields) > 1:
                    term = Filter(type='and', fields=fields)
                    new_filters.append(term)
                elif fields:
                    new_filters.append(fields[0])
            if new_filters:
                ff = Filter(type='or', fields=new_filters)
                if not dim_filter:
                    ret = ff
                else:
                    ret = Filter(type='and', fields=[ff, dim_filter])
        return ret

    def run_query(  # noqa / druid
            self,
            groupby, metrics,
            granularity,
            from_dttm, to_dttm,
            filter=None,  # noqa
            is_timeseries=True,
            timeseries_limit=None,
            timeseries_limit_metric=None,
            row_limit=None,
            inner_from_dttm=None, inner_to_dttm=None,
            orderby=None,
            extras=None,  # noqa
            select=None,  # noqa
            columns=None, phase=2, client=None, form_data=None,
            order_desc=True):
        """Runs a query against Druid and returns a dataframe.
        """
        # TODO refactor into using a TBD Query object
        client = client or self.cluster.get_pydruid_client()

        if not is_timeseries:
            granularity = 'all'
        inner_from_dttm = inner_from_dttm or from_dttm
        inner_to_dttm = inner_to_dttm or to_dttm

        # add tzinfo to native datetime with config
        from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
        to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
        timezone = from_dttm.tzname()

        query_str = ''
        metrics_dict = {m.metric_name: m for m in self.metrics}

        columns_dict = {c.column_name: c for c in self.columns}

        all_metrics, post_aggs = self._metrics_and_post_aggs(
            metrics,
            metrics_dict)

        aggregations = OrderedDict()
        for m in self.metrics:
            if m.metric_name in all_metrics:
                aggregations[m.metric_name] = m.json_obj

        rejected_metrics = [
            m.metric_name for m in self.metrics
            if m.is_restricted and
            m.metric_name in aggregations.keys() and
            not sm.has_access('metric_access', m.perm)
        ]

        if rejected_metrics:
            raise MetricPermException(
                'Access to the metrics denied: ' + ', '.join(rejected_metrics),
            )

        # the dimensions list with dimensionSpecs expanded
        dimensions = []
        groupby = [gb for gb in groupby if gb in columns_dict]
        for column_name in groupby:
            col = columns_dict.get(column_name)
            dim_spec = col.dimension_spec
            if dim_spec:
                dimensions.append(dim_spec)
            else:
                dimensions.append(column_name)
        extras = extras or {}
        qry = dict(
            datasource=self.datasource_name,
            dimensions=dimensions,
            aggregations=aggregations,
            granularity=DruidDatasource.granularity(
                granularity,
                timezone=timezone,
                origin=extras.get('druid_time_origin'),
            ),
            post_aggregations=post_aggs,
            intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
        )

        filters = DruidDatasource.get_filters(filter, self.num_cols)
        if filters:
            qry['filter'] = filters

        having_filters = self.get_having_filters(extras.get('having_druid'))
        if having_filters:
            qry['having'] = having_filters
        order_direction = 'descending' if order_desc else 'ascending'
        if len(groupby) == 0 and not having_filters:
            del qry['dimensions']
            client.timeseries(**qry)
        elif (
            not having_filters and
            len(groupby) == 1 and
            order_desc and
            not isinstance(list(qry.get('dimensions'))[0], dict)
        ):
            dim = list(qry.get('dimensions'))[0]
            if timeseries_limit_metric:
                order_by = timeseries_limit_metric
            else:
                order_by = list(qry['aggregations'].keys())[0]
            # Limit on the number of timeseries, doing a two-phases query
            pre_qry = deepcopy(qry)
            pre_qry['granularity'] = 'all'
            pre_qry['threshold'] = min(row_limit,
                                       timeseries_limit or row_limit)
            pre_qry['metric'] = order_by
            pre_qry['dimension'] = dim
            del pre_qry['dimensions']
            client.topn(**pre_qry)
            query_str += '// Two phase query\n// Phase 1\n'
            query_str += json.dumps(
                client.query_builder.last_query.query_dict, indent=2)
            query_str += '\n'
            if phase == 1:
                return query_str
            query_str += (
                "// Phase 2 (built based on phase one's results)\n")
            df = client.export_pandas()
            qry['filter'] = self._add_filter_from_pre_query_data(
                df,
                qry['dimensions'], filters)
            qry['threshold'] = timeseries_limit or 1000
            if row_limit and granularity == 'all':
                qry['threshold'] = row_limit
            qry['dimension'] = list(qry.get('dimensions'))[0]
            qry['dimension'] = dim
            del qry['dimensions']
            qry['metric'] = list(qry['aggregations'].keys())[0]
            client.topn(**qry)
        elif len(groupby) > 0:
            # If grouping on multiple fields or using a having filter
            # we have to force a groupby query
            if timeseries_limit and is_timeseries:
                order_by = metrics[0] if metrics else self.metrics[0]
                if timeseries_limit_metric:
                    order_by = timeseries_limit_metric
                # Limit on the number of timeseries, doing a two-phases query
                pre_qry = deepcopy(qry)
                pre_qry['granularity'] = 'all'
                pre_qry['limit_spec'] = {
                    'type': 'default',
                    'limit': min(timeseries_limit, row_limit),
                    'intervals': (
                        inner_from_dttm.isoformat() + '/' +
                        inner_to_dttm.isoformat()),
                    'columns': [{
                        'dimension': order_by,
                        'direction': order_direction,
                    }],
                }
                client.groupby(**pre_qry)
                query_str += '// Two phase query\n// Phase 1\n'
                query_str += json.dumps(
                    client.query_builder.last_query.query_dict, indent=2)
                query_str += '\n'
                if phase == 1:
                    return query_str
                query_str += (
                    "// Phase 2 (built based on phase one's results)\n")
                df = client.export_pandas()
                qry['filter'] = self._add_filter_from_pre_query_data(
                    df,
                    qry['dimensions'],
                    filters,
                )
                qry['limit_spec'] = None
            if row_limit:
                qry['limit_spec'] = {
                    'type': 'default',
                    'limit': row_limit,
                    'columns': [{
                        'dimension': (
                            metrics[0] if metrics else self.metrics[0]),
                        'direction': order_direction,
                    }],
                }
            client.groupby(**qry)
        query_str += json.dumps(
            client.query_builder.last_query.query_dict, indent=2)
        return query_str

    def query(self, query_obj):
        qry_start_dttm = datetime.now()
        client = self.cluster.get_pydruid_client()
        query_str = self.get_query_str(
            client=client, query_obj=query_obj, phase=2)
        df = client.export_pandas()

        if df is None or df.size == 0:
            raise Exception(_('No data was returned.'))
        df.columns = [
            DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]

        is_timeseries = query_obj['is_timeseries'] \
            if 'is_timeseries' in query_obj else True
        if (
                not is_timeseries and
                DTTM_ALIAS in df.columns):
            del df[DTTM_ALIAS]

        # Reordering columns
        cols = []
        if DTTM_ALIAS in df.columns:
            cols += [DTTM_ALIAS]
        cols += [col for col in query_obj['groupby'] if col in df.columns]
        cols += [col for col in query_obj['metrics'] if col in df.columns]
        df = df[cols]

        time_offset = DruidDatasource.time_offset(query_obj['granularity'])

        def increment_timestamp(ts):
            dt = utils.parse_human_datetime(ts).replace(
                tzinfo=DRUID_TZ)
            return dt + timedelta(milliseconds=time_offset)
        if DTTM_ALIAS in df.columns and time_offset:
            df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)

        return QueryResult(
            df=df,
            query=query_str,
            duration=datetime.now() - qry_start_dttm)

    @staticmethod
    def get_filters(raw_filters, num_cols):  # noqa
        filters = None
        for flt in raw_filters:
            if not all(f in flt for f in ['col', 'op', 'val']):
                continue

            col = flt['col']
            op = flt['op']
            eq = flt['val']
            cond = None
            if op in ('in', 'not in'):
                eq = [
                    types.replace('"', '').strip()
                    if isinstance(types, string_types)
                    else types
                    for types in eq]
            elif not isinstance(flt['val'], string_types):
                eq = eq[0] if eq and len(eq) > 0 else ''

            is_numeric_col = col in num_cols
            if is_numeric_col:
                if op in ('in', 'not in'):
                    eq = [utils.string_to_num(v) for v in eq]
                else:
                    eq = utils.string_to_num(eq)

            if op == '==':
                cond = Dimension(col) == eq
            elif op == '!=':
                cond = Dimension(col) != eq
            elif op in ('in', 'not in'):
                fields = []

                # ignore the filter if it has no value
                if not len(eq):
                    continue
                elif len(eq) == 1:
                    cond = Dimension(col) == eq[0]
                else:
                    for s in eq:
                        fields.append(Dimension(col) == s)
                    cond = Filter(type='or', fields=fields)

                if op == 'not in':
                    cond = ~cond

            elif op == 'regex':
                cond = Filter(type='regex', pattern=eq, dimension=col)
            elif op == '>=':
                cond = Bound(col, eq, None, alphaNumeric=is_numeric_col)
            elif op == '<=':
                cond = Bound(col, None, eq, alphaNumeric=is_numeric_col)
            elif op == '>':
                cond = Bound(
                    col, eq, None,
                    lowerStrict=True, alphaNumeric=is_numeric_col,
                )
            elif op == '<':
                cond = Bound(
                    col, None, eq,
                    upperStrict=True, alphaNumeric=is_numeric_col,
                )

            if filters:
                filters = Filter(type='and', fields=[
                    cond,
                    filters,
                ])
            else:
                filters = cond

        return filters

    def _get_having_obj(self, col, op, eq):
        cond = None
        if op == '==':
            if col in self.column_names:
                cond = DimSelector(dimension=col, value=eq)
            else:
                cond = Aggregation(col) == eq
        elif op == '>':
            cond = Aggregation(col) > eq
        elif op == '<':
            cond = Aggregation(col) < eq

        return cond

    def get_having_filters(self, raw_filters):
        filters = None
        reversed_op_map = {
            '!=': '==',
            '>=': '<',
            '<=': '>',
        }

        for flt in raw_filters:
            if not all(f in flt for f in ['col', 'op', 'val']):
                continue
            col = flt['col']
            op = flt['op']
            eq = flt['val']
            cond = None
            if op in ['==', '>', '<']:
                cond = self._get_having_obj(col, op, eq)
            elif op in reversed_op_map:
                cond = ~self._get_having_obj(col, reversed_op_map[op], eq)

            if filters:
                filters = filters & cond
            else:
                filters = cond
        return filters

    @classmethod
    def query_datasources_by_name(
            cls, session, database, datasource_name, schema=None):
        return (
            session.query(cls)
            .filter_by(cluster_name=database.id)
            .filter_by(datasource_name=datasource_name)
            .all()
        )
Example #16
0
File: models.py Project: pcodes/mhn
class Rule(db.Model, APIModel):

    # Defines some properties on the fields:
    # required: Is required for creating object via
    #           a POST request.
    # editable: Can be edited via a PUT request.
    # Defaults to False.
    all_fields = {
        'message': {
            'required': True,
            'editable': True
        },
        'references': {
            'required': True,
            'editable': False
        },
        'classtype': {
            'required': True,
            'editable': True
        },
        'sid': {
            'required': True,
            'editable': False
        },
        'rev': {
            'required': True,
            'editable': True
        },
        'date': {
            'required': False,
            'editable': False
        },
        'rule_format': {
            'required': True,
            'editable': False
        },
        'is_active': {
            'required': False,
            'editable': True
        },
        'notes': {
            'required': False,
            'editable': True
        }
    }

    __tablename__ = 'rules'

    id = db.Column(db.Integer, primary_key=True)
    message = db.Column(db.String(140))
    references = db.relationship('Reference', backref='rule', lazy='dynamic')
    classtype = db.Column(db.String(50))
    sid = db.Column(db.Integer)
    rev = db.Column(db.Integer)
    date = db.Column(db.DateTime(), default=datetime.utcnow)
    rule_format = db.Column(db.String(500))
    is_active = db.Column(db.Boolean)
    notes = db.Column(db.String(140))
    __table_args__ = (UniqueConstraint(sid, rev), )

    def __init__(self,
                 msg=None,
                 classtype=None,
                 sid=None,
                 rev=None,
                 date=None,
                 rule_format=None,
                 **args):
        self.message = msg
        self.classtype = classtype
        self.sid = sid
        self.rev = rev
        self.rule_format = rule_format
        self.is_active = True

    def insert_refs(self, refs):
        for r in refs:
            ref = Reference()
            ref.rule = self
            ref.text = r.decode('utf-8')
            db.session.add(ref)
        db.session.commit()

    def to_dict(self):
        return dict(sid=self.sid,
                    rev=self.rev,
                    msg=self.message,
                    classtype=self.classtype,
                    is_active=self.is_active)

    def __repr__(self):
        return '<Rule>{}'.format(self.to_dict())

    def render(self):
        """
        Takes Rule model and renders itself to plain text.
        """
        msg = 'msg:"{}"'.format(self.message)
        classtype = 'classtype:{}'.format(self.classtype)
        sid = 'sid:{}'.format(self.sid)
        rev = 'rev:{}'.format(self.rev)
        reference = ''
        for r in self.references:
            reference += 'reference:{}; '.format(r.text)
        # Remove trailing '; ' from references.
        reference = reference[:-2]
        return self.rule_format.format(msg=msg,
                                       sid=sid,
                                       rev=rev,
                                       classtype=classtype,
                                       reference=reference)

    @classmethod
    def renderall(cls):
        """
        Renders latest revision of active rules.
        This method must be called within a Flask app
        context.
        """
        rules = cls.query.filter_by(is_active=True).\
                    group_by(cls.sid).\
                    having(func.max(cls.rev))
        return '\n\n'.join([ru.render() for ru in rules])

    @classmethod
    def bulk_import(cls, rulelist):
        """
        Imports rules into the database.
        This method must be called within a Flask app
        context.
        """
        cnt = 0
        for ru in rulelist:
            # Checking for rules with this sid.
            if cls.query.\
                   filter_by(sid=ru['sid']).\
                   filter(cls.rev >= ru['rev']).count() == 0:
                # All rules with this sid have lower rev number that
                # the incoming one, or this is a new sid altogether.
                rule = cls(**ru)
                rule.insert_refs(ru['references'])
                db.session.add(rule)
                # Disabling older rules.
                cls.query.\
                    filter_by(sid=ru['sid']).\
                    filter(cls.rev < ru['rev']).\
                    update({'is_active': False}, False)
            cnt += 1
            if cnt % 500 == 0:
                print 'Imported {} rules so far...'.format(cnt)
        print 'Finished Importing {} rules.  Committing data'.format(cnt)
        db.session.commit()
Example #17
0
    __mapper_args__ = {
        "polymorphic_on":
        case(
            [
                (isFromAllocineProvider, "allocine_venue_provider"),
            ],
            else_="venue_provider",
        ),
        "polymorphic_identity":
        "venue_provider",
    }

    __table_args__ = (UniqueConstraint(
        "venueId",
        "providerId",
        "venueIdAtOfferProvider",
        name="unique_venue_provider",
    ), )

    @staticmethod
    def restize_integrity_error(internal_error):
        if "unique_venue_provider" in str(internal_error.orig):
            return ["global", "Votre lieu est déjà lié à cette source"]
        return PcObject.restize_integrity_error(internal_error)

    @property
    def nOffers(self):
        # pylint: disable=comparison-with-callable
        return Offer.query.filter(Offer.venueId == self.venueId).filter(
            Offer.lastProviderId == self.providerId).count()
Example #18
0
    class User(Base, BaseModel):
        __tablename__ = "users"
        __table_args__ = (UniqueConstraint("name", name="_users_uc"), )

        id = Column(Integer, primary_key=True)
        name = Column(String)
class DETETA_table:
    # connection string postgres"
    internal_connection = Connection()

    # create engine and metadata

    engine = create_engine(internal_connection.conn_str(),
                           echo=False,
                           convert_unicode=True)
    metadata = MetaData(engine)

    # define tables
    deteta_table = Table(
        'deteta_table',
        metadata,
        Column('id_det_eta', Integer, primary_key=True),
        Column('sito', Text),
        Column('nr_individuo', Integer),
        Column('sinf_min', Integer),
        Column('sinf_max', Integer),
        Column('sinf_min_2', Integer),
        Column('sinf_max_2', Integer),
        Column('SSPIA', Integer),
        Column('SSPIB', Integer),
        Column('SSPIC', Integer),
        Column('SSPID', Integer),
        Column('sup_aur_min', Integer),
        Column('sup_aur_max', Integer),
        Column('sup_aur_min_2', Integer),
        Column('sup_aur_max_2', Integer),
        Column('ms_sup_min', Integer),
        Column('ms_sup_max', Integer),
        Column('ms_inf_min', Integer),
        Column('ms_inf_max', Integer),
        Column('usura_min', Integer),
        Column('usura_max', Integer),
        Column('Id_endo', Integer),
        Column('Is_endo', Integer),
        Column('IId_endo', Integer),
        Column('IIs_endo', Integer),
        Column('IIId_endo', Integer),
        Column('IIIs_endo', Integer),
        Column('IV_endo', Integer),
        Column('V_endo', Integer),
        Column('VI_endo', Integer),
        Column('VII_endo', Integer),
        Column('VIIId_endo', Integer),
        Column('VIIIs_endo', Integer),
        Column('IXd_endo', Integer),
        Column('IXs_endo', Integer),
        Column('Xd_endo', Integer),
        Column('Xs_endo', Integer),
        Column('endo_min', Integer),
        Column('endo_max', Integer),
        Column('volta_1', Integer),
        Column('volta_2', Integer),
        Column('volta_3', Integer),
        Column('volta_4', Integer),
        Column('volta_5', Integer),
        Column('volta_6', Integer),
        Column('volta_7', Integer),
        Column('lat_6', Integer),
        Column('lat_7', Integer),
        Column('lat_8', Integer),
        Column('lat_9', Integer),
        Column('lat_10', Integer),
        Column('volta_min', Integer),
        Column('volta_max', Integer),
        Column('ant_lat_min', Integer),
        Column('ant_lat_max', Integer),
        Column('ecto_min', Integer),
        Column('ecto_max', Integer),
        # explicit/composite unique constraint.  'name' is optional.
        UniqueConstraint('sito', 'nr_individuo', name='ID_det_eta_unico'))

    metadata.create_all(engine)
Example #20
0
           nullable=False),
    Column('status',
           Enum(name='resource_statuses', *RESOURCE_STATUSES),
           nullable=False,
           server_default='PENDING',
           default='PENDING'),
    Column('parent_zone_id', UUID, default=None, nullable=True),
    Column('action',
           Enum(name='actions', *ACTIONS),
           default='CREATE',
           server_default='CREATE',
           nullable=False),
    Column('pool_id', UUID, default=None, nullable=True),
    Column('reverse_name', String(255), nullable=False),
    Column('delayed_notify', Boolean, default=False),
    UniqueConstraint('name', 'deleted', 'pool_id', name='unique_zone_name'),
    ForeignKeyConstraint(['parent_zone_id'], ['zones.id'],
                         ondelete='SET NULL'),
    mysql_engine='InnoDB',
    mysql_charset='utf8',
)

zone_attributes = Table('zone_attributes',
                        metadata,
                        Column('id',
                               UUID,
                               default=utils.generate_uuid,
                               primary_key=True),
                        Column('version', Integer, default=1, nullable=False),
                        Column('created_at',
                               DateTime,
Example #21
0
class Location(BaseModel):
    """
    Stores a location.
    """
    __tablename__ = 'location'
    __table_args__ = (
        UniqueConstraint('lat',
                         'lon',
                         name='one_location_at_each_lat_lon_pair'),
        CheckConstraint('lat Between -90 and 90', name='lat_bounds'),
        CheckConstraint('lon Between -180 and 180', name='lon_bounds'),
    )

    # id
    id = db.Column(db.Integer, primary_key=True)
    source_id = db.Column(db.Integer)
    version = db.Column(db.Integer, default=0)

    # relationships
    records = relationship("Record", cascade="all, delete-orphan")
    points = relationship("Point", cascade="all, delete-orphan")
    images = relationship("Image", cascade="all, delete-orphan")

    # who
    user_id = db.Column(db.Integer, ForeignKey('users.id'))
    source = db.Column(db.String)

    # where
    lat = db.Column(db.Float, nullable=False, index=True)
    lon = db.Column(db.Float, nullable=False, index=True)

    original_lat = db.Column(db.Float, nullable=False)
    original_lon = db.Column(db.Float, nullable=False)

    # offset
    # bearing from lat lon to center of field from lat lon
    bearing = db.Column(db.Float, default=-1)
    distance = db.Column(
        db.Integer)  # distance along bearing to center of field from lat lon
    accuracy = db.Column(db.Integer)

    country = db.Column(db.String)
    continent = db.Column(db.String)
    field = db.Column(db.String)

    # when
    date_created = db.Column(db.DateTime, default=db.func.now())
    date_edited = db.Column(db.DateTime,
                            default=db.func.now(),
                            onupdate=db.func.now())

    # use
    use_validation = db.Column(db.Boolean, default=False, index=True)
    use_validation_locked = db.Column(db.Boolean, default=False, index=True)
    use_private = db.Column(db.Boolean, default=False, index=True)
    use_deleted = db.Column(db.Boolean, default=False, index=True)
    use_invalid = db.Column(db.Boolean, default=False, index=True)
    use_invalid_reason = db.Column(db.String)

    def __init__(self, *args, **kwargs):
        # convert to float if str
        self.lat = float(kwargs['lat'])
        self.lon = float(kwargs['lon'])

        self.original_lat = self.lat
        self.original_lon = self.lon

        assert abs(self.lat) < 90, 'lat exceeds bounds'
        assert abs(self.lon) < 180, 'lon exceeds bounds'

        super(Location, self).__init__(*args, **kwargs)

        if self.bearing is not None and self.bearing != -1 and self.distance is not None and self.distance > 0:
            self.offset(self.bearing, self.distance)

        if 'use_validation' not in kwargs and 'use_validation_locked' not in kwargs:
            self.use_validation = random.choice([True, False, False])
            if self.use_validation:
                self.use_validation_locked = random.choice([True, False])

        self.check_neighbor_use()
        # self.check_neighbor_field()

    def check_neighbor_use(self, threshold=1000):
        """
        Requires nearby samples to be used either for validation or training and not both.

        First finds all nearby samples within a specific radius.

        Next it determines the use of its neighbors. If there is no single use but a mix of uses,
        do not accept the sample and review at a later time. If there is a single use, reassign
        the current sample to the same use.

        Finally clean up the validation_locked setting if necessary.

        If failure to find single use, mark as invalid to trigger later review.

        :param threshold: Integer in meters
        :return: None
        """
        assert threshold > 0
        assert isinstance(self.lat, (int, long, float)), 'lat not number'
        assert isinstance(self.lon, (int, long, float)), 'lon not number'

        # get nearby locations to this location
        nearby_locations = Location.within(self.lat, self.lon, threshold)

        # if there are nearby locations
        if len(nearby_locations) > 0:
            use_validation = 0
            use_training = 0

            # get neighbors use
            for location in nearby_locations:

                # don't worry about these locations
                if location.use_invalid or location.use_deleted:
                    continue

                if location.use_validation:
                    use_validation += 1
                else:
                    use_training += 1

            # check if there is a mix of uses nearby
            if abs(use_training - use_validation) == len(nearby_locations):
                # apply the use to this sample
                self.use_validation = use_validation > use_training

                # after change clear validation_locked if use is for training
                if not self.use_validation:
                    self.use_validation_locked = False

            # else if not all one use mark it as invalid
            # todo what is the best way to handle this? send message for review
            else:
                self.use_invalid = True
                self.use_invalid_reason = '[Neighbor sample use is mix of training and validation]'

    def check_neighbor_field(self, meters=100):
        # get nearby locations to this location
        nearby_locations = Location.within(self.lat, self.lon, meters)

        # if there are nearby locations
        if len(nearby_locations) > 0:
            self.use_invalid = True
            if self.use_invalid_reason is None:
                self.use_invalid_reason = '[Same field as another location]'
            else:
                self.use_invalid_reason += '[Same field as another location]'

    @classmethod
    def within(cls, lat, lon, meters):
        """
        Finds all samples within a radius of x meters from lat lon pair.
        :param lat:
        :param lon:
        :param meters:
        :return:
        """
        assert isinstance(meters, (int, long, float)), 'lat not number'
        assert isinstance(lat, (int, long, float)), 'lat not number'
        assert isinstance(lon, (int, long, float)), 'lon not number'
        assert meters > 0
        assert abs(lat) < 90, 'lat exceeds bounds'
        assert abs(lon) < 180, 'lon exceeds bounds'

        # TODO paramterize
        sql = "select * from (SELECT * FROM location WHERE abs(lon - %f) < %f and abs(lat - %f) < %f) as pt where st_distance_sphere(st_makepoint(pt.lon, pt.lat), st_makepoint(%f,%f)) " % (
            float(lon), 0.1, float(lat), 0.1, float(lon), float(lat))
        sql += "< %d " % meters

        return db.session.query(Location).from_statement(text(sql)).all()

    def offset(self, bearing, meters):
        """
        Offsets the location to center of area.
        :param bearing: direction float
        :param meters: distance int
        :return: None
        """
        km = float(meters) / 1000.0

        self.lat, self.lon = get_destination(self.lat, self.lon, bearing, km)
Example #22
0
User = Table(
    'users',
    metadata,
    Column('uuid', UUID(), primary_key=True),
    Column('username', String(255), nullable=False),
    Column('name', String(255), nullable=False),
    Column('company', String(255), nullable=True),
    Column('password', String(128), nullable=False),
    Column('created_at', DateTime, nullable=False, server_default=func.now()),
    Column('updated_at',
           DateTime,
           nullable=False,
           server_default=func.now(),
           onupdate=func.now()),
    Column('deleted_at', DateTime, nullable=True),
    UniqueConstraint('username', name='uq_user_username'),
)

CreditCard = table(
    'credit_card',
    Column('brand', String(20),
           nullable=False),  # The brand of the card, like 'visa'
    Column('card_id', String(100),
           nullable=False),  # The ID of the card from Stripe
    Column('country', String(1024),
           nullable=False),  # The Country of the card, like 'US'
    Column('cvc_check', String(100),
           nullable=False),  # The CVC value from Stripe, like 'unchecked'
    Column('expiration_month', Integer(),
           nullable=False),  # The month the card expires
    Column('expiration_year', Integer(),
Example #23
0
def candidate_subclass(class_name,
                       args,
                       table_name=None,
                       cardinality=None,
                       values=None):
    """
    Creates and returns a Candidate subclass with provided argument names,
    which are Context type. Creates the table in DB if does not exist yet.

    Import using:

    .. code-block:: python

        from Asterisk.models import candidate_subclass

    :param class_name: The name of the class, should be "camel case" e.g.
        NewCandidate
    :param args: A list of names of consituent arguments, which refer to the
        Contexts--representing mentions--that comprise the candidate
    :param table_name: The name of the corresponding table in DB; if not
        provided, is converted from camel case by default, e.g. new_candidate
    :param cardinality: The cardinality of the variable corresponding to the
        Candidate. By default is 2 i.e. is a binary value, e.g. is or is not
        a true mention.
    """
    if table_name is None:
        table_name = camel_to_under(class_name)

    # If cardinality and values are None, default to binary classification
    if cardinality is None and values is None:
        values = [True, False]
        cardinality = 2

    # Else use values if present, and validate proper input
    elif values is not None:
        if cardinality is not None and len(values) != cardinality:
            raise ValueError("Number of values must match cardinality.")
        if None in values:
            raise ValueError("`None` is a protected value.")
        # Note that bools are instances of ints in Python...
        if any(
            [isinstance(v, int) and not isinstance(v, bool) for v in values]):
            raise ValueError(
                "Default usage of values is consecutive integers. Leave values unset if attempting to define values as integers."
            )
        cardinality = len(values)

    # If cardinality is specified but not values, fill in with ints
    elif cardinality is not None:
        values = list(range(cardinality))

    class_spec = (args, table_name, cardinality, values)
    if class_name in candidate_subclasses:
        if class_spec == candidate_subclasses[class_name][1]:
            return candidate_subclasses[class_name][0]
        else:
            raise ValueError('Candidate subclass ' + class_name +
                             ' already exists in memory with incompatible ' +
                             'specification: ' +
                             str(candidate_subclasses[class_name][1]))
    else:
        # Set the class attributes == the columns in the database
        class_attribs = {

            # Declares name for storage table
            '__tablename__':
            table_name,

            # Connects candidate_subclass records to generic Candidate records
            'id':
            Column(Integer,
                   ForeignKey('candidate.id', ondelete='CASCADE'),
                   primary_key=True),

            # Store values & cardinality information in the class only
            'values':
            values,
            'cardinality':
            cardinality,

            # Polymorphism information for SQLAlchemy
            '__mapper_args__': {
                'polymorphic_identity': table_name
            },

            # Helper method to get argument names
            '__argnames__':
            args,
        }

        # Create named arguments, i.e. the entity mentions comprising the relation
        # mention
        # For each entity mention: id, cid ("canonical id"), and pointer to Context
        unique_args = []
        for arg in args:

            # Primary arguments are constituent Contexts, and their ids
            class_attribs[arg + '_id'] = Column(Integer,
                                                ForeignKey('context.id',
                                                           ondelete='CASCADE'),
                                                index=True)
            class_attribs[arg] = relationship(
                'Context',
                backref=backref(table_name + '_' + arg + 's',
                                cascade_backrefs=False,
                                cascade='all, delete-orphan'),
                cascade_backrefs=False,
                foreign_keys=class_attribs[arg + '_id'])
            unique_args.append(class_attribs[arg + '_id'])

            # Canonical ids, to be set post-entity normalization stage
            class_attribs[arg + '_cid'] = Column(String)

        # Add unique constraints to the arguments
        class_attribs['__table_args__'] = (UniqueConstraint(*unique_args), )

        # Create class
        C = type(class_name, (Candidate, ), class_attribs)

        # Create table in DB
        if not Asterisk_engine.dialect.has_table(Asterisk_engine, table_name):
            C.__table__.create(bind=Asterisk_engine)

        candidate_subclasses[class_name] = C, class_spec

        return C
Example #24
0
class LocalProductCompany3(Base1, ProductCompanyFrom):
    __tablename__ = "mf_product_company2"
    __table_args__ = (UniqueConstraint('product', 'company', 'gw_link'), )
Example #25
0
from sqlalchemy import Column, String, UniqueConstraint, Enum
from sqlalchemy.orm import relationship

from fixtures.academic_degrees import EducationLevel
from provider.models.base import Base, make_join_table

provider_degree_table = make_join_table("provider", "degree")


class Degree(Base):
    acronym = Column(String(16), unique=True, nullable=False)
    name = Column(String(), nullable=False)
    level = Column(Enum(EducationLevel), nullable=False)

    providers = relationship("Provider",
                             secondary=provider_degree_table,
                             back_populates="degrees")


UniqueConstraint(Degree.acronym, Degree.name)
UniqueConstraint(Degree.acronym, Degree.level)
Example #26
0
class ServerProductCompany(Base, ProductCompanyTo):
    __tablename__ = "hub_products_test"
    __table_args__ = (UniqueConstraint('name', 'company_name', 'website'), )
Example #27
0
class User(ModelBase, BaseUser, UserMixin):
    wifi_passwd_hash = Column(String)

    # one to one from User to Account
    account_id = Column(Integer,
                        ForeignKey("account.id"),
                        nullable=False,
                        index=True)
    account = relationship("Account", backref=backref("user", uselist=False))

    unix_account_id = Column(Integer,
                             ForeignKey('unix_account.id'),
                             nullable=True,
                             unique=True)
    unix_account = relationship('UnixAccount')  # backref not really needed.

    address_id = Column(Integer,
                        ForeignKey(Address.id),
                        index=True,
                        nullable=False)
    address = relationship(Address, backref=backref("inhabitants"))

    room = relationship("Room",
                        backref=backref("users", viewonly=True),
                        sync_backref=False)

    email_forwarded = Column(Boolean, server_default='True', nullable=False)

    password_reset_token = Column(String, nullable=True)

    def __init__(self, **kwargs):
        password = kwargs.pop('password', None)
        wifi_password = kwargs.pop('password', None)
        super(User, self).__init__(**kwargs)
        if password is not None:
            self.password = password
        if wifi_password is not None:
            self.wifi_password = wifi_password

    @hybrid_property
    def has_custom_address(self):
        """Whether the user's address differs from their room's address.

        If no room is assigned, returns ``False``.
        """
        return self.address != self.room.address if self.room else False

    # noinspection PyMethodParameters
    @has_custom_address.expression
    def has_custom_address(cls):
        return and_(
            cls.room_id.isnot(None),
            exists(
                select([
                    null()
                ]).select_from(Room).where(Room.id == cls.room_id).where(
                    Room.address_id != cls.address_id)))

    @validates('login')
    def validate_login(self, _, value):
        assert not has_identity(
            self
        ), "user already in the database - cannot change login anymore!"

        return super(User, self).validate_login(_, value)

    property_groups = relationship("PropertyGroup",
                                   secondary=lambda: Membership.__table__,
                                   viewonly=True)

    @hybrid_method
    def traffic_for_days(self, days):
        from pycroft.model.traffic import TrafficVolume

        return sum(
            v.amount
            for v in TrafficVolume.q.filter_by(user_id=self.id).filter(
                TrafficVolume.timestamp >= (session.utcnow() -
                                            timedelta(days - 1)).date()))

    @traffic_for_days.expression
    def traffic_for_days(self, days):
        from pycroft.model.traffic import TrafficVolume

        return select([func.sum(TrafficVolume.amount).label('amount')]) \
            .where(
            TrafficVolume.timestamp >= (session.utcnow() - timedelta(days-1)).date()
            .where(TrafficVolume.user_id == self.id))

    #: This is a relationship to the `current_property` view filtering out
    #: the entries with `denied=True`.
    current_properties = relationship(
        'CurrentProperty',
        primaryjoin='and_(User.id == foreign(CurrentProperty.user_id),'
        '~CurrentProperty.denied)',
        viewonly=True)
    #: This is a relationship to the `current_property` view ignoring the
    #: `denied` attribute.
    current_properties_maybe_denied = relationship(
        'CurrentProperty',
        primaryjoin='User.id == foreign(CurrentProperty.user_id)',
        viewonly=True)

    @property
    def current_properties_set(self) -> Set[str]:
        """A type-agnostic property giving the granted properties as a set of string.

        Utilized in the web component's access control mechanism.
        """
        return {p.property_name for p in self.current_properties}

    @property
    def wifi_password(self):
        """Store a hash of a given plaintext passwd for the user.

        """

        if self.wifi_passwd_hash is not None and self.wifi_passwd_hash.startswith(
                clear_password_prefix):
            return self.wifi_passwd_hash.replace(clear_password_prefix, '', 1)

        raise ValueError("Cleartext password not available.")

    @hybrid_property
    def has_wifi_access(self):
        return self.wifi_passwd_hash is not None

    @wifi_password.setter
    def wifi_password(self, value):
        self.wifi_passwd_hash = cleartext_password(value)

    @staticmethod
    def verify_and_get(login, plaintext_password):
        try:
            user = User.q.filter(User.login == func.lower(login)).one()
        except NoResultFound:
            return None
        else:
            return user if user.check_password(plaintext_password) else None

    @hybrid_method
    def active_memberships(self,
                           when: Optional[Interval] = None
                           ) -> List[Membership]:
        if when is None:
            now = session.utcnow()
            when = single(now)
        return [
            m for m in self.memberships
            if when.overlaps(closed(m.begins_at, m.ends_at))
        ]

    @active_memberships.expression
    def active_memberships(cls, when=None):
        return select([Membership]).select_from(join(cls, Membership)).where(
            Membership.active(when))

    @hybrid_method
    def active_property_groups(self,
                               when: Optional[Interval] = None
                               ) -> List[PropertyGroup]:
        return object_session(self).query(PropertyGroup).join(
            Membership).filter(Membership.active(when),
                               Membership.user_id == self.id).all()

    @active_property_groups.expression
    def active_property_groups(cls, when=None):
        return select([PropertyGroup]).select_from(
            join(PropertyGroup,
                 Membership).join(cls)).where(Membership.active(when))

    @hybrid_method
    def member_of(self,
                  group: PropertyGroup,
                  when: Optional[Interval] = None) -> bool:
        return group in self.active_property_groups(when)

    @member_of.expression
    def member_of(cls, group, when=None):
        return exists(
            select([null()]).select_from(
                PropertyGroup.__table__.join(
                    Membership.__table__,
                    PropertyGroup.id == Membership.group_id)).where(
                        and_(Membership.user_id == cls.id,
                             PropertyGroup.id == group.id,
                             Membership.active(when))))

    @hybrid_method
    def has_property(self,
                     property_name: str,
                     when: Optional[Interval] = None) -> bool:
        if when is None:
            now = session.utcnow()
            when = single(now)

        prop_granted_flags = [
            group.property_grants[property_name]
            for group in self.active_property_groups(when)
            if property_name in group.property_grants
        ]

        # In case of prop_granted_flags = []: Return False
        # Else: Return True if all elements of prop_granted_flags are True
        return all(prop_granted_flags) and any(prop_granted_flags)

    @has_property.expression
    def has_property(cls, prop, when=None):
        # TODO Use joins
        property_granted_select = select(
            [null()],
            from_obj=[
                Property.__table__, PropertyGroup.__table__,
                Membership.__table__
            ]).where(
                and_(Property.name == prop,
                     Property.property_group_id == PropertyGroup.id,
                     PropertyGroup.id == Membership.group_id,
                     Membership.user_id == cls.id, Membership.active(when)))
        #.cte("property_granted_select")
        return and_(
            not_(
                exists(
                    property_granted_select.where(
                        Property.granted == false()))),
            exists(property_granted_select.where(Property.granted == true()))
        ).self_group().label("has_property_" + prop)

    @property
    def permission_level(self) -> int:
        return max((membership.group.permission_level
                    for membership in self.active_memberships()),
                   default=0)

    @property
    def email_internal(self):
        return "{}@agdsn.me".format(self.login)

    __table_args__ = (UniqueConstraint('swdd_person_id'), )
Example #28
0
class LocalProject1(Base1, ProjectFrom):
    __tablename__ = "cp_crawler_project"
    __table_args__ = (UniqueConstraint('source', 'company', 'product'), )
Example #29
0
class User(Base, BaseMixin, UserMixin):
    __tablename__ = 'user'
    id = Column(Integer, primary_key=True)
    email = Column(String, unique=False)
    tel = Column(String, unique=False)
    password_hash = Column(String, nullable=False)
    activate_code = Column(String)
    otp = Column(String)
    otp_timestamp = Column(DateTime)
    last_active_time = Column(DateTime, default=datetime.datetime.utcnow)

    club_id = Column(Integer,
                     ForeignKey('club.id', ondelete='CASCADE'),
                     nullable=False)
    __table_args__ = (UniqueConstraint('email', 'club_id'), )
    roles = relationship("Role",
                         secondary=association_user_role,
                         back_populates="users")
    orders = relationship("Order",
                          backref="user",
                          cascade="all, delete",
                          order_by="desc(Order.time)")

    def has_otp(self):
        return self.otp and self.otp_timestamp

    def is_otp_expire(self):
        now = datetime.datetime.utcnow()
        total_secs = (now - self.otp_timestamp).total_seconds()
        logger.debug("seconds passed")
        logger.debug(total_secs)
        if total_secs > 600:
            return True
        else:
            return False

    def generate_otp(self):
        self.otp = random_digits(6)
        self.otp_timestamp = datetime.datetime.utcnow()

    def use_one_time_password(self, otp):
        if not self.otp_timestamp:
            raise ValueError(_("Invalid one time password"))
        now = datetime.datetime.utcnow()
        if (now - self.otp_timestamp).total_seconds() > 600:
            raise PasswordExpire()

        if otp != self.otp:
            raise PasswordInvalid()

        if otp == self.otp:
            self.one_time_password = None
            self.otp_timestamp = None

    def total_order_value(self):
        total = 0
        for o in self.orders:
            total += o.total_price()
        return total

    def verify_passwd(self, passwd):
        if self.password_hash == passwd:
            return True

        # try otp
        if self.has_otp():
            try:
                self.use_one_time_password(passwd)
                return True
            finally:
                pass
        return False

    def is_active(self):
        return False if self.activate_code else True

    def activate(self, code):
        if code == self.activate_code:
            self.activate_code = ""

    def update_active_time(self):
        self.last_active_time = datetime.datetime.now()
Example #30
0
    name = Column(AqStr(64), nullable=False)

    vendor_id = Column(Integer, ForeignKey('vendor.id',
                                           name='model_vendor_fk'),
                       nullable=False)
    machine_type = Column(AqStr(16), nullable=False)

    creation_date = deferred(Column(DateTime, default=datetime.now, nullable=False))
    comments = deferred(Column(String(255)))

    vendor = relation(Vendor)

model = Model.__table__
model.primary_key.name='model_pk'

model.append_constraint(UniqueConstraint('name','vendor_id',
                                   name='model_name_vendor_uk'))

table = model

def populate(sess, *args, **kw):
    mlist=sess.query(Model).all()

    if not mlist:

        f = [['ibm', 'hs20-884345u', 'blade'],
            ['ibm', 'ls20-8850pap', 'blade'],
            ['ibm', 'hs21-8853l5u', 'blade'],
            ['ibm', 'bce', 'chassis'],
            ['ibm', 'bch', 'chassis'],
            ['ibm', 'dx320-6388ac1', 'rackmount'], #one of the 4 in 1 types
            ['ibm', 'dx320-6388dau', 'rackmount'],