def define_tables(cls, metadata):
        import json

        class JSONEncodedDict(TypeDecorator):
            impl = VARCHAR(50)

            def process_bind_param(self, value, dialect):
                if value is not None:
                    value = json.dumps(value)

                return value

            def process_result_value(self, value, dialect):
                if value is not None:
                    value = json.loads(value)
                return value

        MutableDict = cls._type_fixture()
        MutableDict.associate_with(JSONEncodedDict)

        Table('foo', metadata,
            Column('id', Integer, primary_key=True,
                            test_needs_autoincrement=True),
            Column('data', JSONEncodedDict),
            Column('unrelated_data', String(50))
        )
    def setup_mappers(cls):
        foo = cls.tables.foo
        subfoo = cls.tables.subfoo

        mapper(Foo, foo)
        mapper(SubFoo, subfoo, inherits=Foo)
        MutableDict.associate_with_attribute(Foo.data)
def upgrade():
    meta_catalogstar = sa.Column('meta', MutableDict.as_mutable(JSON),
                                 default={})
    op.add_column('catalog_star', meta_catalogstar)

    meta_obs = sa.Column('meta', MutableDict.as_mutable(JSON),
                         default={})
    op.add_column('observation', meta_obs)
Example #4
0
def associate_with(sqltype):
    # TODO(leizhang) When we removed sqlalchemy 0.7 dependence
    # we can import MutableDict directly and remove ./mutable.py
    try:
        from sqlalchemy.ext.mutable import MutableDict as sa_MutableDict
        sa_MutableDict.associate_with(Json)
    except ImportError:
        from heat.db.sqlalchemy.mutable import MutableDict
        MutableDict.associate_with(Json)
Example #5
0
 def document(self):
     d = MutableDict()
     d.update(self._document)
     d.update({
         'kind': self.kind,
         'id': self.id,
         'source': self.source,
         'timestamp': self.timestamp,
     })
     return d
    def define_tables(cls, metadata):
        MutableDict = cls._type_fixture()
        MutableDict.associate_with(PickleType)

        Table('foo', metadata,
            Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
            Column('skip', PickleType),
            Column('data', PickleType),
            Column('unrelated_data', String(50))
        )
Example #7
0
    def define_tables(cls, metadata):
        MutableDict = cls._type_fixture()
        MutableDict.associate_with(PickleType)

        Table(
            "foo",
            metadata,
            Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
            Column("skip", PickleType),
            Column("data", PickleType),
            Column("unrelated_data", String(50)),
        )
def downgrade():
    op.drop_column('catalog', 'metajson')
    meta = sa.Column('meta', MutableDict.as_mutable(HSTORE),
                     nullable=False,
                     default={},
                     index=True)
    op.add_column('catalog', meta)
Example #9
0
    def define_tables(cls, metadata):
        import json

        class JSONEncodedDict(TypeDecorator):
            impl = VARCHAR(50)

            def process_bind_param(self, value, dialect):
                if value is not None:
                    value = json.dumps(value)

                return value

            def process_result_value(self, value, dialect):
                if value is not None:
                    value = json.loads(value)
                return value

        MutableDict = cls._type_fixture()

        Table(
            "foo",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("data", MutableDict.as_mutable(JSONEncodedDict)),
            Column("non_mutable_data", JSONEncodedDict),
            Column("unrelated_data", String(50)),
        )
Example #10
0
    def define_tables(cls, metadata):
        MutableDict = cls._type_fixture()

        mutable_pickle = MutableDict.as_mutable(PickleType)
        Table(
            'foo', metadata,
            Column(
                'id', Integer, primary_key=True,
                test_needs_autoincrement=True),
            Column('data', mutable_pickle, default={}),
        )
Example #11
0
    def __init__(self, name=u"", type=u"node", id=None, schema=None, attrs=None, system_attrs=None, orderpos=None):
        self.name = name
        if not isinstance(type, unicode):
            warn("type arg of Node should be unicode (hint: don't create nodes with Node(type='{}')!)".format(type), DeprecationWarning)

        if "/" in type:
            warn("use separate type and schema parameters instead of 'type/schema'", DeprecationWarning)
            type, schema = type.split("/")

        self.type = type
        self.attrs = MutableDict()
        self.system_attrs = MutableDict()
        if id:
            self.id = id
        if schema:
            self.schema = schema
        if attrs:
            self.attrs.update(attrs)
        if system_attrs:
            self.system_attrs.update(system_attrs)
        if orderpos:
            self.orderpos = orderpos
Example #12
0
def get_data_element_postgres_extensions(task, indexes):
    if indexes:
        q = task.__class__.data[indexes]
    else:
        q = task.__class__.data

    s = object_session(task)
    tup = s.query(q).filter_by(id=task.id).one()
    return tup[0]


class json_array_length(GenericFunction):
    type = Integer


def get_data_size_postgres_extensions(task, indexes):
    if indexes:
        q = task.__class__.data[indexes]
    else:
        q = task.__class__.data

    s = object_session(task)
    tup = s.query(json_array_length(q)).filter_by(id=task.id).one()
    return tup[0]


MutableJSONDict = MutableDict.as_mutable(psqlJSON)
JSON = psqlJSON
get_data_element = get_data_element_postgres_extensions
get_data_size = get_data_size_postgres_extensions
Example #13
0
class User(db.Model, DomainObject, UserMixin):
    '''A registered user of the PYBOSSA system'''

    __tablename__ = 'user'

    id = Column(Integer, primary_key=True)
    #: UTC timestamp of the user when it's created.
    created = Column(Text, default=make_timestamp)
    email_addr = Column(Unicode(length=254), unique=True, nullable=False)
    #: Name of the user (this is used as the nickname).
    name = Column(Unicode(length=254), unique=True, nullable=False)
    #: Fullname of the user.
    fullname = Column(Unicode(length=500), nullable=False)
    #: Language used by the user in the PYBOSSA server.
    locale = Column(Unicode(length=254), default=u'en', nullable=False)
    api_key = Column(String(length=36), default=make_uuid, unique=True)
    passwd_hash = Column(Unicode(length=254), unique=True)
    ldap = Column(Unicode, unique=True)
    admin = Column(Boolean, default=False)
    pro = Column(Boolean, default=False)
    privacy_mode = Column(Boolean, default=True, nullable=False)
    category = Column(Integer)
    flags = Column(Integer)
    twitter_user_id = Column(BigInteger, unique=True)
    facebook_user_id = Column(BigInteger, unique=True)
    google_user_id = Column(String, unique=True)
    ckan_api = Column(String, unique=True)
    newsletter_prompted = Column(Boolean, default=False)
    valid_email = Column(Boolean, default=False)
    confirmation_email_sent = Column(Boolean, default=False)
    subscribed = Column(Boolean, default=False)
    consent = Column(Boolean, default=False)
    info = Column(MutableDict.as_mutable(JSONB), default=dict())
    user_pref = Column(JSONB)

    ## Relationships
    task_runs = relationship(TaskRun, backref='user')
    projects = relationship(Project, backref='owner')
    blogposts = relationship(Blogpost, backref='owner')

    def get_id(self):
        '''id for login system. equates to name'''
        return self.name

    def set_password(self, password):
        self.passwd_hash = signer.generate_password_hash(password)

    def check_password(self, password):
        # OAuth users do not have a password
        if self.passwd_hash:
            return signer.check_password_hash(self.passwd_hash, password)
        return False

    @classmethod
    def public_attributes(self):
        """Return a list of public attributes."""
        return [
            'created', 'name', 'fullname', 'info', 'n_answers',
            'registered_ago', 'rank', 'score', 'locale'
        ]

    @classmethod
    def public_info_keys(self):
        """Return a list of public info keys."""
        default = ['avatar', 'container', 'extra', 'avatar_url']
        extra = current_app.config.get('USER_INFO_PUBLIC_FIELDS')
        if extra:
            return list(set(default).union(set(extra)))
        else:
            return default
Example #14
0
class AnsibleScript(Script):

    __tablename__ = 'AnsibleScript'

    id = Column(Integer, ForeignKey('Script.id'), primary_key=True)
    playbook_path = Column(String)
    options = Column(MutableDict.as_mutable(PickleType), default={})

    __mapper_args__ = {
        'polymorphic_identity': 'AnsibleScript',
    }

    def __init__(self, playbook_path, **data):
        name = data['name'][0]
        super(AnsibleScript, self).__init__(name)
        self.playbook_path = playbook_path
        self.options = {}
        for key, value in data.items():
            if key in ansible_options:
                self.options[key] = value[0] if value else None

    def job(self, task):
        loader = DataLoader()
        hosts = [node.ip_address for node in task.nodes]
        temporary_file = NamedTemporaryFile(delete=False)
        temporary_file.write('\n'.join(hosts))
        temporary_file.close()

        # sources is a list of paths to inventory files"
        inventory = InventoryManager(loader=loader,
                                     sources=temporary_file.name)
        variable_manager = VariableManager(loader=loader, inventory=inventory)

        options_dict = {
            'listtags': False,
            'listtasks': False,
            'listhosts': False,
            'syntax': False,
            'connection': 'ssh',
            'module_path': None,
            'forks': 100,
            'remote_user': None,
            'private_key_file': None,
            'ssh_common_args': None,
            'ssh_extra_args': None,
            'sftp_extra_args': None,
            'scp_extra_args': None,
            'become': False,
            'become_method': None,
            'become_user': None,
            'verbosity': None,
            'check': False,
            'diff': False
        }

        Options = namedtuple('Options', list(options_dict))
        passwords = {}
        playbook_executor = PlaybookExecutor(playbooks=[self.playbook_path],
                                             inventory=inventory,
                                             variable_manager=variable_manager,
                                             loader=loader,
                                             options=Options(**options_dict),
                                             passwords=passwords)

        results = playbook_executor.run()
        return results
Example #15
0
class RemoteAccount(db.Model):
    """Storage for remote linked accounts."""

    __tablename__ = 'oauthclient_remoteaccount'

    __table_args__ = (db.UniqueConstraint('user_id', 'client_id'), )

    #
    # Fields
    #
    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    """Primary key."""

    user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
    """Local user linked with a remote app via the access token."""

    client_id = db.Column(db.String(255), nullable=False)
    """Client ID of remote application (defined in OAUTHCLIENT_REMOTE_APPS)."""

    extra_data = db.Column(MutableDict.as_mutable(JSONType), nullable=False)
    """Extra data associated with this linked account."""

    #
    # Relationships properties
    #
    user = db.relationship(User, backref='remote_accounts')
    """SQLAlchemy relationship to user."""
    @classmethod
    def get(cls, user_id, client_id):
        """Get RemoteAccount object for user.

        :param user_id: User id
        :param client_id: Client id.
        :returns: A :class:`invenio_oauthclient.models.RemoteAccount` instance.
        """
        return cls.query.filter_by(
            user_id=user_id,
            client_id=client_id,
        ).first()

    @classmethod
    def create(cls, user_id, client_id, extra_data):
        """Create new remote account for user.

        :param user_id: User id.
        :param client_id: Client id.
        :param extra_data: JSON-serializable dictionary of any extra data that
            needs to be save together with this link.
        :returns: A :class:`invenio_oauthclient.models.RemoteAccount` instance.
        """
        with db.session.begin_nested():
            account = cls(user_id=user_id,
                          client_id=client_id,
                          extra_data=extra_data or dict())
            db.session.add(account)
        return account

    def delete(self):
        """Delete remote account together with all stored tokens."""
        with db.session.begin_nested():
            db.session.delete(self)

    def __repr__(self):
        """String representation for model."""
        return 'Remote Account <id={0.id}, user_id={0.user.id}>'.format(self)
Example #16
0
class Node(DeclarativeBase, NodeMixin):

    """Base class for Nodes which holds all SQLAlchemy fields definitions
    """
    __metaclass__ = BaseNodeMeta
    __tablename__ = "node"
    __versioned__ = {
        "base_classes": (NodeVersionMixin, MtVersionBase, DeclarativeBase),
        "exclude": ["subnode", "system_attrs"]
    }

    id = C(Integer, node_id_seq, server_default=node_id_seq.next_value(), primary_key=True)
    type = C(Unicode, index=True)
    schema = C(Unicode, index=True)
    name = C(Unicode, index=True)
    orderpos = C(Integer, default=1, index=True)
    fulltext = deferred(C(Unicode))
    # indicate that this node is a subnode of a content type node
    # subnode exists just for performance reasons and is updated by the database
    # unversioned
    subnode = C(Boolean, server_default="false")

    attrs = deferred(C(MutableDict.as_mutable(JSONB)))
    # Migration from old mediatum: all attributes starting with "system." go here.
    # We should get rid of most (all?) such attributes in the future.
    # unversioned
    system_attrs = deferred(C(MutableDict.as_mutable(JSONB)))

    @hybrid_property
    def a_expr(self):
        """ see: Attributes"""
        raise Exception("node.a_expr")
        if "_attributes_accessor" not in self.__dict__:
            setattr(self, "_attributes_accessor", Attributes(self, "attrs"))
        return self._attributes_accessor

    @a_expr.expression
    def a(self):
        """ see: AttributesExpression"""
        if "_attributes_accessor" not in self.__dict__:
            setattr(self, "_attributes_accessor", AttributesExpressionAdapter(self, "attrs"))
        return self._attributes_accessor

    @a.setter
    def a_set(self, value):
        raise NotImplementedError("immutable!")


    @hybrid_property
    def sys(self):
        """ see: Attributes"""
        if "_system_attributes_accessor" not in self.__dict__:
            setattr(self, "_system_attributes_accessor", Attributes(self, "system_attrs"))
        return self._system_attributes_accessor

    @sys.expression
    def sys_expr(self):
        """ see: AttributesExpression"""
        if "_system_attributes_accessor" not in self.__dict__:
            setattr(self, "_system_attributes_accessor", AttributesExpressionAdapter(self, "system_attrs"))
        return self._system_attributes_accessor

    @a.setter
    def sys_set(self, value):
        raise NotImplementedError("immutable!")


    def __init__(self, name=u"", type=u"node", id=None, schema=None, attrs=None, system_attrs=None, orderpos=None):
        self.name = name
        if not isinstance(type, unicode):
            warn("type arg of Node should be unicode (hint: don't create nodes with Node(type='{}')!)".format(type), DeprecationWarning)

        if "/" in type:
            warn("use separate type and schema parameters instead of 'type/schema'", DeprecationWarning)
            type, schema = type.split("/")

        self.type = type
        self.attrs = MutableDict()
        self.system_attrs = MutableDict()
        if id:
            self.id = id
        if schema:
            self.schema = schema
        if attrs:
            self.attrs.update(attrs)
        if system_attrs:
            self.system_attrs.update(system_attrs)
        if orderpos:
            self.orderpos = orderpos

    @property
    def slow_content_children_for_all_subcontainers(self):
        """
        !!! very slow, use content_children_for_all_subcontainers instead!!!
        Collects all Content nodes in all subcontainers of this node.
        This excludes content nodes that are children of other content nodes.
        """
        warn("very slow, use content_children_for_all_subcontainers instead", DeprecationWarning)
        from contenttypes.data import Content
        from core import db
        sq = _subquery_subtree_container(self)
        query = db.query(Content).\
            join(t_noderelation, Node.id == t_noderelation.c.cid).\
            filter(t_noderelation.c.nid.in_(sq) | (t_noderelation.c.nid == self.id)).\
            filter(t_noderelation.c.distance == 1)

        return query

    @property
    def content_children_for_all_subcontainers(self):
        """Collects all Content nodes in all subcontainers of this node.
        This excludes content nodes that are children of other content nodes.
        """
        from contenttypes.data import Content
        from core import db
        sq = _subquery_subtree_distinct(self)
        return object_session(self).query(Content).filter(Node.id.in_(sq)).filter_by(subnode=False)

    @property
    def content_children_for_all_subcontainers_with_duplicates(self):
        """Collects all Content nodes in all subcontainers of this node.
        This excludes content nodes that are children of other content nodes.
        This method can be much faster than content_children_for_all_subcontainers, but may return lesser nodes than expected (when using limit).
        Don't use distinct() on this method, use content_children_for_all_subcontainers instead if you need it!
        """
        from contenttypes.data import Content
        nr = t_noderelation
        # TODO: check if it's better to use the _subquery_subtree() here
        return object_session(self).query(Content).filter_by(subnode=False).join(nr, Content.id == nr.c.cid).filter(nr.c.nid==self.id)

    @property
    def content_children_count_for_all_subcontainers(self):
        if USE_CACHED_CHILDCOUNT:
            return exec_sqlfunc(object_session(self), mediatumfunc.count_content_children_for_all_subcontainers(self.id))
        else:
            return self.content_children_for_all_subcontainers.count()


    def all_children_by_query(self, query):
        sq = _subquery_subtree_distinct(self)
        query = query.filter(Node.id.in_(sq))
        return query

    @staticmethod
    def req_has_access_to_node_id(node_id, accesstype, req=None, date=func.current_date()):
        # XXX: the database-independent code could move to core.node
        from core.transition import request
        from core.users import user_from_session

        if req is None:
            req = request

        user = user_from_session(req.session)

        # XXX: like in mysql version, what's the real solution?
        try:
            ip = IPv4Address(req.remote_addr)
        except AddressValueError:
            logg.warn("illegal IP address %s, refusing IP-based access", req.remote_addr)
            ip = None

        return Node.has_access_to_node_id(node_id, accesstype, user, ip, date)

    @staticmethod
    def has_access_to_node_id(node_id, accesstype, user=None, ip=None, date=None):
        # XXX: the database-independent code could move to core.node
        from core import db
        from core.users import get_guest_user

        if user is None:
            user = get_guest_user()

        if user.is_admin:
            return True

        if ip is None:
            ip = IPv4Address("0.0.0.0")

        if date is None:
            date = func.current_date()

        accessfunc = access_funcs[accesstype]
        group_ids = user.group_ids
        access = accessfunc(node_id, group_ids, ip, date)
        return db.session.execute(select([access])).scalar()

    def _parse_searchquery(self, searchquery):
        """
        * `searchquery` is a string type: Parses `searchquery` and transforms it into the search tree.
        * `searchquery` already is already in search tree form: work is already done, return it unchanged.
        """
        from core.search import parse_searchquery
        if isinstance(searchquery, SearchTreeElement):
            searchtree = searchquery
        else:
            searchtree = parse_searchquery(searchquery)
        return searchtree

    def _search_query_object(self):
        """Builds the query object that is used as basis for content node searches below this node"""
        from contenttypes import Content, Collections
        q = object_session(self).query
        sq = _subquery_subtree(self)
        if self == q(Collections).one():
            # no need to filter, the whole tree can be searched
            base_query = q(Content)
        else:
            base_query = q(Content).filter(Node.id.in_(sq))
        return base_query

    def search(self, searchquery, languages=None):
        """Creates a search query.
        :param searchquery: query in search language or parsed query (search tree) as `SearchTreeElement`:
        :param language: sequence of language config strings matching Fts.config
        :returns: Node Query
        """
        from core.database.postgres.search import apply_searchtree_to_query
        searchtree = self._parse_searchquery(searchquery)
        query = self._search_query_object()
        return apply_searchtree_to_query(query, searchtree, languages)

    def search_multilang(self, searchquery, languages=None):
        """Creates search queries for a sequence of languages.
        :param searchquery: query in search language or parsed query (search tree) as `SearchTreeElement`:
        :param languages: language config strings matching Fts.config
        :returns list of Node Query
        """
        from core.database.postgres.search import apply_searchtree_to_query
        searchtree = self._parse_searchquery(searchquery)
        query = self._search_query_object()
        return [apply_searchtree_to_query(query, searchtree, l) for l in languages]

    @property
    def tagged_versions(self):
        Transaction = versioning_manager.transaction_cls
        TransactionMeta = versioning_manager.transaction_meta_cls
        version_cls = version_class(self.__class__)
        return (self.versions.join(Transaction, version_cls.transaction_id == Transaction.id).join(Transaction.meta_relation).
                filter(TransactionMeta.key == u"tag"))

    def get_tagged_version(self, tag):
        return self.tagged_versions.filter_by(value=tag).scalar()

    def get_published_version(self):
        Transaction = versioning_manager.transaction_cls
        TransactionMeta = versioning_manager.transaction_meta_cls
        version_cls = version_class(self.__class__)
        published_versions = self.versions.join(Transaction, version_cls.transaction_id == Transaction.id).\
                join(Transaction.meta_relation). filter(TransactionMeta.key == u"publish")
        return published_versions.scalar()

    def new_tagged_version(self, tag=None, comment=None, publish=None, user=None):
        """Returns a context manager that manages the creation of a new tagged node version.

        :param tag: a unicode tag assigned to the transaction belonging to the new version.
            If none is given, assume that we want to add a new numbered version.
            The tag will be the incremented version number of the last numbered version.
            If no numbered version is present, assign 1 to the last version and 2 to the new version.

        :param comment: optional comment for the transaction
        :param user: user that will be associated with the transaction.
        """
        node = self

        class VersionContextManager(object):

            def __enter__(self):
                self.session = s = object_session(node)
                if s.new or s.dirty:
                    raise Exception("Refusing to create a new tagged node version. Session must be clean!")

                uow = versioning_manager.unit_of_work(s)
                tx = uow.create_transaction(s)

                if user is not None:
                    tx.user = user

                if tag:
                    if node.get_tagged_version(tag):
                        raise ValueError("tag already exists")
                    tx.meta[u"tag"] = tag
                elif publish:
                    if node.get_published_version():
                        raise ValueError("publish version already exists")
                    tx.meta[u"publish"] = publish
                else:
                    NodeVersion = version_class(node.__class__)
                    # in case you were wondering: order_by(None) resets the default order_by
                    last_tagged_version = node.tagged_versions.order_by(None).order_by(NodeVersion.transaction_id.desc()).first()
                    if last_tagged_version is not None:
                        next_version = int(last_tagged_version.tag) + 1
                    else:
                        node.versions[-1].tag = u"1"
                        next_version = 2

                    tx.meta[u"tag"] = unicode(next_version)

                if comment:
                    tx.meta[u"comment"] = comment

                return tx

            def __exit__(self, exc_type, exc_value, traceback):
                if exc_type:
                    self.session.rollback()
                else:
                    self.session.commit()

        return VersionContextManager()

    def is_descendant_of(self, node):
        return exec_sqlfunc(object_session(self), mediatumfunc.is_descendant_of(self.id, node.id))

    def _get_nearest_ancestor_by_type(self, ancestor_type):
        """Returns a nearest ancestor of `ancestor_type`.
        If none is found, return `Collections` as default.
        It's undefined which one will be returned if more than one nearest ancestor is found.
        """
        nr = t_noderelation
        q = object_session(self).query

        maybe_ancestor = (q(ancestor_type)
                .join(nr, Node.id == nr.c.nid)
                .filter_by(cid=self.id)
                .order_by(nr.c.distance).limit(1).first())

        if maybe_ancestor is None:
            from contenttypes import Collections
            return q(Collections).one()

        return maybe_ancestor

    def get_container(self):
        from contenttypes import Container
        return self._get_nearest_ancestor_by_type(Container)

    def get_collection(self):
        from contenttypes import Collection
        return self._get_nearest_ancestor_by_type(Collection)

    @property
    def has_files(self):
        return len(self.file_objects) > 0

    __mapper_args__ = {
        'polymorphic_identity': 'node',
        'polymorphic_on': type
    }

    def to_yaml(self):
        """overwrite default DeclarativeBase.to_yaml method because we need to convert MutableDicts first
        """
        node_dict = self.to_dict()
        node_dict["attrs"] = dict(node_dict["attrs"])
        node_dict["system_attrs"] = dict(node_dict["system_attrs"])
        return pyaml.dump(node_dict)
Example #17
0
def get_data_element_postgres_extensions(task, indexes):
    if indexes:
        q = task.__class__.data[indexes]
    else:
        q = task.__class__.data

    s = object_session(task)
    tup = s.query(q).filter_by(id=task.id).one()
    return tup[0]


class json_array_length(GenericFunction):
    type = Integer


def get_data_size_postgres_extensions(task, indexes):
    if indexes:
        q = task.__class__.data[indexes]
    else:
        q = task.__class__.data

    s = object_session(task)
    tup = s.query(json_array_length(q)).filter_by(id=task.id).one()
    return tup[0]

MutableJSONDict = MutableDict.as_mutable(psqlJSON)
JSON = psqlJSON
get_data_element = get_data_element_postgres_extensions
get_data_size = get_data_size_postgres_extensions
Example #18
0
class Member(Base):
    __tablename__ = 'members'
    guild_id = Column(String, ForeignKey('guilds.id'), primary_key=True)
    id = Column(String, ForeignKey('users.id'), primary_key=True)
    json = Column(MutableDict.as_mutable(JSON), default={}, nullable=False)
Example #19
0
        if value is not None:
            value = json.dumps(value)

        return value

    def process_literal_param(self, value, dialect):
        return value

    def process_result_value(self, value, dialect):
        try:
            return json.loads(value)
        except (ValueError, TypeError):
            return None


Json = MutableDict.as_mutable(JSONEncodedDict)


class User(Base):
    __tablename__ = 'user'
    column_id = Column(Integer, primary_key=True, autoincrement=True)
    user_name = Column(String(32), nullable=False, unique=True)
    email = Column(String(64), nullable=False, unique=True)
    category = Column(String(128))
    endpoint = Column(String(255))
    endpoint_data = Column(Text())


class Monitor(Base):
    __tablename__ = 'monitor'
    column_id = Column(Integer, primary_key=True, autoincrement=True)
Example #20
0
class Project(db.Model, DomainObject):
    '''A microtasking Project to which Tasks are associated.
    '''

    __tablename__ = 'project'

    #: ID of the project
    id = Column(Integer, primary_key=True)
    #: UTC timestamp when the project is created
    created = Column(Text, default=make_timestamp)
    #: UTC timestamp when the project is updated (or any of its relationships)
    updated = Column(Text, default=make_timestamp, onupdate=make_timestamp)
    #: Project name
    name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project slug for the URL
    short_name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project description
    description = Column(Unicode(length=255), nullable=False)
    #: Project long description
    long_description = Column(UnicodeText)
    #: Project webhook
    webhook = Column(Text)
    #: If the project allows anonymous contributions
    allow_anonymous_contributors = Column(Boolean, default=True)
    #: If the project is published
    published = Column(Boolean, nullable=False, default=False)
    # If the project is hidden
    hidden = Column(Boolean, default=False)
    # If the project is featured
    featured = Column(Boolean, nullable=False, default=False)
    # Secret key for project
    secret_key = Column(Text, default=make_uuid)
    # Zip download
    zip_download = Column(Boolean, default=True)
    # If the project owner has been emailed
    contacted = Column(Boolean, nullable=False, default=False)
    #: Project owner_id
    owner_id = Column(Integer, ForeignKey('user.id'), nullable=False)
    #: Project Category
    category_id = Column(Integer, ForeignKey('category.id'), nullable=False)
    #: Project info field formatted as JSONB
    info = Column(MutableDict.as_mutable(JSONB), default=dict())
    #: If emails are sent to users about new tasks
    email_notif = Column(Boolean, default=False)

    tasks = relationship(Task, cascade='all, delete, delete-orphan', backref='project')
    task_runs = relationship(TaskRun, backref='project',
                             cascade='all, delete-orphan',
                             order_by='TaskRun.finish_time.desc()')
    category = relationship(Category)
    blogposts = relationship(Blogpost, cascade='all, delete-orphan', backref='project')
    owners_ids = Column(MutableList.as_mutable(ARRAY(Integer)), default=list())

    def needs_password(self):
        return self.get_passwd_hash() is not None

    def get_passwd_hash(self):
        return self.info.get('passwd_hash')

    def set_password(self, password):
        if len(password) > 1:
            self.info['passwd_hash'] = signer.generate_password_hash(password)
            return True
        self.info['passwd_hash'] = None
        return False

    def check_password(self, password):
        if self.needs_password():
            return signer.check_password_hash(self.get_passwd_hash(), password)
        return False

    def has_autoimporter(self):
        return self.get_autoimporter() is not None

    def get_autoimporter(self):
        return self.info.get('autoimporter')

    def set_autoimporter(self, new=None):
        self.info['autoimporter'] = new

    def delete_autoimporter(self):
        del self.info['autoimporter']

    def has_presenter(self):
        if current_app.config.get('DISABLE_TASK_PRESENTER') is True:
            return True
        else:
            return self.info.get('task_presenter') not in ('', None)

    def get_default_n_answers(self):
        return self.info.get('default_n_answers', 1)

    def set_default_n_answers(self, default_n_answers):
        self.info['default_n_answers'] = default_n_answers

    @classmethod
    def public_attributes(self):
        """Return a list of public attributes."""
        return ['id', 'description', 'info', 'n_tasks', 'n_volunteers', 'name',
                'overall_progress', 'short_name', 'created', 'category_id',
                'long_description', 'last_activity', 'last_activity_raw',
                'n_task_runs', 'n_results', 'owner', 'updated', 'featured',
                'owner_id', 'n_completed_tasks', 'n_blogposts', 'owners_ids',
                'published']

    @classmethod
    def public_info_keys(self):
        """Return a list of public info keys."""
        default = ['container', 'thumbnail', 'thumbnail_url',
                   'tutorial', 'sched']
        extra = current_app.config.get('PROJECT_INFO_PUBLIC_FIELDS')
        if extra:
            return list(set(default).union(set(extra)))
        else:
            return default

    def get_presenter_field_set(self):
        fields = set()
        task_presenter = self.info.get('task_presenter')

        if not task_presenter:
            return fields

        search_backward_stop = 0
        for match in re.finditer('\.info\.([a-zA-Z0-9_]+)', task_presenter):
            linebreak_index = task_presenter.rfind(
                '\n', search_backward_stop, match.start())
            if linebreak_index > -1:
                search_start = linebreak_index
            else:
                search_start = search_backward_stop
            if task_presenter.rfind('//', search_start, match.start()) > -1:
                continue

            comment_start = task_presenter.rfind(
                '/*', search_backward_stop, match.start())
            if comment_start > -1:
                search_backward_stop = comment_start
                comment_end = 'task_presenter'.rfind(
                    '*/', search_backward_stop, match.start())
                if comment_end < 0:
                    continue
            field = match.group(1)
            if not field.endswith('__upload_url'):
                fields.add(field)
            search_backward_stop = match.end()

        return fields

    def set_project_users(self, users):
        from pybossa.cache.users import get_users_access_levels
        from pybossa.data_access import can_assign_user

        valid_users = set([])
        proj_levels = self.info.get('data_access', [])
        if not proj_levels:
            return

        users = get_users_access_levels(users)
        for user in users:
            user_levels = user.get('data_access', [])
            if can_assign_user(proj_levels, user_levels):
                valid_users.add(user['id'])
        self.info['project_users'] = list(valid_users)

    def get_project_users(self):
        return self.info.get('project_users', [])

    def get_quiz(self):
        quiz = self.info.get(
            'quiz',
            {
                'enabled': False,
                'passing': 0,
                'questions': 0,
                'short_circuit': True,
                'completion_mode': 'short_circuit'
            }
        )

        return quiz

    def set_quiz(self, quiz):
        self.info['quiz'] = quiz

    def get_gold_task_probability(self):
        return self.info.get('sched_gold_task_probability', .1)

    def set_gold_task_probability(self, value):
        self.info['sched_gold_task_probability'] = float(value)
Example #21
0
import sqlalchemy_utils
import sqlalchemy_utils.types.json

from sqlalchemy_json import NestedMutable as _NestedMutable


class MutableJson(sqlalchemy.types.JSON):
    """JSON type for SQLAlchemy with change tracking at top level."""


class NestedMutableJson(sqlalchemy.types.JSON):
    """JSON type for SQLAlchemy with nested change tracking."""


_MutableDict.associate_with(MutableJson)
_NestedMutable.associate_with(NestedMutableJson)

# Monkey patch sqlalchemy_utils serializer.  Note that sqlalchemy_json inherits
# from the data type from sqlalchemy_utils - so setting serializer there takes
# care of both instances.
#
# TODO:
# 1. This patching should maybe be done differently -- maybe override the
#    JsonType-method for serializing to the db?
# 2. We might want to implement our own Json serializer to deal with custom
#    data types. The flask one simply extends the stdlib json module with
#    support for serializing datetime.datatime and uuid.UUID
# 3. Note that JsonType only use the json-implementation for serializing to
#    json-strings for all db-engines *but* postgres -- if using postgres, json
#    serialization is delegated to the engine, so we'll have to make sure to
Example #22
0
class Annotation(Base):

    """Model class representing a single annotation."""

    __tablename__ = 'annotation'
    __table_args__ = (
        # Tags are stored in an array-type column, and indexed using a
        # generalised inverted index. For more information on the use of GIN
        # indices for array columns, see:
        #
        #   http://www.databasesoup.com/2015/01/tag-all-things.html
        #   http://www.postgresql.org/docs/9.5/static/gin-intro.html
        #
        sa.Index('ix__annotation_tags', 'tags', postgresql_using='gin'),
        sa.Index('ix__annotation_updated', 'updated'),
    )

    #: Annotation ID: these are stored as UUIDs in the database, and mapped
    #: transparently to a URL-safe Base64-encoded string.
    id = sa.Column(types.URLSafeUUID,
                   server_default=sa.func.uuid_generate_v1mc(),
                   primary_key=True)

    #: The timestamp when the annotation was created.
    created = sa.Column(sa.DateTime,
                        default=datetime.datetime.utcnow,
                        server_default=sa.func.now(),
                        nullable=False)

    #: The timestamp when the user edited the annotation last.
    updated = sa.Column(sa.DateTime,
                        server_default=sa.func.now(),
                        default=datetime.datetime.utcnow,
                        nullable=False)

    #: The full userid (e.g. 'acct:[email protected]') of the owner of this
    #: annotation.
    userid = sa.Column(sa.UnicodeText,
                       nullable=False,
                       index=True)
    #: The string id of the group in which this annotation is published.
    #: Defaults to the global public group, "__world__".
    groupid = sa.Column(sa.UnicodeText,
                        default='__world__',
                        server_default='__world__',
                        nullable=False,
                        index=True)

    #: The textual body of the annotation.
    _text = sa.Column('text', sa.UnicodeText)
    #: The Markdown-rendered and HTML-sanitized textual body of the annotation.
    _text_rendered = sa.Column('text_rendered', sa.UnicodeText)
    #: The tags associated with the annotation.
    tags = sa.Column(
        types.MutableList.as_mutable(
            pg.ARRAY(sa.UnicodeText, zero_indexes=True)))

    #: A boolean indicating whether this annotation is shared with members of
    #: the group it is published in. "Private"/"Only me" annotations have
    #: shared=False.
    shared = sa.Column(sa.Boolean,
                       nullable=False,
                       default=False,
                       server_default=sa.sql.expression.false())

    #: The URI of the annotated page, as provided by the client.
    _target_uri = sa.Column('target_uri', sa.UnicodeText)
    #: The URI of the annotated page in normalized form.
    _target_uri_normalized = sa.Column('target_uri_normalized', sa.UnicodeText)
    #: The serialized selectors for the annotation on the annotated page.
    target_selectors = sa.Column(types.AnnotationSelectorJSONB,
                                 default=list,
                                 server_default=sa.func.jsonb('[]'))

    #: An array of annotation IDs which are ancestors of this annotation.
    references = sa.Column(pg.ARRAY(types.URLSafeUUID),
                           default=list,
                           server_default=sa.text('ARRAY[]::uuid[]'))

    #: Any additional serialisable data provided by the client.
    extra = sa.Column(MutableDict.as_mutable(pg.JSONB),
                      default=dict,
                      server_default=sa.func.jsonb('{}'),
                      nullable=False)

    document_id = sa.Column(sa.Integer,
                            sa.ForeignKey('document.id'),
                            nullable=False)

    document = sa.orm.relationship('Document', backref='annotations')

    @hybrid_property
    def target_uri(self):
        return self._target_uri

    @target_uri.setter
    def target_uri(self, value):
        self._target_uri = value
        self._target_uri_normalized = uri.normalize(value)

    @hybrid_property
    def target_uri_normalized(self):
        return self._target_uri_normalized

    @hybrid_property
    def text(self):
        return self._text

    @text.setter
    def text(self, value):
        self._text = value
        self._text_rendered = markdown.render(value)

    @hybrid_property
    def text_rendered(self):
        return self._text_rendered

    @property
    def parent_id(self):
        """
        Return the ID of the annotation that this annotation is a reply to.

        Return None if this annotation is not a reply.

        """
        if self.references:
            return self.references[-1]

    @property
    def thread_root_id(self):
        """
        Return the ID of the root annotation of this annotation's thread.

        Return the ID of the root annotation of the thread to which this
        annotation belongs. May be this annotation's own ID if it is the root
        annotation of its thread.

        """
        if self.references:
            return self.references[0]
        else:
            return self.id

    def __acl__(self):
        """Return a Pyramid ACL for this annotation."""
        acl = []
        if self.shared:
            group = 'group:{}'.format(self.groupid)
            if self.groupid == '__world__':
                group = security.Everyone

            acl.append((security.Allow, group, 'read'))
        else:
            acl.append((security.Allow, self.userid, 'read'))

        for action in ['admin', 'update', 'delete']:
            acl.append((security.Allow, self.userid, action))

        # If we haven't explicitly authorized it, it's not allowed.
        acl.append(security.DENY_ALL)

        return acl

    def __repr__(self):
        return '<Annotation %s>' % self.id
Example #23
0
    _null = None
    _type = object

    def process_bind_param(self, value, dialect):
        return json.dumps(value)

    def process_literal_param(self, value, dialect):
        return value

    def process_result_value(self, value, dialect):
        try:
            value = json.loads(value)
        except (ValueError, TypeError):
            value = self._null
        return value


class List(Json):
    _null = []
    _type = list


class Dict(Json):
    _null = {}
    _type = dict


MutableDict.associate_with(Dict)

Base = declarative_base()
Example #24
0
    impl = HSTORE

    def process_bind_param(self, value, dialect):
        if not isinstance(value, dict):
            return value
        ret = {k: json.dumps(v) for k, v in value.items()}
        return ret

    def process_result_value(self, value, dialect):
        if not value:
            return MutableDict()
        ret = MutableDict({k: json.loads(v) for k, v in value.items()})
        return ret


MutableDict.associate_with(JSONValuesColumn)


class IntegerEnumColumn(types.TypeDecorator):
    impl = types.INTEGER

    def __init__(self, enum_values):
        super(IntegerEnumColumn, self).__init__()
        self.enum_values = enum_values
        self.reverse_enum_values = reverse_dict(enum_values)

    def process_bind_param(self, value, dialect):
        return self.reverse_enum_values.get(value, value)

    def process_result_value(self, value, dialect):
        return self.enum_values.get(value, value)
Example #25
0
#XXX NOTICE XXX DO NOT NAME THINGS types.py it breaks EVERYTHING
from sqlalchemy.types import PickleType
from sqlalchemy.dialects import postgres,postgresql
from sqlalchemy.ext.mutable import MutableDict

def array_base(column_type):
    array = PickleType()
    array.with_variant(postgres.ARRAY(column_type), 'postgres')
    array.with_variant(postgres.ARRAY(column_type), 'postgresql')
    return array

Array=array_base

_DictType = MutableDict.as_mutable(PickleType)
_DictType.with_variant(MutableDict.as_mutable(postgres.HSTORE), 'postgres')
#_DictType.with_variant(MutableDict.as_mutable(postgresql.HSTORE), 'postgresql')
#_DictType.with_variant(MutableDict.as_mutable(postgresql.HSTORE), 'psycopg2')
#_DictType.with_variant(MutableDict.as_mutable(postgresql.HSTORE), 'postgresql+psycopg2')
DictType=_DictType #FIXME not working as hstore :/


__all__=[
    'Array',
    'DictType',
]

#ArrayFloat = PickleType()
#ArrayFloat.with_variant(postgresql.ARRAY(Float), 'postgresql')

#ArrayString = PickleType()
#ArrayString.with_variant(postgresql.ARRAY(String), 'postgresql')
Example #26
0
            return cls(value)
        return super(cls).coerce(key, value)


class NestedMutable(Mutable):
    """SQLAlchemy `mutable` extension with nested change tracking."""
    @classmethod
    def coerce(cls, key, value):
        """Convert plain dictionary to NestedMutable."""
        if value is None:
            return value
        if isinstance(value, cls):
            return value
        if isinstance(value, dict):
            return NestedMutableDict.coerce(key, value)
        if isinstance(value, list):
            return NestedMutableList.coerce(key, value)
        return super(cls).coerce(key, value)


class MutableJson(JSONType):
    """JSON type for SQLAlchemy with change tracking at top level."""


class NestedMutableJson(JSONType):
    """JSON type for SQLAlchemy with nested change tracking."""


MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
            return dialect.type_descriptor(mysql.LONGTEXT())
        else:
            return self.impl

    def process_bind_param(self, value, dialect):
        return dumps(value)

    def process_result_value(self, value, dialect):
        return loads(value)


# TODO(leizhang) When we removed sqlalchemy 0.7 dependence
# we can import MutableDict directly and remove ./mutable.py
try:
    from sqlalchemy.ext.mutable import MutableDict as sa_MutableDict
    sa_MutableDict.associate_with(Json)
except ImportError:
    from dragon.db.sqlalchemy.mutable import MutableDict
    MutableDict.associate_with(Json)


class DragonBase(object):
    """Base class for Heat Models."""
    __table_args__ = {'mysql_engine': 'InnoDB'}
    __table_initialized__ = False
    created_at = sqlalchemy.Column(sqlalchemy.DateTime,
                                   default=timeutils.utcnow)
    updated_at = sqlalchemy.Column(sqlalchemy.DateTime,
                                   onupdate=timeutils.utcnow)

    def save(self, session=None):
Example #28
0
@author: peterb
'''
from blueshed.model_helpers.sqla_views import view
from blueshed.model_helpers.sql_extensions import JSONEncodedDict
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.sql.expression import select, join
from sqlalchemy.sql.functions import func

from examples.simple.model import *
from blueshed.model_helpers.access_model import Person, Permission,\
    person_permissions_permission


Person._token = Column(String(80))
Person._preferences = Column(MutableDict.as_mutable(JSONEncodedDict(255)))
Person.firstname = Column(String(80))
Person.lastname = Column(String(80))
Person.photo = Column(String(128))

'''
    An example View
'''
q = select([Person.id.label('id'), 
            Person.email.label('email'),
            func.count(Permission.id).label('permission_count')]).\
            select_from(join(Person,
                             person_permissions_permission,
                             Person.id==person_permissions_permission.c.permissions_id).\
                        join(Permission,
                             Permission.id==person_permissions_permission.c.permission_id)).\
Example #29
0
class GroupInfo(BASE):  # pylint:disable=too-few-public-methods
    """
    Some info about an LMS group that was created in h.

    This info is stored purely for metrics/analytics purposes and shouldn't be
    used for application logic. The app should treat the h API as the
    "single-source of truth" about what h groups exist and what their IDs and
    other properties are.
    """

    __tablename__ = "group_info"

    id = sa.Column(sa.Integer(), autoincrement=True, primary_key=True)

    #: The authority_provided_id of the group in h.
    #:
    #: This corresponds to the ID part of the groupid that's used in h's groups
    #: API. For example if the groupid is "group:[email protected]"
    #: then the authority_provided_id is the "SOME_ID" part without the leading
    #: "group:" or the trailing "@lms.hypothes.is".
    #:
    #: This also corresponds to the group.authority_provided_id column in h's
    #: DB.
    authority_provided_id = sa.Column(sa.UnicodeText(),
                                      nullable=False,
                                      unique=True)

    #: The LTI consumer_key (oauth_consumer_key) of the application instance
    #: that this access token belongs to.
    consumer_key = sa.Column(
        sa.String(),
        sa.ForeignKey("application_instances.consumer_key",
                      ondelete="cascade"),
        nullable=False,
    )

    #: The ApplicationInstance that this group belongs to.
    application_instance = sa.orm.relationship("ApplicationInstance",
                                               back_populates="group_infos")

    #: The value of the context_id param this group was last launched with.
    context_id = sa.Column(sa.UnicodeText())

    #: The value of the context_title param this group was last launched with.
    context_title = sa.Column(sa.UnicodeText())

    #: The value of the context_label param this group was last launched with.
    context_label = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_info_product_family_code param this group was last launched with.
    tool_consumer_info_product_family_code = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_info_version param this group was last launched with.
    tool_consumer_info_version = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_instance_name param this group was last launched with.
    tool_consumer_instance_name = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_instance_description param this group was last launched with.
    tool_consumer_instance_description = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_instance_url param this group was last launched with.
    tool_consumer_instance_url = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_instance_contact_email param this group was last launched with.
    tool_consumer_instance_contact_email = sa.Column(sa.UnicodeText())

    #: The value of the tool_consumer_instance_guid param this group was last launched with.
    tool_consumer_instance_guid = sa.Column(sa.UnicodeText())

    #: The value of the custom_canvas_api_domain param this group was last launched with.
    custom_canvas_api_domain = sa.Column(sa.UnicodeText())

    #: The value of the custom_canvas_course_id param this group was last launched with.
    custom_canvas_course_id = sa.Column(sa.UnicodeText())

    #: A dict of info about this group.
    info = sa.Column(MutableDict.as_mutable(JSONB))

    def __init__(self, *args, **kwargs):
        kwargs.setdefault("info", {})
        kwargs["info"].setdefault("instructors", [])
        kwargs["info"].setdefault("type", None)
        super().__init__(*args, **kwargs)

    @property
    def instructors(self):
        return self.info["instructors"]

    @instructors.setter
    def instructors(self, new_instructors):
        self.info["instructors"] = new_instructors

    @property
    def type(self):
        return self.info["type"]

    @type.setter
    def type(self, new_type):
        self.info["type"] = new_type

    def upsert_instructor(self, new_instructor):
        updated_instructors = []
        found = False

        for existing_instructor in self.instructors:
            if existing_instructor["username"] == new_instructor["username"]:
                updated_instructors.append(new_instructor)
                found = True
            else:
                updated_instructors.append(existing_instructor)

        if not found:
            updated_instructors.append(new_instructor)

        if updated_instructors != self.instructors:
            self.instructors = updated_instructors
Example #30
0
            return json.loads(value)


class AstonFrameBinary(TypeDecorator):
    impl = LargeBinary

    def process_bind_param(self, value, dialect):
        if value is not None:
            return value.compress()

    def process_result_value(self, value, dialect):
        if value is not None:
            return decompress(value)


MutableDict.associate_with(JSONDict)

Base = declarative_base()


def initialize_sql(engine):
    DBSession = scoped_session(sessionmaker(expire_on_commit=False))
    DBSession.configure(bind=engine)
    Base.metadata.bind = engine
    Base.metadata.create_all(engine)
    return DBSession


def quick_sqlite(filename):
    from sqlalchemy import create_engine
Example #31
0
class Survey(Base):
    __tablename__ = 'surveys'

    name = db.Column(db.String(255), nullable=False, unique=True)
    pretty_name = db.Column(db.String(80))
    language = db.Column(db.String(2))
    about_text = db.Column(db.Text)
    terms_of_service = db.Column(db.Text)
    contact_email = db.Column(db.String(255))
    avatar_uri = db.Column(db.String(255))
    max_survey_days = db.Column(db.Integer, default=14)
    max_prompts = db.Column(db.Integer, default=20)
    trip_break_interval = db.Column(db.Integer, nullable=False, default=360)
    trip_subway_buffer = db.Column(db.Integer, nullable=False, default=300)
    last_export = db.Column(MutableDict.as_mutable(JSONB))
    record_acceleration = db.Column(db.Boolean, default=True)
    record_mode = db.Column(db.Boolean, default=True)

    web_users = db.relationship('WebUser',
                                backref='survey',
                                cascade='all, delete-orphan',
                                lazy='dynamic')

    survey_questions = db.relationship('SurveyQuestion',
                                       backref='survey',
                                       cascade='all, delete-orphan',
                                       lazy='dynamic')

    prompt_questions = db.relationship('PromptQuestion',
                                       backref='survey',
                                       cascade='all, delete-orphan',
                                       lazy='dynamic')

    mobile_users = db.relationship('MobileUser',
                                   backref='survey',
                                   cascade='all, delete-orphan',
                                   lazy='dynamic')

    survey_responses = db.relationship('SurveyResponse',
                                       backref='survey',
                                       cascade='all, delete-orphan',
                                       lazy='dynamic')

    prompt_responses = db.relationship('PromptResponse',
                                       backref='survey',
                                       cascade='all, delete-orphan',
                                       lazy='dynamic')

    cancelled_prompts = db.relationship('CancelledPromptResponse',
                                        backref='survey',
                                        cascade='all, delete-orphan',
                                        lazy='dynamic')

    mobile_coordinates = db.relationship('MobileCoordinate',
                                         backref='survey',
                                         cascade='all, delete-orphan',
                                         lazy='dynamic')

    subway_stops = db.relationship('SubwayStop',
                                   backref='survey',
                                   cascade='all, delete-orphan',
                                   lazy='dynamic')

    researcher_invite_token = db.relationship('ResearcherInviteToken',
                                              backref='survey',
                                              cascade='all, delete-orphan',
                                              lazy='dynamic')

    def __repr__(self):
        return '<Survey %d>' % self.id
Example #32
0
class PredictionTile(db.Model):
    """ Store individual tile predictions """
    __tablename__ = 'prediction_tiles'

    id = db.Column(db.Integer, primary_key=True)

    prediction_id = db.Column(db.BigInteger,
                              db.ForeignKey('predictions.id',
                                            name='fk_predictions'),
                              nullable=False)

    quadkey = db.Column(db.String, nullable=False)
    quadkey_geom = db.Column(Geometry('POLYGON', srid=4326), nullable=False)
    centroid = db.Column(Geometry('POINT', srid=4326))
    predictions = db.Column(postgresql.JSONB, nullable=False)
    validity = db.Column(MutableDict.as_mutable(postgresql.JSONB),
                         nullable=True)

    prediction_tiles_quadkey_idx = db.Index(
        'prediction_tiles_quadkey_idx',
        'prediction_tiles.quadkey',
        postgresql_ops={'quadkey': 'text_pattern_ops'})

    @staticmethod
    def get(predictiontile_id: int):

        query = db.session.query(
            PredictionTile.id,
            PredictionTile.prediction_id,
            PredictionTile.validity,
        ).filter(PredictionTile.id == predictiontile_id)

        return PredictionTile.query.get(predictiontile_id)

    def update(self, validity):
        self.validity = validity

        db.session.commit()

    @staticmethod
    def inferences(prediction_id: int):
        results = db.session.execute(
            text('''
             SELECT
                DISTINCT jsonb_object_keys(predictions)
            FROM
                prediction_tiles
            WHERE
                prediction_id = :pred
        '''), {
                'pred': prediction_id
            }).fetchall()

        inferences = []
        for res in results:
            inferences.append(res[0])

        return inferences

    @staticmethod
    def count(prediction_id: int):
        return db.session.query(
            func.count(PredictionTile.quadkey).label("count")).filter(
                PredictionTile.prediction_id == prediction_id).one()

    @staticmethod
    def bbox(prediction_id: int):
        result = db.session.execute(
            text('''
            SELECT
                ST_Extent(quadkey_geom)
            FROM
                prediction_tiles
            WHERE
                prediction_id = :pred
        '''), {
                'pred': prediction_id
            }).fetchone()

        bbox = []
        for corners in result[0].replace('BOX(', '').replace(')',
                                                             '').split(' '):
            for corner in corners.split(','):
                bbox.append(float(corner))

        return bbox

    def mvt(prediction_id: int, z: int, x: int, y: int):
        grid = mercantile.xy_bounds(x, y, z)

        result = db.session.execute(
            text('''
            SELECT
                ST_AsMVT(q, 'data', 4096, 'geom', 'id') AS mvt
            FROM (
                SELECT
                    p.id AS id,
                    quadkey AS quadkey,
                    predictions || COALESCE(v.validity, '{}'::JSONB) AS props,
                    ST_AsMVTGeom(quadkey_geom, ST_Transform(ST_MakeEnvelope(:minx, :miny, :maxx, :maxy, 3857), 4326), 4096, 256, false) AS geom
                FROM
                    prediction_tiles AS p
                    LEFT JOIN (
                        SELECT
                            id,
                            JSONB_Object_Agg('v_'||key, value) AS validity
                        FROM
                            prediction_tiles,
                            jsonb_each(validity)
                        GROUP BY
                            id
                    ) AS v ON p.id = v.id
                WHERE
                    p.prediction_id = :pred
                    AND ST_Intersects(p.quadkey_geom, ST_Transform(ST_MakeEnvelope(:minx, :miny, :maxx, :maxy, 3857), 4326))
            ) q
        '''), {
                'pred': prediction_id,
                'minx': grid[0],
                'miny': grid[1],
                'maxx': grid[2],
                'maxy': grid[3]
            }).fetchone()

        return bytes(result.values()[0])

    @staticmethod
    def get_tiles_by_quadkey(prediction_id: int, quadkeys: tuple, zoom: int):
        return db.session.query(
            func.substr(PredictionTile.quadkey, 1, zoom).label('qaudkey'),
            func.avg(
                cast(
                    cast(PredictionTile.predictions['ml_prediction'],
                         sqlalchemy.String),
                    sqlalchemy.Float)).label('ml_prediction'),
            func.avg(
                cast(
                    cast(PredictionTile.predictions['osm_building_area'],
                         sqlalchemy.String),
                    sqlalchemy.Float)).label('osm_building_area')).filter(
                        PredictionTile.prediction_id == prediction_id).filter(
                            func.substr(PredictionTile.quadkey, 1,
                                        zoom).in_(quadkeys)).group_by(
                                            func.substr(
                                                PredictionTile.quadkey, 1,
                                                zoom)).all()

    @staticmethod
    def get_aggregate_for_polygon(prediction_id: int, polygon: str):
        return db.session.query(
            func.avg(
                cast(
                    cast(PredictionTile.predictions['ml_prediction'],
                         sqlalchemy.String),
                    sqlalchemy.Float)).label('ml_prediction'),
            func.avg(
                cast(
                    cast(PredictionTile.predictions['osm_building_area'],
                         sqlalchemy.String),
                    sqlalchemy.Float)).label('osm_building_area')).filter(
                        PredictionTile.prediction_id == prediction_id).filter(
                            ST_Within(PredictionTile.centroid,
                                      ST_GeomFromText(polygon)) ==
                            'True').one()
Example #33
0
class Project(db.Model, DomainObject):
    '''A microtasking Project to which Tasks are associated.
    '''

    __tablename__ = 'project'

    #: ID of the project
    id = Column(Integer, primary_key=True)
    #: UTC timestamp when the project is created
    created = Column(Text, default=make_timestamp)
    #: UTC timestamp when the project is updated (or any of its relationships)
    updated = Column(Text, default=make_timestamp, onupdate=make_timestamp)
    #: Project name
    name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project slug for the URL
    short_name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project description
    description = Column(Unicode(length=255), nullable=False)
    #: Project long description
    long_description = Column(UnicodeText)
    #: Project webhook
    webhook = Column(Text)
    #: If the project allows anonymous contributions
    allow_anonymous_contributors = Column(Boolean, default=True)
    long_tasks = Column(Integer, default=0)
    #: If the project is hidden
    hidden = Column(Integer, default=0)
    # If the project is featured
    featured = Column(Boolean, nullable=False, default=False)
    # If the project owner has been emailed
    contacted = Column(Boolean, nullable=False, default=False)
    #: Project owner_id
    owner_id = Column(Integer, ForeignKey('user.id'), nullable=False)
    time_estimate = Column(Integer, default=0)
    time_limit = Column(Integer, default=0)
    calibration_frac = Column(Float, default=0)
    bolt_course_id = Column(Integer, default=0)
    #: Project Category
    category_id = Column(Integer, ForeignKey('category.id'), nullable=False)
    #: Project info field formatted as JSON
    info = Column(MutableDict.as_mutable(JSON))

    tasks = relationship(Task,
                         cascade='all, delete, delete-orphan',
                         backref='project')
    task_runs = relationship(TaskRun,
                             backref='project',
                             cascade='all, delete-orphan',
                             order_by='TaskRun.finish_time.desc()')
    category = relationship(Category)
    blogposts = relationship(Blogpost,
                             cascade='all, delete-orphan',
                             backref='project')

    def needs_password(self):
        return self.get_passwd_hash() is not None

    def get_passwd_hash(self):
        return self.info.get('passwd_hash')

    def get_passwd(self):
        if self.needs_password():
            return signer.loads(self.get_passwd_hash())
        return None

    def set_password(self, password):
        if len(password) > 1:
            self.info['passwd_hash'] = signer.dumps(password)
            return True
        self.info['passwd_hash'] = None
        return False

    def check_password(self, password):
        if self.needs_password():
            return self.get_passwd() == password
        return False

    def has_autoimporter(self):
        return self.get_autoimporter() is not None

    def get_autoimporter(self):
        return self.info.get('autoimporter')

    def set_autoimporter(self, new=None):
        self.info['autoimporter'] = new

    def delete_autoimporter(self):
        del self.info['autoimporter']
Example #34
0
class Installation(db.Model, ModelIter):
    __tablename__ = 'installations'
    allowed_widget = True

    fillable = [
        'installed_capacity',  # kWp
        'egauge_url',
        'egauge_serial',
        'egauge_mac',
        'start_date',
        'specific_yield',  # kWh/kWp/Year
        'project_id',
        'sale_type_id',
        'price_per_kwp',
        'responsible_party',
        'setup_summary'
    ]

    id = db.Column(db.Integer, primary_key=True)
    # project can have multiple installations because customers may ask to add more equipment after project is done
    project = relationship(CustomerProject,
                           uselist=False,
                           backref='installations',
                           cascade='all, delete')
    responsible_party = db.Column(db.String(32))
    installed_capacity = db.Column(db.Numeric(8, 3), nullable=False)
    sale_type = relationship(SaleType, uselist=False, lazy='joined')
    price_per_kwp = db.Column(db.Numeric(10, 2), nullable=False)
    setup_summary = db.Column(
        MutableDict.as_mutable(db.JSON),
        comment=
        'A JSON schema that allows free form data, i.e. historical consumption data',
        server_default=('{'
                        '"historical_consumption": [],'
                        '"historical_power": [],'
                        '"expected_generation": []'
                        '}'))
    panels = relationship(
        InstallationPanelModel,
        backref='installations',
        primaryjoin=id == InstallationPanelModel.installation_id,
        lazy='joined')
    inverters = relationship(
        InstallationInverterModel,
        backref='installations',
        primaryjoin=id == InstallationInverterModel.installation_id,
        lazy='joined')
    egauge_url = db.Column(db.String(255, collation=configs.DB_COLLATION))
    egauge_serial = db.Column(db.String(255, collation=configs.DB_COLLATION))
    egauge_mac = db.Column(MacAddress)
    start_date = db.Column(db.DateTime())
    specific_yield = db.Column(db.SmallInteger)

    @property
    def installation_size(self):
        if self.installed_capacity < 50:
            return 'Pequeño'
        elif 51 <= self.installed_capacity <= 200:
            return 'Mediano'
        elif 201 <= self.installed_capacity <= 500:
            return 'Comercial Pequeño'
        elif 501 <= self.installed_capacity <= 1000:
            return 'Comercial Mediano'
        elif 1001 <= self.installed_capacity <= 1500:
            return 'Comercial Grande'
        else:
            return 'Utilidad'

    @property
    def total_investment(self):
        return self.installed_capacity * self.price_per_kwp

    @property
    def annual_production(self):
        return self.installed_capacity * self.specific_yield

    sale_type_id = deferred(
        db.Column(db.Integer,
                  db.ForeignKey('sale_types.id'),
                  index=True,
                  nullable=False))
    project_id = deferred(
        db.Column(db.Integer,
                  db.ForeignKey('customer_projects.id'),
                  index=True,
                  nullable=False))
Example #35
0
class Task(CustomBase):

    __tablename__ = 'Task'

    id = Column(Integer, primary_key=True)
    type = Column(String)
    recurrent = Column(Boolean, default=False)
    name = Column(String(120), unique=True)
    status = Column(String)
    creation_time = Column(Integer)
    logs = Column(MutableDict.as_mutable(PickleType), default={})
    nodes = Column(MutableList.as_mutable(PickleType), default=[])

    # scheduling parameters
    frequency = Column(String(120))
    scheduled_date = Column(String)

    # script parameters
    creator = Column(String)

    def __init__(self, user, **data):
        self.name = data['name']
        self.frequency = data['frequency']
        self.recurrent = bool(data['frequency'])
        self.creation_time = str(datetime.now())
        self.creator = user.username
        self.status = 'active'
        # if the scheduled date is left empty, we turn the empty string into
        # None as this is what AP Scheduler is expecting
        if data['scheduled_date']:
            self.scheduled_date = self.datetime_conversion(
                data['scheduled_date'])
        else:
            self.scheduled_date = None
        self.is_active = True
        if data['frequency']:
            self.recurrent_scheduling()
        else:
            self.one_time_scheduling()

    def datetime_conversion(self, scheduled_date):
        dt = datetime.strptime(scheduled_date, '%d/%m/%Y %H:%M:%S')
        return datetime.strftime(dt, '%Y-%m-%d %H:%M:%S')

    def pause_task(self):
        scheduler.pause_job(self.creation_time)
        self.status = 'suspended'
        db.session.commit()

    def resume_task(self):
        scheduler.resume_job(self.creation_time)
        self.status = "active"
        db.session.commit()

    def recurrent_scheduling(self):
        if not self.scheduled_date:
            self.scheduled_date = datetime.now() + timedelta(seconds=15)
        # run the job on a regular basis with an interval trigger
        id = scheduler.add_job(id=self.creation_time,
                               func=self.job,
                               args=self.args,
                               trigger='interval',
                               start_date=self.scheduled_date,
                               seconds=int(self.frequency),
                               replace_existing=True)

    def one_time_scheduling(self):
        if not self.scheduled_date:
            # when the job is scheduled to run immediately, it may happen that
            # the job is run even before the task is created, in which case
            # it fails because it cannot be retrieve from the Task column of
            # the database: we introduce a delta of 2 seconds.
            # other situation: the server is too slow and the job cannot be
            # run at all, eg 'job was missed by 0:00:09.465684'
            self.scheduled_date = datetime.now() + timedelta(seconds=5)
        # execute the job immediately with a date-type job
        # when date is used as a trigger and run_date is left undetermined,
        # the job is executed immediately.
        id = scheduler.add_job(id=self.creation_time,
                               run_date=self.scheduled_date,
                               func=self.job,
                               args=self.args,
                               trigger='date')
Example #36
0
 def __init__(self):
     self.key = uuid4().hex
     self.startmoment = datetime.now()
     self.values = MutableDict()
     self.values['check'] = uuid4().hex
Example #37
0
class Badge(db.Model):
    id = db.Column(db.Text, primary_key=True)
    name = db.Column(db.Text)
    orcid_id = db.Column(db.Text, db.ForeignKey('person.orcid_id'))
    created = db.Column(db.DateTime)
    value = db.Column(db.Float)
    percentile = db.Column(db.Float)
    support = db.Column(db.Text)
    products = db.Column(MutableDict.as_mutable(JSONB))

    def __init__(self, assigned=True, **kwargs):
        self.id = shortuuid.uuid()[0:10]
        self.created = datetime.datetime.utcnow().isoformat()
        self.assigned = assigned
        self.products = {}
        super(Badge, self).__init__(**kwargs)

    @property
    def dois(self):
        if self.products:
            return self.products.keys()
        return []

    @property
    def num_products(self):
        if self.products:
            return len(self.products)
        else:
            return 0

    def add_product(self, my_product):
        self.products[my_product.doi] = True

    def add_products(self, products_list):
        for my_product in products_list:
            self.add_product(my_product)

    def remove_all_products(self):
        self.products = {}

    @property
    def my_badge_type(self):
        assigner = get_badge_assigner(self.name)
        if assigner:
            my_assigner = assigner()
        else:
            my_assigner = dummy_badge_assigner()
        return my_assigner

    @property
    def sort_score(self):

        if self.percentile:
            sort_score = self.percentile * self.my_badge_type.importance
        else:
            sort_score = 0.5 * self.my_badge_type.importance

        if self.my_badge_type.group == "fun":
            sort_score -= 0.25
        return sort_score

    @property
    def description(self):
        description_template = self.my_badge_type.description
        description_string = description_template.format(
            value=conversational_number(self.value),
            one_hundred_minus_value=conversational_number(100 - self.value))
        return description_string

    @property
    def display_in_the_top_percentile(self):
        if not self.percentile:
            return None
        ret = int(100 - self.percentile * 100)
        if ret == 100:
            ret = 99
        if ret < 1:
            ret = 1
        return ret

    @property
    def display_percentile(self):
        if not self.percentile:
            return None
        ret = int(self.percentile * 100)
        if ret == 100:
            ret = 99
        if ret < 1:
            ret = 1
        return ret

    # what the UI is currently expecting
    @property
    def display_percentile_fraction(self):
        if not self.percentile:
            return None

        if self.percentile > 0.99:
            return 0.99
        return self.percentile

    @property
    def context(self):
        context_template = self.my_badge_type.context
        if context_template == None:
            context_template = u"  This puts you in the top {in_the_top_percentile}% of researchers."

        inverse_percentiles = ["reading_level"]
        if self.name in inverse_percentiles:
            if u"{percentile}" in context_template:
                if self.display_percentile > 50:
                    return None
            if u"{in_the_top_percentile}" in context_template:
                if self.display_in_the_top_percentile < 50:
                    return None

        else:
            if u"{percentile}" in context_template:
                if self.display_percentile < 50:
                    return None
            if u"{in_the_top_percentile}" in context_template:
                if self.display_in_the_top_percentile > 50:
                    return None

        context_string = context_template.format(
            value=conversational_number(self.value),
            one_hundred_minus_value=conversational_number(100 - self.value),
            in_the_top_percentile=self.display_in_the_top_percentile,
            percentile=self.display_percentile)

        return context_string

    @property
    def group(self):
        return self.my_badge_type.group

    @property
    def support_items(self):
        try:
            parts = self.support.split(": ")
        except AttributeError:
            return None

        try:
            support_phrase = parts[1]
        except IndexError:
            return None

        items = support_phrase.split(",")
        trimmed = [x.strip() for x in items]
        deduped = list(set(trimmed))
        deduped.sort()
        return deduped

    @property
    def support_intro(self):
        try:
            parts = self.support.split(": ")
        except AttributeError:
            return None

        return parts[0]

    def set_percentile(self, refset_list):
        if refset_list:
            self.percentile = calculate_percentile(refset_list, self.value)
            # print u"set percentile for {} {} to {}".format(self.name, self.value, self.percentile)
        else:
            print "not setting percentile, no refest.  maybe local?"

    def __repr__(self):
        return u'<Badge {id} {name} ({value})>'.format(id=self.id,
                                                       name=self.name,
                                                       value=self.value)

    def to_dict(self):
        if self.products:
            product_list = self.products.keys()

        resp = {
            "id": self.id,
            "orcid_id": self.orcid_id,
            "name": self.name,
            "created": date_as_iso_utc(self.created),
            "show_in_ui": self.my_badge_type.show_in_ui,
            "support_items": self.support_items,
            "support_intro": self.support_intro,
            "support_finale": self.my_badge_type.support_finale,
            "value": self.value,
            "importance": self.my_badge_type.importance,
            "percentile": self.display_percentile_fraction,
            "sort_score": self.sort_score,
            "description": self.description,
            "extra_description": self.my_badge_type.extra_description,
            "context": self.context,
            "group": self.my_badge_type.group,
            "display_name": self.my_badge_type.display_name
        }
        return resp
Example #38
0
class NapalmGettersService(Service):

    __tablename__ = 'NapalmGettersService'

    id = Column(Integer, ForeignKey('Service.id'), primary_key=True)
    has_targets = True
    validation_method = Column(String, default='text')
    validation_method_values = (('text', 'Validation by text match'),
                                ('dict_equal',
                                 'Validation by dictionnary equality'),
                                ('dict_included',
                                 'Validation by dictionnary inclusion'))
    content_match = Column(String)
    content_match_textarea = True
    content_match_regex = Column(Boolean)
    dict_match = Column(MutableDict.as_mutable(PickleType), default={})
    negative_logic = Column(Boolean)
    delete_spaces_before_matching = Column(Boolean)
    driver = Column(String)
    driver_values = NAPALM_DRIVERS
    use_device_driver = Column(Boolean, default=True)
    getters = Column(MutableList.as_mutable(PickleType), default=[])
    getters_values = (('get_arp_table',
                       'ARP table'), ('get_interfaces_counters',
                                      'Interfaces counters'), ('get_facts',
                                                               'Facts'),
                      ('get_environment', 'Environment'), ('get_config',
                                                           'Configuration'),
                      ('get_interfaces', 'Interfaces'), ('get_interfaces_ip',
                                                         'Interface IP'),
                      ('get_lldp_neighbors',
                       'LLDP neighbors'), ('get_lldp_neighbors_detail',
                                           'LLDP neighbors detail'),
                      ('get_mac_address_table',
                       'MAC address'), ('get_ntp_servers', 'NTP servers'),
                      ('get_ntp_stats', 'NTP statistics'), ('get_optics',
                                                            'Transceivers'),
                      ('get_snmp_information', 'SNMP'), ('get_users', 'Users'),
                      ('get_network_instances', 'Network instances (VRF)'),
                      ('get_ntp_peers', 'NTP peers'), ('get_bgp_config',
                                                       'BGP configuration'),
                      ('get_bgp_neighbors',
                       'BGP neighbors'), ('get_ipv6_neighbors_table',
                                          'IPv6'), ('is_alive', 'Is alive'))
    optional_args = Column(MutableDict.as_mutable(PickleType), default={})

    __mapper_args__ = {
        'polymorphic_identity': 'NapalmGettersService',
    }

    def job(self, device, _):
        napalm_driver, result = self.napalm_connection(device), {}
        napalm_driver.open()
        for getter in self.getters:
            try:
                result[getter] = getattr(napalm_driver, getter)()
            except Exception as e:
                result[getter] = f'{getter} failed because of {e}'
        if self.validation_method == 'text':
            success = self.match_content(
                str(result), self.sub(self.content_match, locals()))
        else:
            success = self.match_dictionnary(result)
        napalm_driver.close()
        return {
            'negative_logic': self.negative_logic,
            'result': result,
            'success': success
        }
Example #39
0
class Job(Base):

    __tablename__ = 'Job'

    id = Column(Integer, primary_key=True)
    hidden = Column(Boolean, default=False)
    name = Column(String, unique=True)
    description = Column(String)
    number_of_retries = Column(Integer, default=0)
    time_between_retries = Column(Integer, default=10)
    positions = Column(MutableDict.as_mutable(PickleType), default={})
    logs = Column(MutableDict.as_mutable(PickleType), default={})
    state = Column(String, default='Idle')
    status = Column(MutableDict.as_mutable(PickleType), default={})
    tasks = relationship('Task', back_populates='job', cascade='all,delete')
    type = Column(String)
    vendor = Column(String)
    operating_system = Column(String)
    waiting_time = Column(Integer, default=0)
    workflows = relationship('Workflow',
                             secondary=job_workflow_table,
                             back_populates='jobs')
    devices = relationship('Device',
                           secondary=job_device_table,
                           back_populates='jobs')
    pools = relationship('Pool',
                         secondary=job_pool_table,
                         back_populates='jobs')
    log_rules = relationship('LogRule',
                             secondary=job_log_rule_table,
                             back_populates='jobs')
    send_notification = Column(Boolean, default=False)
    send_notification_method = Column(String)
    mail_recipient = Column(String)

    __mapper_args__ = {'polymorphic_identity': 'Job', 'polymorphic_on': type}

    def compute_targets(self):
        targets = set(self.devices)
        for pool in self.pools:
            targets |= set(pool.devices)
        return targets

    def job_sources(self, workflow, type='all'):
        return [
            x.source for x in self.sources
            if (type == 'all' or x.type == type) and x.workflow == workflow
        ]

    def job_successors(self, workflow, type='all'):
        return [
            x.destination for x in self.destinations
            if (type == 'all' or x.type == type) and x.workflow == workflow
        ]

    def try_run(self, payload=None, remaining_targets=None):
        failed_attempts = {}
        for i in range(self.number_of_retries + 1):
            results, remaining_targets = self.run(payload, remaining_targets)
            if results['success']:
                break
            if i != self.number_of_retries:
                failed_attempts[f'Attempts {i + 1}'] = results
                sleep(self.time_between_retries)
        results['failed_attempts'] = failed_attempts
        self.logs[str(datetime.now())] = results
        return results

    def get_results(self, payload, device=None):
        try:
            return self.job(device, payload) if device else self.job(payload)
        except Exception as e:
            return {'success': False, 'result': str(e)}

    def run(self, payload=None, targets=None):
        if not targets:
            targets = self.compute_targets()
        if targets:
            results = {'result': {'devices': {}}}
            if self.multiprocessing:
                pool = ThreadPool(processes=min(len(targets), 1))
                pool.map(self.device_run,
                         [(device, results, payload) for device in targets])
                pool.close()
                pool.join()
            else:
                results['result']['devices'] = {
                    device.name: self.get_results(payload, device)
                    for device in targets
                }
            remaining_targets = {
                device
                for device in targets
                if not results['result']['devices'][device.name]['success']
            }
            results['success'] = not bool(remaining_targets)
            return results, remaining_targets
        else:
            return self.get_results(payload), None

    def device_run(self, args):
        device, results, payload = args
        device_result = self.get_results(payload, device)
        results['result']['devices'][device.name] = device_result
Example #40
0
class Annotation(Base):
    """Model class representing a single annotation."""

    __tablename__ = "annotation"
    __table_args__ = (
        # Tags are stored in an array-type column, and indexed using a
        # generalised inverted index. For more information on the use of GIN
        # indices for array columns, see:
        #
        #   http://www.databasesoup.com/2015/01/tag-all-things.html
        #   http://www.postgresql.org/docs/9.5/static/gin-intro.html
        #
        sa.Index("ix__annotation_tags", "tags", postgresql_using="gin"),
        sa.Index("ix__annotation_updated", "updated"),
        # This is a functional index on the *first* of the annotation's
        # references, pointing to the top-level annotation it refers to. We're
        # using 1 here because Postgres uses 1-based array indexing.
        sa.Index("ix__annotation_thread_root", sa.text('("references"[1])')),
    )

    #: Annotation ID: these are stored as UUIDs in the database, and mapped
    #: transparently to a URL-safe Base64-encoded string.
    id = sa.Column(types.URLSafeUUID,
                   server_default=sa.func.uuid_generate_v1mc(),
                   primary_key=True)

    #: The timestamp when the annotation was created.
    created = sa.Column(
        sa.DateTime,
        default=datetime.datetime.utcnow,
        server_default=sa.func.now(),
        nullable=False,
    )

    #: The timestamp when the user edited the annotation last.
    updated = sa.Column(
        sa.DateTime,
        server_default=sa.func.now(),
        default=datetime.datetime.utcnow,
        nullable=False,
    )

    #: The full userid (e.g. 'acct:[email protected]') of the owner of this
    #: annotation.
    userid = sa.Column(sa.UnicodeText, nullable=False, index=True)
    #: The string id of the group in which this annotation is published.
    #: Defaults to the global public group, "__world__".
    groupid = sa.Column(
        sa.UnicodeText,
        default="__world__",
        server_default="__world__",
        nullable=False,
        index=True,
    )

    #: The textual body of the annotation.
    _text = sa.Column("text", sa.UnicodeText)
    #: The Markdown-rendered and HTML-sanitized textual body of the annotation.
    _text_rendered = sa.Column("text_rendered", sa.UnicodeText)

    #: The tags associated with the annotation.
    tags = sa.Column(
        MutableList.as_mutable(pg.ARRAY(sa.UnicodeText, zero_indexes=True)))

    #: A boolean indicating whether this annotation is shared with members of
    #: the group it is published in. "Private"/"Only me" annotations have
    #: shared=False.
    shared = sa.Column(
        sa.Boolean,
        nullable=False,
        default=False,
        server_default=sa.sql.expression.false(),
    )

    #: The URI of the annotated page, as provided by the client.
    _target_uri = sa.Column("target_uri", sa.UnicodeText)
    #: The URI of the annotated page in normalized form.
    _target_uri_normalized = sa.Column("target_uri_normalized", sa.UnicodeText)
    #: The serialized selectors for the annotation on the annotated page.
    target_selectors = sa.Column(types.AnnotationSelectorJSONB,
                                 default=list,
                                 server_default=sa.func.jsonb("[]"))

    #: An array of annotation IDs which are ancestors of this annotation.
    references = sa.Column(
        pg.ARRAY(types.URLSafeUUID, zero_indexes=True),
        default=list,
        server_default=sa.text("ARRAY[]::uuid[]"),
    )

    #: Any additional serialisable data provided by the client.
    extra = sa.Column(
        MutableDict.as_mutable(pg.JSONB),
        default=dict,
        server_default=sa.func.jsonb("{}"),
        nullable=False,
    )

    #: Has the annotation been deleted?
    deleted = sa.Column(
        sa.Boolean,
        nullable=False,
        default=False,
        server_default=sa.sql.expression.false(),
    )

    document_id = sa.Column(sa.Integer,
                            sa.ForeignKey("document.id"),
                            nullable=False)

    document = sa.orm.relationship("Document", backref="annotations")

    thread = sa.orm.relationship(
        "Annotation",
        primaryjoin=(sa.orm.foreign(id) == sa.orm.remote(references[0])),
        viewonly=True,
        uselist=True,
    )

    @hybrid_property
    def target_uri(self):
        return self._target_uri

    @target_uri.setter
    def target_uri(self, value):
        self._target_uri = value
        self._target_uri_normalized = uri.normalize(value)

    @hybrid_property
    def target_uri_normalized(self):
        return self._target_uri_normalized

    @hybrid_property
    def text(self):
        return self._text

    @text.setter
    def text(self, value):
        self._text = value
        # N.B. We MUST take care here of appropriately escaping the user
        # input. Code elsewhere will assume that the content of the
        # `text_rendered` field is safe for printing without further escaping.
        #
        # `markdown.render` does the hard work for now.
        self._text_rendered = markdown.render(value)

    @hybrid_property
    def text_rendered(self):
        return self._text_rendered

    @property
    def thread_ids(self):
        return [thread_annotation.id for thread_annotation in self.thread]

    @property
    def is_reply(self):
        return bool(self.references)

    @property
    def parent_id(self):
        """
        Return the ID of the annotation that this annotation is a reply to.

        Return None if this annotation is not a reply.

        """
        if self.references:
            return self.references[-1]

    @property
    def thread_root_id(self):
        """
        Return the ID of the root annotation of this annotation's thread.

        Return the ID of the root annotation of the thread to which this
        annotation belongs. May be this annotation's own ID if it is the root
        annotation of its thread.

        """
        if self.references:
            return self.references[0]
        return self.id

    @property
    def authority(self):
        """
        Return the authority of the user and group this annotation belongs to.

        For example, returns "hypothes.is" for Hypothesis first-party
        annotations, or "elifesciences.org" for eLife third-party annotations.

        If this annotation doesn't have a userid (which is possible for
        annotations that haven't been saved to the DB yet) then return None.

        :raises ValueError: if the annotation's userid is invalid

        """
        if self.userid is None:
            return None
        return split_user(self.userid)["domain"]

    def __repr__(self):
        return "<Annotation %s>" % self.id
Example #41
0
class PredictionTile(db.Model):
    """ Store individual tile predictions """

    __tablename__ = "prediction_tiles"

    id = db.Column(db.Integer, primary_key=True)

    prediction_id = db.Column(
        db.BigInteger,
        db.ForeignKey("predictions.id", name="fk_predictions"),
        nullable=False,
    )

    quadkey = db.Column(db.String, nullable=True)
    geom = db.Column(Geometry("POLYGON", srid=4326), nullable=False)
    predictions = db.Column(postgresql.JSONB, nullable=False)
    validity = db.Column(MutableDict.as_mutable(postgresql.JSONB),
                         nullable=True)

    prediction_tiles_quadkey_idx = db.Index(
        "prediction_tiles_quadkey_idx",
        "prediction_tiles.quadkey",
        postgresql_ops={"quadkey": "text_pattern_ops"},
    )

    @staticmethod
    def get(predictiontile_id: int):

        db.session.query(
            PredictionTile.id,
            PredictionTile.prediction_id,
            PredictionTile.validity,
        ).filter(PredictionTile.id == predictiontile_id)

        return PredictionTile.query.get(predictiontile_id)

    def update(self, validity):
        self.validity = validity

        db.session.commit()

    @staticmethod
    def inferences(prediction_id: int):
        results = db.session.execute(
            text("""
             SELECT
                DISTINCT jsonb_object_keys(predictions)
            FROM
                prediction_tiles
            WHERE
                prediction_id = :pred
        """),
            {
                "pred": prediction_id
            },
        ).fetchall()

        inferences = []
        for res in results:
            inferences.append(res[0])

        return inferences

    @staticmethod
    def count(prediction_id: int):
        return (db.session.query(
            func.count(PredictionTile.geom).label("count")).filter(
                PredictionTile.prediction_id == prediction_id).one())

    @staticmethod
    def bbox(prediction_id: int):
        result = db.session.execute(
            text("""
            SELECT
                ST_Extent(geom)
            FROM
                prediction_tiles
            WHERE
                prediction_id = :pred
        """),
            {
                "pred": prediction_id
            },
        ).fetchone()

        bbox = []
        for corners in result[0].replace("BOX(", "").replace(")",
                                                             "").split(" "):
            for corner in corners.split(","):
                bbox.append(float(corner))

        return bbox

    def mvt(prediction_id: int, z: int, x: int, y: int):
        grid = mercantile.xy_bounds(x, y, z)

        result = db.session.execute(
            text("""
            SELECT
                ST_AsMVT(q, 'data', 4096, 'geom', 'id') AS mvt
            FROM (
                SELECT
                    p.id AS id,
                    quadkey AS quadkey,
                    predictions || COALESCE(v.validity, '{}'::JSONB) AS props,
                    ST_AsMVTGeom(geom, ST_Transform(ST_MakeEnvelope(:minx, :miny, :maxx, :maxy, 3857), 4326), 4096, 256, false) AS geom
                FROM
                    prediction_tiles AS p
                    LEFT JOIN (
                        SELECT
                            id,
                            JSONB_Object_Agg('v_'||key, value) AS validity
                        FROM
                            prediction_tiles,
                            jsonb_each(validity)
                        GROUP BY
                            id
                    ) AS v ON p.id = v.id
                WHERE
                    p.prediction_id = :pred
                    AND ST_Intersects(p.geom, ST_Transform(ST_MakeEnvelope(:minx, :miny, :maxx, :maxy, 3857), 4326))
            ) q
        """),
            {
                "pred": prediction_id,
                "minx": grid[0],
                "miny": grid[1],
                "maxx": grid[2],
                "maxy": grid[3],
            },
        ).fetchone()

        return bytes(result.values()[0])

    @staticmethod
    def get_tiles_by_quadkey(prediction_id: int, quadkeys: tuple, zoom: int):
        return (db.session.query(
            func.substr(PredictionTile.quadkey, 1, zoom).label("qaudkey"),
            func.avg(
                cast(
                    cast(
                        PredictionTile.predictions["ml_prediction"],
                        sqlalchemy.String,
                    ),
                    sqlalchemy.Float,
                )).label("ml_prediction"),
            func.avg(
                cast(
                    cast(
                        PredictionTile.predictions["osm_building_area"],
                        sqlalchemy.String,
                    ),
                    sqlalchemy.Float,
                )).label("osm_building_area"),
        ).filter(PredictionTile.prediction_id == prediction_id).filter(
            func.substr(PredictionTile.quadkey, 1,
                        zoom).in_(quadkeys)).group_by(
                            func.substr(PredictionTile.quadkey, 1,
                                        zoom)).all())
Example #42
0
class SocialAuthAccount(AbstractAgentAccount, AppSocialAuthMixin, UserMixin):
    """An account with an external :py:class:`.auth.IdentityProvider`"""
    __tablename__ = "social_auth_account"
    __mapper_args__ = {
        'polymorphic_identity': 'social_auth_account',
    }
    __table_args__ = (UniqueConstraint('provider_id', 'provider_domain',
                                       'uid'), )
    UID_LENGTH = config.get('UID_LENGTH', 255)

    id = Column(Integer,
                ForeignKey('abstract_agent_account.id',
                           ondelete='CASCADE',
                           onupdate='CASCADE'),
                primary_key=True)
    provider_id = Column(Integer,
                         ForeignKey('identity_provider.id',
                                    ondelete='CASCADE',
                                    onupdate='CASCADE'),
                         nullable=False,
                         info={'rdf': QuadMapPatternS(None, SIOC.member_of)})
    identity_provider = relationship(IdentityProvider)
    username = Column(Unicode(200))
    #    info={'rdf': QuadMapPatternS(None, SIOC.name)})
    provider_domain = Column(String(255))
    uid = Column(String(UID_LENGTH), nullable=False)
    #    info={'rdf': QuadMapPatternS(None, SIOC.id)})
    extra_data = Column(MutableDict.as_mutable(JSONType))
    picture_url = Column(URLString)
    user = relationship(AgentProfile, backref='identity_accounts')
    last_checked = Column(DateTime)

    def successful_login(self):
        self.last_checked = datetime.utcnow()

    def login_expiry(self):
        if self.last_checked is None:
            return datetime.utcnow() - timedelta(seconds=1)
        expiry = self.login_duration()
        if not expiry:
            return None
        return self.last_checked + timedelta(expiry)

    @property
    def provider(self):
        return self.identity_provider.provider_type

    @property
    def provider_with_idp(self):
        provider = self.provider
        if provider == 'saml':
            # PSA prefixes SAML uids with the idp_name
            idp_name = self.uid.split(':')[0]
            # Also available as self.extra_data['idp_name']
            return ':'.join((provider, idp_name))
        return provider

    @provider.setter
    def provider(self, value):
        self.identity_provider = IdentityProvider.get_by_type(value)

    def __init__(self, **kwargs):
        super(SocialAuthAccount, self).__init__(**kwargs)
        self.interpret_profile(self.extra_data)

    # reimplementation of UserSocialAuth
    @classmethod
    def username_max_length(cls):
        return User.__table__.columns.get('username').type.length

    @classmethod
    def user_model(cls):
        return User

    # reimplementation of SQLAlchemyUserMixin

    @classmethod
    def changed(cls, user):
        cls._save_instance(user)

    def set_extra_data(self, extra_data=None):
        if super(SocialAuthAccount, self).set_extra_data(extra_data):
            self.interpret_profile(self.extra_data)

    @classmethod
    def allowed_to_disconnect(cls, user, backend_name, association_id=None):
        if association_id is not None:
            qs = cls._query().filter(cls.id != association_id)
        else:
            qs = cls._query().join(cls.identity_provider).filter(
                IdentityProvider.provider_type != backend_name)
        qs = qs.filter(cls.user == user)

        if hasattr(user, 'has_usable_password'):  # TODO
            valid_password = user.has_usable_password()
        else:
            valid_password = True
        return valid_password or qs.count() > 0

    @classmethod
    def disconnect(cls, entry):
        cls._session().delete(entry)
        cls._flush()

    @classmethod
    def user_query(cls):
        return cls._session().query(cls.user_model())

    @classmethod
    def user_exists(cls, *args, **kwargs):
        """
        Return True/False if a User instance exists with the given arguments.
        Arguments are directly passed to filter() manager method.
        """
        query = cls.user_query()
        username = kwargs.pop('username', None)
        if username:
            query = query.filter(User.username == username)
        return query.filter_by(*args, **kwargs).count() > 0

    @classmethod
    def get_username(cls, user):
        """Return the username for given user"""
        # assume user is a User, not an AgentProfile
        return user.username

    @classmethod
    def create_user(cls,
                    email=None,
                    username=None,
                    fullname=None,
                    *args,
                    **kwargs):
        if fullname:
            kwargs['name'] = fullname
        user = cls._new_instance(cls.user_model(), *args, **kwargs)
        return user

    @classmethod
    def get_user(cls, pk):
        return cls.user_query().get(pk)

    @classmethod
    def get_users_by_email(cls, email):
        # Find users with similar email.
        # Only use if social provider is trusted to have verified email.
        users = cls.default_db().query(User).join(User.accounts).filter(
            AbstractAgentAccount.email_ci == email, ).all()
        # choose best known profile for base_account
        # prefer profiles with verified users, then users, then oldest profiles
        users.sort(
            key=lambda p:
            (isinstance(p, User) and p.verified, isinstance(p, User), -p.id),
            reverse=True)
        return users

    @classmethod
    def get_social_auth(cls, provider, uid):
        if not isinstance(uid, six.string_types):
            uid = str(uid)
        return cls._query().join(cls.identity_provider).filter(
            IdentityProvider.provider_type == provider,
            cls.uid == uid).first()

    @classmethod
    def get_social_auth_for_user(cls, user, provider=None, id=None):
        qs = cls._query().filter_by(profile_id=user.id)
        if provider:
            qs = qs.join(cls.identity_provider).filter(
                IdentityProvider.provider_type == provider)
        if id:
            qs = qs.filter(cls.id == id)
        return qs

    @classmethod
    def create_social_auth(cls, user, uid, provider):
        if not isinstance(uid, six.string_types):
            uid = str(uid)
        id_provider = IdentityProvider.get_by_type(provider)
        return cls._new_instance(cls,
                                 profile=user,
                                 uid=uid,
                                 identity_provider=id_provider,
                                 verified=id_provider.trust_emails)

    # override social_core.storage.UserMixin.get_backend_instance
    def get_backend_instance(self, strategy):
        try:
            backend_class = self.get_backend(strategy)
        except MissingBackend:
            return None
        else:
            if issubclass(backend_class, GenericAuth):
                return backend_class(strategy=strategy, name=self.provider)
            else:
                return backend_class(strategy=strategy)

    # Lifted from IdentityProviderAccount

    def signature(self):
        return ('idprovider_agent_account', self.provider_id, self.username,
                self.uid)

    def interpret_profile(self, profile=None):
        profile = profile or self.extra_data
        if profile:
            self.populate_picture(profile)
            if not self.email:
                # May be missed by social auth. compensate.
                emails = profile.get('emails', [])
                if emails:
                    self.email = emails[0].get('value', '')

    def interpret_social_auth_details(self, details):
        self.email = details.get("email", self.email)
        self.username = details.get('username', self.username)
        # TODO: Maybe see if username usable for user?
        fullname = details.get("fullname")
        if not fullname:
            first_name = details.get('first_name', None)
            last_name = details.get('last_name', None)
            if first_name and last_name:
                fullname = ' '.join((first_name, last_name))
        if fullname and not self.user.name:
            self.user.name = fullname

    def display_name(self):
        # TODO: format according to provider, ie @ for twitter.
        if self.username:
            name = self.username
        else:
            name = self.uid
        return ":".join((self.identity_provider.provider_type, name))

    def get_provider_name(self):
        return self.identity_provider.name

    def get_provider_type(self):
        return self.identity_provider.provider_type

    def real_name(self):
        if not self.full_name:
            info = self.extra_data
            name = info.get('name', None) or {}
            if isinstance(name, str):
                self.fullname = name
            elif name.get('formatted', None):
                self.full_name = name['formatted']
            elif 'givenName' in name and 'familyName' in name:
                self.full_name = ' '.join(
                    (name['givenName'], name['familyName']))
        return self.full_name

    def populate_picture(self, profile):
        if 'photos' in profile:  # google, facebook
            photos = [x.get('value', None) for x in profile['photos']]
            photos = [x for x in photos if x]
            if photos:
                self.picture_url = photos[0]
        elif 'image' in profile:  # google
            photo = profile['image'].get('url', None)
            if photo:
                self.picture_url = photo
        elif profile.get('user', {}).get('mugshot_url_template',
                                         None):  # yammer
            self.picture_url = profile['user']['mugshot_url_template']
        elif profile.get('user', {}).get('mugshot_url', None):  # yammer
            self.picture_url = profile['user']['mugshot_url']
        elif profile.get('mugshot_url', None):  # yammer
            self.picture_url = profile['mugshot_url']
        elif self.identity_provider.provider_type.startswith('facebook'):
            account = profile.get('id', None)
            if account is None:
                accounts = [x.get('uid') for x in profile.get('accounts', ())]
                accounts = [x for x in accounts if x]
                if not accounts:
                    return
                account = accounts[0]
            self.picture_url = 'http://graph.facebook.com/%s/picture' % (
                account)

    facebook_sizes = (('square', 50), ('small', 50), ('normal', 100), ('large',
                                                                       200))
    twitter_sizes = (('_mini', 25), ('_normal', 48), ('_bigger', 73), ('',
                                                                       1000))

    def avatar_url(self, size=32):
        picture_url = self.picture_url
        if not picture_url:
            return
        if config.get("accept_secure_connection"):
            # Make the connection https, known services can handle both.
            # Ideally we should check which ones work.
            picture_url = "https://" + picture_url.split("://", 1)[-1]
        if "{width}" in unquote(picture_url):  # yammer
            picture_url = unquote(picture_url).format(width=size, height=size)
            return picture_url
        if self.identity_provider.provider_type.startswith('google'):
            modified = re.sub(r"((\?|&)(size|sz))=(\d+)", r"\1=%d" % (size, ),
                              picture_url)
            if modified == picture_url:
                separator = "&" if "?" in picture_url else "?"
                modified = picture_url + separator + 'sz=' + str(size)
            return modified
        elif self.identity_provider.provider_type.startswith('facebook'):
            for (size_name, name_size) in self.facebook_sizes:
                if size <= name_size:
                    break
            return '%s?type=%s' % (picture_url, size_name)
        elif self.identity_provider.provider_type == 'twitter':
            for (size_name, name_size) in self.twitter_sizes:
                if size <= name_size:
                    break
            return size_name.join(picture_url.split('_normal'))

    # @classmethod
    # def special_quad_patterns(cls, alias_maker, discussion_id):
    #     return [QuadMapPatternS(AgentProfile.iri_class().apply(
    #             SocialAuthAccount.profile_id),
    #         FOAF.img, SocialAuthAccount.picture_url,
    #         name=QUADNAMES.foaf_image,
    #         conditions=(SocialAuthAccount.picture_url != None),
    #         sections=(PRIVATE_USER_SECTION,))]

    def unique_query(self):
        query, _ = super(SocialAuthAccount, self).unique_query()
        return query.filter_by(type=self.type,
                               provider_id=self.provider_id,
                               username=self.username), True

    @classmethod
    def find_accounts(cls, provider, social_account):
        # Probably deprecated
        if 'email' in social_account:
            return provider.db.query(cls).filter_by(
                provider=provider,
                domain=social_account['domain'],
                email_ci=social_account['email']).all()
        elif 'username' in social_account:
            return provider.db.query(cls).filter_by(
                provider=provider,
                domain=social_account['domain'],
                uid=social_account['username']).all()
        else:
            log.error("account needs username or email" + social_account)
            raise RuntimeError("account needs username or uid")

    def login_duration(self):
        data = self.extra_data
        intrinsic = None
        if 'expires' in data:
            intrinsic = data['expires']
        elif 'expires_in' in data:
            intrinsic = data['expires_in']
        provider = self.provider_with_idp
        provider = '_'.join(provider.split(':'))
        config_t = config.get('login_expiry_' + provider, None)
        if config_t is None and '_' in provider:
            config_t = config.get('login_expiry_' + provider.split('_')[0],
                                  None)
        if config_t is None:
            config_t = config.get('login_expiry_default', None)
        if intrinsic is not None:
            # convert to days
            intrinsic = float(intrinsic) / 864000
            if config_t is not None:
                # take minimum of intrinsic or config.
                intrinsic = min(float(config_t), intrinsic)
        return float(intrinsic or config_t or 0)

    # temporary shims
    @property
    def profile_info_json(self):
        return self.extra_data

    @profile_info_json.setter
    def profile_info_json(self, val):
        self.extra_data = val
        self.interpret_profile(val)
def upgrade():
    metajson = sa.Column('metajson', MutableDict.as_mutable(JSON),
                         default={})
    op.add_column('catalog', metajson)
Example #44
0
class MSScan(Base):
    __tablename__ = "MSScan"

    id = Column(Integer, primary_key=True, autoincrement=True)
    index = Column(Integer, index=True)
    ms_level = Column(Integer)
    scan_time = Column(Numeric(10, 5, asdecimal=False), index=True)
    title = Column(String(512))
    scan_id = Column(String(512), index=True)
    sample_run_id = Column(Integer, ForeignKey(
        SampleRun.id, ondelete='CASCADE'), index=True)

    peak_set = relationship("FittedPeak", backref="scan", lazy="dynamic")
    deconvoluted_peak_set = relationship(
        "DeconvolutedPeak", backref='scan', lazy='dynamic')

    info = deferred(Column(MutableDict.as_mutable(PickleType)))

    def __repr__(self):
        f = "{}({}, {}, {}, {}".format(
            self.__class__.__name__, self.scan_id, self.ms_level, self.scan_time,
            self.deconvoluted_peak_set.count())
        if self.ms_level > 1:
            f = "%s %s" % (f, self.precursor_information)
        f += ")"
        return f

    def convert(self, fitted=True, deconvoluted=True):
        precursor_information = self.precursor_information.convert(
        ) if self.precursor_information is not None else None

        session = object_session(self)
        conn = session.connection()

        if fitted:
            q = conn.execute(select([FittedPeak.__table__]).where(
                FittedPeak.__table__.c.scan_id == self.id)).fetchall()

            peak_set_items = list(
                map(make_memory_fitted_peak, q))

            peak_set = PeakSet(peak_set_items)
            peak_set._index()
            peak_index = PeakIndex(np.array([], dtype=np.float64), np.array(
                [], dtype=np.float64), peak_set)
        else:
            peak_index = PeakIndex(np.array([], dtype=np.float64), np.array(
                [], dtype=np.float64), PeakSet([]))

        if deconvoluted:
            q = conn.execute(select([DeconvolutedPeak.__table__]).where(
                DeconvolutedPeak.__table__.c.scan_id == self.id)).fetchall()

            deconvoluted_peak_set_items = list(
                map(make_memory_deconvoluted_peak, q))

            deconvoluted_peak_set = DeconvolutedPeakSet(
                deconvoluted_peak_set_items)
            deconvoluted_peak_set._reindex()
        else:
            deconvoluted_peak_set = DeconvolutedPeakSet([])

        info = self.info or {}

        scan = ProcessedScan(
            self.scan_id, self.title, precursor_information, int(self.ms_level),
            float(self.scan_time), self.index, peak_index, deconvoluted_peak_set,
            activation=info.get('activation'))
        return scan

    @classmethod
    def _serialize_scan(cls, scan, sample_run_id=None):
        db_scan = cls(
            index=scan.index, ms_level=scan.ms_level,
            scan_time=float(scan.scan_time), title=scan.title,
            scan_id=scan.id, sample_run_id=sample_run_id,
            info={'activation': scan.activation})
        return db_scan

    @classmethod
    def serialize(cls, scan, sample_run_id=None):
        db_scan = cls._serialize_scan(scan, sample_run_id)
        db_scan.peak_set = map(FittedPeak.serialize, scan.peak_set)
        db_scan.deconvoluted_peak_set = map(
            DeconvolutedPeak.serialize, scan.deconvoluted_peak_set)
        return db_scan

    @classmethod
    def serialize_bulk(cls, scan, sample_run_id, session, fitted=True, deconvoluted=True):
        db_scan = cls._serialize_scan(scan, sample_run_id)

        session.add(db_scan)
        session.flush()

        if fitted:
            FittedPeak._serialize_bulk_list(scan.peak_set, db_scan.id, session)
        if deconvoluted:
            DeconvolutedPeak._serialize_bulk_list(
                scan.deconvoluted_peak_set, db_scan.id, session)
        return db_scan
Example #45
0
    impl = VARCHAR

    def process_bind_param(self, value, dialect):
        if value is not None:
            value = str(value)

        return value

    def process_result_value(self, value, dialect):
        if value is not None:
            value = ast.literal_eval(value)
        return value

ARRAY_TYPE = Array()
JSON_TYPE = MutableDict.as_mutable(JSONEncodedDict)

BASE = declarative_base()

class Product(BASE):
    """docstring for Product."""

    __tablename__ = "products"

    id = Column('id', Integer, primary_key=True, autoincrement=False)
    style_no = Column('style_no', String)
    title = Column('title', String)
    keywords = Column('keywords', ARRAY_TYPE)
    owner = Column('owner', String)
    modify_time = Column('modify_time', Date)
    update = Column('update', Date, default=date.today)
Example #46
0
class NapalmGettersService(Service):

    __tablename__ = "NapalmGettersService"

    id = Column(Integer, ForeignKey("Service.id"), primary_key=True)
    has_targets = True
    validation_method = Column(String, default="text")
    validation_method_values = (
        ("text", "Validation by text match"),
        ("dict_equal", "Validation by dictionary equality"),
        ("dict_included", "Validation by dictionary inclusion"),
    )
    content_match = Column(String)
    content_match_textarea = True
    content_match_regex = Column(Boolean)
    dict_match = Column(MutableDict.as_mutable(PickleType), default={})
    negative_logic = Column(Boolean)
    delete_spaces_before_matching = Column(Boolean)
    driver = Column(String)
    driver_values = NAPALM_DRIVERS
    use_device_driver = Column(Boolean, default=True)
    getters = Column(MutableList.as_mutable(PickleType), default=[])
    getters_values = (
        ("get_arp_table", "ARP table"),
        ("get_interfaces_counters", "Interfaces counters"),
        ("get_facts", "Facts"),
        ("get_environment", "Environment"),
        ("get_config", "Configuration"),
        ("get_interfaces", "Interfaces"),
        ("get_interfaces_ip", "Interface IP"),
        ("get_lldp_neighbors", "LLDP neighbors"),
        ("get_lldp_neighbors_detail", "LLDP neighbors detail"),
        ("get_mac_address_table", "MAC address"),
        ("get_ntp_servers", "NTP servers"),
        ("get_ntp_stats", "NTP statistics"),
        ("get_optics", "Transceivers"),
        ("get_snmp_information", "SNMP"),
        ("get_users", "Users"),
        ("get_network_instances", "Network instances (VRF)"),
        ("get_ntp_peers", "NTP peers"),
        ("get_bgp_config", "BGP configuration"),
        ("get_bgp_neighbors", "BGP neighbors"),
        ("get_ipv6_neighbors_table", "IPv6"),
        ("is_alive", "Is alive"),
    )
    optional_args = Column(MutableDict.as_mutable(PickleType), default={})

    __mapper_args__ = {"polymorphic_identity": "NapalmGettersService"}

    def job(self, payload: dict, device: Device) -> dict:
        napalm_driver, result = self.napalm_connection(device), {}
        napalm_driver.open()
        self.logs.append(
            f"Fetching NAPALM getters ({', '.join(self.getters)}) on {device.name}"
        )
        for getter in self.getters:
            try:
                result[getter] = getattr(napalm_driver, getter)()
            except Exception as e:
                result[getter] = f"{getter} failed because of {e}"
        match = self.sub(self.content_match, locals())
        napalm_driver.close()
        return {
            "match":
            match if self.validation_method == "text" else self.dict_match,
            "negative_logic": self.negative_logic,
            "result": result,
            "success": self.match_content(result, match),
        }
Example #47
0
class Task(CustomBase):

    __tablename__ = 'Task'

    id = Column(Integer, primary_key=True)
    name = Column(String, unique=True)
    creation_time = Column(String)
    status = Column(String)
    type = Column(String)
    user_id = Column(Integer, ForeignKey('User.id'))
    user = relationship('User', back_populates='tasks')
    logs = Column(MutableDict.as_mutable(PickleType), default={})
    frequency = Column(String(120))
    start_date = Column(String)
    end_date = Column(String)
    positions = Column(MutableDict.as_mutable(PickleType), default={})
    waiting_time = Column(Integer, default=0)
    transfer_payload = Column(Boolean, default=False)
    workflows = relationship('Workflow',
                             secondary=task_workflow_table,
                             back_populates='tasks')
    log_rules = relationship('LogRule',
                             secondary=task_log_rule_table,
                             back_populates='tasks')

    __mapper_args__ = {'polymorphic_identity': 'Task', 'polymorphic_on': type}

    def __init__(self, **kwargs):
        self.update(**kwargs)
        self.status = 'active'
        self.creation_time = str(datetime.now())
        self.is_active = True
        if 'do_not_run' not in kwargs:
            self.schedule(run_now='run_immediately' in kwargs)

    def aps_conversion(self, date):
        dt = datetime.strptime(date, '%d/%m/%Y %H:%M:%S')
        return datetime.strftime(dt, '%Y-%m-%d %H:%M:%S')

    def aps_date(self, datetype):
        date = getattr(self, datetype)
        return self.aps_conversion(date) if date else None

    def pause_task(self):
        scheduler.pause_job(self.creation_time)
        self.status = 'suspended'
        db.session.commit()

    def resume_task(self):
        scheduler.resume_job(self.creation_time)
        self.status = 'active'
        db.session.commit()

    def delete_task(self):
        try:
            scheduler.delete_job(self.creation_time)
        except JobLookupError:
            pass
        db.session.commit()

    def task_sources(self, workflow, type):
        return [
            x.source for x in self.sources
            if x.type == type and x.workflow == workflow
        ]

    def task_neighbors(self, workflow, type):
        return [
            x.destination for x in self.destinations
            if x.type == type and x.workflow == workflow
        ]

    def get_payloads(self, workflow, runtime):
        if not workflow:
            return {}
        payloads = {}
        for edge_type in (True, False):
            for task in self.task_sources(workflow, edge_type):
                if not task.transfer_payload:
                    continue
                if runtime in task.logs and 'success' in task.logs[runtime]:
                    success = task.logs[runtime]['success']
                    if edge_type == success:
                        pl = task.logs[runtime]['payload']['outgoing_payload']
                        payloads[task.name] = pl
        return payloads

    def schedule(self, run_now=True):
        now = datetime.now() + timedelta(seconds=15)
        runtime = now if run_now else self.aps_date('start_date')
        if self.frequency:
            scheduler.add_job(id=self.creation_time,
                              func=job,
                              args=[self.name, str(runtime)],
                              trigger='interval',
                              start_date=runtime,
                              end_date=self.aps_date('end_date'),
                              seconds=int(self.frequency),
                              replace_existing=True)
        else:
            scheduler.add_job(id=str(runtime),
                              run_date=runtime,
                              func=job,
                              args=[self.name, str(runtime)],
                              trigger='date')
        return str(runtime)

    @property
    def properties(self):
        return {p: getattr(self, p) for p in cls_to_properties['Task']}
Example #48
0
class Task(CustomBase):

    __tablename__ = 'Task'

    id = Column(Integer, primary_key=True)
    name = Column(String, unique=True)
    creation_time = Column(String)
    status = Column(String)
    type = Column(String)
    user_id = Column(Integer, ForeignKey('User.id'))
    user = relationship('User', back_populates='tasks')
    logs = Column(MutableDict.as_mutable(PickleType), default={})
    frequency = Column(String(120))
    start_date = Column(String)
    end_date = Column(String)
    x = Column(Integer, default=0)
    y = Column(Integer, default=0)
    waiting_time = Column(Integer, default=0)
    workflows = relationship('Workflow',
                             secondary=task_workflow_table,
                             back_populates='tasks')
    log_rules = relationship('LogRule',
                             secondary=task_log_rule_table,
                             back_populates='tasks')

    __mapper_args__ = {'polymorphic_identity': 'Task', 'polymorphic_on': type}

    def __init__(self, **data):
        self.status = 'active'
        self.waiting_time = data['waiting_time']
        self.name = data['name']
        self.user = data['user']
        self.creation_time = str(datetime.now())
        self.frequency = data['frequency']
        # if the start date is left empty, we turn the empty string into
        # None as this is what AP Scheduler is expecting
        for date in ('start_date', 'end_date'):
            js_date = data[date]
            value = self.datetime_conversion(js_date) if js_date else None
            setattr(self, date, value)
        self.is_active = True
        if 'do_not_run' not in data:
            self.schedule(run_now='run_immediately' in data)

    def datetime_conversion(self, date):
        dt = datetime.strptime(date, '%d/%m/%Y %H:%M:%S')
        return datetime.strftime(dt, '%Y-%m-%d %H:%M:%S')

    def pause_task(self):
        scheduler.pause_job(self.creation_time)
        self.status = 'suspended'
        db.session.commit()

    def resume_task(self):
        scheduler.resume_job(self.creation_time)
        self.status = 'active'
        db.session.commit()

    def delete_task(self):
        try:
            scheduler.delete_job(self.creation_time)
        except JobLookupError:
            pass
        db.session.commit()

    def task_neighbors(self, workflow, type):
        return [
            x.destination for x in self.destinations
            if x.type == type and x.workflow == workflow
        ]

    def schedule(self, run_now=True):
        now = datetime.now() + timedelta(seconds=15)
        runtime = now if run_now else self.start_date
        if self.frequency:
            scheduler.add_job(id=self.creation_time,
                              func=job,
                              args=[self.name, str(runtime)],
                              trigger='interval',
                              start_date=runtime,
                              end_date=self.end_date,
                              seconds=int(self.frequency),
                              replace_existing=True)
        else:
            scheduler.add_job(id=str(runtime),
                              run_date=runtime,
                              func=job,
                              args=[self.name, str(runtime)],
                              trigger='date')
        return str(runtime)

    @property
    def properties(self):
        return {p: getattr(self, p) for p in cls_to_properties['Task']}
Example #49
0
class Project(db.Model, DomainObject):
    '''A microtasking Project to which Tasks are associated.
    '''

    __tablename__ = 'project'

    #: ID of the project
    id = Column(Integer, primary_key=True)
    #: UTC timestamp when the project is created
    created = Column(Text, default=make_timestamp)
    #: UTC timestamp when the project is updated (or any of its relationships)
    updated = Column(Text, default=make_timestamp, onupdate=make_timestamp)
    #: Project name
    name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project slug for the URL
    short_name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project description
    description = Column(Unicode(length=255), nullable=False)
    #: Project long description
    long_description = Column(UnicodeText)
    #: Project webhook
    webhook = Column(Text)
    #: If the project allows anonymous contributions
    allow_anonymous_contributors = Column(Boolean, default=True)
    #: If the project is published
    published = Column(Boolean, nullable=False, default=False)
    # If the project is featured
    featured = Column(Boolean, nullable=False, default=False)
    # Secret key for project
    secret_key = Column(Text, default=make_uuid)
    # If the project owner has been emailed
    contacted = Column(Boolean, nullable=False, default=False)
    #: Project owner_id
    owner_id = Column(Integer, ForeignKey('user.id'), nullable=False)
    #: Project Category
    category_id = Column(Integer, ForeignKey('category.id'), nullable=False)
    #: Project info field formatted as JSON
    info = Column(MutableDict.as_mutable(JSON), default=dict())

    tasks = relationship(Task, cascade='all, delete, delete-orphan', backref='project')
    task_runs = relationship(TaskRun, backref='project',
                             cascade='all, delete-orphan',
                             order_by='TaskRun.finish_time.desc()')
    category = relationship(Category)
    blogposts = relationship(Blogpost, cascade='all, delete-orphan', backref='project')

    def needs_password(self):
        return self.get_passwd_hash() is not None

    def get_passwd_hash(self):
        return self.info.get('passwd_hash')

    def get_passwd(self):
        if self.needs_password():
            return signer.loads(self.get_passwd_hash())
        return None

    def set_password(self, password):
        if len(password) > 1:
            self.info['passwd_hash'] = signer.dumps(password)
            return True
        self.info['passwd_hash'] = None
        return False

    def check_password(self, password):
        if self.needs_password():
            return self.get_passwd() == password
        return False

    def has_autoimporter(self):
        return self.get_autoimporter() is not None

    def get_autoimporter(self):
        return self.info.get('autoimporter')

    def set_autoimporter(self, new=None):
        self.info['autoimporter'] = new

    def delete_autoimporter(self):
        del self.info['autoimporter']

    def has_presenter(self):
        return self.info.get('task_presenter') not in ('', None)

    @classmethod
    def public_attributes(self):
        """Return a list of public attributes."""
        return ['id', 'description', 'info', 'n_tasks', 'n_volunteers', 'name',
                'overall_progress', 'short_name', 'created', 'description',
                'last_activity', 'last_activity_raw', 'overall_progress',
                'n_tasks', 'n_volunteers', 'owner', 'updated', 'featured',
                'owner_id']

    @classmethod
    def public_info_keys(self):
        """Return a list of public info keys."""
        return ['container', 'thumbnail', 'thumbnail_url',
                'task_presenter', 'tutorial', 'sched']
Example #50
0
class Task(AbstractBase):

    __tablename__ = type = "Task"
    id = Column(Integer, primary_key=True)
    aps_job_id = Column(String(SMALL_STRING_LENGTH))
    name = Column(String(SMALL_STRING_LENGTH), unique=True)
    description = Column(String(SMALL_STRING_LENGTH))
    creation_time = Column(String(SMALL_STRING_LENGTH))
    scheduling_mode = Column(String(SMALL_STRING_LENGTH), default="standard")
    periodic = Column(Boolean)
    frequency = Column(Integer)
    frequency_unit = Column(String(SMALL_STRING_LENGTH), default="seconds")
    start_date = Column(String(SMALL_STRING_LENGTH))
    end_date = Column(String(SMALL_STRING_LENGTH))
    crontab_expression = Column(String(SMALL_STRING_LENGTH))
    is_active = Column(Boolean, default=False)
    payload = Column(MutableDict.as_mutable(PickleType), default={})
    devices = relationship("Device",
                           secondary=task_device_table,
                           back_populates="tasks")
    pools = relationship("Pool",
                         secondary=task_pool_table,
                         back_populates="tasks")
    job_id = Column(Integer, ForeignKey("Job.id"))
    job = relationship("Job", back_populates="tasks")
    job_name = association_proxy("job", "name")

    def __init__(self, **kwargs: Any) -> None:
        super().update(**kwargs)
        self.creation_time = controller.get_time()
        self.aps_job_id = kwargs.get("aps_job_id", self.creation_time)
        if self.is_active:
            self.schedule()

    def update(self, **kwargs: Any) -> None:
        super().update(**kwargs)
        if self.is_active:
            self.schedule()

    def generate_row(self, table: str) -> List[str]:
        status = "Pause" if self.is_active else "Resume"
        return [
            f"""<button id="pause-resume-{self.id}" type="button"
            class="btn btn-success btn-xs" onclick=
            "{status.lower()}Task('{self.id}')">{status}</button>""",
            f"""<button type="button" class="btn btn-primary btn-xs"
            onclick="showTypePanel('task', '{self.id}')">Edit</button>""",
            f"""<button type="button" class="btn btn-primary btn-xs"
            onclick="showTypePanel('task', '{self.id}', true)">
            Duplicate</button>""",
            f"""<button type="button" class="btn btn-danger btn-xs"
            onclick="showDeletionPanel('task', '{self.id}', '{self.name}')">
            Delete</button>""",
        ]

    @hybrid_property
    def status(self) -> str:
        return "Active" if self.is_active else "Inactive"

    @status.expression  # type: ignore
    def status(cls) -> str:  # noqa: N805
        return case([(cls.is_active, "Active")], else_="Inactive")

    @property
    def next_run_time(self) -> Optional[str]:
        job = controller.scheduler.get_job(self.aps_job_id)
        if job and job.next_run_time:
            return job.next_run_time.strftime("%Y-%m-%d %H:%M:%S")
        return None

    @property
    def time_before_next_run(self) -> Optional[str]:
        job = controller.scheduler.get_job(self.aps_job_id)
        if job and job.next_run_time:
            delta = job.next_run_time.replace(tzinfo=None) - datetime.now()
            hours, remainder = divmod(delta.seconds, 3600)
            minutes, seconds = divmod(remainder, 60)
            days = f"{delta.days} days, " if delta.days else ""
            return f"{days}{hours}h:{minutes}m:{seconds}s"
        return None

    def aps_conversion(self, date: str) -> str:
        dt: datetime = datetime.strptime(date, "%d/%m/%Y %H:%M:%S")
        return datetime.strftime(dt, "%Y-%m-%d %H:%M:%S")

    def aps_date(self, datetype: str) -> Optional[str]:
        date = getattr(self, datetype)
        return self.aps_conversion(date) if date else None

    def pause(self) -> None:
        controller.scheduler.pause_job(self.aps_job_id)
        self.is_active = False
        Session.commit()

    def resume(self) -> None:
        self.schedule()
        controller.scheduler.resume_job(self.aps_job_id)
        self.is_active = True
        Session.commit()

    def delete_task(self) -> None:
        if controller.scheduler.get_job(self.aps_job_id):
            controller.scheduler.remove_job(self.aps_job_id)
        Session.commit()

    def compute_targets(self) -> Set[int]:
        targets = {device.id for device in self.devices}
        for pool in self.pools:
            targets |= {device.id for device in pool.devices}
        return targets

    def kwargs(self) -> Tuple[dict, dict]:
        default = {
            "id":
            self.aps_job_id,
            "func":
            threaded_job,
            "replace_existing":
            True,
            "args": [
                self.job.id,
                self.aps_job_id,
                self.compute_targets(),
                self.payload,
            ],
        }
        if self.scheduling_mode == "cron":
            self.periodic = True
            trigger = {
                "trigger": CronTrigger.from_crontab(self.crontab_expression)
            }
        elif self.frequency:
            self.periodic = True
            frequency_in_seconds = (int(self.frequency) * {
                "seconds": 1,
                "minutes": 60,
                "hours": 3600,
                "days": 86400
            }[self.frequency_unit])
            trigger = {
                "trigger": "interval",
                "start_date": self.aps_date("start_date"),
                "end_date": self.aps_date("end_date"),
                "seconds": frequency_in_seconds,
            }
        else:
            self.periodic = False
            trigger = {
                "trigger": "date",
                "run_date": self.aps_date("start_date")
            }
        return default, trigger

    def schedule(self) -> None:
        default, trigger = self.kwargs()
        if not controller.scheduler.get_job(self.aps_job_id):
            controller.scheduler.add_job(**{**default, **trigger})
        else:
            controller.scheduler.reschedule_job(default.pop("id"), **trigger)
Example #51
0
 def hstore_table_for(self, name):
     return sa.Table(name, self.Base.metadata,
                     sa.Column('id', sa.Integer, primary_key=True),
                     sa.Column('data', MutableDict.as_mutable(pg.HSTORE)))
Example #52
0

class Json(types.TypeDecorator):
    impl = types.Text

    def process_bind_param(self, value, dialect):
        return dumps(value)

    def process_result_value(self, value, dialect):
        return loads(value)

# TODO(leizhang) When we removed sqlalchemy 0.7 dependence
# we can import MutableDict directly and remove ./mutable.py
try:
    from sqlalchemy.ext.mutable import MutableDict as sa_MutableDict
    sa_MutableDict.associate_with(Json)
except ImportError:
    from heat.db.sqlalchemy.mutable import MutableDict
    MutableDict.associate_with(Json)


class HeatBase(object):
    """Base class for Heat Models."""
    __table_args__ = {'mysql_engine': 'InnoDB'}
    __table_initialized__ = False
    created_at = sqlalchemy.Column(sqlalchemy.DateTime,
                                   default=timeutils.utcnow)
    updated_at = sqlalchemy.Column(sqlalchemy.DateTime,
                                   onupdate=timeutils.utcnow)

    def save(self, session=None):
Example #53
0
class Job(Base):

    __tablename__ = "Job"
    type = Column(String(255))
    __mapper_args__ = {"polymorphic_identity": "Job", "polymorphic_on": type}
    id = Column(Integer, primary_key=True)
    hidden = Column(Boolean, default=False)
    name = Column(String(255), unique=True)
    description = Column(String(255))
    multiprocessing = Column(Boolean, default=False)
    max_processes = Column(Integer, default=5)
    number_of_retries = Column(Integer, default=0)
    time_between_retries = Column(Integer, default=10)
    positions = Column(MutableDict.as_mutable(PickleType), default={})
    results = Column(MutableDict.as_mutable(PickleType), default={})
    is_running = Column(Boolean, default=False)
    number_of_targets = Column(Integer, default=0)
    completed = Column(Integer, default=0)
    failed = Column(Integer, default=0)
    state = Column(MutableDict.as_mutable(PickleType), default={})
    credentials = Column(String(255), default="device")
    tasks = relationship("Task", back_populates="job", cascade="all,delete")
    vendor = Column(String(255))
    operating_system = Column(String(255))
    waiting_time = Column(Integer, default=0)
    creator_id = Column(Integer, ForeignKey("User.id"))
    creator = relationship("User", back_populates="jobs")
    creator_name = association_proxy("creator", "name")
    push_to_git = Column(Boolean, default=False)
    workflows = relationship("Workflow",
                             secondary=job_workflow_table,
                             back_populates="jobs")
    devices = relationship("Device",
                           secondary=job_device_table,
                           back_populates="jobs")
    pools = relationship("Pool",
                         secondary=job_pool_table,
                         back_populates="jobs")
    log_rules = relationship("LogRule",
                             secondary=job_log_rule_table,
                             back_populates="jobs")
    send_notification = Column(Boolean, default=False)
    send_notification_method = Column(String(255),
                                      default="mail_feedback_notification")
    display_only_failed_nodes = Column(Boolean, default=True)
    mail_recipient = Column(String(255), default="")
    logs = Column(MutableList.as_mutable(PickleType), default=[])

    @hybrid_property
    def status(self) -> str:
        return "Running" if self.is_running else "Idle"

    @status.expression  # type: ignore
    def status(cls) -> str:  # noqa: N805
        return case([(cls.is_running, "Running")], else_="Idle")

    @property
    def progress(self) -> str:
        if self.is_running:
            return f"{self.completed}/{self.number_of_targets} ({self.failed} failed)"
        else:
            return "N/A"

    def compute_targets(self) -> Set[Device]:
        targets = set(self.devices)
        for pool in self.pools:
            targets |= set(pool.devices)
        self.number_of_targets = len(targets)
        db.session.commit()
        return targets

    def job_sources(self,
                    workflow: "Workflow",
                    subtype: str = "all") -> List["Job"]:
        return [
            x.source for x in self.sources
            if (subtype == "all" or x.subtype == subtype)
            and x.workflow == workflow
        ]

    def job_successors(self,
                       workflow: "Workflow",
                       subtype: str = "all") -> List["Job"]:
        return [
            x.destination for x in self.destinations
            if (subtype == "all" or x.subtype == subtype)
            and x.workflow == workflow
        ]

    def build_notification(self, results: dict, now: str) -> str:
        summary = [
            f"Job: {self.name} ({self.type})",
            f"Runtime: {now}",
            f'Status: {"PASS" if results["results"]["success"] else "FAILED"}',
        ]
        if "devices" in results[
                "results"] and not results["results"]["success"]:
            failed = "\n".join(device for device, device_results in
                               results["results"]["devices"].items()
                               if not device_results["success"])
            summary.append(f"FAILED\n{failed}")
            if not self.display_only_failed_nodes:
                passed = "\n".join(device for device, device_results in
                                   results["results"]["devices"].items()
                                   if device_results["success"])
                summary.append(f"\n\nPASS:\n{passed}")
        server_url = environ.get("ENMS_SERVER_ADDR", "http://SERVER_IP")
        results_url = f"{server_url}/automation/results/{self.id}/{now}"
        summary.append(f"Results: {results_url}")
        return "\n\n".join(summary)

    def notify(self, results: dict, time: str) -> None:
        fetch("Job", name=self.send_notification_method).try_run({
            "job":
            self.serialized,
            "results":
            self.results,
            "runtime":
            time,
            "result":
            results["results"]["success"],
            "content":
            self.build_notification(results, time),
        })

    def try_run(
        self,
        payload: Optional[dict] = None,
        targets: Optional[Set[Device]] = None,
        workflow: Optional["Workflow"] = None,
    ) -> Tuple[dict, str]:
        self.is_running, self.state, self.logs = True, {}, []
        db.session.commit()
        results: dict = {"results": {}}
        if not payload:
            payload = {}
        job_from_workflow_targets = bool(workflow and targets)
        if not targets and getattr(self, "use_workflow_targets", True):
            targets = self.compute_targets()
        has_targets = bool(targets)
        if has_targets and not job_from_workflow_targets:
            results["results"]["devices"] = {}
        now = str(datetime.now()).replace(" ", "-")
        logs = workflow.logs if workflow else self.logs
        logs.append(f"{self.type} {self.name}: Starting.")
        for i in range(self.number_of_retries + 1):
            self.completed = self.failed = 0
            db.session.commit()
            logs.append(f"Running {self.type} {self.name} (attempt n°{i + 1})")
            attempt = self.run(payload, job_from_workflow_targets, targets,
                               workflow)
            if has_targets and not job_from_workflow_targets:
                assert targets is not None
                for device in set(targets):
                    if not attempt["devices"][device.name]["success"]:
                        continue
                    results["results"]["devices"][
                        device.name] = attempt["devices"][device.name]
                    targets.remove(device)
                if not targets:
                    results["results"]["success"] = True
                    break
                else:
                    if self.number_of_retries:
                        results[f"Attempts {i + 1}"] = attempt
                    if i != self.number_of_retries:
                        sleep(self.time_between_retries)
                    else:
                        results["results"]["success"] = False
                        for device in targets:
                            results["results"]["devices"][
                                device.name] = attempt["devices"][device.name]
            else:
                if self.number_of_retries:
                    results[f"Attempts {i + 1}"] = attempt
                if attempt["success"] or i == self.number_of_retries:
                    results["results"] = attempt
                    break
                else:
                    sleep(self.time_between_retries)
        logs.append(f"{self.type} {self.name}: Finished.")
        self.results[now] = {**results, "logs": logs}
        self.is_running, self.state = False, {}
        self.completed = self.failed = 0
        db.session.commit()
        if not workflow and self.send_notification:
            self.notify(results, now)
        return results, now

    def get_results(
        self,
        payload: dict,
        device: Optional[Device] = None,
        workflow: Optional["Workflow"] = None,
        threaded: bool = False,
    ) -> dict:
        logs = workflow.logs if workflow else self.logs
        try:
            if device:
                logs.append(f"Running {self.type} on {device.name}.")
                results = self.job(payload, device)
                success = "SUCCESS" if results["success"] else "FAILURE"
                logs.append(
                    f"Finished running service on {device.name}. ({success})")
            else:
                results = self.job(payload)
        except Exception:
            if device:
                logs.append(
                    f"Finished running service on {device.name}. (FAILURE)")
            results = {
                "success": False,
                "result": chr(10).join(format_exc().splitlines()),
            }
        self.completed += 1
        self.failed += 1 - results["success"]
        if not threaded:
            db.session.commit()
        return results

    def device_run(
            self, args: Tuple[Device, dict, dict,
                              Optional["Workflow"]]) -> None:
        with controller.app.app_context():
            with session_scope() as session:
                device, results, payload, workflow = args
                device_result = self.get_results(payload, device, workflow,
                                                 True)
                session.merge(workflow or self)
                results["devices"][device.name] = device_result

    def run(
        self,
        payload: dict,
        job_from_workflow_targets: bool,
        targets: Optional[Set[Device]] = None,
        workflow: Optional["Workflow"] = None,
    ) -> dict:
        if job_from_workflow_targets:
            assert targets is not None
            device, = targets
            return self.get_results(payload, device, workflow)
        elif targets:
            results: dict = {"devices": {}}
            if self.multiprocessing:
                processes = min(len(targets), self.max_processes)
                pool = ThreadPool(processes=processes)
                pool.map(
                    self.device_run,
                    [(device, results, payload, workflow)
                     for device in targets],
                )
                pool.close()
                pool.join()
            else:
                results["devices"] = {
                    device.name: self.get_results(payload, device, workflow)
                    for device in targets
                }
            return results
        else:
            return self.get_results(payload)