Пример #1
0
class Account(Base):
    name = db.Column(db.String)
    email = db.Column(db.String)
    github_user = db.Column(db.String, unique=True)
    last_push = db.Column(db.DateTime)
    last_email = db.Column(db.DateTime)
    watchlist = db.Column(ARRAY(db.String))
Пример #2
0
class TestSet(BASE):

    __tablename__ = 'test_sets'

    id = sa.Column(sa.String(128), primary_key=True)
    description = sa.Column(sa.String(256))
    test_path = sa.Column(sa.String(256))
    driver = sa.Column(sa.String(128))
    additional_arguments = sa.Column(fields.ListField())
    cleanup_path = sa.Column(sa.String(128))
    meta = sa.Column(fields.JsonField())
    deployment_tags = sa.Column(ARRAY(sa.String(64)))

    tests = relationship('Test',
                         backref='test_set',
                         order_by='Test.name',
                         cascade='delete')

    @property
    def frontend(self):
        return {'id': self.id, 'name': self.description}

    @classmethod
    def get_test_set(cls, session, test_set):
        return session.query(cls)\
            .filter_by(id=test_set)\
            .first()
Пример #3
0
class TemplateBase(Model):
    '''
    Basic metadata about a template

    Fields:
    - created_at: when the template was created
    - updated_at: when the template metadata was last updated
    - title: the displayed title or name of the template
    - description: a freetext description of the template
    '''

    __tablename__ = 'template_base'
    id = Column(db.Integer, primary_key=True)
    created_at = Column(db.DateTime)
    updated_at = Column(db.DateTime)
    title = Column(db.String(255))
    description = Column(db.Text)
    template_text = db.relationship('TemplateSection',
                                    cascade='all,delete',
                                    lazy='dynamic')
    template_placeholders = db.relationship('TemplatePlaceholders',
                                            cascade='all,delete',
                                            lazy='dynamic')
    published = Column(db.Boolean, default=False)
    section_order = Column(ARRAY(db.Integer))

    # created_by = ReferenceCol('users')

    def __init__(self, created_at, updated_at, title, description):
        self.created_at = created_at
        self.updated_at = updated_at
        self.title = title
        self.description = description
Пример #4
0
class VoidedEdge(VoidedBase):

    __tablename__ = '_voided_edges'

    key = Column(BigInteger, primary_key=True, nullable=False)

    src_id = Column(
        Text,
        primary_key=True,
        nullable=False,
    )

    dst_id = Column(
        Text,
        primary_key=True,
        nullable=False,
    )

    created = Column(
        DateTime(timezone=True),
        nullable=False,
        server_default=text('now()'),
    )

    voided = Column(
        DateTime(timezone=True),
        nullable=False,
        server_default=text('now()'),
    )

    acl = Column(
        ARRAY(Text),
        default=list(),
    )

    system_annotations = Column(
        JSONB,
        default={},
    )

    properties = Column(
        JSONB,
        default={},
    )

    label = Column(
        Text,
        primary_key=True,
        nullable=False,
    )

    def __init__(self, edge):
        self.created = edge.created
        self.src_id = edge.src_id
        self.dst_id = edge.dst_id
        self.acl = edge.acl
        self.label = edge.label
        self.system_annotations = edge.system_annotations
        self.properties = edge.properties
Пример #5
0
class User(db.Model, UserMixin, APIMixin):
    __tablename__ = 'users'
    __autoroutes__ = ['index', 'meta']

    id = db.Column(db.Integer, primary_key=True)
    email = db.Column(db.Unicode, nullable=False, default='', index=True)
    pw_hash = db.Column(db.Unicode, nullable=False, default='', info={'public': False})
    first_name = db.Column(db.Unicode(50), nullable=False, default='')
    last_name = db.Column(db.Unicode(50), nullable=False, default='')
    profile_picture = db.Column(db.Unicode, nullable=False, default='', info={'public': False})
    description = db.Column(db.UnicodeText, nullable=False, default='')
    roles = db.Column(ARRAY(db.Unicode), default=[])

    posts = db.relationship('Post', backref=db.backref('author', lazy='joined', info={'public': True}), order_by='Post.id')

    def is_admin(self):
        return 'admin' in self.roles

    def full_name(self):
        return self.first_name + ' ' + self.last_name

    def abbr_name(self):
        return self.first_name[0] + '. ' + self.last_name

    def set_password(self, password):
        self.pw_hash = generate_password_hash(password)

    def check_password(self, password):
        return check_password_hash(self.pw_hash, password)

    # custom routes can use the route decorator
    @classmethod
    @route('/me')
    def me(cls):
        return jsonify(current_user.as_dict())

    @classmethod
    @route('/login', methods=['POST'])
    def login(cls):
        email = request.json.get('email')
        user = cls.query.filter(db.func.lower(User.email) == db.func.lower(email)).first()
        if user is None:
            abort(404, 'No user with this email address')
        if not user.check_password(request.json.get('password')):
            abort(403, 'This password doesn\'t work')
        login_user(user)
        return jsonify(user.as_dict())

    # the extra_field decorator is preferred, but more_json can also be used to add data to the response
    def more_json(self):
        return {
            'profile_picture_url': media_url(self.profile_picture) if self.profile_picture else None,
            'is_admin': self.is_admin(),
            'full_name': self.full_name(),
            'abbr_name': self.abbr_name()
        }
Пример #6
0
class Flow(Model):
    __tablename__ = 'flow'

    id = Column(db.Integer, primary_key=True, index=True)
    flow_name = Column(db.Text, unique=True)
    contract = db.relationship('ContractBase', backref='flow', lazy='subquery')
    stage_order = Column(ARRAY(db.Integer))

    def __unicode__(self):
        return self.flow_name
Пример #7
0
class SQLDag(DeclarativeBase):
    __tablename__ = 'sql_dag'

    id = Column(String(ID_LEN), primary_key=True)
    description = Column(Text)
    created = Column(DateTime, default=func.now())
    user_id = Column(Integer(), ForeignKey(models.User.id), nullable=False)
    enabled = Column(Boolean, default=True, nullable=False)
    start_date = Column(Date, nullable=False)
    end_date = Column(Date)
    schedule_interval = Column(String, nullable=False)
    retries = Column(Integer, default=2)
    retry_delay = Column(Integer, default=60 * 10)
    pool_id = Column(Integer(), ForeignKey(models.Pool.id))
    sla = Column(Integer, default=3600 * 5)
    drill_down_sql_sensors = Column(
        ARRAY(String(), ForeignKey('sql_sensor.label')))
    specific_sql_sensors = Column(
        ARRAY(String(), ForeignKey('sql_sensor.label')))
    extra = Column(String(5000))

    child_tasks = relationship(
        "SQLDagTask",
        back_populates="parent_sql_dag",
    )
    owner = relationship(
        models.User,
        foreign_keys=[user_id],
    )
    pool = relationship(
        models.Pool,
        foreign_keys=[pool_id],
    )

    def __repr__(self):
        return self.id
Пример #8
0
class ClusterState(BASE):
    '''
    Represents clusters currently
    present in the system. Holds info
    about deployment type which is using in
    redeployment process.

    Is linked with TestSetToCluster entity
    that implements many-to-many relationship with
    TestSet.
    '''

    __tablename__ = 'cluster_state'

    id = sa.Column(sa.Integer, primary_key=True, autoincrement=False)
    deployment_tags = sa.Column(ARRAY(sa.String(64)))
Пример #9
0
class Category(Model):
    '''Category model for opportunities and Vendor signups

    Categories are based on the codes created by the `National Institute
    of Government Purchasing (NIGP) <http://www.nigp.org/eweb/StartPage.aspx>`_.
    The names of the categories have been re-written a bit to make them more
    human-readable and in some cases a bit more modern.

    Attributes:
        id: Primary key unique ID
        nigp_codes: Array of integers refering to NIGP codes.
        category: parent top-level category
        subcategory: NIGP designated subcategory name
        category_friendly_name: Rewritten, more human-readable subcategory name
        examples: Pipe-delimited examples of items that fall in each subcategory
        examples_tsv: TSVECTOR of the examples for that subcategory

    See Also:
        The :ref:`nigp-importer` contains more information about how NIGP codes
        are imported into the system.
    '''
    __tablename__ = 'category'

    id = Column(db.Integer, primary_key=True, index=True)
    nigp_codes = Column(ARRAY(db.Integer()))
    category = Column(db.String(255))
    subcategory = Column(db.String(255))
    category_friendly_name = Column(db.Text)
    examples = Column(db.Text)
    examples_tsv = Column(TSVECTOR)

    def __unicode__(self):
        return '{sub} (in {main})'.format(sub=self.category_friendly_name,
                                          main=self.category)

    @classmethod
    def parent_category_query_factory(cls):
        '''Query factory to return a query of all of the distinct top-level categories
        '''
        return db.session.query(db.distinct(
            cls.category).label('category')).order_by('category')

    @classmethod
    def query_factory(cls):
        '''Query factory that returns all category/subcategory pairs
        '''
        return cls.query
Пример #10
0
class OldNode(Base):
    __tablename__ = 'nodes'
    __table_args__ = (UniqueConstraint('node_id', name='_node_id_uc'), )
    key = Column(Integer, primary_key=True)
    node_id = Column(Text, nullable=False)
    label = Column(Text, nullable=False)
    created = Column(DateTime(timezone=True), nullable=False)
    acl = Column(ARRAY(Text))
    system_annotations = Column(JSONB, default={})
    properties = Column(JSONB, default={})
    edges_out = relationship("OldEdge", foreign_keys=[OldEdge.src_id])
    edges_in = relationship("OldEdge", foreign_keys=[OldEdge.dst_id])

    def get_edges(self):
        for edge_in in self.edges_in:
            yield edge_in
        for edge_out in self.edges_out:
            yield edge_out
Пример #11
0
class ClusterTestingPattern(BASE):
    '''
    Stores cluster's pattern for testsets and tests
    '''

    __tablename__ = 'cluster_testing_pattern'

    cluster_id = sa.Column(sa.Integer,
                           sa.ForeignKey('cluster_state.id'),
                           primary_key=True)

    test_set_id = sa.Column(sa.String(128),
                            sa.ForeignKey('test_sets.id'),
                            primary_key=True)

    tests = sa.Column(ARRAY(sa.String(512)))

    test_set = relationship('TestSet')
Пример #12
0
class Gallery(db.Model, APIMixin):
    __tablename__ = 'galleries'
    __autoroutes__ = ['index', 'get', 'post', 'put', 'delete', 'meta']

    id = db.Column(db.Integer, primary_key=True)
    project_id = db.Column(db.Integer, db.ForeignKey(Project.id, ondelete='CASCADE'), index=True, info={'set_by': 'json'})
    title = db.Column(db.Unicode, nullable=False, default='', info={'set_by': 'json'})
    image_dir = db.Column(db.Unicode, nullable=False, default='', info={'set_by': 'json'})
    images = db.Column(ARRAY(db.Unicode), nullable=False, default=[], info={'set_by': 'server'})

    @setter('images')
    def set_images(self, name, value):
        self.images = image_url_array(self.image_dir)

    @staticmethod
    @authorizes('post', 'put', 'delete')
    def authorize_changes(resource):
        return current_user.is_admin()
Пример #13
0
class SQLSensor(DeclarativeBase):
    __tablename__ = 'sql_sensor'

    id = Column(Integer, primary_key=True)
    label = Column(String, unique=True, nullable=False)
    description = Column(Text)
    created = Column(DateTime, default=func.now())
    user_id = Column(Integer(), ForeignKey(models.User.id), nullable=False)
    enabled = Column(Boolean, default=True, nullable=False)
    ttl = Column(Integer, default=3600 * 24 * 7, nullable=False)
    timeout = Column(Integer, default=3600 * 3, nullable=False)
    # parent_labels = Column(ARRAY(String()), ForeignKey('sql_sensor.label'))
    parent_labels = Column(ARRAY(String()))
    cross_check = Column(Boolean, default=False, nullable=False)
    conn_id = Column(Integer(),
                     ForeignKey(models.Connection.id),
                     nullable=False)
    pool_id = Column(Integer(), ForeignKey(models.Pool.id))
    main_table = Column(String)
    poke_interval = Column(Integer, default=60, nullable=False)
    main_argument = Column(String, nullable=False, default='')
    sql = Column(Text, nullable=False)
    _extra = Column('extra', String(5000))

    owner = relationship(models.User, foreign_keys=[user_id])
    connection = relationship(models.Connection, foreign_keys=[conn_id])
    pool = relationship(models.Pool, foreign_keys=[pool_id])
    positive_int = CheckConstraint(and_(
        ttl > 0,
        timeout > 0,
    ))

    # todo: implement check or relationship for parent_labels
    # todo: implement getter and setter for _extra

    def __repr__(self):
        return self.label
Пример #14
0
class Opportunity(Model):
    '''Base Opportunity Model -- the central point for Beacon

    The Beacon model is centered around three dates:
    :py:attr:`~purchasing.models.front.Opportunity.planned_publish`,
    :py:attr:`~purchasing.models.front.Opportunity.planned_submission_start`,
    and :py:attr:`~purchasing.models.front.Opportunity.planned_submission_end`.
    The publish date is when opportunities that are approved appear on Beacon. The
    publication date also is when vendors are notified via email.

    Attributes:
        id: Primary key unique ID
        title: Title of the Opportunity
        description: Short (maximum 500-word) description of the opportunity
        planned_publish: Date when the opportunity should show up on Beacon
        planned_submission_start: Date when vendors can begin submitting
            responses to the opportunity
        planned_submission_end: Deadline for submitted responses to the
            Opportunity
        vendor_documents_needed: Array of integers that relate to
            :py:class:`~purchasing.models.front.RequiredBidDocument` ids
        is_public: True if opportunity is approved (publicly visible), False otherwise
        is_archived: True if opportunity is archived (not visible), False otherwise
        published_at: Date when an alert email was sent out to relevant vendors
        publish_notification_sent: True is notification sent, False otherwise
        department_id: ID of primary :py:class:`~purchasing.models.users.Department`
            for this opportunity
        department: Sqlalchemy relationship to primary
            :py:class:`~purchasing.models.users.Department`
            for this opportunity
        contact_id: ID of the :py:class:`~purchasing.models.users.User` for this opportunity
        contact: Sqlalchemy relationship to :py:class:`~purchasing.models.users.User`
            for this opportunity
        categories: Many-to-many relationship of the
            :py:class:`~purchasing.models.front.Category` objects
            for this opportunity
        opportunity_type_id: ID of the :py:class:`~beacon.models.front.OpportunityType`
        opportunity_type: Sqlalchemy relationship to the :py:class:`~beacon.models.front.OpportunityType`

    See Also:
        For more on the Conductor <--> Beacon relationship, look at the
        :py:func:`~purchasing.conductor.handle_form()` Conductor utility method and the
        :py:class:`~purchasing.conductor.forms.PostOpportunityForm` Conductor Form
    '''
    __tablename__ = 'opportunity'

    id = Column(db.Integer, primary_key=True)
    title = Column(db.String(255))
    description = Column(db.Text)
    planned_publish = Column(db.DateTime, nullable=False)
    planned_submission_start = Column(db.DateTime, nullable=False)
    planned_submission_end = Column(db.DateTime, nullable=False)
    vendor_documents_needed = Column(ARRAY(db.Integer()))
    is_public = Column(db.Boolean(), default=False)
    is_archived = Column(db.Boolean(), default=False, nullable=False)

    published_at = Column(db.DateTime, nullable=True)
    publish_notification_sent = Column(db.Boolean,
                                       default=False,
                                       nullable=False)

    department_id = ReferenceCol('department',
                                 ondelete='SET NULL',
                                 nullable=True)
    department = db.relationship('Department',
                                 backref=backref('opportunities',
                                                 lazy='dynamic'))

    contact_id = ReferenceCol('users', ondelete='SET NULL')
    contact = db.relationship('User',
                              backref=backref('opportunities', lazy='dynamic'),
                              foreign_keys='Opportunity.contact_id')

    categories = db.relationship(
        'Category',
        secondary=category_opportunity_association_table,
        backref='opportunities',
        collection_class=set)

    opportunity_type_id = ReferenceCol('opportunity_type',
                                       ondelete='SET NULL',
                                       nullable=True)
    opportunity_type = db.relationship(
        'OpportunityType',
        backref=backref('opportunities', lazy='dynamic'),
    )

    @classmethod
    def create(cls, data, user, documents, publish=False):
        '''Create a new opportunity

        Arguments:
            data: dictionary of fields needed to populate new
                opportunity object
            user: :py:class:`~purchasing.models.users.User` object
                creating the new opportunity
            documents: The documents FieldList from the
                :py:class:`~purchasing.forms.front.OpportunityForm`

        Keyword Arguments:
            publish: Boolean as to whether to publish this document. If
                True, it will set ``is_public`` to True.

        See Also:
            The :py:class:`~purchasing.forms.front.OpportunityForm`
            and :py:class:`~purchasing.forms.front.OpportunityDocumentForm`
            have more information about the documents.

        '''
        opportunity = Opportunity(**data)

        current_app.logger.info(
            '''BEACON NEW - New Opportunity Created: Department: {} | Title: {} | Publish Date: {} | Submission Start Date: {} | Submission End Date: {}
            '''.format(
                opportunity.id,
                opportunity.department.name if opportunity.department else '',
                opportunity.title.encode('ascii', 'ignore'),
                str(opportunity.planned_publish),
                str(opportunity.planned_submission_start),
                str(opportunity.planned_submission_end)))

        if not (user.is_conductor() or publish):
            # only send 'your post has been sent/a new post needs review'
            # emails when 1. the submitter isn't from OMB and 2. they are
            # saving a draft as opposed to publishing the opportunity
            opportunity.notify_approvals(user)

        opportunity._handle_uploads(documents)
        opportunity._publish(publish)

        return opportunity

    def raw_update(self, **kwargs):
        '''Performs a basic update based on the passed kwargs.

        Arguments:
            **kwargs: Keyword arguments of fields to be updated in
                the existing Opportunity model
        '''
        super(Opportunity, self).update(**kwargs)

    def update(self, data, user, documents, publish=False):
        '''Performs an update, uploads new documents, and publishes

        Arguments:
            data: dictionary of fields needed to populate new
                opportunity object
            user: :py:class:`~purchasing.models.users.User` object
                updating the opportunity
            documents: The documents FieldList from the
                :py:class:`~purchasing.forms.front.OpportunityForm`

        Keyword Arguments:
            publish: Boolean as to whether to publish this document. If
                True, it will set ``is_public`` to True.
        '''
        data.pop('publish_notification_sent', None)
        for attr, value in data.iteritems():
            setattr(self, attr, value)

        current_app.logger.info(
            '''BEACON Update - Opportunity Updated: ID: {} | Title: {} | Publish Date: {} | Submission Start Date: {} | Submission End Date: {}
            '''.format(self.id, self.title.encode('ascii', 'ignore'),
                       str(self.planned_publish),
                       str(self.planned_submission_start),
                       str(self.planned_submission_end)))

        self._handle_uploads(documents)
        self._publish(publish)

    @property
    def is_published(self):
        '''Determine if an opportunity can be displayed

        Returns:
            True if the planned publish date is before or on today,
            and the opportunity is approved, False otherwise
        '''
        return self.coerce_to_date(
            self.planned_publish) <= localize_today() and self.is_public

    @property
    def is_upcoming(self):
        '''Determine if an opportunity is upcoming

        Returns:
            True if the planned publish date is before or on today, is approved,
            is not accepting submissions, and is not closed; False otherwise
        '''
        return self.coerce_to_date(self.planned_publish) <= localize_today() and \
            not self.is_submission_start and not self.is_submission_end and self.is_public

    @property
    def is_submission_start(self):
        '''Determine if the oppportunity is accepting submissions

        Returns:
            True if the submission start date and planned publish date are
            before or on today, is approved, and the opportunity is not closed;
            False otherwise
        '''
        return self.coerce_to_date(self.planned_submission_start) <= localize_today() and \
            self.coerce_to_date(self.planned_publish) <= localize_today() and \
            not self.is_submission_end and self.is_public

    @property
    def is_submission_end(self):
        '''Determine if an opportunity is closed to new submissions

        Returns:
            True if the submission end date is on or before today,
            and it is approved
        '''
        return pytz.UTC.localize(self.planned_submission_end).astimezone(
            current_app.config['DISPLAY_TIMEZONE']
        ) <= localize_now() and \
            self.is_public

    @property
    def has_docs(self):
        '''True if the opportunity has at least one document, False otherwise
        '''
        return self.opportunity_documents.count() > 0

    def estimate_submission_start(self):
        '''Returns the month/year based on submission start date
        '''
        return self.planned_submission_start.strftime('%B %d, %Y')

    def estimate_submission_end(self):
        '''Returns the localized date and time based on submission end date
        '''
        return pytz.UTC.localize(self.planned_submission_end).astimezone(
            current_app.config['DISPLAY_TIMEZONE']).strftime(
                '%B %d, %Y at %I:%M%p %Z')

    def can_view(self, user):
        '''Check if a user can see opportunity detail

        Arguments:
            user: A :py:class:`~purchasing.models.users.User` object

        Returns:
            Boolean indiciating if the user can view this opportunity
        '''
        return False if user.is_anonymous() and not self.is_published else True

    def can_edit(self, user):
        '''Check if a user can edit the contract

        Arguments:
            user: A :py:class:`~purchasing.models.users.User` object

        Returns:
            Boolean indiciating if the user can edit this opportunity.
            Conductors, the opportunity creator, and the primary opportunity
            contact can all edit the opportunity before it is published. After
            it is published, only conductors can edit it.
        '''
        if self.is_public and user.role.name in ('conductor', 'admin',
                                                 'superadmin'):
            return True
        elif not self.is_public and \
            (user.role.name in ('conductor', 'admin', 'superadmin') or
                user.id in (self.created_by_id, self.contact_id)):
            return True
        return False

    def coerce_to_date(self, field):
        '''Coerces the input field to a datetime.date object

        Arguments:
            field: A datetime.datetime or datetime.date object

        Returns:
            A datetime.date object
        '''
        if isinstance(field, datetime.datetime):
            return field.date()
        if isinstance(field, datetime.date):
            return field
        return field

    def get_vendor_emails(self):
        '''Return list of all signed up vendors
        '''
        return [i.email for i in self.vendors]

    def has_vendor_documents(self):
        '''Returns a Boolean for whether there are required bid documents

        See Also:
            :py:class:`~purchasing.models.front.RequiredBidDocument`
        '''
        return self.vendor_documents_needed and len(
            self.vendor_documents_needed) > 0

    def get_vendor_documents(self):
        '''Returns a list of documents the the vendor will need to provide

        See Also:
            :py:class:`~purchasing.models.front.RequiredBidDocument`
        '''
        if self.has_vendor_documents():
            return RequiredBidDocument.query.filter(
                RequiredBidDocument.id.in_(
                    self.vendor_documents_needed)).all()
        return []

    def get_events(self):
        '''Returns the opportunity dates out as a nice ordered list for rendering
        '''
        return [{
            'event': 'bid_submission_start',
            'classes': 'event event-submission_start',
            'date': self.estimate_submission_start(),
            'description': 'Opportunity opens for submissions.'
        }, {
            'event': 'bid_submission_end',
            'classes': 'event event-submission_end',
            'date': self.estimate_submission_end(),
            'description': 'Deadline to submit proposals.'
        }]

    def _handle_uploads(self, documents):
        opp_documents = self.opportunity_documents.all()

        for document in documents.entries:
            if document.title.data == '':
                continue

            _id = self.id if self.id else random_id(6)

            _file = document.document.data
            if _file.filename in [i.name for i in opp_documents]:
                continue

            filename, filepath = document.upload_document(_id)
            if filepath:
                self.opportunity_documents.append(
                    OpportunityDocument(name=document.title.data,
                                        href=filepath))

    def _publish(self, publish):
        if not self.is_public:
            if publish:
                self.is_public = True

    def notify_approvals(self, user):
        '''Send the approval notifications to everyone with approval rights

        Arguments:
            user: A :py:class:`~purchasing.models.users.User` object
        '''
        Notification(to_email=[user.email],
                     subject='Your post has been sent to OMB for approval',
                     html_template='beacon/emails/staff_postsubmitted.html',
                     txt_template='beacon/emails/staff_postsubmitted.txt',
                     opportunity=self).send(multi=True)

        Notification(to_email=db.session.query(User.email).join(
            Role, User.role_id == Role.id).filter(
                Role.name.in_(['conductor', 'admin', 'superadmin'])).all(),
                     subject='A new Beacon post needs review',
                     html_template='beacon/emails/admin_postforapproval.html',
                     txt_template='beacon/emails/admin_postforapproval.txt',
                     opportunity=self).send(multi=True)

    def get_category_ids(self):
        '''Returns the IDs from the Opportunity's related categories
        '''
        return [i.id for i in self.categories]

    def send_publish_email(self):
        '''Sends the "new opportunity available" email to subscribed vendors

        If a new Opportunity is created and it has a publish date before or
        on today's date, it will trigger an immediate publish email send. This
        operates in a very similar way to the nightly
        :py:class:`~purchasing.jobs.beacon_nightly.BeaconNewOppotunityOpenJob`.
        It will build a list of all vendors signed up to the Opportunity
        or to any of the categories that describe the Opportunity.
        '''
        if self.is_published and not self.publish_notification_sent:
            vendors = Vendor.query.filter(
                Vendor.categories.any(Category.id.in_(
                    self.get_category_ids()))).all()

            Notification(
                to_email=[i.email for i in vendors],
                subject='A new City of Pittsburgh opportunity from Beacon!',
                html_template='beacon/emails/newopp.html',
                txt_template='beacon/emails/newopp.txt',
                opportunity=self).send(multi=True)

            self.publish_notification_sent = True
            self.published_at = datetime.datetime.utcnow()

            current_app.logger.info(
                '''BEACON PUBLISHED:  ID: {} | Title: {} | Publish Date: {} | Submission Start Date: {} | Submission End Date: {}
                '''.format(self.id, self.title.encode('ascii', 'ignore'),
                           str(self.planned_publish),
                           str(self.planned_submission_start),
                           str(self.planned_submission_end)))
            return True
        return False
Пример #15
0
class Test(BASE):

    __tablename__ = 'tests'

    STATES = ('wait_running', 'running', 'failure', 'success', 'error',
              'stopped', 'disabled')

    id = sa.Column(sa.Integer(), primary_key=True)
    name = sa.Column(sa.String(512))
    title = sa.Column(sa.String(512))
    description = sa.Column(sa.Text())
    duration = sa.Column(sa.String(512))
    message = sa.Column(sa.Text())
    traceback = sa.Column(sa.Text())
    status = sa.Column(sa.Enum(*STATES, name='test_states'))
    step = sa.Column(sa.Integer())
    time_taken = sa.Column(sa.Float())
    meta = sa.Column(fields.JsonField())
    deployment_tags = sa.Column(ARRAY(sa.String(64)))

    test_run_id = sa.Column(sa.Integer(),
                            sa.ForeignKey('test_runs.id', ondelete='CASCADE'))

    test_set_id = sa.Column(sa.String(length=128),
                            sa.ForeignKey('test_sets.id', ondelete='CASCADE'))

    @property
    def frontend(self):
        return {
            'id': self.name,
            'testset': self.test_set_id,
            'name': self.title,
            'description': self.description,
            'duration': self.duration,
            'message': self.message,
            'step': self.step,
            'status': self.status,
            'taken': self.time_taken
        }

    @classmethod
    def add_result(cls, session, test_run_id, test_name, data):
        session.query(cls).\
            filter_by(name=test_name, test_run_id=test_run_id).\
            update(data, synchronize_session=False)

    @classmethod
    def update_running_tests(cls, session, test_run_id, status='stopped'):
        session.query(cls). \
            filter(cls.test_run_id == test_run_id,
                   cls.status.in_(('running', 'wait_running'))). \
            update({'status': status}, synchronize_session=False)

    @classmethod
    def update_test_run_tests(cls,
                              session,
                              test_run_id,
                              tests_names,
                              status='wait_running'):
        session.query(cls). \
            filter(cls.name.in_(tests_names),
                   cls.test_run_id == test_run_id). \
            update({'status': status}, synchronize_session=False)

    def copy_test(self, test_run, predefined_tests):
        '''
        Performs copying of tests for newly created
        test_run.
        '''
        new_test = self.__class__()
        mapper = object_mapper(self)
        primary_keys = set([col.key for col in mapper.primary_key])
        for column in mapper.iterate_properties:
            if column.key not in primary_keys:
                setattr(new_test, column.key, getattr(self, column.key))
        new_test.test_run_id = test_run.id
        if predefined_tests and new_test.name not in predefined_tests:
            new_test.status = 'disabled'
        else:
            new_test.status = 'wait_running'
        return new_test
Пример #16
0
class CommonBase(object):

    _session_hooks_before_insert = []
    _session_hooks_before_update = []
    _session_hooks_before_delete = []

    # ======== Columns ========
    created = Column(
        DateTime(timezone=True),
        nullable=False,
        server_default=text('now()'),
    )

    acl = Column(
        ARRAY(Text),
        default=list(),
    )

    _sysan = Column(
        # WARNING: Do not update this column directly. See
        # `.system_annotations`
        JSONB,
        default={},
    )

    _props = Column(
        # WARNING: Do not update this column directly.
        # See `.properties` or `.props`
        JSONB,
        default={},
    )

    @classmethod
    def get_label(cls):
        return getattr(cls, '__label__', cls.__name__.lower())

    # ======== Table Attributes ========
    @declared_attr
    def __mapper_args__(cls):
        name = cls.__name__
        if name in abstract_classes:
            pjoin = polymorphic_union(
                {
                    scls.__tablename__: scls.__table__
                    for scls in cls.get_subclasses()
                }, 'type')
            return {
                'polymorphic_identity': name,
                'with_polymorphic': ('*', pjoin),
            }
        else:
            return {
                'polymorphic_identity': name,
                'concrete': True,
            }

    def __init__(self, *args, **kwargs):
        raise NotImplemented()

    # ======== Properties ========
    @hybrid_property
    def properties(self):
        return PropertiesDict(self)

    @properties.setter
    def properties(self, properties):
        """To set each property, _set_property is called, which calls
        __setitem__ which calls setattr(). The final call to setattr
        will pass through any validation defined in a subclass
        property setter.

        """
        for key, val in sanitize(properties).items():
            setattr(self, key, val)

    @hybrid_property
    def props(self):
        """Alias of properties

        """
        return self.properties

    @props.setter
    def props(self, properties):
        """Alias of properties

        """
        self.properties = properties

    @hybrid_property
    def sysan(self):
        """Alias of properties

        """
        return self.system_annotations

    @sysan.setter
    def sysan(self, sysan):
        """Alias of properties

        """
        self.system_annotations = sysan

    def _set_property(self, key, val):
        """Property dict is cloned (to make sure that SQLAlchemy flushes it)
        before setting the key value pair.

        """
        if not self.has_property(key):
            raise KeyError('{} has no property {}'.format(type(self), key))
        self._props = {k: v for k, v in self._props.iteritems()}
        self._props[key] = val

    def _get_property(self, key):
        """If the property is defined in the model but not present on the
        instance, return None, else return the value associated with key.

        """
        if not self.has_property(key):
            raise KeyError('{} has no property {}'.format(type(self), key))
        if key not in self._props:
            return None
        return self._props[key]

    def property_template(self, properties={}):
        """Returns a dictionary of {key: None} templating all of the
        properties defined on the model.

        """
        temp = {k: None for k in self.get_property_list()}
        temp.update(properties)
        return temp

    def __getitem__(self, key):
        """Returns value corresponding to key in _props

        """
        return getattr(self, key)

    def __setitem__(self, key, val):
        """Sets value corresponding to key in _props.  This calls the model's
        hybrid_property setter method in the instance's model class.

        """
        setattr(self, key, val)

    @classmethod
    def get_property_list(cls):
        """Returns a list of hybrid_properties defined on the subclass model

        """
        return [
            attr for attr in dir(cls) if attr in cls.__dict__
            and isinstance(cls.__dict__[attr], hybrid_property)
            and getattr(getattr(cls, attr), '_is_pg_property', True)
        ]

    @classmethod
    def has_property(cls, key):
        """Returns boolean if key is a property defined on the subclass model

        """
        return key in cls.get_property_list()

    # ======== Label ========
    @hybrid_property
    def label(self):
        """Custom label on the model

        .. note: This is not the polymorphic identity, see `_type`
        """
        return self.get_label()

    @label.setter
    def label(self, label):
        """Custom setter as an application level ban from changing labels.

        """
        if not isinstance(self.label, Column)\
           and self.get_label() is not None\
           and self.get_label() != label:
            raise AttributeError('Cannot change label from {} to {}'.format(
                self.get_label(), label))

    # ======== System Annotations ========
    @hybrid_property
    def system_annotations(self):
        """Returns a system annotation proxy pointing to _sysan.  Any updates
        to this dict will be proxied to the model's _sysan JSONB
        column.

        """
        return SystemAnnotationDict(self)

    @system_annotations.setter
    def system_annotations(self, sysan):
        """Directly set the model's _sysan column with dict sysan.

        """
        self._sysan = sanitize(sysan)

    def get_name(self):
        """Convenience wrapper for getting class name
        """
        return type(self).__name__

    def get_session(self):
        """Returns the session an object is bound to if bound to a session

        """
        return object_session(self)

    def merge(self, acl=None, system_annotations={}, properties={}):
        """Merge the model's system_annotations and properties.

        .. note: acl will be overwritten, merging acls is not supported
        """
        self.system_annotations.update(system_annotations)
        for key, value in properties.items():
            setattr(self, key, value)
        if acl is not None:
            self.acl = acl

    def _merge_onto_existing(self, old_props, old_sysan):
        # properties
        temp = {}
        temp.update(old_props)
        temp.update(self._props)
        self._props = temp

        # system annotations
        temp = {}
        temp.update(old_sysan)
        temp.update(self._sysan)
        self._sysan = temp

    def _get_clean_session(self, session=None):
        """Create a new session from an objects session using the same
        connection to allow for clean queries against the database

        """
        if not session:
            session = self.get_session()
        Clean = sessionmaker()
        Clean.configure(bind=session.bind)
        return Clean()

    def _validate(self, session=None):
        """Final validation currently only includes checking nonnull
        properties

        """
        for key in getattr(self, '__nonnull_properties__', []):
            assert self.properties[key] is not None, (
                "Null value in key '{}' violates non-null constraint for {}."
            ).format(key, self)

    @classmethod
    def get_pg_properties(cls):
        return cls.__pg_properties__
Пример #17
0
class VersionedNode(Base):

    __tablename__ = 'versioned_nodes'
    __table_args__ = (
        Index('submitted_node_id_idx', 'node_id'),
        Index('submitted_node_gdc_versions_idx', 'node_id'),
    )

    def __repr__(self):
        return ("<VersionedNode(key={}, label='{}', node_id='{}')>".format(
            self.key, self.label, self.node_id))

    key = Column(BigInteger, primary_key=True, nullable=False)

    label = Column(
        Text,
        nullable=False,
    )

    node_id = Column(
        Text,
        nullable=False,
    )

    project_id = Column(
        Text,
        nullable=False,
    )

    gdc_versions = Column(ARRAY(Text), )

    created = Column(
        DateTime(timezone=True),
        nullable=False,
    )

    versioned = Column(
        DateTime(timezone=True),
        nullable=False,
        server_default=text('now()'),
    )

    acl = Column(
        ARRAY(Text),
        default=list(),
    )

    system_annotations = Column(
        JSONB,
        default={},
    )

    properties = Column(
        JSONB,
        default={},
    )

    neighbors = Column(ARRAY(Text), )

    @staticmethod
    def clone(node):
        return VersionedNode(
            label=copy(node.label),
            node_id=copy(node.node_id),
            project_id=copy(node._props.get('project_id')),
            created=copy(node.created),
            acl=copy(node.acl),
            system_annotations=copy(node.system_annotations),
            properties=copy(node.properties),
            neighbors=copy([edge.dst_id for edge in node.edges_out] +
                           [edge.src_id for edge in node.edges_in]))
Пример #18
0
def chat_list(request):

    current_page = int(request.GET.get("page", 1))

    if request.matched_route.name.startswith("chat_list_unanswered"):
        current_status = "unanswered"
    elif request.matched_route.name.startswith("chat_list_ongoing"):
        current_status = "ongoing"
    elif request.matched_route.name.startswith("chat_list_ended"):
        current_status = "ended"
    else:
        current_status = None

    if request.matched_route.name.startswith("chat_list_label"):
        current_label = request.matchdict["label"].lower().strip().replace(
            " ", "_")
        if current_label != request.matchdict["label"]:
            raise HTTPFound(
                request.route_path("chat_list_label", label=current_label))
    else:
        current_label = None

    chats = Session.query(ChatUser, Chat, Message).join(Chat).outerjoin(
        Message,
        Message.id == Session.query(func.min(Message.id), ).filter(
            Message.chat_id == Chat.id, ).correlate(Chat),
    ).filter(ChatUser.user_id == request.user.id, )

    chat_count = Session.query(func.count('*')).select_from(ChatUser).filter(
        ChatUser.user_id == request.user.id, )

    if current_status == "unanswered":
        chats = chats.filter(
            and_(
                Chat.last_user_id is not None,
                Chat.last_user_id != request.user.id,
            ))
        chat_count = chat_count.join(Chat).filter(
            and_(
                Chat.last_user_id is not None,
                Chat.last_user_id != request.user.id,
            ))
    elif current_status is not None:
        chats = chats.filter(Chat.status == current_status)
        chat_count = chat_count.join(Chat).filter(
            Chat.status == current_status)

    if current_label is not None:
        label_array = cast([current_label], ARRAY(Unicode(500)))
        chats = chats.filter(ChatUser.labels.contains(label_array))
        chat_count = chat_count.filter(ChatUser.labels.contains(label_array))

    chats = chats.order_by(Chat.updated.desc()).limit(25).offset(
        (current_page - 1) * 25).all()

    # 404 on empty pages, unless it's the first page.
    if current_page != 1 and len(chats) == 0:
        raise HTTPNotFound

    chat_count = chat_count.scalar()

    if request.matchdict.get("fmt") == "json":
        return render_to_response("json", {
            "chats": [{
                "chat_user": chat_user,
                "chat": chat,
                "prompt": prompt,
            } for chat_user, chat, prompt in chats],
            "chat_count":
            chat_count,
        },
                                  request=request)

    paginator = paginate.Page(
        [],
        page=current_page,
        items_per_page=25,
        item_count=chat_count,
        url=paginate.PageURL(
            request.route_path(request.matched_route.name,
                               label=current_label), {"page": current_page}),
    )

    labels = (Session.query(
        func.unnest(ChatUser.labels),
        func.count("*")).filter(ChatUser.user_id == request.user.id).group_by(
            func.unnest(ChatUser.labels)).order_by(
                func.count("*").desc(),
                func.unnest(ChatUser.labels).asc()).all())

    template = "layout2/chat_list.mako" if request.user.layout_version == 2 else "chat_list.mako"
    return render_to_response(template, {
        "chats": chats,
        "paginator": paginator,
        "labels": labels,
        "current_status": current_status,
        "current_label": current_label,
        "symbols": symbols,
    },
                              request=request)
Пример #19
0
class Flow(Model):
    '''Model for flows

    A Flow is the series of :py:class:`~purchasing.data.stages.Stage` objects
    that a contract will go through as part of Conductor. It is meant to be
    as configurable and flexible as possible. Because of the nature of Flows,
    it is best to not allow them to be edited or deleted once they are in use.
    Instead, there is an ``is_archived`` flag. This is because of the difficulty
    of knowing how to handle contracts that are currently in the middle of a flow
    if that flow is edited. Instead, it is better to create a new flow.

    Attributes:
        id: Primary key unique ID
        flow_name: Name of this flow
        contract: Many-to-one relationship with
            :py:class:`~purchasing.data.contracts.ContractBase` (many
            contracts can share a flow)
        stage_order: Array of stage_id integers
        is_archived: Boolean of whether the flow is archived or active
    '''
    __tablename__ = 'flow'

    id = Column(db.Integer, primary_key=True, index=True)
    flow_name = Column(db.Text, unique=True)
    contract = db.relationship('ContractBase', backref='flow', lazy='subquery')
    stage_order = Column(ARRAY(db.Integer))
    is_archived = Column(db.Boolean, default=False, nullable=False)

    def __unicode__(self):
        return self.flow_name

    @classmethod
    def all_flow_query_factory(cls):
        '''Query factory that returns query of all flows
        '''
        return cls.query

    @classmethod
    def nonarchived_query_factory(cls):
        '''Query factory that returns query of all non-archived flows
        '''
        return cls.query.filter(cls.is_archived == False)

    def get_ordered_stages(self):
        '''Turns the flow's stage_order attribute into Stage objects

        Returns:
            Ordered list of :py:class:`~purchasing.data.stages.Stage` objects
            in the flow's ``stage_order``
        '''
        return [Stage.query.get(i) for i in self.stage_order]

    def create_contract_stages(self, contract):
        '''Creates new rows in contract_stage table.

        Extracts the rows out of the given flow, and creates new rows
        in the contract_stage table for each of them.

        If the stages already exist, that means that the contract
        is switching back into a flow that it had already been in.
        To handle this, the "revert" flag is set to true, which
        should signal to a downstream process to roll the stages
        back to the first one in the current flow.

        Arguments:
            contract: A :py:class:`~purchasing.data.contracts.ContractBase` object

        Returns:
            A three-tuple of (the flow's stage order, a list of the flow's
            :py:class:`~purchasing.data.contract_stages.ContractStage` objects,
            whether the we are "reverting")

        '''
        revert = False
        contract_stages = []
        for stage_id in self.stage_order:
            try:
                contract_stages.append(ContractStage.create(
                    contract_id=contract.id,
                    flow_id=self.id,
                    stage_id=stage_id,
                ))
            except (IntegrityError, FlushError):
                revert = True
                db.session.rollback()
                stage = ContractStage.query.filter(
                    ContractStage.contract_id == contract.id,
                    ContractStage.flow_id == self.id,
                    ContractStage.stage_id == stage_id
                ).first()
                if stage:
                    contract_stages.append(stage)
                else:
                    raise IntegrityError

            except Exception:
                raise

        contract.flow_id = self.id
        db.session.commit()

        return self.stage_order, contract_stages, revert

    def _build_row(self, row, exited, data_dict):
        try:
            data_dict[row.contract_id]['stages'].append({
                'name': row.stage_name, 'id': row.stage_id,
                'entered': localize_datetime(row.entered).isoformat(),
                'exited': localize_datetime(exited).isoformat(),
                'seconds': max([(exited - row.entered).total_seconds(), 0]),
            })
        except KeyError:
            data_dict[row.contract_id] = {
                'description': row.description,
                'email': row.email,
                'department': row.department,
                'contract_id': row.contract_id,
                'stages': [{
                    'name': row.stage_name, 'id': row.stage_id,
                    'entered': localize_datetime(row.entered).isoformat(),
                    'exited': localize_datetime(exited).isoformat(),
                    'seconds': max([(exited - row.entered).total_seconds(), 0]),
                }]
            }

        return data_dict

    def build_metrics_data(self):
        '''Build the raw data sets to be transformed client-side for metrics charts

        Example:
            .. code-block:: python

                results = {
                    'current': { 'contract id': {
                        'description': 'a contract description',
                        'email': 'the contract is assigned to this email',
                        'department': 'the primary department for the contract',
                        'contract_id': 'the contract id',
                        'stages': [{
                            'name': 'the stage name', 'id': 'the stage id',
                            'entered': 'when the stage was entered',
                            'exited': 'when the stage was exited',
                            'seconds': 'number of seconds the contract spent in this stage',
                        }, ...]
                    }, ... },
                    'complete': { 'contract id': {

                    }, ... }
                }

        Returns:
            A results dictionary described in the example above.
        '''
        raw_data = self.get_metrics_csv_data()
        results = {'current': {}, 'complete': {}}

        for ix, row in enumerate(raw_data):
            exited = row.exited if row.exited else datetime.datetime.utcnow()
            if row.exited is None:
                results['current'] = self._build_row(row, exited, results['current'])
            else:
                results['complete'] = self._build_row(row, exited, results['complete'])

        return results

    def reshape_metrics_granular(self, enter_and_exit=False):
        '''Transform long data from database into wide data for consumption

        Take in a result set (list of tuples), return a dictionary of results.
        The key for the dictionary is the contract id, and the values are a list
        of (fieldname, value). Metadata (common to all rows) is listed first, and
        timing information from each stage is listed afterwords. Sorting is assumed
        to be done on the database layer

        Arguments:
            enter_and_exit: A boolean option of whether to add both the
                enter and exit times to the results list

        Returns:
            * Results - a dictionary of lists which can be used to generate
              a .csv or .tsv file to be downloaded by the client
            * Headers - A list of strings which can be used to create the
              headers for the downloadable file
        '''
        raw_data = self.get_metrics_csv_data()
        results = defaultdict(list)
        headers = []

        for ix, row in enumerate(raw_data):
            if ix == 0:
                headers.extend(['item_number', 'description', 'assigned_to', 'department'])

            # if this is a new contract row, append metadata
            if len(results[row.contract_id]) == 0:
                results[row.contract_id].extend([
                    row.contract_id,
                    row.description,
                    row.email,
                    row.department,
                ])

            # append the stage date data
            if enter_and_exit and row.exited:
                results[row.contract_id].extend([
                    localize_datetime(row.exited),
                    localize_datetime(row.entered)
                ])
                if row.stage_name + '_exit' not in headers:
                    headers.append(row.stage_name.replace(' ', '_') + '_exit')
                    headers.append(row.stage_name.replace(' ', '_') + '_enter')
            else:
                results[row.contract_id].extend([
                    localize_datetime(row.exited)
                ])

                if row.stage_name not in headers:
                    headers.append(row.stage_name)

        return results, headers

    def get_metrics_csv_data(self):
        '''Raw SQL query that returns the raw data to be reshaped for download or charting
        '''
        return db.session.execute('''
            select
                x.contract_id, x.description, x.department,
                x.email, x.stage_name, x.rn, x.stage_id,
                min(x.entered) as entered,
                max(x.exited) as exited

            from (

                select
                    c.id as contract_id, c.description, d.name as department,
                    u.email, s.name as stage_name, s.id as stage_id, cs.exited, cs.entered,
                    row_number() over (partition by c.id order by cs.entered asc, cs.id asc) as rn

                from contract_stage cs
                join stage s on cs.stage_id = s.id

                join contract c on cs.contract_id = c.id

                join users u on c.assigned_to = u.id
                left join department d on c.department_id = d.id

                where cs.entered is not null
                and cs.flow_id = :flow_id

            ) x
            group by 1,2,3,4,5,6,7
            order by contract_id, rn asc
        ''', {
            'flow_id': self.id
        }).fetchall()