コード例 #1
0
ファイル: session.py プロジェクト: GEverding/inbox
class InboxSession(object):
    """ Inbox custom ORM (with SQLAlchemy compatible API).

    Parameters
    ----------
    engine : <sqlalchemy.engine.Engine>
        A configured database engine to use for this session
    versioned : bool
        Do you want to enable the transaction log?
    ignore_soft_deletes : bool
        Whether or not to ignore soft-deleted objects in query results.
    namespace_id : int
        Namespace to limit query results with.
    """
    def __init__(self, engine, versioned=True, ignore_soft_deletes=True,
                 namespace_id=None):
        # TODO: support limiting on namespaces
        assert engine, "Must set the database engine"

        args = dict(bind=engine, autoflush=True, autocommit=False)
        self.ignore_soft_deletes = ignore_soft_deletes
        if ignore_soft_deletes:
            args['query_cls'] = InboxQuery
        self._session = Session(**args)

        if versioned:
            from inbox.models.transaction import create_revisions

            @event.listens_for(self._session, 'after_flush')
            def after_flush(session, flush_context):
                """
                Hook to log revision snapshots. Must be post-flush in order to
                grab object IDs on new objects.
                """
                create_revisions(session)

    def query(self, *args, **kwargs):
        q = self._session.query(*args, **kwargs)
        if self.ignore_soft_deletes:
            return q.options(IgnoreSoftDeletesOption())
        else:
            return q

    def add(self, instance):
        if not self.ignore_soft_deletes or not instance.is_deleted:
            self._session.add(instance)
        else:
            raise Exception("Why are you adding a deleted object?")

    def add_all(self, instances):
        if True not in [i.is_deleted for i in instances] or \
                not self.ignore_soft_deletes:
            self._session.add_all(instances)
        else:
            raise Exception("Why are you adding a deleted object?")

    def delete(self, instance):
        if self.ignore_soft_deletes:
            instance.mark_deleted()
            # just to make sure
            self._session.add(instance)
        else:
            self._session.delete(instance)

    def begin(self):
        self._session.begin()

    def commit(self):
        self._session.commit()

    def rollback(self):
        self._session.rollback()

    def flush(self):
        self._session.flush()

    def close(self):
        self._session.close()

    def expunge(self, obj):
        self._session.expunge(obj)

    def merge(self, obj):
        return self._session.merge(obj)

    @property
    def no_autoflush(self):
        return self._session.no_autoflush
コード例 #2
0
ファイル: per_session.py プロジェクト: gajop/ailadder
    from sqlalchemy.orm import sessionmaker
    from sqlalchemy.ext.declarative import declarative_base
    
    Session = sessionmaker(query_cls=CachingQuery)
    
    Base = declarative_base(engine=create_engine('sqlite://', echo=True))
    
    class User(Base):
        __tablename__ = 'users'
        id = Column(Integer, primary_key=True)
        name = Column(String(100))
        
        def __repr__(self):
            return "User(name=%r)" % self.name

    Base.metadata.create_all()
    
    sess = Session()
    
    sess.add_all(
        [User(name='u1'), User(name='u2'), User(name='u3')]
    )
    sess.commit()
    
    # cache two user objects
    sess.query(User).with_cache_key('u2andu3').filter(User.name.in_(['u2', 'u3'])).all()
    
    # pull straight from cache
    print sess.query(User).with_cache_key('u2andu3').all()
    
コード例 #3
0
ファイル: db_helper.py プロジェクト: mrzhangboss/fsqlfly
 def bulk_insert(cls, data: list, *args, session: Session, **kwargs):
     session.add_all(data)
     return DBRes(data=len(data))
コード例 #4
0
def upgrade():
    session = Session(bind=op.get_bind())
    roles = [
        Role(
            name='ccpo',
            description='',
            permissions=[
                Permissions.VIEW_ORIGINAL_JEDI_REQEUST,
                Permissions.REVIEW_AND_APPROVE_JEDI_WORKSPACE_REQUEST,
                Permissions.MODIFY_ATAT_ROLE_PERMISSIONS,
                Permissions.CREATE_CSP_ROLE, Permissions.DELETE_CSP_ROLE,
                Permissions.DEACTIVE_CSP_ROLE,
                Permissions.MODIFY_CSP_ROLE_PERMISSIONS,
                Permissions.VIEW_USAGE_REPORT, Permissions.VIEW_USAGE_DOLLARS,
                Permissions.ADD_AND_ASSIGN_CSP_ROLES,
                Permissions.REMOVE_CSP_ROLES, Permissions.REQUEST_NEW_CSP_ROLE,
                Permissions.ASSIGN_AND_UNASSIGN_ATAT_ROLE,
                Permissions.VIEW_ASSIGNED_ATAT_ROLE_CONFIGURATIONS,
                Permissions.VIEW_ASSIGNED_CSP_ROLE_CONFIGURATIONS,
                Permissions.DEACTIVATE_WORKSPACE,
                Permissions.VIEW_ATAT_PERMISSIONS,
                Permissions.TRANSFER_OWNERSHIP_OF_WORKSPACE,
                Permissions.ADD_APPLICATION_IN_WORKSPACE,
                Permissions.DELETE_APPLICATION_IN_WORKSPACE,
                Permissions.DEACTIVATE_APPLICATION_IN_WORKSPACE,
                Permissions.VIEW_APPLICATION_IN_WORKSPACE,
                Permissions.RENAME_APPLICATION_IN_WORKSPACE,
                Permissions.ADD_ENVIRONMENT_IN_APPLICATION,
                Permissions.DELETE_ENVIRONMENT_IN_APPLICATION,
                Permissions.DEACTIVATE_ENVIRONMENT_IN_APPLICATION,
                Permissions.VIEW_ENVIRONMENT_IN_APPLICATION,
                Permissions.RENAME_ENVIRONMENT_IN_APPLICATION,
                Permissions.ADD_TAG_TO_WORKSPACE,
                Permissions.REMOVE_TAG_FROM_WORKSPACE
            ]),
        Role(name='owner',
             description='',
             permissions=[
                 Permissions.REQUEST_JEDI_WORKSPACE,
                 Permissions.VIEW_ORIGINAL_JEDI_REQEUST,
                 Permissions.VIEW_USAGE_REPORT,
                 Permissions.VIEW_USAGE_DOLLARS,
                 Permissions.ADD_AND_ASSIGN_CSP_ROLES,
                 Permissions.REMOVE_CSP_ROLES,
                 Permissions.REQUEST_NEW_CSP_ROLE,
                 Permissions.ASSIGN_AND_UNASSIGN_ATAT_ROLE,
                 Permissions.VIEW_ASSIGNED_ATAT_ROLE_CONFIGURATIONS,
                 Permissions.VIEW_ASSIGNED_CSP_ROLE_CONFIGURATIONS,
                 Permissions.DEACTIVATE_WORKSPACE,
                 Permissions.VIEW_ATAT_PERMISSIONS,
                 Permissions.ADD_APPLICATION_IN_WORKSPACE,
                 Permissions.DELETE_APPLICATION_IN_WORKSPACE,
                 Permissions.DEACTIVATE_APPLICATION_IN_WORKSPACE,
                 Permissions.VIEW_APPLICATION_IN_WORKSPACE,
                 Permissions.RENAME_APPLICATION_IN_WORKSPACE,
                 Permissions.ADD_ENVIRONMENT_IN_APPLICATION,
                 Permissions.DELETE_ENVIRONMENT_IN_APPLICATION,
                 Permissions.DEACTIVATE_ENVIRONMENT_IN_APPLICATION,
                 Permissions.VIEW_ENVIRONMENT_IN_APPLICATION,
                 Permissions.RENAME_ENVIRONMENT_IN_APPLICATION,
             ]),
        Role(name='admin',
             description='',
             permissions=[
                 Permissions.VIEW_USAGE_REPORT,
                 Permissions.ADD_AND_ASSIGN_CSP_ROLES,
                 Permissions.REMOVE_CSP_ROLES,
                 Permissions.REQUEST_NEW_CSP_ROLE,
                 Permissions.ASSIGN_AND_UNASSIGN_ATAT_ROLE,
                 Permissions.VIEW_ASSIGNED_ATAT_ROLE_CONFIGURATIONS,
                 Permissions.VIEW_ASSIGNED_CSP_ROLE_CONFIGURATIONS,
                 Permissions.ADD_APPLICATION_IN_WORKSPACE,
                 Permissions.DELETE_APPLICATION_IN_WORKSPACE,
                 Permissions.DEACTIVATE_APPLICATION_IN_WORKSPACE,
                 Permissions.VIEW_APPLICATION_IN_WORKSPACE,
                 Permissions.RENAME_APPLICATION_IN_WORKSPACE,
                 Permissions.ADD_ENVIRONMENT_IN_APPLICATION,
                 Permissions.DELETE_ENVIRONMENT_IN_APPLICATION,
                 Permissions.DEACTIVATE_ENVIRONMENT_IN_APPLICATION,
                 Permissions.VIEW_ENVIRONMENT_IN_APPLICATION,
                 Permissions.RENAME_ENVIRONMENT_IN_APPLICATION,
             ]),
        Role(name='developer',
             description='',
             permissions=[
                 Permissions.VIEW_USAGE_REPORT, Permissions.VIEW_USAGE_DOLLARS,
                 Permissions.VIEW_APPLICATION_IN_WORKSPACE,
                 Permissions.VIEW_ENVIRONMENT_IN_APPLICATION
             ]),
        Role(name='billing_auditor',
             description='',
             permissions=[
                 Permissions.VIEW_USAGE_REPORT,
                 Permissions.VIEW_USAGE_DOLLARS,
                 Permissions.VIEW_APPLICATION_IN_WORKSPACE,
                 Permissions.VIEW_ENVIRONMENT_IN_APPLICATION,
             ]),
        Role(name='security_auditor',
             description='',
             permissions=[
                 Permissions.VIEW_ASSIGNED_ATAT_ROLE_CONFIGURATIONS,
                 Permissions.VIEW_ASSIGNED_CSP_ROLE_CONFIGURATIONS,
                 Permissions.VIEW_ATAT_PERMISSIONS,
                 Permissions.VIEW_APPLICATION_IN_WORKSPACE,
                 Permissions.VIEW_ENVIRONMENT_IN_APPLICATION,
             ]),
    ]

    session.add_all(roles)
    session.commit()
コード例 #5
0
ファイル: processor.py プロジェクト: grepthat/airflow
    def manage_slas(self, dag: DAG, session: Session = None) -> None:
        """
        Finding all tasks that have SLAs defined, and sending alert emails
        where needed. New SLA misses are also recorded in the database.

        We are assuming that the scheduler runs often, so we only check for
        tasks that should have succeeded in the past hour.
        """
        self.log.info("Running SLA Checks for %s", dag.dag_id)
        if not any(isinstance(ti.sla, timedelta) for ti in dag.tasks):
            self.log.info(
                "Skipping SLA check for %s because no tasks in DAG have SLAs",
                dag)
            return

        qry = (session.query(
            TI.task_id,
            func.max(DR.execution_date).label('max_ti')).join(
                TI.dag_run).with_hint(
                    TI, 'USE INDEX (PRIMARY)', dialect_name='mysql').filter(
                        TI.dag_id == dag.dag_id).filter(
                            or_(TI.state == State.SUCCESS,
                                TI.state == State.SKIPPED)).filter(
                                    TI.task_id.in_(dag.task_ids)).group_by(
                                        TI.task_id).subquery('sq'))

        max_tis: Iterator[TI] = (session.query(TI).join(TI.dag_run).filter(
            TI.dag_id == dag.dag_id,
            TI.task_id == qry.c.task_id,
            DR.execution_date == qry.c.max_ti,
        ))

        ts = timezone.utcnow()
        for ti in max_tis:
            task = dag.get_task(ti.task_id)
            if not task.sla:
                continue

            if not isinstance(task.sla, timedelta):
                raise TypeError(
                    f"SLA is expected to be timedelta object, got "
                    f"{type(task.sla)} in {task.dag_id}:{task.task_id}")

            sla_misses = []
            next_info = dag.next_dagrun_info(dag.get_run_data_interval(
                ti.dag_run),
                                             restricted=False)
            if next_info is None:
                self.log.info(
                    "Skipping SLA check for %s because task does not have scheduled date",
                    ti)
            else:
                while next_info.logical_date < ts:
                    next_info = dag.next_dagrun_info(next_info.data_interval,
                                                     restricted=False)
                    if next_info is None:
                        break
                    if next_info.logical_date + task.sla < ts:
                        sla_miss = SlaMiss(
                            task_id=ti.task_id,
                            dag_id=ti.dag_id,
                            execution_date=next_info.logical_date,
                            timestamp=ts,
                        )
                        sla_misses.append(sla_miss)
            if sla_misses:
                session.add_all(sla_misses)
        session.commit()

        slas: List[SlaMiss] = (
            session.query(SlaMiss).filter(SlaMiss.notification_sent == False,
                                          SlaMiss.dag_id == dag.dag_id)  # noqa
            .all())
        if slas:
            sla_dates: List[datetime.datetime] = [
                sla.execution_date for sla in slas
            ]
            fetched_tis: List[TI] = (session.query(TI).filter(
                TI.state != State.SUCCESS, TI.execution_date.in_(sla_dates),
                TI.dag_id == dag.dag_id).all())
            blocking_tis: List[TI] = []
            for ti in fetched_tis:
                if ti.task_id in dag.task_ids:
                    ti.task = dag.get_task(ti.task_id)
                    blocking_tis.append(ti)
                else:
                    session.delete(ti)
                    session.commit()

            task_list = "\n".join(sla.task_id + ' on ' +
                                  sla.execution_date.isoformat()
                                  for sla in slas)
            blocking_task_list = "\n".join(ti.task_id + ' on ' +
                                           ti.execution_date.isoformat()
                                           for ti in blocking_tis)
            # Track whether email or any alert notification sent
            # We consider email or the alert callback as notifications
            email_sent = False
            notification_sent = False
            if dag.sla_miss_callback:
                # Execute the alert callback
                self.log.info('Calling SLA miss callback')
                try:
                    dag.sla_miss_callback(dag, task_list, blocking_task_list,
                                          slas, blocking_tis)
                    notification_sent = True
                except Exception:
                    self.log.exception(
                        "Could not call sla_miss_callback for DAG %s",
                        dag.dag_id)
            email_content = f"""\
            Here's a list of tasks that missed their SLAs:
            <pre><code>{task_list}\n<code></pre>
            Blocking tasks:
            <pre><code>{blocking_task_list}<code></pre>
            Airflow Webserver URL: {conf.get(section='webserver', key='base_url')}
            """

            tasks_missed_sla = []
            for sla in slas:
                try:
                    task = dag.get_task(sla.task_id)
                except TaskNotFound:
                    # task already deleted from DAG, skip it
                    self.log.warning(
                        "Task %s doesn't exist in DAG anymore, skipping SLA miss notification.",
                        sla.task_id)
                    continue
                tasks_missed_sla.append(task)

            emails: Set[str] = set()
            for task in tasks_missed_sla:
                if task.email:
                    if isinstance(task.email, str):
                        emails |= set(get_email_address_list(task.email))
                    elif isinstance(task.email, (list, tuple)):
                        emails |= set(task.email)
            if emails:
                try:
                    send_email(emails,
                               f"[airflow] SLA miss on DAG={dag.dag_id}",
                               email_content)
                    email_sent = True
                    notification_sent = True
                except Exception:
                    Stats.incr('sla_email_notification_failure')
                    self.log.exception(
                        "Could not send SLA Miss email notification for DAG %s",
                        dag.dag_id)
            # If we sent any notification, update the sla_miss table
            if notification_sent:
                for sla in slas:
                    sla.email_sent = email_sent
                    sla.notification_sent = True
                    session.merge(sla)
            session.commit()