Beispiel #1
0
    def test_cube_operators(self):

        t = table('t', column('value'),
                  column('x'), column('y'), column('z'), column('q'))

        stmt = select([func.sum(t.c.value)])

        self.assert_compile(
            stmt.group_by(func.cube(t.c.x, t.c.y)),
            "SELECT sum(t.value) AS sum_1 FROM t GROUP BY CUBE(t.x, t.y)"
        )

        self.assert_compile(
            stmt.group_by(func.rollup(t.c.x, t.c.y)),
            "SELECT sum(t.value) AS sum_1 FROM t GROUP BY ROLLUP(t.x, t.y)"
        )

        self.assert_compile(
            stmt.group_by(
                func.grouping_sets(t.c.x, t.c.y)
            ),
            "SELECT sum(t.value) AS sum_1 FROM t "
            "GROUP BY GROUPING SETS(t.x, t.y)"
        )

        self.assert_compile(
            stmt.group_by(
                func.grouping_sets(
                    sql.tuple_(t.c.x, t.c.y),
                    sql.tuple_(t.c.z, t.c.q),
                )
            ),
            "SELECT sum(t.value) AS sum_1 FROM t GROUP BY "
            "GROUPING SETS((t.x, t.y), (t.z, t.q))"
        )
def process_audits(connection, user_id, caches, audits):
    """Process audits"""
    snapshot_quads = create_snapshots(connection, user_id, caches, audits)

    relationships_payload = []
    if snapshot_quads:
        snapshots = connection.execute(
            select([snapshots_table]).where(
                tuple_(
                    Snapshot.parent_type,
                    Snapshot.parent_id,
                    Snapshot.child_type,
                    Snapshot.child_id,
                ).in_(snapshot_quads))).fetchall()
        snapshot_cache = {(obj_.parent_type, obj_.parent_id, obj_.child_type,
                           obj_.child_id): (obj_.id, obj_.context_id)
                          for obj_ in snapshots}
        for snapshot in snapshot_quads:
            relationships_payload += [{
                "source_type":
                snapshot[2],
                "source_id":
                snapshot[3],
                "destination_type":
                "Snapshot",
                "destination_id":
                snapshot_cache[snapshot][0],
                "modified_by_id":
                user_id,
                "context_id":
                snapshot_cache[snapshot][1],
            }]

        insert_payloads(connection, relationships=relationships_payload)
def process_audits(connection, user_id, caches, audits):
  """Process audits"""
  snapshot_quads = create_snapshots(connection, user_id, caches, audits)

  relationships_payload = []
  if snapshot_quads:
    snapshots = connection.execute(select([snapshots_table]).where(
        tuple_(
            Snapshot.parent_type,
            Snapshot.parent_id,
            Snapshot.child_type,
            Snapshot.child_id,
        ).in_(snapshot_quads)
    )).fetchall()
    snapshot_cache = {
        (obj_.parent_type, obj_.parent_id,
         obj_.child_type, obj_.child_id): (obj_.id, obj_.context_id)
        for obj_ in snapshots
    }
    for snapshot in snapshot_quads:
      relationships_payload += [{
          "source_type": snapshot[2],
          "source_id": snapshot[3],
          "destination_type": "Snapshot",
          "destination_id": snapshot_cache[snapshot][0],
          "modified_by_id": user_id,
          "context_id": snapshot_cache[snapshot][1],
      }]

    insert_payloads(connection, relationships=relationships_payload)
Beispiel #4
0
def load_photos(user: User, limit: Optional[str] = None, offset: Optional[str] = None):
    alias_ass = Assessment.alias('ass')
    subq = db.select([Assessment.user_id]).where(Assessment.id == alias_ass.id).as_scalar()
    query = db.select([Photo]).select_from(Photo.outerjoin(alias_ass, or_(
        and_(alias_ass.user_id == user.id, alias_ass.photo_id == Photo.id), alias_ass.photo_id == None))). \
        where(not_(tuple_(user.id).in_(subq)))
    photos = limit_query(query, limit, offset)

    return photos.gino
Beispiel #5
0
    def test_cube_operators(self):

        t = table(
            "t",
            column("value"),
            column("x"),
            column("y"),
            column("z"),
            column("q"),
        )

        stmt = select(func.sum(t.c.value))

        self.assert_compile(
            stmt.group_by(func.cube(t.c.x, t.c.y)),
            "SELECT sum(t.value) AS sum_1 FROM t GROUP BY CUBE(t.x, t.y)",
        )

        self.assert_compile(
            stmt.group_by(func.rollup(t.c.x, t.c.y)),
            "SELECT sum(t.value) AS sum_1 FROM t GROUP BY ROLLUP(t.x, t.y)",
        )

        self.assert_compile(
            stmt.group_by(func.grouping_sets(t.c.x, t.c.y)),
            "SELECT sum(t.value) AS sum_1 FROM t "
            "GROUP BY GROUPING SETS(t.x, t.y)",
        )

        self.assert_compile(
            stmt.group_by(
                func.grouping_sets(
                    sql.tuple_(t.c.x, t.c.y), sql.tuple_(t.c.z, t.c.q)
                )
            ),
            "SELECT sum(t.value) AS sum_1 FROM t GROUP BY "
            "GROUPING SETS((t.x, t.y), (t.z, t.q))",
        )
Beispiel #6
0
def get_revisions(connection, objects):
  """Get latest revisions of provided objects."""
  revisions = select([
      func.max(revisions_table.c.id),
      revisions_table.c.resource_type,
      revisions_table.c.resource_id,
  ]).where(
      tuple_(
          revisions_table.c.resource_type,
          revisions_table.c.resource_id).in_(objects)
  ).group_by(revisions_table.c.resource_type, revisions_table.c.resource_id)
  revisions_ = {
      Stub(rtype, rid): id_ for id_, rtype, rid in
      connection.execute(revisions).fetchall()
  }
  return revisions_
Beispiel #7
0
def get_revisions(connection, objects):
  """Get latest revisions of provided objects."""
  revisions = select([
      func.max(revisions_table.c.id),
      revisions_table.c.resource_type,
      revisions_table.c.resource_id,
  ]).where(
      tuple_(
          revisions_table.c.resource_type,
          revisions_table.c.resource_id).in_(objects)
  ).group_by(revisions_table.c.resource_type, revisions_table.c.resource_id)
  revisions_ = {
      Stub(rtype, rid): id_ for id_, rtype, rid in
      connection.execute(revisions).fetchall()
  }
  return revisions_
Beispiel #8
0
def _build_where(table, where_rpn):
    ''' selectのwhere句のクエリを作成
    where_rpn (list): [column, value, operator, ...]
        => Reverse Polish Notation
        (el.) WHERE id <= 5 AND user LIKE "%admin%"
            => ["id", 5, "<=", "user", "%admin%", "like", "and"]
    '''
    # 演算子定義
    op = {
        '<': lambda x, y: x < y,
        '<=': lambda x, y: x <= y,
        '>': lambda x, y: x > y,
        '>=': lambda x, y: x >= y,
        '=': lambda x, y: x == y,
        '!=': lambda x, y: x != y,
        'and': lambda x, y: and_(x, y),
        'or': lambda x, y: or_(x, y),
        'like': lambda x, y: x.like(y),
        'in': lambda x, y: tuple_(x).in_([(e, ) for e in y]),
    }
    # 変数定義
    op.update(table.columns)
    # 逆ポーランド記法でクエリ生成
    return RPN(where_rpn, op)[0]
Beispiel #9
0
def overlaps(a_pair, b_pair):
    return tuple_(*a_pair).op('OVERLAPS')(tuple_(*b_pair))
def link_snapshots_to_objects(connection, user_id, caches, object_settings,
                              snapshot_quads):
    """Create relationships between snapshots and objects"""
    # pylint: disable=too-many-locals
    relationships_payload = []

    audit_contexts = caches["audit_contexts"]

    object_klass = object_settings["type"]
    object_relationships = object_settings["object_relationships"]
    object_select = object_settings["select_all"]

    all_objects = connection.execute(object_select).fetchall()

    if snapshot_quads:
        snapshots = connection.execute(
            select([snapshots_table]).where(
                tuple_(
                    Snapshot.parent_type,
                    Snapshot.parent_id,
                    Snapshot.child_type,
                    Snapshot.child_id,
                ).in_(snapshot_quads))).fetchall()
        snapshot_cache = {(obj_.parent_type, obj_.parent_id, obj_.child_type,
                           obj_.child_id): obj_.id
                          for obj_ in snapshots}

        for object_ in all_objects:
            key = Stub(object_klass, object_.id)
            objects = object_relationships[key]
            audit = [x for x in objects if x.type == "Audit"]
            others = [x for x in objects if x.type in Types.all]

            if len(audit) != 1:
                continue

            if audit:
                audit = audit[0]
                audit_context_id = audit_contexts[audit.id]
                others = set(others)

                for obj_ in others:
                    quad = ("Audit", audit.id, obj_.type, obj_.id)
                    if quad in snapshot_cache:
                        relationships_payload += [{
                            "source_type":
                            object_klass,
                            "source_id":
                            object_.id,
                            "destination_type":
                            "Snapshot",
                            "destination_id":
                            snapshot_cache[quad],
                            "modified_by_id":
                            user_id,
                            "context_id":
                            audit_context_id,
                        }, {
                            "source_type":
                            obj_.type,
                            "source_id":
                            obj_.id,
                            "destination_type":
                            "Snapshot",
                            "destination_id":
                            snapshot_cache[quad],
                            "modified_by_id":
                            user_id,
                            "context_id":
                            audit_context_id,
                        }]
                    else:
                        logger.warning(
                            "Couldn't map %s-%s to Snapshot of object %s-%s because it "
                            "doesn't exist due to missing revision.",
                            object_klass, object_.id, obj_.type, obj_.id)

    insert_payloads(connection, relationships=relationships_payload)
Beispiel #11
0
def trigger_alerts():
    from api.api import build_filtered_lead_selection

    if request.remote_addr not in current_app.config['ALERT_TRIGGER_WHITELIST']:
        return abort_json(401, 'Unauthorized')

    freq = request.args.get('frequency', None)

    if freq is not None and freq not in FREQS:
        return abort_json(400, 'Invalid frequency specified.')

    with engine().connect() as con:
        # select all alerts where:
        # 1. the recipient email is confirmed
        # 2. the alert hasn't been sent in the current time period
        confirmed = select(
            [confirmed_emails.c.user_id, confirmed_emails.c.email]).cte()

        where = tuple_(alerts_.c.user_id, alerts_.c.recipient).in_(confirmed)
        if freq is not None:
            where = and_(where, alerts_.c.frequency == FREQS[freq])

        query = select([alerts_, func.max(sent_alerts.c.send_date).label('last_sent')])\
            .select_from(alerts_.outerjoin(sent_alerts, sent_alerts.c.alert_id == alerts_.c.id))\
            .where(where)\
            .group_by(alerts_.c.id)

        results = con.execute(query)

        # these results satisfy #1, but not #2 yet
        # however, we need to go row by row anyway because MySQL cannot match
        # on column values (only plaintext)
        for result in results:
            row = dict(result)
            if row['last_sent'] is not None and row['last_sent'] >= min_date_threshold(row['frequency']):
                # has been sent more recently than we allow
                print(
                    f"Last trigger for {row['id']} is too recent ({row['last_sent']}, {min_date_threshold(row['frequency'])})")
                continue
            with con.begin():
                query = build_filtered_lead_selection(
                    filter_=row['filter'],
                    from_=None,
                    to=None,
                    sources={
                        key: row[f"{key}_source"]
                        for key in ['federal', 'regional', 'local']
                    },
                    page=None,
                    fields=[leads.c.id, annotated_leads.c.name],
                    where=[
                        annotated_leads.c.published_dt >= min_date_threshold(
                            row['frequency'], fudge=timedelta(0))
                    ]
                )

                lead_results = con.execute(query)
                lead_results = list(dict(row) for row in lead_results)

                if len(lead_results) == 0:
                    print(f"Skipping alert {row['id']}. No new results.")
                    continue

                # record alert sending
                sent_alert = {
                    k: v
                    for k, v in row.items()
                    if k not in ['id', 'last_sent']
                }

                sent_alert['alert_id'] = row['id']
                sent_alert['send_date'] = datetime.now()
                sent_alert['db_link'] = build_db_url(sent_alert)

                query = sent_alerts.insert().values(  # pylint: disable=no-value-for-parameter
                    **sent_alert)

                res = con.execute(query)

                send_id = res.inserted_primary_key[0]

                sent_alert['send_id'] = send_id

                templates = render_alert(sent_alert, [{
                    'name': lead['name'],
                    'link': f'{BASE_URL}/lead/{lead["id"]}'
                } for lead in islice(lead_results, None)])

                sent_contents = [
                    {'send_id': send_id,
                     'lead_id': lead['id']}
                    for lead in lead_results
                ]

                con.execute(sent_alert_contents.insert(  # pylint: disable=no-value-for-parameter
                ), *sent_contents)

                send_alert(sent_alert, *templates)

    return {'status': 'ok'}
def link_snapshots_to_objects(connection, user_id,
                              caches, object_settings, snapshot_quads):
  """Create relationships between snapshots and objects"""
  # pylint: disable=too-many-locals
  relationships_payload = []

  audit_contexts = caches["audit_contexts"]

  object_klass = object_settings["type"]
  object_relationships = object_settings["object_relationships"]
  object_select = object_settings["select_all"]

  all_objects = connection.execute(object_select).fetchall()

  if snapshot_quads:
    snapshots = connection.execute(select([snapshots_table]).where(
        tuple_(
            Snapshot.parent_type,
            Snapshot.parent_id,
            Snapshot.child_type,
            Snapshot.child_id,
        ).in_(snapshot_quads)
    )).fetchall()
    snapshot_cache = {
        (obj_.parent_type, obj_.parent_id,
         obj_.child_type, obj_.child_id): obj_.id
        for obj_ in snapshots
    }

    for object_ in all_objects:
      key = Stub(object_klass, object_.id)
      objects = object_relationships[key]
      audit = [x for x in objects if x.type == "Audit"]
      others = [x for x in objects if x.type in Types.all]

      if len(audit) != 1:
        continue

      if audit:
        audit = audit[0]
        audit_context_id = audit_contexts[audit.id]
        others = set(others)

        for obj_ in others:
          quad = ("Audit", audit.id, obj_.type, obj_.id)
          if quad in snapshot_cache:
            relationships_payload += [{
                "source_type": object_klass,
                "source_id": object_.id,
                "destination_type": "Snapshot",
                "destination_id": snapshot_cache[quad],
                "modified_by_id": user_id,
                "context_id": audit_context_id,
            }, {
                "source_type": obj_.type,
                "source_id": obj_.id,
                "destination_type": "Snapshot",
                "destination_id": snapshot_cache[quad],
                "modified_by_id": user_id,
                "context_id": audit_context_id,
            }]
          else:
            logger.warning(
                "Couldn't map %s-%s to Snapshot of object %s-%s because it "
                "doesn't exist due to missing revision.",
                object_klass, object_.id, obj_.type, obj_.id
            )

  insert_payloads(connection, relationships=relationships_payload)