Example #1
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.create_table('users',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('first', sa.String(length=32), nullable=False),
    sa.Column('last', sa.String(length=32), nullable=False),
    sa.Column('email', sa.String(length=128), nullable=False),
    sa.Column('salt', sa.String(), nullable=False),
    sa.Column('password', sa.String(), nullable=False),
    sa.Column('created_at', sa.DateTime(), server_default=sa.text('NOW()'), nullable=False),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('email')
    )
    op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
    op.create_table('posts',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('created_at', sa.DateTime(), server_default=sa.text('NOW()'), nullable=False),
    sa.Column('body', sa.String(length=256), nullable=True),
    sa.Column('user_id', sa.Integer(), nullable=True),
    sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
    sa.PrimaryKeyConstraint('id')
    )
    op.create_index(op.f('ix_posts_id'), 'posts', ['id'], unique=False)
    op.create_table('replies',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('created_at', sa.DateTime(), server_default=sa.text('NOW()'), nullable=False),
    sa.Column('body', sa.String(length=256), nullable=True),
    sa.Column('user_id', sa.Integer(), nullable=True),
    sa.Column('post_id', sa.Integer(), nullable=True),
    sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
    sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
    sa.PrimaryKeyConstraint('id')
    )
    op.create_index(op.f('ix_replies_id'), 'replies', ['id'], unique=False)
Example #2
0
def team_leaderboard_classic():
    # Get teams sorted by points
    q = text("""
             select T.id as team_id, T.name as team_name, sum(DS.points) as total_score,
             sum(DS.distance) as total_distance
             from daily_scores DS
             join teams T on T.id = DS.team_id
             where not T.leaderboard_exclude
             group by T.id, T.name
             order by total_score desc
             ;
             """)

    team_rows = db.session.execute(q).fetchall() # @UndefinedVariable

    q = text("""
             select A.id as athlete_id, A.team_id, A.display_name as athlete_name,
             sum(DS.points) as total_score, sum(DS.distance) as total_distance,
             count(DS.points) as days_ridden
             from daily_scores DS
             join athletes A on A.id = DS.athlete_id
             group by A.id, A.display_name
             order by total_score desc
             ;
             """)

    team_members = {}
    for indiv_row in db.session.execute(q).fetchall(): # @UndefinedVariable
        team_members.setdefault(indiv_row['team_id'], []).append(indiv_row)

    for team_id in team_members:
        team_members[team_id] = reversed(sorted(team_members[team_id], key=lambda m: m['total_score']))

    return render_template('leaderboard/team_text.html', team_rows=team_rows, team_members=team_members)
def upgradeTransferTemplates():
    update_sql = ("update sample_transfer_template"
                  " set name = :name,"
                  " is_one_to_one_transfer = :ioto,"
                  " source_plate_well_count = :spwc,"
                  " destination_plate_well_count = :dpwc"
                  " where id = :id")

    insert_sql = ("insert into sample_transfer_template"
                  " (id, name, is_one_to_one_transfer, source_plate_well_count,"
                  " destination_plate_well_count)"
                  " select :id, :name, :ioto, :spwc, :dpwc"
                  " where not exists"
                  " (select * from sample_transfer_template where id = :id)")

    sql_param_names = ("id", "name", "ioto", "spwc", "dpwc")

    desired_values = [
        [35, "PCA Pre-Planning", "F", None, None]
        ,[36, "PCR Primer Hitpicking", "F", None, None]
        ,[37, "PCA/PCR Master Mix Addition", "F", None, None]

    ]

    for row_values in desired_values:
        args = dict(zip(sql_param_names, row_values))

        # try update first, then insert.  The WHERE clauses in the SQL
        # for insert and update should allow this without causing failures.
        op.execute(sa.text(update_sql).bindparams(**args))
        op.execute(sa.text(insert_sql).bindparams(**args))
Example #4
0
def add_task():
    data = request.json['content']
    # if not data:
    #     return redirect('/')

    dt, content = utils.extract_datetime_from_text(data)

    user_id = request.json['user_id']
    prev_task = Task.query.get(request.json['prev_task'])
    my_right = prev_task.rgt
    task = Task(
        content=content,
        user_id=user_id,
        due_date=dt,
        my_right=my_right,
        parent_id=prev_task.parent_id
    )

    user_id = str(user_id)
    # Technically this should be wrapped in a transaction
    cmd = "UPDATE tasks SET rgt = rgt + 2 WHERE user_id =" + user_id + " AND rgt > " + str(my_right)
    db.engine.execute(text(cmd))
    cmd2 = "UPDATE tasks SET lft = lft + 2 WHERE user_id =" + user_id + " AND lft > " + str(my_right)
    db.engine.execute(text(cmd2))
    db.session.add(task)
    db.session.commit()
    return json.dumps(utils.task_to_dictionary(task))
def upgradeTransferTypes():
    update_sql = ("update sample_transfer_type"
                  " set name = :name,"
                  " transfer_template_id = :stti,"
                  " source_plate_count = :spc,"
                  " destination_plate_count = :dpc"
                  " where id = :id")

    insert_sql = ("insert into sample_transfer_type"
                  " (id, name, transfer_template_id,"
                  " source_plate_count, destination_plate_count)"
                  " select :id, :name, :stti, :spc, :dpc"
                  " where not exists"
                  " (select * from sample_transfer_type where id = :id)")

    sql_param_names = ("id", "name", "stti", "spc", "dpc")

    desired_values = [
        [53, "PCA Pre-Planning", 35, 1, 4, None]
        ,[54, "Primer Hitpicking - Create Source", 2, 1, 0, None]
        ,[55, "PCA master mix addition", 2, 1, 0, None]
        ,[56, "PCA Thermocycling", 2, 1, 0, None]
        ,[57, "PCR Primer Hitpicking", 36, 1, 4, None]
        ,[58, "PCA/PCR Master Mix Addition", 37, 1, 4, None]
        ,[59, "PCA/PCR Thermocycling", 2, 1, 0, None]
    ]

    for row_values in desired_values:
        args = dict(zip(sql_param_names, row_values))

        # try update first, then insert.  The WHERE clauses in the SQL
        # for insert and update should allow this without causing failures.
        op.execute(sa.text(update_sql).bindparams(**args))
        op.execute(sa.text(insert_sql).bindparams(**args))
Example #6
0
def generateToken(username, password):
    # Check if username and password are correct, if none returns "wrong"
    if auth(username, password) is None:
        return 'wrong'
    # Username and password are right, so check if the user has an associated token
    utente_id = session.query(Utente).filter(Utente.username==username).first()
    utente_id = utente_id.id
    q = text('SELECT token FROM tokens WHERE utente=:utente_id')
    q = q.bindparams(utente_id=utente_id)
    token = db.execute(q).fetchone()
    print token
    # If there is a token for that user, returns the token
    if token is not None:
        return token
    # If there isn't token for the user, try to generate one
    while True:
        token = uuid.uuid4().hex
        print token
        res = session.query(Token).filter(Token.token == token).first()
        if res is not None:
            print "esiste gia il token ", token, " nella tabella"
        if res is None:
            print "Il token non esiste, quindi lo restituisco al chiamante"
            break
    q = text('INSERT INTO tokens VALUES (:utente_id, :token)')
    q = q.bindparams(utente_id=utente_id, token=token)
    db.execute(q)
    d = {'token': token}
    return d
Example #7
0
def dat_crime(fpath=None):
    # Step Zero: Create dat_crime table
    raw_crime(fpath=fpath)
    dedupe_crime()
    src_crime()
    src_crime_table = Table('src_chicago_crimes_all', Base.metadata, 
        autoload=True, autoload_with=engine, extend_existing=True)
    dat_crime_table = crime_table('dat_chicago_crimes_all', Base.metadata)
    dat_crime_table.append_column(Column('chicago_crimes_all_row_id', Integer, primary_key=True))
    dat_crime_table.append_column(Column('start_date', TIMESTAMP, server_default=text('CURRENT_TIMESTAMP')))
    dat_crime_table.append_column(Column('end_date', TIMESTAMP, server_default=text('NULL')))
    dat_crime_table.append_column(Column('current_flag', Boolean, server_default=text('TRUE')))
    dat_crime_table.append_constraint(UniqueConstraint('id', 'start_date'))
    dat_crime_table.create(bind=engine, checkfirst=True)
    new_cols = ['start_date', 'end_date', 'current_flag', 'chicago_crimes_all_row_id']
    dat_ins = dat_crime_table.insert()\
        .from_select(
            [c for c in dat_crime_table.columns.keys() if c not in new_cols],
            select([c for c in src_crime_table.columns])
        )
    conn = engine.contextual_connect()
    res = conn.execute(dat_ins)
    cols = crime_master_cols(dat_crime_table)
    master_ins = MasterTable.insert()\
        .from_select(
            [c for c in MasterTable.columns.keys() if c != 'master_row_id'],
            select(cols)\
                .select_from(dat_crime_table)
        )
    conn = engine.contextual_connect()
    res = conn.execute(master_ins)
    return 'DAT crime created'
Example #8
0
def get_heatmap():
    lat = 0
    lng = 0
    if request.args.get('lat') != None:
        lat = float(request.args.get('lat'))
    if request.args.get('lng') != None:
        lng = float(request.args.get('lng'))

    sql_query = '''SELECT id
                FROM areas
                WHERE mbrcontains(areas.coordinates, GeomFromText('POINT(%(lat)f %(lng)f)')) = 1''' % {
                    'lat': lng,
                    'lng': lat
                }
    area_id = db.engine.execute(text(sql_query)).fetchall()[0][0]

    print(area_id, file=sys.stderr)

    sql_query = '''SELECT ST_X(point_to), ST_Y(point_to), time_transit
                FROM transit
                WHERE area_id = %(area_id)i AND time_transit != -1''' % {
                    'area_id': area_id
                }
    points = []
    for point in db.engine.execute(text(sql_query)).fetchall():
        points.append({'lat': point[0], 'lng': point[1], 'weight': point[2]})

    return Response(response=json.dumps(points, indent=3), status=200, mimetype="application/json")
Example #9
0
    def update_last_seen(steam_ids):
        """Update last_seen column on a player

        Args:
            steam_ids: Integer or List of steam id
        Returns:
            True if any updates
        """

        do_commit = False

        if type(steam_ids) == int:
            p = Db.find_player(steam_id=steam_ids)
            if p:
                p.last_seen = text("NOW()")
                DbCore.session.add(p)
                do_commit = True
        else:
            for steam_id in steam_ids:
                p = Db.find_player(steam_id=steam_id)
                if p:
                    p.last_seen = text("NOW()")
                    DbCore.session.add(p)
                    do_commit = True

        if do_commit is True:
            DbCore.commit()
            return True
        return False
def update_source_pks(conn, before_after):
    select_fks = sa.text('SELECT c.conname AS const, '
            's.relname AS stab, sa.attname AS scol, '
            't.relname AS ttab, ta.attname AS tcol '
        'FROM pg_constraint AS c '
        'JOIN pg_class AS s ON c.conrelid = s.oid '
        'JOIN pg_attribute AS sa ON sa.attrelid = s.oid AND sa.attnum = ANY(c.conkey) '
        'JOIN pg_class AS t ON c.confrelid = t.oid '
        'JOIN pg_attribute AS ta ON ta.attrelid = t.oid AND ta.attnum = ANY(c.confkey) '
        'WHERE c.contype = :type AND (t.relname, ta.attname) IN :tabcols '
        'ORDER BY ttab, tcol, stab, scol',
        ).bindparams(type='f', tabcols=(('source', 'pk'), ('ref', 'pk')))

    def drop_add_fk(names):
        yield 'ALTER TABLE %(stab)s DROP CONSTRAINT %(const)s' % names
        yield ('ALTER TABLE %(stab)s ADD CONSTRAINT %(const)s '
            'FOREIGN KEY (%(scol)s) REFERENCES %(ttab)s (%(tcol)s)' % names)
    
    update_source = sa.text('UPDATE source SET pk = :after WHERE pk = :before')
    update_other = 'UPDATE %(stab)s SET %(scol)s = :after WHERE %(scol)s = :before'

    fks = conn.execute(select_fks).fetchall()
    drop, add = zip(*map(drop_add_fk, fks))
    
    conn.execute(';\n'.join(drop))
    conn.execute(update_source, before_after)
    for names in fks:
        conn.execute(sa.text(update_other % names), before_after)
    conn.execute(';\n'.join(add))
def upgrade():
    print("{} upgrade: starting transaction".format(__file__))
    conn = op.get_bind()
    transaction = conn.begin()

    try:
        print("{} upgrade: creating scheama".format(__file__))
        conn.execute(text("""CREATE SCHEMA alembic_example;"""))
        print("{} upgrade: creating table".format(__file__))
        conn.execute(text("""CREATE TABLE alembic_example.users (
            id          bigserial,
            created_at  timestamp with time zone default now() NOT NULL,
            updated_at  timestamp with time zone default now() NOT NULL,
            first_name  varchar(255) NOT NULL,
            last_name   varchar(255) NOT NULL,
            email       varchar(255) NOT NULL
        );"""))
        print("{} upgrade: creating index".format(__file__))
        conn.execute(text("""
                          CREATE UNIQUE INDEX idx_users_email
                              ON alembic_example.users (email)
                          """))
        print("{} upgrade: committing".format(__file__))
        transaction.commit()
        print("{} upgrade: done!".format(__file__))
    except:
        print("{} upgrade: error, rolling back!".format(__file__))
        transaction.rollback()
        raise
def upgrade():
    conn = op.get_bind()

    exists_genus = sa.text('SELECT EXISTS (SELECT 1 FROM genus WHERE id = :id)',
        conn)

    select_family = sa.text('SELECT pk FROM family WHERE id = :id', conn)

    insert_genus = sa.text('INSERT INTO genus '
        '(created, updated, active, id, name, family_pk, icon) VALUES '
        '(now(), now(), true, :id, :name, :family_pk, :icon)', conn)

    update_lang = sa.text('UPDATE language AS l SET updated = now() '
        'WHERE id = :id AND EXISTS (SELECT 1 FROM walslanguage WHERE pk = l.pk '
        'AND genus_pk != (SELECT pk FROM genus WHERE id = :genus_id))', conn)

    update_wals = sa.text('UPDATE walslanguage SET genus_pk = '
        '(SELECT pk FROM genus WHERE id = :genus_id) '
        'WHERE pk = (SELECT pk FROM language WHERE id = :id) '
        'AND genus_pk != (SELECT pk FROM genus WHERE id = :genus_id)', conn)

    if not exists_genus.scalar(id=ID):
        insert_genus.execute(id=ID, name=GENUS, icon=ICON,
            family_pk=select_family.scalar(id=FAMILY_ID))

    for id in LANGUAGES:
        update_lang.execute(id=id, genus_id=ID)
        update_wals.execute(id=id, genus_id=ID)
def upgrade(verbose=True):
    conn = op.get_bind()

    select_ba = sa.text('SELECT s.pk AS before, s.id::int AS after '
        'FROM source AS s WHERE s.pk != s.id::int '
        'AND NOT EXISTS (SELECT 1 FROM source WHERE pk = s.id::int) '
        'ORDER BY s.id::int', conn)

    set_sequence = sa.text("""SELECT setval('source_pk_seq', max(x))
        FROM unnest(array[(SELECT coalesce(max(pk), 0) FROM source), :minimum])
        AS x""", conn)

    before_after = select_ba.execute().fetchall()
    if verbose:
        from itertools import groupby
        def groupkey(key):
            i, (b, a) = key
            return i - b
        def consecutive(ba):
            for k, g in groupby(enumerate(ba), groupkey):
                group = [ba for i, ba in g]
                yield group[0], group[-1]
        print(list(consecutive(before_after)))
            
    update_source_pks(conn, before_after)

    set_sequence.scalar(minimum=510000)
def _update_region_batch(bind, shard_id, geocoder, batch=10000):
    rows = bind.execute(sa.text(stmt_select_region.format(
        id=shard_id, batch=batch))).fetchall()

    areas = set()
    cells = {}
    deleted = 0

    i = 0
    for row in rows:
        code = geocoder.region_for_cell(row.lat, row.lon, row.mcc)
        if code not in cells:
            cells[code] = []
        cells[code].append(row[0])
        if not code:
            # cellid is a 11 byte column, the last 4 byte being the
            # cid, but this is hex encoded, so 22 byte minus 8 byte
            # is the area id
            areas.add(row[0][:14])
            deleted += 1
        i += 1

    for code, cellids in cells.items():
        ids = 'UNHEX("' + '"), UNHEX("'.join(cellids) + '")'
        if not code:
            op.execute(sa.text(stmt_delete_outside.format(
                id=shard_id, ids=ids)))
        else:
            op.execute(sa.text(stmt_update_region.format(
                id=shard_id, code=code, ids=ids)))

    return (i, areas, deleted)
def upgrade():
    op.create_table('locale',
    sa.Column('id', sa.Integer(), nullable=False, server_default=sa.text("nextval('core_seq_general')")),
    sa.Column('created', sa.DateTime(), nullable=False, server_default=func.now()),
    sa.Column('version', sa.DateTime(), nullable=False, server_default=func.now()),
    sa.Column('enabled', sa.Boolean(), nullable=False),
    sa.Column('type', sa.String(), nullable=False),
    sa.Column('label', sa.String(), nullable=False),
    sa.Column('value', sa.String(), nullable=False),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('type', 'value')
    )

    connection = op.get_bind()
    t = sa.text('''
        INSERT INTO locale(enabled, type, label, value) VALUES
            (true, 'lang', 'English', 'en'),
            (true, 'lang', 'Français', 'fr'),
            (true, 'lang', '中国', 'cn'),
            (true, 'tz', 'Europe/Paris', 'Europe/Paris')
    ''')
    connection.execute(t)

    op.add_column('contact', sa.Column('lang_id', sa.Integer(), sa.ForeignKey('locale.id'), nullable=True))
    op.add_column('contact', sa.Column('tz_id', sa.Integer(), sa.ForeignKey('locale.id'), nullable=True))
Example #16
0
def archive(archive):
    if request.method == 'POST':
        sql = '''
update archive
set identity=:identity, name=:name, gender=:gender, birthday=:birthday,
    retire_date=:retire_date, female_cadre=:female_cadre,
    special_personnel=:special_personnel
where archive=:archive
        '''
        param = {
            'identity': request.form['identity'],
            'name': request.form['name'],
            'gender': request.form['gender'],
            'birthday': request.form['birthday'],
            'retire_date': request.form['retire_date'],
            'female_cadre': request.form['female_cadre'],
            'special_personnel': request.form['special_personnel'],
            'archive': archive
        }
        gl.db_engine.execute(text(sql), param)
        return redirect('/archive/%s' % archive)

    sql = '''
select *
from archive
where archive=:archive
    '''
    param = {'archive': archive}
    res = gl.db_engine.execute(text(sql), param)
    archive = res.fetchone()
    return render_template('archive/archive.html', User=session['user'],
        archive=archive)
Example #17
0
    def drop_table(self, db_alias, table_name):
        """
        Drop table from provided database
        """
        tables = self.load_tables(db_alias, table_name)
        if  tables.has_key(table_name):
            tab_obj = tables[table_name]
            try:
                tab_obj.drop()
            except :
                query = ""
                if self.db_type[db_alias] == 'oracle':
                    query = text("DROP TABLE " + tab_obj.name \
                        + " CASCADE constraints")
                elif self.db_type[db_alias] == 'postgresql':
                    query = text("DROP TABLE " + tab_obj.name \
                        + " CASCADE ")
                else:
                    traceback.print_exc()
                self.con.execute(query)
            try:
                tables.pop(table_name)
            except :
                pass
            try:
#                print self.db_tables
                self.db_tables[db_alias].pop(table_name)
            except :
                pass
            try:
                self.table_names[db_alias].remove(table_name)
            except :
                traceback.print_exc()
Example #18
0
    def testHandleStartedDisallowedBuilder(self):
        self.buildbot_db.execute(sa.text("""
INSERT INTO buildrequests
    (id, buildsetid, buildername, submitted_at, claimed_by_name, claimed_by_incarnation)
    VALUES (4, 0, "bad", 50, "a", "b");"""))
        self.buildbot_db.execute(sa.text("""
INSERT INTO builds
    (id, number, brid, start_time)
    VALUES (0, 2, 4, 60);"""))

        data = {
            "payload": {
                "build": {
                    "number": 2,
                    "builderName": "bad",
                }
            },
            "_meta": {
                "master_name": "a",
                "master_incarnation": "b",
            },
        }
        self.bblistener.handleStarted(data, Mock())

        self.assertEqual(self.bblistener.tc_queue.claimTask.call_count, 0)
Example #19
0
    def _handleFinishedTest(self, results):
        self.buildbot_db.execute(sa.text("""
INSERT INTO buildrequests
    (id, buildsetid, buildername, submitted_at, complete, complete_at, results)
    VALUES (1, 0, "good", 5, 1, 30, 0);"""))
        self.buildbot_db.execute(sa.text("""
INSERT INTO builds
    (id, number, brid, start_time, finish_time)
    VALUES (0, 0, 1, 15, 30);"""))
        self.tasks.insert().execute(
            buildrequestId=1,
            taskId=makeTaskId(),
            runId=0,
            createdDate=5,
            processedDate=10,
            takenUntil=100,
        )

        data = {"payload": {"build": {
            "builderName": "good",
            "properties": (
                ("request_ids", (1,), "postrun.py"),
            ),
            "results": results,
        }}}
        self.bblistener.tc_queue.createArtifact.return_value = {
            "storageType": "s3",
            "putUrl": "http://foo.com",
        }
        self.bblistener.handleFinished(data, Mock())
Example #20
0
    def test_database_error(self, db_errors=0):
        cells = [
            CellShardFactory.build(radio=Radio.gsm),
            CellShardFactory.build(radio=Radio.wcdma),
            CellShardFactory.build(radio=Radio.lte),
        ]
        wifis = WifiShardFactory.build_batch(2)

        for model in (CellArea, CellOCID, CellAreaOCID):
            self.session.execute(text('drop table %s;' % model.__tablename__))
        for name in set([cell.__tablename__ for cell in cells]):
            self.session.execute(text('drop table %s;' % name))
        for name in set([wifi.__tablename__ for wifi in wifis]):
            self.session.execute(text('drop table %s;' % name))

        query = self.model_query(cells=cells, wifis=wifis)
        res = self._call(body=query, ip=self.test_ip)
        self.check_response(res, 'ok')
        self.check_stats(counter=[
            ('request', [self.metric_path, 'method:post', 'status:200']),
        ], timer=[
            ('request', [self.metric_path, 'method:post']),
        ])
        if self.apikey_metrics:
            self.check_stats(counter=[
                (self.metric_type + '.result',
                    ['key:test', 'region:GB', 'fallback_allowed:false',
                     'accuracy:high', 'status:miss']),
            ])

        self.check_raven([('ProgrammingError', db_errors)])
def q1_solution_in_sql():
    """
    Q:  What percent of students attend school on their birthday?

    - in sqlite, STRFTIME function is: ``to_char(format, datetime)``
    - in postgresql, STRFTIME function is: ``to_char(datetime, format)``
    """
    sqlite_sql = sqlalchemy.text("""
    SELECT 
        COUNT(DISTINCT(student.student_id)) as n_student_attend_school_on_birthday
    FROM attendance
    JOIN student
    ON attendance.student_id = student.student_id
    WHERE 
        STRFTIME('%m-%d', attendance.date) = STRFTIME('%m-%d', student.dob)
        AND attendance.attendance = 1
    """)
    pprint(sqlite_sql, engine)

    postgres_sql = sqlalchemy.text("""
    SELECT 
        COUNT(DISTINCT(student.student_id)) as n_student_attend_school_on_birthday
    FROM attendance
    JOIN student
    ON attendance.student_id = student.student_id
    WHERE 
        to_char(attendance.date, 'MM-DD') = to_char(student.dob, 'MM-DD')
        AND attendance.attendance = True
    """)
Example #22
0
    def test_database_error(self, db_errors=0):
        for tablename in ('cell', 'cell_area',
                          'ocid_cell', 'ocid_cell_area'):
            self.session.execute(text('drop table %s;' % tablename))
        for i in range(16):
            self.session.execute(text(
                'drop table wifi_shard_%s;' % hex(i)[2:]))

        cells = CellFactory.build_batch(2)
        wifis = WifiShardFactory.build_batch(2)

        query = self.model_query(cells=cells, wifis=wifis)
        res = self._call(body=query, ip=self.test_ip)
        self.check_response(res, 'ok')
        self.check_stats(counter=[
            ('request', [self.metric_path, 'method:post', 'status:200']),
        ], timer=[
            ('request', [self.metric_path, 'method:post']),
        ])
        if self.apikey_metrics:
            self.check_stats(counter=[
                (self.metric_type + '.result',
                    ['key:test', 'country:GB',
                     'accuracy:high', 'status:miss']),
            ])

        self.check_raven([('ProgrammingError', db_errors)])
def listAllProjects():
	req = request.get_json()
	reslist = []
	query = db.engine.execute(text("select ProjectDB.projectID,ProjectDB.title,ProjectDB.description,\
		ProjectDB.department, ProjectDB.time_stamp, UserDB.email, UserDB.first_name,UserDB.last_name,\
		UserDB.profile_picture, ( select count(InterestDB.email) from InterestDB where \
		InterestDB.projectID = ProjectDB.projectID) count, MAX(IF(InterestDB.email = '"+req["email"]+"', \
		TRUE, FALSE)) as liked from UserDB, ProjectDB LEFT JOIN InterestDB ON \
		(ProjectDB.projectID = InterestDB.projectID) where ProjectDB.email = UserDB.email GROUP BY ProjectDB.projectID;"))
	
	for i in query:
		if(i.liked == 0):
			reslist.append(dict(projectID=i.projectID,title=i.title,description=i.description,department=i.department,email=i.email, time_stamp=i.time_stamp,\
				first_name=i.first_name, last_name=i.last_name, profile_picture=i.profile_picture, count=i.count,liked=False))
		else:
			reslist.append(dict(projectID=i.projectID,title=i.title,description=i.description,department=i.department,email=i.email, time_stamp=i.time_stamp,\
				first_name=i.first_name, last_name=i.last_name, profile_picture=i.profile_picture, count=i.count,liked=True))

	allDepartmentProjectCountGraph = []
	allDepartmentProjectCountGraphCounts = db.engine.execute(text("SELECT department, \
		COUNT(DISTINCT projectID) projectCount FROM ProjectDB GROUP BY department"))
	for row in allDepartmentProjectCountGraphCounts:
		allDepartmentProjectCountGraph.append(dict(department=row.department,likes=row.projectCount))

	return jsonify(list=reslist,allDepartmentProjectCountGraph=allDepartmentProjectCountGraph), 200
def upgrade():
    conn = op.get_bind()

    strip_prefix = sa.text('UPDATE identifier AS i SET updated = now(), '
        'name = substring(name from :extract) '
        'WHERE type = :type AND name LIKE :match '
        'AND NOT EXISTS (SELECT 1 FROM identifier '
            'WHERE active AND type = i.type '
            'AND description = i.description AND lang = i.lang '
            'AND name = substring(i.name from :extract))', conn)

    del_prefixed = sa.text('DELETE FROM identifier AS i '
        'WHERE type = :type AND name LIKE :match '
        'AND EXISTS (SELECT 1 FROM identifier '
            'WHERE type = i.type '
            'AND description = i.description AND lang = i.lang '
            'AND name = substring(i.name from :extract))', conn)

    del_prefixed_langident = sa.text('DELETE FROM languageidentifier AS li '
        'WHERE EXISTS (SELECT 1 FROM identifier AS i '
            'WHERE type = :type AND name LIKE :match '
            'AND pk = li.identifier_pk  '
            'AND EXISTS (SELECT 1 FROM identifier '
                'WHERE type = i.type '
                'AND description = i.description AND lang = i.lang '
                'AND name = substring(i.name from :extract)))', conn)
    
    for match, extract in MATCH_EXTRACT:
        for query in (strip_prefix, del_prefixed_langident, del_prefixed):
            query.execute(type='name', match=match, extract=extract)
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.create_table('record_schedule',
    sa.Column('id', sa.BigInteger(), nullable=False),
    sa.Column('project_name', sa.String(length=64, convert_unicode=True), server_default=u'', nullable=False),
    sa.Column('name', sa.String(length=255, convert_unicode=True), server_default=u'', nullable=False),
    sa.Column('desc', sa.String(length=255, convert_unicode=True), server_default=u'', nullable=False),
    sa.Column('long_desc', sa.String(length=1024, convert_unicode=True), server_default=u'', nullable=False),
    sa.ForeignKeyConstraint(['project_name'], [u'project.name'], onupdate=u'CASCADE', ondelete=u'CASCADE'),
    sa.PrimaryKeyConstraint('id')
    )
    op.create_table('record_schedule_entry',
    sa.Column('id', sa.BigInteger(), nullable=False),
    sa.Column('schedule_id', sa.BigInteger(), server_default=sa.text(u'0'), nullable=False),
    sa.Column('date', sa.Date(), nullable=True),
    sa.Column('monthday', sa.SmallInteger(), nullable=True),
    sa.Column('weekday', sa.SmallInteger(), nullable=True),
    sa.Column('start', sa.Time(), nullable=True),
    sa.Column('end', sa.Time(), nullable=True),
    sa.Column('prerecord', sa.Boolean(), server_default=sa.text(u'0'), nullable=False),
    sa.ForeignKeyConstraint(['schedule_id'], [u'record_schedule.id'], onupdate=u'CASCADE', ondelete=u'CASCADE'),
    sa.PrimaryKeyConstraint('id')
    )
    op.create_table('camera_record_config',
    sa.Column('id', sa.BigInteger(), nullable=False),
    sa.Column('camera_uuid', sa.CHAR(length=36, convert_unicode=True), server_default=u'', nullable=False),
    sa.Column('schedule_id', sa.BigInteger(), nullable=True),
    sa.Column('stream_quality', sa.CHAR(length=8, convert_unicode=True), server_default=u'', nullable=False),
    sa.Column('mannual_enabled', sa.Boolean(), server_default=sa.text(u'0'), nullable=False),
    sa.Column('stop_time', sa.TIMESTAMP(), server_default=sa.text(u'0'), nullable=False),
    sa.ForeignKeyConstraint(['camera_uuid'], [u'camera.uuid'], onupdate=u'CASCADE', ondelete=u'CASCADE'),
    sa.ForeignKeyConstraint(['schedule_id'], [u'record_schedule.id'], onupdate=u'CASCADE', ondelete=u'SET NULL'),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('camera_uuid')
    )
def upgrade():
    update_name = sa.text('UPDATE language SET updated = now(), '
        'name = :after WHERE id = :id AND name = :before')

    params = {'type': 'name', 'description': 'other', 'lang': 'en'}

    insert_ident = sa.text('INSERT INTO identifier '
        '(created, updated, active, version, type, description, lang, name) '
        'SELECT now(), now(), true, 1, :type, :description, :lang, :name '
        'WHERE NOT EXISTS (SELECT 1 FROM identifier WHERE type = :type '
        'AND description = :description AND lang = :lang AND name = :name)'
        ).bindparams(**params)

    insert_lang_ident = sa.text('INSERT INTO languageidentifier '
        '(created, updated, active, version, language_pk, identifier_pk) '
        'SELECT now(), now(), true, 1, '
        '(SELECT pk FROM language WHERE id = :id), '
        '(SELECT pk FROM identifier WHERE type = :type '
        'AND description = :description AND lang = :lang AND name = :name) '
        'WHERE NOT EXISTS (SELECT 1 FROM languageidentifier '
        'WHERE language_pk = (SELECT pk FROM language WHERE id = :id) '
        'AND identifier_pk = (SELECT pk FROM identifier WHERE type = :type '
        'AND description = :description AND lang = :lang AND name = :name))'
        ).bindparams(**params)

    op.execute(update_name.bindparams(id=ID, before=BEFORE, after=AFTER))
    op.execute(insert_ident.bindparams(name=BEFORE))
    op.execute(insert_lang_ident.bindparams(id=ID, name=BEFORE))
def find_pending(db):
    engine = sa.create_engine(db)
    inspector = Inspector(engine)
    # Newer buildbot has a "buildrequest_claims" table
    if "buildrequest_claims" in inspector.get_table_names():
        query = sa.text("""
        SELECT buildername, count(*) FROM
               buildrequests WHERE
               complete=0 AND
               submitted_at > :yesterday AND
               submitted_at < :toonew AND
               (select count(*) from buildrequest_claims where brid=id) = 0

               GROUP BY buildername""")
    # Older buildbot doesn't
    else:
        query = sa.text("""
        SELECT buildername, count(*) FROM
               buildrequests WHERE
               complete=0 AND
               claimed_at=0 AND
               submitted_at > :yesterday AND
               submitted_at < :toonew

               GROUP BY buildername""")

    result = engine.execute(
        query,
        yesterday=time.time() - 86400,
        toonew=time.time() - 60
    )
    retval = result.fetchall()
    return retval
Example #28
0
def get_and_update_messages_to_send(user_message_set):
    '''
    Update last time a message id was send for a user.

    Expects a set of (login_id, message_id) tuples.
    '''
    # collect user messages in a dictionary for easier processing
    user_dict = defaultdict(set)
    for login_id, message_id in user_message_set:
        user_dict[login_id].add(message_id)
    with session_scope() as session:
        joined_login_ids = ','.join([str(login_id) for login_id in user_dict.keys()])
        array_login_ids = "'{{{}}}'".format(joined_login_ids)
        query = "select * from get_non_elligible_user_message_pairs({})".format(array_login_ids)
        non_eligible_messages = session.query(model.UserMessageLastTimeSent).\
            from_statement(text(query)).all()
        for non_eligible_message in non_eligible_messages:
            user_dict[non_eligible_message.login_id].discard(non_eligible_message.message_id)

        # update user_message_last_time_sent
        return_tuple = []
        for user, messages in user_dict.iteritems():
            for message in messages:
                return_tuple.append({str(user): str(message)})
                session.execute(text(
                    'SELECT upsert_user_message_last_time_sent((:login_id)::bigint, :message_id)'),
                                {
                                    'login_id': user,
                                    'message_id': message
                                })
        session.commit()
    return return_tuple
Example #29
0
    def testHandleStartedMissingTaskId(self):
        self.buildbot_db.execute(sa.text("""
INSERT INTO buildrequests
    (id, buildsetid, buildername, submitted_at, claimed_by_name, claimed_by_incarnation)
    VALUES (4, 0, "good", 50, "a", "b");"""))
        self.buildbot_db.execute(sa.text("""
INSERT INTO builds
    (id, number, brid, start_time)
    VALUES (0, 2, 4, 60);"""))
        self.tasks.insert().execute(
            buildrequestId=4,
            taskId=makeTaskId(),
            runId=0,
            createdDate=50,
            processedDate=60,
            takenUntil=None
        )
        data = {
            "payload": {
                "build": {
                    "number": 2,
                    "builderName": "good",
                },
                "request_ids": [4, 5],
            },
            "_meta": {
                "master_name": "a",
                "master_incarnation": "b",
            },
        }
        self.bblistener.tc_queue.claimTask.return_value = {"takenUntil": 100}
        self.bblistener.handleStarted(data, Mock())

        self.assertEqual(self.bblistener.tc_queue.claimTask.call_count, 0)
Example #30
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.create_table('yt_match',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('espn_id', sa.Integer(), nullable=True),
    sa.Column('league_id', sa.Integer(), nullable=True),
    sa.Column('finish', sa.SmallInteger(), nullable=True),
    sa.Column('home_score', sa.SmallInteger(), nullable=True),
    sa.Column('away_score', sa.SmallInteger(), nullable=True),
    sa.Column('home_id', sa.Integer(), nullable=True),
    sa.Column('away_id', sa.Integer(), nullable=True),
    sa.Column('date', sa.Date(), nullable=True),
    sa.ForeignKeyConstraint(['away_id'], ['yt_team.id'], ),
    sa.ForeignKeyConstraint(['home_id'], ['yt_team.id'], ),
    sa.ForeignKeyConstraint(['league_id'], ['yt_league.id'], ),
    sa.PrimaryKeyConstraint('id')
    )
    op.alter_column(u'league_team', 'league_id',
               existing_type=mysql.INTEGER(display_width=11),
               nullable=True,
               existing_server_default=sa.text(u"'0'"))
    op.alter_column(u'league_team', 'team_id',
               existing_type=mysql.INTEGER(display_width=11),
               nullable=True,
               existing_server_default=sa.text(u"'0'"))
Example #31
0
    def do_new_build_for_builder(self, message_data, message):
        who = message_data['who']
        branch = message_data['body']['branch']
        revision = message_data['body']['revision']
        priority = message_data['body']['priority']
        builder_name = message_data['body']['builder_name']
        files = message_data['body']['files']
        log.info("New build for %s by %s of %s %s", builder_name, who, branch, revision)

        # Create a sourcestamp
        real_branch = branch.split("-selfserve")[0]
        q = text("""INSERT INTO sourcestamps
                (`branch`, `revision`, `patchid`, `repository`, `project`)
                VALUES
                (:real_branch, :revision, NULL, '', '')
                """)
        log.debug(q)
        r = self.db.execute(q, real_branch=real_branch, revision=revision)
        ssid = r.lastrowid  # SourcestampID
        log.debug("Created sourcestamp %s", ssid)

        # Create change object
        when = time.time()
        q = text("""INSERT INTO changes
                (`author`, `comments`, `is_dir`, `branch`,
                `revision`, `revlink`, `when_timestamp`, `category`,
                `repository`, `project`)
                VALUES
                (:who, '', 0, :branch, :revision, NULL, :when, NULL, '', '')
                """)
        log.debug(q)
        r = self.db.execute(q, who=who, branch=branch, revision=revision, when=when)
        cid = r.lastrowid
        log.debug("Created change %s", cid)

        # Create change-files
        for f in files:
            q = text("""INSERT INTO change_files
                    (`changeid`, `filename`)
                    VALUES
                    (:cid, :f)
                    """)
            log.debug(q)
            r = self.db.execute(q, cid=cid, f=f)
        log.debug("Created change_file for change object %s", cid)

        # Create sourcestamp_changes
        q = text("""INSERT INTO sourcestamp_changes
                (`sourcestampid`, `changeid`)
                VALUES
                (:ssid, :cid)
                """)
        log.debug(q)
        r = self.db.execute(q, ssid=ssid, cid=cid)
        log.debug("Created sourcestamp_changes for sourcestamp %s, change object %s", ssid, cid)

        # Create a new buildset
        now = time.time()
        buildsetid = create_buildset(
            self.db,
            idstring=None,
            reason='Self-serve: Requested by %s' % who,
            ssid=ssid,
            submitted_at=now,
        )

        # Create buildset properties (buildid, builduid)
        q = text("""INSERT INTO buildset_properties
                (`buildsetid`, `property_name`, `property_value`)
                VALUES
                (:buildsetid, :key, :value)
                """)
        props = {
            'buildid': json.dumps((genBuildID(now), "self-serve")),
            'builduid': json.dumps((genBuildUID(), "self-serve")),
        }
        # Making a tuple of each key and a tuple of it's associated property and the source "self-serve"
        props.update(((k, json.dumps((v, "self-serve"))) for (k, v) in message_data['body']['properties'].iteritems()))
        log.debug(q)
        for key, value in props.items():
            r = self.db.execute(q, buildsetid=buildsetid, key=key, value=value)
            log.debug("Created buildset_property %s=%s", key, value)

        # Create buildrequests
        q = text("""INSERT INTO buildrequests
                (`buildsetid`, `buildername`, `submitted_at`, `priority`, `claimed_at`, `claimed_by_name`, `claimed_by_incarnation`, `complete`, `results`, `complete_at`)
                VALUES
                (:buildsetid, :builder_name, :submitted_at, :priority, 0, NULL, NULL, 0, NULL, NULL)""")
        log.debug(q)
        r = self.db.execute(
            q,
            buildsetid=buildsetid,
            builder_name=builder_name,
            submitted_at=now,
            priority=priority)
        log.debug("Created buildrequest %s: %i", builder_name, r.lastrowid)
        return {"errors": False, "msg": "Ok"}
Example #32
0
 def get_weibo_comment_not_crawled(cls):
     return db_session.query(WeiboData.weibo_id).filter(
         text('comment_crawled=0')).all()
Example #33
0
File: Auto.py Project: wifimedia/rd
class AutoDevice(Base):
    __tablename__ = 'auto_devices'

    mac = Column(String(17), primary_key=True)
    username = Column(String(255), nullable=False, server_default=text("''"))
Example #34
0
def sync_trigger(conn,
                 table_name,
                 tsvector_column,
                 indexed_columns,
                 metadata=None,
                 options=None):
    """
    Synchronizes search trigger and trigger function for given table and given
    search index column. Internally this function executes the following SQL
    queries:

    * Drops search trigger for given table (if it exists)
    * Drops search function for given table (if it exists)
    * Creates search function for given table
    * Creates search trigger for given table
    * Updates all rows for given search vector by running a column=column
      update query for given table.


    Example::

        from sqlalchemy_searchable import sync_trigger


        sync_trigger(
            conn,
            'article',
            'search_vector',
            ['name', 'content']
        )


    This function is especially useful when working with alembic migrations.
    In the following example we add a content column to article table and then
    sync the trigger to contain this new column. ::


        from alembic import op
        from sqlalchemy_searchable import sync_trigger


        def upgrade():
            conn = op.get_bind()
            op.add_column('article', sa.Column('content', sa.Text))

            sync_trigger(conn, 'article', 'search_vector', ['name', 'content'])

        # ... same for downgrade


    If you are using vectorizers you need to initialize them in your migration
    file and pass them to this function. ::


        import sqlalchemy as sa
        from alembic import op
        from sqlalchemy.dialects.postgresql import HSTORE
        from sqlalchemy_searchable import sync_trigger, vectorizer


        def upgrade():
            vectorizer.clear()

            conn = op.get_bind()
            op.add_column('article', sa.Column('name_translations', HSTORE))

            metadata = sa.MetaData(bind=conn)
            articles = sa.Table('article', metadata, autoload=True)

            @vectorizer(articles.c.name_translations)
            def hstore_vectorizer(column):
                return sa.cast(sa.func.avals(column), sa.Text)

            op.add_column('article', sa.Column('content', sa.Text))
            sync_trigger(
                conn,
                'article',
                'search_vector',
                ['name_translations', 'content'],
                metadata=metadata
            )

        # ... same for downgrade

    :param conn: SQLAlchemy Connection object
    :param table_name: name of the table to apply search trigger syncing
    :param tsvector_column:
        TSVector typed column which is used as the search index column
    :param indexed_columns:
        Full text indexed column names as a list
    :param metadata:
        Optional SQLAlchemy metadata object that is being used for autoloaded
        Table. If None is given then new MetaData object is initialized within
        this function.
    :param options: Dictionary of configuration options
    """
    if metadata is None:
        metadata = sa.MetaData()
    table = sa.Table(table_name, metadata, autoload=True, autoload_with=conn)
    params = dict(tsvector_column=getattr(table.c, tsvector_column),
                  indexed_columns=indexed_columns,
                  options=options,
                  conn=conn)
    classes = [
        DropSearchTriggerSQL,
        DropSearchFunctionSQL,
        CreateSearchFunctionSQL,
        CreateSearchTriggerSQL,
    ]
    for class_ in classes:
        sql = class_(**params)
        conn.execute(str(sql), **sql.params)
    update_sql = table.update().values(
        {indexed_columns[0]: sa.text(indexed_columns[0])})
    conn.execute(update_sql)
Example #35
0
 def execute(self, command, **kwargs):
     with sqlalchemy.create_engine(
             self.connection_url).begin() as connection:
         return connection.execute(sqlalchemy.text(command), **kwargs)
def upgrade():
    conn = op.get_bind()
    conn.execute(sa.text('PRAGMA secure_delete = ON'))
    conn.execute(sa.text('PRAGMA auto_vacuum = FULL'))
Example #37
0
    def get_sqla_query(  # sqla
        self,
        metrics: List[Metric],
        granularity: str,
        from_dttm: Optional[datetime],
        to_dttm: Optional[datetime],
        columns: Optional[List[str]] = None,
        groupby: Optional[List[str]] = None,
        filter: Optional[List[Dict[str, Any]]] = None,
        is_timeseries: bool = True,
        timeseries_limit: int = 15,
        timeseries_limit_metric: Optional[Metric] = None,
        row_limit: Optional[int] = None,
        row_offset: Optional[int] = None,
        inner_from_dttm: Optional[datetime] = None,
        inner_to_dttm: Optional[datetime] = None,
        orderby: Optional[List[Tuple[ColumnElement, bool]]] = None,
        extras: Optional[Dict[str, Any]] = None,
        order_desc: bool = True,
    ) -> SqlaQuery:
        """Querying any sqla table from this common interface"""
        template_kwargs = {
            "from_dttm": from_dttm,
            "groupby": groupby,
            "metrics": metrics,
            "row_limit": row_limit,
            "row_offset": row_offset,
            "to_dttm": to_dttm,
            "filter": filter,
            "columns": {col.column_name: col for col in self.columns},
        }
        is_sip_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE")
        template_kwargs.update(self.template_params_dict)
        extra_cache_keys: List[Any] = []
        template_kwargs["extra_cache_keys"] = extra_cache_keys
        template_processor = self.get_template_processor(**template_kwargs)
        db_engine_spec = self.database.db_engine_spec
        prequeries: List[str] = []

        orderby = orderby or []

        # For backward compatibility
        if granularity not in self.dttm_cols:
            granularity = self.main_dttm_col

        # Database spec supports join-free timeslot grouping
        time_groupby_inline = db_engine_spec.time_groupby_inline

        cols: Dict[str, Column] = {col.column_name: col for col in self.columns}
        metrics_dict: Dict[str, SqlMetric] = {m.metric_name: m for m in self.metrics}

        if not granularity and is_timeseries:
            raise Exception(
                _(
                    "Datetime column not provided as part table configuration "
                    "and is required by this type of chart"
                )
            )
        if (
            not metrics
            and not columns
            and (is_sip_38 or (not is_sip_38 and not groupby))
        ):
            raise Exception(_("Empty query?"))
        metrics_exprs: List[ColumnElement] = []
        for m in metrics:
            if utils.is_adhoc_metric(m):
                assert isinstance(m, dict)
                metrics_exprs.append(self.adhoc_metric_to_sqla(m, cols))
            elif isinstance(m, str) and m in metrics_dict:
                metrics_exprs.append(metrics_dict[m].get_sqla_col())
            else:
                raise Exception(_("Metric '%(metric)s' does not exist", metric=m))
        if metrics_exprs:
            main_metric_expr = metrics_exprs[0]
        else:
            main_metric_expr, label = literal_column("COUNT(*)"), "ccount"
            main_metric_expr = self.make_sqla_column_compatible(main_metric_expr, label)

        select_exprs: List[Column] = []
        groupby_exprs_sans_timestamp = OrderedDict()

        if (is_sip_38 and metrics and columns) or (not is_sip_38 and groupby):
            # dedup columns while preserving order
            columns_ = columns if is_sip_38 else groupby
            assert columns_
            groupby = list(dict.fromkeys(columns_))

            select_exprs = []
            for s in groupby:
                if s in cols:
                    outer = cols[s].get_sqla_col()
                else:
                    outer = literal_column(f"({s})")
                    outer = self.make_sqla_column_compatible(outer, s)

                groupby_exprs_sans_timestamp[outer.name] = outer
                select_exprs.append(outer)
        elif columns:
            for s in columns:
                select_exprs.append(
                    cols[s].get_sqla_col()
                    if s in cols
                    else self.make_sqla_column_compatible(literal_column(s))
                )
            metrics_exprs = []

        assert extras is not None
        time_range_endpoints = extras.get("time_range_endpoints")
        groupby_exprs_with_timestamp = OrderedDict(groupby_exprs_sans_timestamp.items())
        if granularity:
            dttm_col = cols[granularity]
            time_grain = extras.get("time_grain_sqla")
            time_filters = []

            if is_timeseries:
                timestamp = dttm_col.get_timestamp_expression(time_grain)
                select_exprs += [timestamp]
                groupby_exprs_with_timestamp[timestamp.name] = timestamp

            # Use main dttm column to support index with secondary dttm columns.
            if (
                db_engine_spec.time_secondary_columns
                and self.main_dttm_col in self.dttm_cols
                and self.main_dttm_col != dttm_col.column_name
            ):
                time_filters.append(
                    cols[self.main_dttm_col].get_time_filter(
                        from_dttm, to_dttm, time_range_endpoints
                    )
                )
            time_filters.append(
                dttm_col.get_time_filter(from_dttm, to_dttm, time_range_endpoints)
            )

        select_exprs += metrics_exprs

        labels_expected = [c._df_label_expected for c in select_exprs]

        select_exprs = db_engine_spec.make_select_compatible(
            groupby_exprs_with_timestamp.values(), select_exprs
        )
        qry = sa.select(select_exprs)

        tbl = self.get_from_clause(template_processor)

        if (is_sip_38 and metrics) or (not is_sip_38 and not columns):
            qry = qry.group_by(*groupby_exprs_with_timestamp.values())

        where_clause_and = []
        having_clause_and = []

        for flt in filter:  # type: ignore
            if not all([flt.get(s) for s in ["col", "op"]]):
                continue
            col = flt["col"]
            op = flt["op"].upper()
            col_obj = cols.get(col)
            if col_obj:
                is_list_target = op in (
                    utils.FilterOperator.IN.value,
                    utils.FilterOperator.NOT_IN.value,
                )
                eq = self.filter_values_handler(
                    values=flt.get("val"),
                    target_column_is_numeric=col_obj.is_numeric,
                    is_list_target=is_list_target,
                )
                if op in (
                    utils.FilterOperator.IN.value,
                    utils.FilterOperator.NOT_IN.value,
                ):
                    cond = col_obj.get_sqla_col().in_(eq)
                    if isinstance(eq, str) and NULL_STRING in eq:
                        cond = or_(cond, col_obj.get_sqla_col() is None)
                    if op == utils.FilterOperator.NOT_IN.value:
                        cond = ~cond
                    where_clause_and.append(cond)
                else:
                    if col_obj.is_numeric:
                        eq = utils.cast_to_num(flt["val"])
                    if op == utils.FilterOperator.EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() == eq)
                    elif op == utils.FilterOperator.NOT_EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() != eq)
                    elif op == utils.FilterOperator.GREATER_THAN.value:
                        where_clause_and.append(col_obj.get_sqla_col() > eq)
                    elif op == utils.FilterOperator.LESS_THAN.value:
                        where_clause_and.append(col_obj.get_sqla_col() < eq)
                    elif op == utils.FilterOperator.GREATER_THAN_OR_EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() >= eq)
                    elif op == utils.FilterOperator.LESS_THAN_OR_EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() <= eq)
                    elif op == utils.FilterOperator.LIKE.value:
                        where_clause_and.append(col_obj.get_sqla_col().like(eq))
                    elif op == utils.FilterOperator.IS_NULL.value:
                        where_clause_and.append(col_obj.get_sqla_col() == None)
                    elif op == utils.FilterOperator.IS_NOT_NULL.value:
                        where_clause_and.append(col_obj.get_sqla_col() != None)
                    else:
                        raise Exception(
                            _("Invalid filter operation type: %(op)s", op=op)
                        )
        if config["ENABLE_ROW_LEVEL_SECURITY"]:
            where_clause_and += self._get_sqla_row_level_filters(template_processor)
        if extras:
            where = extras.get("where")
            if where:
                where = template_processor.process_template(where)
                where_clause_and += [sa.text("({})".format(where))]
            having = extras.get("having")
            if having:
                having = template_processor.process_template(having)
                having_clause_and += [sa.text("({})".format(having))]
        if granularity:
            qry = qry.where(and_(*(time_filters + where_clause_and)))
        else:
            qry = qry.where(and_(*where_clause_and))
        qry = qry.having(and_(*having_clause_and))

        if not orderby and ((is_sip_38 and metrics) or (not is_sip_38 and not columns)):
            orderby = [(main_metric_expr, not order_desc)]

        # To ensure correct handling of the ORDER BY labeling we need to reference the
        # metric instance if defined in the SELECT clause.
        metrics_exprs_by_label = {m._label: m for m in metrics_exprs}

        for col, ascending in orderby:
            direction = asc if ascending else desc
            if utils.is_adhoc_metric(col):
                col = self.adhoc_metric_to_sqla(col, cols)
            elif col in cols:
                col = cols[col].get_sqla_col()

            if isinstance(col, Label) and col._label in metrics_exprs_by_label:
                col = metrics_exprs_by_label[col._label]

            qry = qry.order_by(direction(col))

        if row_limit:
            qry = qry.limit(row_limit)
        if row_offset:
            qry = qry.offset(row_offset)

        if (
            is_timeseries
            and timeseries_limit
            and not time_groupby_inline
            and ((is_sip_38 and columns) or (not is_sip_38 and groupby))
        ):
            if self.database.db_engine_spec.allows_joins:
                # some sql dialects require for order by expressions
                # to also be in the select clause -- others, e.g. vertica,
                # require a unique inner alias
                inner_main_metric_expr = self.make_sqla_column_compatible(
                    main_metric_expr, "mme_inner__"
                )
                inner_groupby_exprs = []
                inner_select_exprs = []
                for gby_name, gby_obj in groupby_exprs_sans_timestamp.items():
                    inner = self.make_sqla_column_compatible(gby_obj, gby_name + "__")
                    inner_groupby_exprs.append(inner)
                    inner_select_exprs.append(inner)

                inner_select_exprs += [inner_main_metric_expr]
                subq = select(inner_select_exprs).select_from(tbl)
                inner_time_filter = dttm_col.get_time_filter(
                    inner_from_dttm or from_dttm,
                    inner_to_dttm or to_dttm,
                    time_range_endpoints,
                )
                subq = subq.where(and_(*(where_clause_and + [inner_time_filter])))
                subq = subq.group_by(*inner_groupby_exprs)

                ob = inner_main_metric_expr
                if timeseries_limit_metric:
                    ob = self._get_timeseries_orderby(
                        timeseries_limit_metric, metrics_dict, cols
                    )
                direction = desc if order_desc else asc
                subq = subq.order_by(direction(ob))
                subq = subq.limit(timeseries_limit)

                on_clause = []
                for gby_name, gby_obj in groupby_exprs_sans_timestamp.items():
                    # in this case the column name, not the alias, needs to be
                    # conditionally mutated, as it refers to the column alias in
                    # the inner query
                    col_name = db_engine_spec.make_label_compatible(gby_name + "__")
                    on_clause.append(gby_obj == column(col_name))

                tbl = tbl.join(subq.alias(), and_(*on_clause))
            else:
                if timeseries_limit_metric:
                    orderby = [
                        (
                            self._get_timeseries_orderby(
                                timeseries_limit_metric, metrics_dict, cols
                            ),
                            False,
                        )
                    ]

                # run prequery to get top groups
                prequery_obj = {
                    "is_timeseries": False,
                    "row_limit": timeseries_limit,
                    "metrics": metrics,
                    "granularity": granularity,
                    "from_dttm": inner_from_dttm or from_dttm,
                    "to_dttm": inner_to_dttm or to_dttm,
                    "filter": filter,
                    "orderby": orderby,
                    "extras": extras,
                    "columns": columns,
                    "order_desc": True,
                }
                if not is_sip_38:
                    prequery_obj["groupby"] = groupby

                result = self.query(prequery_obj)
                prequeries.append(result.query)
                dimensions = [
                    c
                    for c in result.df.columns
                    if c not in metrics and c in groupby_exprs_sans_timestamp
                ]
                top_groups = self._get_top_groups(
                    result.df, dimensions, groupby_exprs_sans_timestamp
                )
                qry = qry.where(top_groups)
        return SqlaQuery(
            extra_cache_keys=extra_cache_keys,
            labels_expected=labels_expected,
            sqla_query=qry.select_from(tbl),
            prequeries=prequeries,
        )
Example #38
0
    def do_rebuild_request(self, message_data, message):
        who = message_data['who']
        brid = message_data['body']['brid']
        count = message_data['body'].get('count', 1)
        priority = message_data['body'].get('priority', 0)
        log.info("rebuilding request by %s of %s count %s pri %s", who, brid, count, priority)

        # Get the build request and build set
        request = self.db.execute(text(
            """SELECT * FROM
                    buildrequests, buildsets WHERE
                    buildrequests.id=:brid AND
                    buildrequests.buildsetid = buildsets.id"""),
            brid=brid).fetchone()

        if not request:
            log.info("No request with id %s, giving up" % brid)
            return {"errors": True, "msg": "No request with that id"}

        now = time.time()
        for i in range(count):
            # Create a new buildset
            buildsetid = create_buildset(
                self.db,
                idstring=request.external_idstring,
                reason='Self-serve: Rebuilt by %s' % who,
                ssid=request.sourcestampid,
                submitted_at=now,
            )

            # Copy buildset properties
            q = text("""INSERT INTO buildset_properties
                    (`buildsetid`, `property_name`, `property_value`)
                    SELECT :buildsetid, `property_name`, `property_value` FROM
                        buildset_properties WHERE
                        buildsetid = :oldbsid""")
            log.debug(q)
            r = self.db.execute(
                q,
                buildsetid=buildsetid,
                oldbsid=request.buildsetid)
            log.debug("Created %i properties" % r.rowcount)

            # Create a new build request
            q = text("""INSERT INTO buildrequests
                    (`buildsetid`, `buildername`, `submitted_at`, `priority`,
                     `claimed_at`, `claimed_by_name`, `claimed_by_incarnation`,
                     `complete`, `results`, `complete_at`)
                    VALUES
                     (:buildsetid, :buildername, :submitted_at, :priority,
                      0, NULL, NULL,
                      0, NULL, NULL)""")
            log.debug(q)

            r = self.db.execute(
                q,
                buildsetid=buildsetid,
                buildername=request.buildername,
                submitted_at=now,
                priority=priority,
            )

            new_brid = r.lastrowid
            log.debug("Created buildrequest %s", new_brid)
        return {"errors": False, "msg": "Ok"}
Example #39
0
 def test_nine(self):
     self._test(text("t.dt + t.tm"))
Example #40
0
    def _create_build_for_revision(self, who, branch, revision, priority, builder_expression, builder_exclusions=None):
        if builder_exclusions is None:
            builder_exclusions = ['%l10n nightly']
        now = time.time()
        repo_path = self._get_repo_path(branch)

        # Find builders that have been active in the past 2 weeks
        q = """SELECT DISTINCT buildername FROM buildrequests WHERE
                buildername LIKE :buildername AND
            """
        for i, bx in enumerate(builder_exclusions):
            q = q + "buildername NOT LIKE :buildername_exclusion_%i AND " % i
        q = q + """
          submitted_at > :submitted_at"""
        qparams = {
            'buildername': builder_expression,
            'submitted_at': time.time() - 14 * 24 * 3600,
        }
        for i, bx in enumerate(builder_exclusions):
            qparams['buildername_exclusion_%i' % i] = builder_exclusions[i]
        result = self.db.execute(text(q), qparams)

        buildernames = [r[0] for r in result]
        log.debug("buildernames are %s", buildernames)

        # Create a sourcestamp
        q = text("""INSERT INTO sourcestamps
                (`branch`, `revision`, `patchid`, `repository`, `project`)
                VALUES
                (:branch, :revision, NULL, '', '')
                """)
        log.debug(q)
        r = self.db.execute(q, branch=repo_path, revision=revision)
        ssid = r.lastrowid
        log.debug("Created sourcestamp %s", ssid)

        # Create a new buildset
        buildsetid = create_buildset(
            self.db,
            idstring=None,
            reason='Self-serve: Requested by %s' % who,
            ssid=ssid,
            submitted_at=now,
        )

        # Create buildset properties (buildid, builduid)
        q = text("""INSERT INTO buildset_properties
                (`buildsetid`, `property_name`, `property_value`)
                VALUES
                (:buildsetid, :key, :value)
                """)
        props = {
            'buildid': json.dumps((genBuildID(now), "self-serve")),
            'builduid': json.dumps((genBuildUID(), "self-serve")),
        }
        log.debug(q)
        for key, value in props.items():
            r = self.db.execute(q, buildsetid=buildsetid, key=key, value=value)
            log.debug("Created buildset_property %s=%s", key, value)

        # Create buildrequests
        q = text("""INSERT INTO buildrequests
                (`buildsetid`, `buildername`, `submitted_at`, `priority`, `claimed_at`, `claimed_by_name`, `claimed_by_incarnation`, `complete`, `results`, `complete_at`)
                VALUES
                (:buildsetid, :buildername, :submitted_at, :priority, 0, NULL, NULL, 0, NULL, NULL)""")
        log.debug(q)
        for buildername in buildernames:
            r = self.db.execute(
                q,
                buildsetid=buildsetid,
                buildername=buildername,
                submitted_at=now,
                priority=priority)
            log.debug("Created buildrequest %s: %i", buildername, r.lastrowid)
        return {"errors": False, "msg": "Ok"}
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table('comment', sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('text', sa.String(), nullable=True),
                    sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'course', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=True),
        sa.Column('description', sa.String(), nullable=True),
        sa.Column('presentation_video', sa.String(), nullable=True),
        sa.Column('avg_rating', sa.Integer(), nullable=True),
        sa.Column('num_ratings', sa.Integer(), nullable=True),
        sa.PrimaryKeyConstraint('id'))
    op.create_table('event', sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('name', sa.String(), nullable=True),
                    sa.Column('content', sa.String(), nullable=True),
                    sa.PrimaryKeyConstraint('id'))
    op.create_table('event_pics', sa.Column('id', sa.Integer(),
                                            nullable=False),
                    sa.Column('name', sa.String(), nullable=True),
                    sa.Column('picture', sa.String(), nullable=True),
                    sa.Column('details', sa.String(), nullable=True),
                    sa.Column('date', sa.DateTime(), nullable=True),
                    sa.Column('type', sa.String(), nullable=True),
                    sa.PrimaryKeyConstraint('id'))
    op.create_table('interest', sa.Column('id', sa.Integer(), nullable=False),
                    sa.PrimaryKeyConstraint('id'))
    op.create_table('role', sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('name', sa.String(length=50), nullable=True),
                    sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'))
    op.create_table(
        'user', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(), nullable=True),
        sa.Column('email', sa.String(length=132), nullable=True),
        sa.Column('password', sa.String(length=512), nullable=True),
        sa.Column('profile_pic', sa.String(), nullable=True),
        sa.Column('role', sa.Integer(), nullable=True),
        sa.Column('age', sa.Integer(), nullable=True),
        sa.Column('gender', sa.String(), nullable=True),
        sa.Column('membership_status', sa.String(), nullable=True),
        sa.Column('date_joined',
                  sa.DateTime(timezone=True),
                  server_default=sa.text('(CURRENT_TIMESTAMP)'),
                  nullable=True),
        sa.ForeignKeyConstraint(
            ['role'],
            ['role.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.create_table('class', sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('name', sa.String(), nullable=True),
                    sa.Column('course_id', sa.Integer(), nullable=True),
                    sa.Column('instructor_id', sa.Integer(), nullable=True),
                    sa.Column('start', sa.DateTime(), nullable=True),
                    sa.Column('end', sa.DateTime(), nullable=True),
                    sa.Column('gender', sa.String(), nullable=True),
                    sa.Column('type', sa.String(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['course_id'],
                        ['course.id'],
                    ), sa.ForeignKeyConstraint(
                        ['instructor_id'],
                        ['user.id'],
                    ), sa.PrimaryKeyConstraint('id'))
    op.create_table('user_comments',
                    sa.Column('user_id', sa.Integer(), nullable=True),
                    sa.Column('comment_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['comment_id'],
                        ['comment.id'],
                    ), sa.ForeignKeyConstraint(
                        ['user_id'],
                        ['user.id'],
                    ))
    op.create_table(
        'user_interests', sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('interest_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['interest_id'],
            ['interest.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['user.id'],
        ))
    op.create_table('users_to_classes',
                    sa.Column('user_id', sa.Integer(), nullable=True),
                    sa.Column('class_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['class_id'],
                        ['class.id'],
                    ), sa.ForeignKeyConstraint(
                        ['user_id'],
                        ['user.id'],
                    ))
    op.create_table('video', sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('video_filename', sa.String(), nullable=True),
                    sa.Column('sequ_nb', sa.Integer(), nullable=True),
                    sa.Column('class_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['class_id'],
                        ['class.id'],
                    ), sa.PrimaryKeyConstraint('id'))
Example #42
0
def get_school_name_by_sid(sid):
    res = db.session.execute(
        text('SELECT name FROM school WHERE sid=:sid LIMIT 1'), {'sid': sid})
    res = res.fetchone()[0]
    return res
Example #43
0
def down(db):
    db.execute(text("""DELETE FROM second WHERE age = :age;"""),
               age=42)
Example #44
0
def get_customer_by_id(customer_id):
    stmt = "select id, name, age, gender, email from customer where id=:id"
    result = engine.execute(text(stmt), id=customer_id).fetchall()
    result_dict = [dict(row) for row in result]
    return jsonify(result_dict[0])
Example #45
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        'hooks', sa.Column('created', sa.DateTime(), nullable=False),
        sa.Column('updated', sa.DateTime(), nullable=False),
        sa.Column('id',
                  postgresql.UUID(as_uuid=True),
                  server_default=sa.text('gen_random_uuid()'),
                  nullable=False),
        sa.Column('headers',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=False),
        sa.Column('payload',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=False),
        sa.Column('processed', sa.Boolean(), nullable=False),
        sa.Column('version_id', sa.Integer(), nullable=False),
        sa.PrimaryKeyConstraint('id', name=op.f('pk_hooks')))
    op.create_table(
        'hooks_version',
        sa.Column('created', sa.DateTime(), autoincrement=False,
                  nullable=True),
        sa.Column('updated', sa.DateTime(), autoincrement=False,
                  nullable=True),
        sa.Column('id',
                  postgresql.UUID(as_uuid=True),
                  server_default=sa.text('gen_random_uuid()'),
                  autoincrement=False,
                  nullable=False),
        sa.Column('headers',
                  postgresql.JSONB(astext_type=sa.Text()),
                  autoincrement=False,
                  nullable=True),
        sa.Column('payload',
                  postgresql.JSONB(astext_type=sa.Text()),
                  autoincrement=False,
                  nullable=True),
        sa.Column('processed',
                  sa.Boolean(),
                  autoincrement=False,
                  nullable=True),
        sa.Column('version_id',
                  sa.Integer(),
                  autoincrement=False,
                  nullable=True),
        sa.Column('transaction_id',
                  sa.BigInteger(),
                  autoincrement=False,
                  nullable=False),
        sa.Column('end_transaction_id', sa.BigInteger(), nullable=True),
        sa.Column('operation_type', sa.SmallInteger(), nullable=False),
        sa.PrimaryKeyConstraint('id',
                                'transaction_id',
                                name=op.f('pk_hooks_version')))
    op.create_index(op.f('ix_hooks_version_end_transaction_id'),
                    'hooks_version', ['end_transaction_id'],
                    unique=False)
    op.create_index(op.f('ix_hooks_version_operation_type'),
                    'hooks_version', ['operation_type'],
                    unique=False)
    op.create_index(op.f('ix_hooks_version_transaction_id'),
                    'hooks_version', ['transaction_id'],
                    unique=False)
Example #46
0
def insert_unfollow(user_unfollow):
    return current_app.database.execute(text("""
        DELETE FROM users_follow_list
        WHERE user_id = :id
        AND follow_user_id = :unfollow
    """), user_unfollow).rowcount
Example #47
0
def get_place():
        if len(request.args.get('latitude'))==0 or len(request.args.get('longitude'))==0:
		return 'Please Specify Arguments Correctly' 
	else:
		lat1=float(request.args.get('latitude'))
		lng1=float(request.args.get('longitude'))
	c=0
	sql = text('Select name , type , parent , coordinates from geojson')
	result = db.engine.execute(sql)
	name=[]
	typ=[]
	parents=[]
	coordinates=[]
	found=""
	for row in result:
	    name.append(row[0])
	    typ.append(row[1])
 	    parents.append(row[2])
 	    coordinates.append(row[3])
    	def quadrant(w,lng1,lat1):
       		lng=w[:,0]
        	lat=w[:,1]
        	lenlat=np.median(lat,axis=0)
        	lenlng=np.median(lng,axis=0)
       	 	for i in range(1):
            		if lng1<=lenlng and lat1>=lenlat:
                #print '1st'
                		lng=lng[lng<=lenlng]
                		lat=lat[lat>=lenlat]
            		elif lng1>=lenlng and lat1>=lenlat:
                #print '2nd'
              			lng=lng[lng>=lenlng]
                		lat=lat[lat>=lenlat]
            		elif lng1<=lenlng and lat1<=lenlat:
                #print '3rd'
                		lng=lng[lng<=lenlng]
                		lat=lat[lat<=lenlat]
            		elif lng1>=lenlng and lat1<=lenlat:
                		lng=lng[lng>=lenlng]
                		lat=lat[lat<=lenlat]
                #print '4th'
            	return lng , lat
    
    	for i in range(len(name)):
     		   q=coordinates[i]
     		   w=np.matrix(q[0])
     		   lng,lat=quadrant(w,lng1,lat1)
     		   if c>1:
     			       lng,lat=quadrant(w,lng1,lat1)
      			       c=c-1
       		   xmax=lng.max()
       		   xmin=lng.min()
       		   ymax=lat.max()
       		   ymin=lat.min()
       		   if(lng1<=xmax and lng1>=xmin and lat1<=ymax and lat1>=ymin):
       			     found=name[i]
          		     c=c+1
		  
	if len(found)>0:
 	   return found
	else:
 	   return 'Not Found'
Example #48
0
def up(db):
    db.execute(text("""INSERT INTO second (age) VALUES (:age);"""),
               age=42)
Example #49
0
class Job(BaseModel):
    __tablename__ = "jobs"
    __table_args__ = (Index("ix_jobs_project_uuid_pipeline_uuid",
                            "project_uuid", "pipeline_uuid"), )

    name = db.Column(
        db.String(255),
        unique=False,
        nullable=False,
        # For migrating users.
        server_default=text("'job'"),
    )

    pipeline_name = db.Column(
        db.String(255),
        unique=False,
        nullable=False,
        # For migrating users.
        server_default=text("''"),
    )

    uuid = db.Column(db.String(36), primary_key=True)
    project_uuid = db.Column(
        db.String(36),
        db.ForeignKey("projects.uuid", ondelete="CASCADE"),
        index=True,
        nullable=False,
    )
    pipeline_uuid = db.Column(db.String(36), index=True, nullable=False)

    # Jobs that are to be schedule once (right now) or once in the
    # future will have no schedule (null).
    schedule = db.Column(db.String(100), nullable=True)

    # A list of dictionaries. The length of the list is the number of
    # non interactive runs that will be run, one for each parameters
    # dictinary. A parameter dictionary maps step uuids to a dictionary,
    # containing the parameters of that step for that particular run.
    # [{ <step_uuid>: {"a": 1}, ...}, ...GG]
    parameters = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid. Note that the entries will be stored as a list
        # of dicts.
        server_default="[]",
    )

    # Note that this column also contains the parameters that were
    # stored within the pipeline definition file. These are not the job
    # parameters, but the original ones.
    pipeline_definition = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid.
        server_default="{}",
    )

    pipeline_run_spec = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid.
        server_default="{}",
    )

    # So that we can efficiently look for jobs to run.
    next_scheduled_time = db.Column(TIMESTAMP(timezone=True), index=True)

    # So that we can show the user the last time it was scheduled/run.
    last_scheduled_time = db.Column(TIMESTAMP(timezone=True), index=True)

    # So that we can "stamp" every non interactive run with the
    # execution number it belongs to, e.g. the first time a job runs it
    # will be batch 1, then 2, etc.
    total_scheduled_executions = db.Column(
        db.Integer,
        unique=False,
        server_default=text("0"),
    )

    pipeline_runs = db.relationship(
        "NonInteractivePipelineRun",
        lazy="select",
        # let the db take care of cascading deletions
        # https://docs.sqlalchemy.org/en/13/orm/relationship_api.html#sqlalchemy.orm.relationship.params.passive_deletes
        # A value of True indicates that unloaded child items should not
        # be loaded during a delete operation on the parent. Normally,
        # when a parent item is deleted, all child items are loaded so
        # that they can either be marked as deleted, or have their
        # foreign key to the parent set to NULL. Marking this flag as
        # True usually implies an ON DELETE <CASCADE|SET NULL> rule is
        # in place which will handle updating/deleting child rows on the
        # database side.
        passive_deletes=True,
        # https://docs.sqlalchemy.org/en/14/orm/cascades.html#using-foreign-key-on-delete-cascade-with-orm-relationships
        # In order to use ON DELETE foreign key cascades in conjunction
        # with relationship(), it’s important to note first and foremost
        # that the relationship.cascade setting must still be configured
        # to match the desired “delete” or “set null” behavior
        # Essentially, the specified behaviour in the FK column
        # and the one specified in the relationship must match.
        cascade="all, delete",
        # When querying a job and its runs the runs will be sorted by
        # job schedule number and the index of the pipeline in that job.
        order_by=(
            "[desc(NonInteractivePipelineRun.job_run_index), "
            "desc(NonInteractivePipelineRun.job_run_pipeline_run_index)]"),
    )

    # The status of a job can be DRAFT, PENDING, STARTED, SUCCESS,
    # ABORTED. Jobs start as DRAFT, this indicates that the job has
    # been created but that has not been started by the user. Once a
    # job is started by the user, what happens depends on the type of
    # job. One time jobs become PENDING, and become STARTED once they
    # are run by the scheduler and their pipeline runs are added to the
    # queue. Once they are completed, their status will be SUCCESS, if
    # they are aborted, their status will be set to ABORTED. Recurring
    # jobs, characterized by having a schedule, become STARTED, and can
    # only move to the ABORTED state in case they get cancelled, which
    # implies that the job will not be scheduled anymore.
    status = db.Column(
        db.String(15),
        unique=False,
        nullable=False,
        # Pre-existing Jobs of migrating users will be set to SUCCESS.
        server_default=text("'SUCCESS'"),
    )

    strategy_json = db.Column(
        JSONB,
        nullable=False,
        server_default="{}",
    )

    env_variables = deferred(
        db.Column(
            JSONB,
            nullable=False,
            server_default="{}",
        ))

    created_time = db.Column(
        db.DateTime,
        unique=False,
        nullable=False,
        index=True,
        # For migrating users.
        server_default=text("timezone('utc', now())"),
    )

    def __repr__(self):
        return f"<Job: {self.uuid}>"
Example #50
0
db = create_engine(os.getenv("DATABASE_URL"))

## DROP THE TABLE IF IT EXISTS
# query = """DROP TABLE IF EXISTS books"""
# db.execute(query)

### CREATE TABLE
query = """CREATE TABLE IF NOT EXISTS books (
    id SERIAL PRIMARY KEY,
    isbn VARCHAR UNIQUE NOT NULL,
    title VARCHAR NOT NULL,
    author VARCHAR NOT NULL,
    year INTEGER
    )"""
db.execute(query)

## OPEN CSV AND INSERT TO DB
with open("books.csv", 'r') as csvfile:
    books = csv.DictReader(csvfile)
    for book in books:
        query = text(
            "INSERT INTO books (isbn, title, author, year) VALUES (:isbn, :title, :author, :year)"
        )
        db.execute(
            query, {
                "isbn": book['isbn'],
                "title": book['title'],
                "author": book['author'],
                "year": book['year']
            })
Example #51
0
    def test_basic(self, metadata, connection):

        s_table = Table(
            "sometable",
            metadata,
            Column("id_a", Unicode(255), primary_key=True),
            Column("id_b", Unicode(255), primary_key=True, unique=True),
            Column("group", Unicode(255), primary_key=True),
            Column("col", Unicode(255)),
            UniqueConstraint("col", "group"),
        )

        # "group" is a keyword, so lower case
        normalind = Index("tableind", s_table.c.id_b, s_table.c.group)
        Index(
            "compress1", s_table.c.id_a, s_table.c.id_b, oracle_compress=True
        )
        Index(
            "compress2",
            s_table.c.id_a,
            s_table.c.id_b,
            s_table.c.col,
            oracle_compress=1,
        )

        metadata.create_all(connection)

        mirror = MetaData()
        mirror.reflect(connection)

        metadata.drop_all(connection)
        mirror.create_all(connection)

        inspect = MetaData()
        inspect.reflect(connection)

        def obj_definition(obj):
            return (
                obj.__class__,
                tuple([c.name for c in obj.columns]),
                getattr(obj, "unique", None),
            )

        # find what the primary k constraint name should be
        primaryconsname = connection.scalar(
            text(
                """SELECT constraint_name
               FROM all_constraints
               WHERE table_name = :table_name
               AND owner = :owner
               AND constraint_type = 'P' """
            ),
            dict(
                table_name=s_table.name.upper(),
                owner=testing.db.dialect.default_schema_name.upper(),
            ),
        )

        reflectedtable = inspect.tables[s_table.name]

        # make a dictionary of the reflected objects:

        reflected = dict(
            [
                (obj_definition(i), i)
                for i in reflectedtable.indexes | reflectedtable.constraints
            ]
        )

        # assert we got primary key constraint and its name, Error
        # if not in dict

        assert (
            reflected[
                (PrimaryKeyConstraint, ("id_a", "id_b", "group"), None)
            ].name.upper()
            == primaryconsname.upper()
        )

        # Error if not in dict

        eq_(reflected[(Index, ("id_b", "group"), False)].name, normalind.name)
        assert (Index, ("id_b",), True) in reflected
        assert (Index, ("col", "group"), True) in reflected

        idx = reflected[(Index, ("id_a", "id_b"), False)]
        assert idx.dialect_options["oracle"]["compress"] == 2

        idx = reflected[(Index, ("id_a", "id_b", "col"), False)]
        assert idx.dialect_options["oracle"]["compress"] == 1

        eq_(len(reflectedtable.constraints), 1)
        eq_(len(reflectedtable.indexes), 5)
Example #52
0
class NonInteractivePipelineRun(PipelineRun):
    # https://docs.sqlalchemy.org/en/14/orm/inheritance.html
    # sqlalchemy has 3 kinds of inheritance: joined table, single table,
    # concrete.
    #
    # Concrete is, essentially, not recommended unsless you have a
    # reason to use it. Will also lead to FKs issues if the base table
    # is abstract.
    #
    # "ORM-enabled UPDATEs and DELETEs do not handle joined table
    # inheritance automatically." This means that, for example, that
    # updating a NonInteractivePipelineRun would not allow updating the
    # columns that belong to the InteractiveRun. This means that, for
    # for example, the update_status_db function from the utils module
    # would not work when updating the status of a non interactive run.
    # https://docs.sqlalchemy.org/en/14/orm/session_basics.html#update-and-delete-with-arbitrary-where-clause
    #
    # Single table inheritance is the inheritance of choice, mostly
    # because of the drawbacks of joined table inheritance. Setting the
    # tablename to None will result in using single table inheritance,
    # setting it to a string will result in using joined table
    # inheritance.
    # Note that single table inheritance will NOT create a new table for
    # each "child" of the inheritance.
    __tablename__ = None

    # TODO: verify why the job_uuid should be part of the
    # primary key
    job_uuid = db.Column(db.String(36),
                         db.ForeignKey("jobs.uuid", ondelete="CASCADE"),
                         index=True)

    # To what batch of non interactive runs of a job it belongs. The
    # first time a job runs will produce batch 1, then batch 2, etc.
    job_run_index = db.Column(
        db.Integer,
        nullable=False,
        server_default=text("0"),
    )

    # This run_id is used to identify the pipeline run within the
    # job and maintain a consistent ordering.
    job_run_pipeline_run_index = db.Column(db.Integer, )

    # The pipeline run number across all job runs of a job.
    pipeline_run_index = db.Column(db.Integer, )

    # Parameters with which it was run, so that the history is kept.
    parameters = db.Column(
        JSONB,
        nullable=False,
        # This way migrated entries that did not have this column will
        # still be valid.
        server_default="{}",
    )

    env_variables = deferred(
        db.Column(
            JSONB,
            nullable=False,
            server_default="{}",
        ))

    # related to inheriting from PipelineRun
    __mapper_args__ = {
        "polymorphic_identity": "NonInteractivePipelineRun",
    }
Example #53
0
def _should_create_function(ddl, target, connection, **kw):
    sql = "SELECT COUNT(*) FROM information_schema.routines WHERE routine_name = 'indico_unaccent'"
    count = connection.execute(text(sql)).scalar()
    return not count
Example #54
0
def confirmaramistad():
	id_pet1=login_session['id_selectedpet']
	id_pet2=request.form['idpet']
	sql=text("update petfriends set friendship=1 where (id_pet1="+str(id_pet1)+" AND id_pet2="+str(id_pet2)+") OR (id_pet1="+str(id_pet2)+" AND id_pet2="+str(id_pet1)+")")
	session.execute(sql)
	return jsonify({'status':"OK"})
Example #55
0
def configure_orm(disable_connection_pool=False):
    """Configure ORM using SQLAlchemy"""
    from airflow.utils.log.secrets_masker import mask_secret

    log.debug("Setting up DB connection pool (PID %s)", os.getpid())
    global engine
    global Session
    engine_args = prepare_engine_args(disable_connection_pool)

    # Allow the user to specify an encoding for their DB otherwise default
    # to utf-8 so jobs & users with non-latin1 characters can still use us.
    engine_args['encoding'] = conf.get('core',
                                       'SQL_ENGINE_ENCODING',
                                       fallback='utf-8')

    if conf.has_option('core', 'sql_alchemy_connect_args'):
        connect_args = conf.getimport('core', 'sql_alchemy_connect_args')
    else:
        connect_args = {}

    engine = create_engine(SQL_ALCHEMY_CONN,
                           connect_args=connect_args,
                           **engine_args)

    mask_secret(engine.url.password)

    setup_event_handlers(engine)

    Session = scoped_session(
        sessionmaker(
            autocommit=False,
            autoflush=False,
            bind=engine,
            expire_on_commit=False,
        ))
    if engine.dialect.name == 'mssql':
        session = Session()
        try:
            result = session.execute(
                sqlalchemy.text(
                    'SELECT is_read_committed_snapshot_on FROM sys.databases WHERE name=:database_name'
                ),
                params={"database_name": engine.url.database},
            )
            data = result.fetchone()[0]
            if data != 1:
                log.critical(
                    "MSSQL database MUST have READ_COMMITTED_SNAPSHOT enabled."
                )
                log.critical(
                    f"The database {engine.url.database} has it disabled.")
                log.critical(
                    "This will cause random deadlocks, Refusing to start.")
                log.critical(
                    "See https://airflow.apache.org/docs/apache-airflow/stable/howto/"
                    "set-up-database.html#setting-up-a-mssql-database")
                raise Exception(
                    "MSSQL database MUST have READ_COMMITTED_SNAPSHOT enabled."
                )
        finally:
            session.close()
#                        line_items.c.extended_cost])
#         joins = joins.join(line_items).join(cookies)
#     cust_orders = select(columns) \
#         .select_from(joins) \
#         .where(users.c.username == cust_name)
#     if shipped is not None:
#         cust_orders = cust_orders.where(orders.c.shipped == shipped)
#     result = connection.execute(cust_orders).fetchall()
#     return result
#
#
# print(get_orders_by_customer('cakeeater'))
#
# print(get_orders_by_customer('cakeeater', details=True))
#
# print(get_orders_by_customer('cakeeater', shipped=True))  # AttributeError
#
# print(get_orders_by_customer('cakeeater', shipped=False))  # -/-
#
# print(get_orders_by_customer('cakeeater', shipped=False, details=True))  # -/-

# ------------------------------------------------------------------------------

# result = connection.execute("select * from practice.orders").fetchall()
# print(result)

# ------------------------------------------------------------------------------

stmt = select([users]).where(text("username='******'"))
print(connection.execute(stmt).fetchall())
Example #57
0
    def get_spawnpoints(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None,
                        oNeLat=None, oNeLng=None, timestamp=0, geofences=None,
                        exclude_geofences=None):
        query = db.session.query(
            TrsSpawn.latitude, TrsSpawn.longitude,
            TrsSpawn.spawnpoint.label('spawnpoint_id'), TrsSpawn.spawndef,
            TrsSpawn.first_detection, TrsSpawn.last_non_scanned,
            TrsSpawn.last_scanned, TrsSpawn.calc_endminsec.label('end_time')
        )

        if timestamp > 0:
            # If timestamp is known only send last scanned spawn points.
            t = datetime.fromtimestamp(timestamp / 1000)
            query = query.filter(
                (TrsSpawn.last_scanned > t) | (TrsSpawn.last_non_scanned > t)
            )

        if swLat and swLng and neLat and neLng:
            query = query.filter(
                TrsSpawn.latitude >= swLat,
                TrsSpawn.longitude >= swLng,
                TrsSpawn.latitude <= neLat,
                TrsSpawn.longitude <= neLng
            )

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude spawn points within old boundaries.
            query = query.filter(
                ~and_(
                    TrsSpawn.latitude >= oSwLat,
                    TrsSpawn.longitude >= oSwLng,
                    TrsSpawn.latitude <= oNeLat,
                    TrsSpawn.longitude <= oNeLng
                )
            )

        if geofences:
            sql = geofences_to_query(geofences, 'trs_spawn')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'trs_spawn')
            query = query.filter(~text(sql))

        result = query.all()

        spawnpoints = []
        ts = time.time()
        utc_offset = datetime.fromtimestamp(ts) - datetime.utcfromtimestamp(ts)
        for sp in result:
            sp = sp._asdict()
            if sp['last_non_scanned'] is not None:
                sp['last_non_scanned'] = sp['last_non_scanned'] - utc_offset
            if sp['end_time'] is not None:
                if sp['last_scanned'] is not None:
                    sp['last_scanned'] = sp['last_scanned'] - utc_offset
                end_time_split = sp['end_time'].split(':')
                end_time_seconds = int(end_time_split[1])
                end_time_minutes = int(end_time_split[0])
                despawn_time = datetime.today().replace(
                    minute=end_time_minutes, second=end_time_seconds,
                    microsecond=0
                )
                if despawn_time <= datetime.today():
                    despawn_time += timedelta(hours=1)
                sp['despawn_time'] = despawn_time - utc_offset
                if sp['spawndef'] == 15:
                    sp['spawn_time'] = sp['despawn_time'] - timedelta(hours=1)
                else:
                    sp['spawn_time'] = (sp['despawn_time']
                                        - timedelta(minutes=30))
                del sp['end_time']
            spawnpoints.append(sp)

        return spawnpoints
Example #58
0
def get_all():

    sql = text('select distinct(country.continent) from country')
    result = db.engine.execute(sql)
    result = [row[0] for row in result]
    return create_response(result, 200)
Example #59
0
    def get_active(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None,
                   oNeLat=None, oNeLng=None, timestamp=0, eids=None, ids=None,
                   geofences=None, exclude_geofences=None,
                   verified_despawn_time=False):
        columns = [
            Pokemon.encounter_id, Pokemon.pokemon_id, Pokemon.latitude,
            Pokemon.longitude, Pokemon.disappear_time,
            Pokemon.individual_attack, Pokemon.individual_defense,
            Pokemon.individual_stamina, Pokemon.move_1, Pokemon.move_2,
            Pokemon.cp, Pokemon.cp_multiplier, Pokemon.weight, Pokemon.height,
            Pokemon.gender, Pokemon.form, Pokemon.costume,
            Pokemon.catch_prob_1, Pokemon.catch_prob_2, Pokemon.catch_prob_3,
            Pokemon.weather_boosted_condition, Pokemon.last_modified
        ]

        if verified_despawn_time:
            columns.append(
                TrsSpawn.calc_endminsec.label('verified_disappear_time')
            )
            query = (
                db.session.query(*columns)
                .outerjoin(
                    TrsSpawn, Pokemon.spawnpoint_id == TrsSpawn.spawnpoint
                )
            )
        else:
            query = db.session.query(*columns)

        query = query.filter(Pokemon.disappear_time > datetime.utcnow())

        if timestamp > 0:
            # If timestamp is known only load modified Pokémon.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(Pokemon.last_modified > t)

        if swLat and swLng and neLat and neLng:
            query = query.filter(
                Pokemon.latitude >= swLat,
                Pokemon.longitude >= swLng,
                Pokemon.latitude <= neLat,
                Pokemon.longitude <= neLng
            )

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude Pokémon within old boundaries.
            query = query.filter(
                ~and_(
                    Pokemon.latitude >= oSwLat,
                    Pokemon.longitude >= oSwLng,
                    Pokemon.latitude <= oNeLat,
                    Pokemon.longitude <= oNeLng
                )
            )

        if geofences:
            sql = geofences_to_query(geofences, 'pokemon')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokemon')
            query = query.filter(~text(sql))

        if eids:
            query = query.filter(Pokemon.pokemon_id.notin_(eids))
        elif ids:
            query = query.filter(Pokemon.pokemon_id.in_(ids))

        return [pokemon._asdict() for pokemon in query.all()]
Example #60
0
def drop_table(table_model=None, table_name=None):
    if table_name:
        query = 'DROP TABLE IF EXISTS {}'.format(table_name)
        db.session.execute(text(query))
    elif table_model and table_exists(table_model):
        table_model.__table__.drop(db.engine)