def _update_schemas_1_to_2(self, cur):
        """
        Given a Cursor for a Postgres Connection, upgrade table schemas at version 1 to version 2.

        :param cur: Cursor
        :return: None
        """
        cur.execute(
            sql.SQL('''
                    SELECT c.relname, obj_description(c.oid, 'pg_class')
                    FROM pg_namespace AS n
                      INNER JOIN pg_class AS c ON n.oid = c.relnamespace
                    WHERE n.nspname = {};
                    ''').format(sql.Literal(self.postgres_schema)))

        for mapped_name, raw_json in cur.fetchall():
            metadata = None
            if raw_json:
                try:
                    metadata = json.loads(raw_json)
                except:
                    pass

            if metadata and metadata.get(
                    'schema_version',
                    0) == 1 and metadata.get('table_mappings'):
                self.LOGGER.info(
                    'Migrating root_table `{}` children from schema_version 1 to 2'
                    .format(mapped_name))

                table_path = tuple()

                for mapping in metadata.get('table_mappings'):
                    table_name = mapping['to']
                    table_path = mapping['from']
                    table_metadata = self._get_table_metadata(cur, table_name)

                    self.LOGGER.info(
                        'Migrating `{}` (`{}`) from schema_version 1 to 2'.
                        format(table_path, table_name))

                    version_2_metadata = _update_schema_1_to_2(
                        table_metadata, table_path)
                    self._set_table_metadata(cur, table_name,
                                             version_2_metadata)

                root_version_2_metadata = _update_schema_1_to_2(
                    metadata, table_path[0:1])
                self._set_table_metadata(cur, mapped_name,
                                         root_version_2_metadata)
Esempio n. 2
0
def to_dollar(cid):
    """ convert currency of the corresponding id to dollar ratio

    for example if currency A = 2 dollars, then the conversion would be 0.5,
    for another currency B = 0.5 dollar, then the conversion to dollar would be 2
    such that for given cost of xA, would be 0.5x$.
    @param cid is the id of the corresponding currency
    @return transformation ratio to dollar
    """
    query = sql.SQL("SELECT * FROM currency WHERE id=cid;").\
        format(cid=sql.Literal(cid))
    db_log.debug(query)
    ratio = 1.0/pd.read_sql(query, conn)['currency_value'].ix[0]
    return ratio
Esempio n. 3
0
def get_all_vacancy():
    user = check_auth(request.headers, __name__)
    if user == True:
        user = authorize.get(request.headers.get('UserToken'))

    try:
        database = Database()
    except TypeError:
        return jsonify({"messageError": "Нет подключения к БД"})

    status_id = [2]
    fields = [("v", "id"), ("v", "create_date"), ("sp", "title"),
              ("pa", "title"), ("sv", "title")]
    if type(user) != tuple:
        if user.get_role() == 1:
            status_id = [1, 2, 3]
    result = []
    res = database.select_data(
        sql.SQL("""
        SELECT 
            {fields}    
        FROM vacancy v
            LEFT JOIN specializations sp on sp.id = v.specializations_id
            LEFT JOIN professional_area pa on pa.id = sp.professional_area_id
            LEFT JOIN statuses_vacancy sv on sv.id = v.status_id
        WHERE
            status_id in ({status_id})
    """).format(fields=sql.SQL(",").join(
            sql.Identifier(i[0], i[1]) for i in fields),
                status_id=sql.SQL(",").join(sql.Literal(i)
                                            for i in status_id)))
    if res:
        for id, dt, specialization, professional_area, status in res:
            result.append({
                "id":
                id,
                "dt":
                time.mktime(
                    datetime.datetime.strptime(dt.strftime("%d/%m/%Y"),
                                               "%d/%m/%Y").timetuple()),
                "specialization":
                specialization,
                "professional_area":
                professional_area
            })
            if type(user) != tuple:
                if user.get_role() == 1:
                    result[-1]['status'] = status
    database.close()
    return jsonify(result)
Esempio n. 4
0
    def get_client_id_byemail(self, email):
        """ retrieve the corresponding client_id of the given banking_id (bid) (called at the server side)

        @param bid: banking id
        @return cid: contact id
        """
        query=sql.SQL("SELECT (client_id) FROM clients WHERE client_email={email} LIMIT 1 FOR UPDATE SKIP LOCKED;").\
            format(email=sql.Literal(email))
        self.db_log.debug(query)
        self.cur.execute(query)
        ret = self.cur.fetchone()
        if None:
            return False
        return ret[0]
Esempio n. 5
0
def filter_imei_list_sql_by_device_type(conn, exempted_device_types, imei_list_sql):
    """Function to return SQL filtering out exempted device types."""
    # If certain device types are exempted, first select the IMEIs passed in imei_list_sql query.
    # These IMEIs are then joined against GSMA TAC db to get their device type.
    # Finally, any IMEIs that belong to exempted device types are excluded.
    return sql.SQL("""SELECT imei_norm
                        FROM (SELECT imei_norm,
                                     SUBSTRING(imei_norm, 1, 8) AS tac
                                FROM ({0}) imeis) imeis_with_tac
                        JOIN gsma_data
                       USING (tac)
                       WHERE device_type NOT IN {1}
                    """).format(sql.SQL(imei_list_sql),
                                sql.Literal(tuple(exempted_device_types))).as_string(conn)
Esempio n. 6
0
def get_df_column_types(table_name: str) -> List[str]:
    """Get the list of data types in the given table.

    :param table_name: Table name
    :return: List of SQL types
    """
    with connection.connection.cursor() as cursor:
        cursor.execute(sql.SQL(
            'SELECT DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS '
            + 'WHERE TABLE_NAME = {0}').format(sql.Literal(table_name)))

        type_names = cursor.fetchall()

    return [sql_to_ontask_datatype_names[dtype[0]] for dtype in type_names]
Esempio n. 7
0
    def all_stores_count(self):
        county = input("Enter County Name")
        try:
            query = sql.SQL(
                """select 'Total Liquor Stores' as Type,  count(l.serial_number) as Total from liquor_address l \
            where l.premise_zip_code in (Select zip from global_county_zip_code where county ilike {c1}) \
            union \
            select    'Total Retail Stores' as Type,  count(rs.license) as Total from  retail_food_stores rs \
            where rs.zip_code  in (Select zip from global_county_zip_code where county ilike {c2})\
            union \
            select  'Total Farmer Market Stores' as Type,  count(fm.market_name) as Total from farmers_market fm \
            where  fm.zip in (Select zip from global_county_zip_code where county ilike {c3}) \
             """).format(c1=sql.Literal(county),
                         c2=sql.Literal(county),
                         c3=sql.Literal(county))

            cursor = self.conn.cursor(
                cursor_factory=psycopg2.extras.DictCursor)
            # print(cursor.mogrify(query, params))
            # print(cursor.mogrify(query, params))
            cursor.execute(query)
            r = cursor.fetchall()
            # print(r)
            print("Following are the Stores in the County ", county)
            for name, count in r:
                print(name, ':', count)

            return

        except Exception as e:
            self.conn.rollback()
            print("This is the Error")
            print(str(e))
            print(
                "There is some problem with the variables contraints. Please ask for assistance from the owner of the code"
            )
            return
Esempio n. 8
0
    def search_retail_store(self):
        try:
            cursor = self.conn.cursor(
                cursor_factory=psycopg2.extras.DictCursor)
            name = input(
                "Enter the keyword you want to search for in retail store (name) "
            )
            sq_foot = input(
                "Enter minimum Sqaure Foot Area for the retail store ")

            query = sql.SQL(
                """ select rf.entity_name,  rf.street_number, rf.street_name, rf.city, rf.state, gl.zip \
                from retail_food_stores rf inner join  global_county_zip_code gl \
                on gl.zip=rf.zip_code\
                where rf.entity_name ilike {name} and square_footage > {sq_foot}"""
            ).format(
                name=sql.Literal("%" + name + "%"),
                sq_foot=sql.Literal(sq_foot),
            )

            cursor.execute(query)
            # print(cursor.mogrify(query))
            result = cursor.fetchall()
            for r in result:
                print(" store name : ", r[0].rstrip(), " Street Number: ",
                      r[1].rstrip(), " Street Name: ", r[2].rstrip(),
                      " City : ", r[3].rstrip(), "  State : ", r[4].rstrip(),
                      " Zip : ", r[5])

        except Exception as e:
            self.conn.rollback()
            print("This is the Error")
            print(str(e))
            print(
                "There is some problem with the variables contraints. Please ask for assistance from the owner of the code"
            )
            return
Esempio n. 9
0
def insert_multiple_user_jsonb_data(stats_type, stats_range, from_ts, to_ts,
                                    values):
    query = """
        INSERT INTO statistics.user (user_id, stats_type, stats_range, data, count, from_ts, to_ts, last_updated)
             SELECT "user".id
                  , {stats_type}
                  , {stats_range}
                  , stats::jsonb
                  , count
                  , {from_ts}
                  , {to_ts}
                  , NOW()
               FROM (VALUES %s) AS t(user_id, count, stats)
               -- this JOIN serves no other purpose than to filter out users for whom stats were calculated but
               -- no longer exist in LB. if we don't filter, we'll get a FK conflict when such a case occurs
               JOIN "user" ON "user".id = user_id 
        ON CONFLICT (user_id, stats_type, stats_range)
      DO UPDATE SET data = EXCLUDED.data
                  , count = EXCLUDED.count
                  , from_ts = EXCLUDED.from_ts
                  , to_ts = EXCLUDED.to_ts
                  , last_updated = EXCLUDED.last_updated
    """
    formatted_query = sql.SQL(query).format(
        stats_type=sql.Literal(stats_type),
        stats_range=sql.Literal(stats_range),
        from_ts=sql.Literal(from_ts),
        to_ts=sql.Literal(to_ts))
    connection = db.engine.raw_connection()
    try:
        with connection.cursor() as cursor:
            execute_values(cursor, formatted_query, values)
        connection.commit()
    except psycopg2.errors.OperationalError:
        connection.rollback()
        current_app.logger.error("Error while inserting user stats:",
                                 exc_info=True)
Esempio n. 10
0
    def travel_sheds(self,
                     block_ids,
                     out_table,
                     composite=True,
                     overwrite=False,
                     dry=None):
        """
        Creates a new DB table showing the high- and low-stress travel sheds
        for the block(s) identified by block_ids. If more than one block is
        passed to block_ids the table will have multiple travel sheds that need
        to be filtered by a user.

        args
        block_ids -- the ids to use building travel sheds
        out_table -- the table to save travel sheds to
        composite -- whether to save the output as a composite of all blocks or as individual sheds for each block
        overwrite -- whether to overwrite an existing table
        dry -- a path to save SQL statements to instead of executing in DB
        """
        conn = self.get_db_connection()

        schema, out_table = self.parse_table_name(out_table)
        if schema is None:
            schema = self.get_default_schema()

        if overwrite and dry is None:
            self.drop_table(out_table, conn=conn, schema=schema)

        # set global sql vars
        subs = dict(self.sql_subs)
        subs["table"] = sql.Identifier(out_table)
        subs["schema"] = sql.Identifier(schema)
        subs["block_ids"] = sql.Literal(block_ids)
        subs["sidx"] = sql.Identifier("sidx_" + out_table + "_geom")
        subs["idx"] = sql.Identifier(out_table + "_source_blockid")

        if composite:
            self._run_sql_script("travel_shed_composite.sql",
                                 subs, ["sql"],
                                 dry=dry,
                                 conn=conn)
        else:
            self._run_sql_script("travel_shed.sql",
                                 subs, ["sql"],
                                 dry=dry,
                                 conn=conn)

        conn.commit()
        conn.close()
Esempio n. 11
0
def get_questions_for_teseting(vacancy_id):
    try:
        database = Database()
    except TypeError:
        return jsonify({"messageError": "Нет подключения к БД"})
    
    result = {}

    questons = database.select_data(sql.SQL("""
        SELECT
            q.id questons_id,
            qt.title question_type_title,
            qt.description question_type_description,
            g.title grade_title,
            s.title skills_title,
            q.title questons_title,
            q.question questons_question,
            aoq.id answer_id,
            aoq.answer answer_text
        FROM questons q
            LEFT JOIN grade g on q.grade_id = g.id
            LEFT JOIN skills s on q.skill_id = s.id
            LEFT JOIN question_types qt on q.question_type_id = qt.id
            LEFT JOIN answers_on_question aoq on q.id = aoq.question_id
        WHERE
            s.id in (SELECT skill_id FROM vacancy v WHERE v.id={vacancy_id});
    """).format(vacancy_id=sql.Literal(vacancy_id)))

    for quest in questons:
        if result.get(quest['questons_id']):
            result[quest['questons_id']]['answers'].append((quest['answer_id'], quest['answer_text']))
        else:
            res = {
            "answers": [] 
            }
            for key in quest.keys():
                if not key.split('_')[0] == 'answer':   
                    res[key] = quest[key]
            
            res["answers"].append((quest['answer_id'], quest['answer_text']))
            result[res['questons_id']] = res
        if len(result) == 4:
            result.pop(res['questons_id'])
            break
    
    result = [result[key] for key in result]

    database.close()
    return jsonify(result)
    def get_pk_columns(self, table, schema='public'):
        qualified_table = quote_ident(schema, self.conn) + "." + quote_ident(
            table, self.conn)

        query = sqlbuilder.SQL("""
        SELECT a.attname AS field_name
                        FROM pg_index i
                        JOIN pg_attribute a ON a.attrelid = i.indrelid
                        AND a.attnum = ANY(i.indkey)
                        WHERE
                        i.indrelid = ({schema_table})::regclass
                        AND i.indisprimary
        """).format(schema_table=sqlbuilder.Literal(qualified_table))
        self.cursor.execute(query)
        return [r[0] for r in self.cursor.fetchall()]
Esempio n. 13
0
    def getComments(cls, place_id, chat_id):
        stmt = SQL(
            'SELECT comment FROM "Reviews" WHERE adr = {} and chat_id != {} ORDER BY submit_time DESC LIMIT 3;'
        ).format(sql.Literal(place_id), sql.Literal(chat_id))
        result = run_query(stmt)
        if len(result) == 0:
            return []
        return ([msg[0] for msg in result])


#EXAMPLES

# User.addNewUser(100500, 'Testname', 'TestLastName','chat','unmae')
# print(User.getUser(100500)[0].checkLocation())
# print(User.getUser(100500)[0].updateLocation(100, 500))
# print(User.getUser(100500))
# print(Form.getForm(1))
# print(Form.addForm('TestYForm', [1,2]))
# print(Question.getQuestion(1, 'RU'))
# print(Question.getQuestion(1, 'UA'))
# print(Question.addQuestion('Hellot there?'))
# print(Review.getReview(1))
# print (type(Review.getReview(1)[0].q_json))
# Review.addReview(100500, json.dumps({'1':0}), 1,1,1,'Tereshkovoy10', 'ONPU', 'cmnt', 10)

# Form example
# print(Question.getQuestions([1,2,3], 'UA'))
# form = Form.getForm(1)[0]
# questions = Question.getQuestions(form.questions, 'UA')
# for q in questions:
#     print (q)
# print(Review.getMark('--'))
# print(Review.getComments('--', 100500))
# print(Review.isReviewEstimate('ChIJg8PGGlzUhg4RDMUagWtgV6E', 516233921))
# User.setUserLang(384341805, 'ru')
# print(User.getUserLang(384341805))
Esempio n. 14
0
    def get_table_schema(self, cur, path, name):
        cur.execute(
            sql.SQL(
                'SELECT column_name, data_type, is_nullable FROM information_schema.columns '
            ) + sql.SQL('WHERE table_schema = {} and table_name = {};').format(
                sql.Literal(self.postgres_schema), sql.Literal(name)))

        properties = {}
        for column in cur.fetchall():
            properties[column[0]] = json_schema.from_sql(
                column[1], column[2] == 'YES')

        metadata = self._get_table_metadata(cur, name)

        if metadata is None and not properties:
            return None

        if metadata is None:
            metadata = {'version': None}

        if len(path) > 1:
            table_mappings = self.get_table_schema(cur, path[:1],
                                                   path[0])['table_mappings']
        else:
            table_mappings = []
            for mapping in metadata.get('table_mappings', []):
                if mapping['type'] == 'TABLE':
                    table_mappings.append(mapping)

        metadata['name'] = name
        metadata['path'] = path
        metadata['type'] = 'TABLE_SCHEMA'
        metadata['schema'] = {'properties': properties}
        metadata['table_mappings'] = table_mappings

        return metadata
Esempio n. 15
0
def update_table(table_name, columns, col_values, attr=None, value=None):
    try:
        conn = connect()
        db = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
        if type(columns) is list and type(col_values) is list:
            if len(columns) != len(col_values):
                raise ValueError(
                    'columns list and value list must be equal in size')
            if value and attr:
                for i in range(len(columns)):
                    query = sql.SQL(
                        "UPDATE  {tbl} SET {colval} = {vals} WHERE {att} = {val} "
                    ).format(colval=sql.Identifier(columns[i]),
                             vals=sql.Literal(col_values[i]),
                             tbl=sql.Identifier(table_name),
                             att=sql.Identifier(attr),
                             val=sql.Literal(value))
            else:
                for i in range(len(columns)):
                    query = sql.SQL(
                        "UPDATE  {tbl} SET {colval} = {vals} ").format(
                            colval=sql.Identifier(columns[i]),
                            vals=sql.Literal(col_values[i]),
                            tbl=sql.Identifier(table_name))
        elif type(columns) is str and type(col_values) in [str, int, bool]:
            if value and attr:
                query = sql.SQL(
                    "UPDATE  {tbl} SET {colval} = {vals} WHERE {att} = {val} "
                ).format(colval=sql.Identifier(columns),
                         vals=sql.Literal(col_values),
                         tbl=sql.Identifier(table_name),
                         att=sql.Identifier(attr),
                         val=sql.Literal(value))
            else:
                query = sql.SQL("UPDATE  {tbl} SET {colval} = {vals} ").format(
                    colval=sql.Identifier(columns),
                    vals=sql.Literal(col_values),
                    tbl=sql.Identifier(table_name))
            print()
        else:
            raise TypeError(
                'columns and values must  be  of type list or str and of same type '
            )
        db.execute(query)
        conn.commit()
    except (Exception, psycopg2.DatabaseError) as error:
        logger.error(error)
        val = False
    else:
        logger.info(query.as_string(db))
        val = True
    finally:
        db.close()
        if conn is not None:
            conn.close()
        return val
def get_alarm_texts_from_db(session_id):
    conn = psycopg2.connect(CONNECTION_STRING)
    cur = conn.cursor()
    query = sql.SQL("""
    SELECT alarm_text, alarm_time FROM ringring.alarms
    WHERE session_id = {session_id}
    """).format(session_id=sql.Literal(session_id))
    cur.execute(query)
    data = []
    for result in cur.fetchall():
        data.append(result[0])
    conn.commit()
    conn.close()

    return data
Esempio n. 17
0
def delete_invoice(invoice_number):
    conn = psycopg2.connect(CONNECTION_STRING)
    cur = conn.cursor()
    query = sql.SQL("""
    DELETE FROM invoices.invoices WHERE invoice_number = {number};
    
    """).format(number=sql.Literal(invoice_number))
    try:
        cur.execute(query)
        conn.commit()
        conn.close()
        return True

    except psycopg2.ProgrammingError:
        return False
Esempio n. 18
0
    def users_to_email(cls, conn, idlist):
        result = []
        ids_for_request = []
        for idl in idlist:
            (ids_for_request if type(idl) is int else result).append(idl)
        if len(ids_for_request):
            query = sql.SQL(
                'SELECT "Email" as eml FROM "Users" WHERE "Id" IN {}').format(
                    sql.Literal(tuple(ids_for_request)))

            def fact(row, res):
                res.append(row.eml)

            cls._sql_exec(conn, query, result, fact, named_result=True)
        return result
Esempio n. 19
0
    def _matching_imeis_sql(self, conn, app_config, virt_imei_range_start, virt_imei_range_end, curr_date=None):
        """Overrides Dimension._matching_imeis_sql."""
        analysis_start_date, analysis_end_date = self._calc_analysis_window(conn, curr_date)

        """Matches duplicate IMEIs where the average daily number of UIDs seen with that IMEI over
        a configurable period exceeds a configurable threshold if that IMEI was seen on at least a
        configurable number of days during that period.
        """
        return sql.SQL(
            """SELECT imei_norm
                 FROM (SELECT imei_norm,
                              bitcount(bit_or(combined_date_bitmask)) AS days_seen,
                              SUM(unique_bitcount(combined_date_bitmask)) AS uids_per_imei
                         FROM (SELECT mno.imei_norm,
                                      srs.uid,
                                      bit_or(get_bitmask_within_window(mno.date_bitmask,
                                                                       mno.first_seen,
                                                                       mno.last_seen,
                                                                       {analysis_start_date},
                                                                       {analysis_start_dom},
                                                                       {analysis_end_date},
                                                                       {analysis_end_dom})
                                            ) AS combined_date_bitmask
                                 FROM monthly_network_triplets_country AS mno
                            INNER JOIN subscribers_registration_list AS srs
                                                  ON srs.imsi = mno.imsi
                                WHERE mno.imei_norm IS NOT NULL
                                  AND mno.last_seen >= {analysis_start_date}
                                  AND mno.first_seen < {analysis_end_date}
                                  AND mno.virt_imei_shard >= {virt_imei_range_start}
                                  AND mno.virt_imei_shard < {virt_imei_range_end}
                                  AND is_valid_imsi(mno.imsi)
                                  AND is_valid_imsi(srs.imsi)
                             GROUP BY mno.imei_norm, srs.uid) all_seen_triplets
                    GROUP BY imei_norm) triplet_monthly_days
            GROUP BY imei_norm
                     HAVING SUM(days_seen) >= {min_seen_days_threshold}
                            AND (SUM(uids_per_imei)/SUM(days_seen)) >= {threshold}
           """).format(analysis_start_date=sql.Literal(analysis_start_date),  # noqa: Q447, Q449
                       analysis_start_dom=sql.Literal(analysis_start_date.day),
                       analysis_end_date=sql.Literal(analysis_end_date),
                       analysis_end_dom=sql.Literal(analysis_end_date.day),
                       virt_imei_range_start=sql.Literal(virt_imei_range_start),
                       virt_imei_range_end=sql.Literal(virt_imei_range_end),
                       min_seen_days_threshold=sql.Literal(self._min_seen_days),
                       threshold=sql.Literal(self._threshold)).as_string(conn)
Esempio n. 20
0
    def get_schema(self, cur, table_schema, table_name):
        cur.execute(
            sql.SQL(
                'SELECT column_name, data_type, is_nullable FROM information_schema.columns '
            ) + sql.SQL('WHERE table_schema = {} and table_name = {};').format(
                sql.Literal(table_schema), sql.Literal(table_name)))

        properties = {}
        for column in cur.fetchall():
            properties[column[0]] = json_schema.from_sql(
                column[1], column[2] == 'YES')

        metadata = self.get_table_metadata(cur, table_schema, table_name)

        if metadata is None and not properties:
            return None
        elif metadata is None:
            metadata = {}

        metadata['name'] = table_name
        metadata['type'] = 'TABLE_SCHEMA'
        metadata['schema'] = {'properties': properties}

        return metadata
def trial_to_paid(tablename: str, email: str):
    con = psycopg2.connect(dbname='test',
                           user='******',
                           password='******',
                           host='localhost')
    cur = con.cursor()
    state = sql.SQL(
        """UPDATE {tablename} SET status = CASE WHEN email = {email} AND status IS true THEN false END"""
    ).format(tablename=sql.Identifier(tablename), email=sql.Literal(email))
    try:
        cur.execute(state)
        con.commit()
        return True
    except:
        return False
def is_email_exist(tablename: str, email: str):
    con = psycopg2.connect(dbname='test',
                           user='******',
                           password='******',
                           host='localhost')
    cur = con.cursor()
    state = sql.SQL(
        """SELECT email FROM {tablename} WHERE email ={email}""").format(
            tablename=sql.Identifier(tablename), email=sql.Literal(email))
    cur.execute(state)
    result = cur.fetchone()
    if (result != None):
        return True
    else:
        return False
Esempio n. 23
0
    def count_from_db(self, days, start_id):

        q = sql.SQL('''
            select count(l.id) as total
            from eventlog_log l
            inner join eventlog_action a
            on a.id = l.action_id and a.name = {}
            where {}
        ''').format(sql.Literal(self.action_name),
                    self.get_filters(days, start_id))

        self.cur.execute(q)
        rows = self.cur.fetchall()

        return rows[0]["total"]
Esempio n. 24
0
def sql_table_exists(table_name):
    """
	Verifies that user's requested table is a public table in the SQL database. Use string for input.
	"""
    query = sql.SQL("""SELECT EXISTS 
		(
		SELECT 1
		FROM information_schema.tables 
		WHERE table_schema = 'public'
		AND table_name = {}
		);""").format(sql.Literal(table_name))
    xCURSORx.execute(query)
    extant = xCURSORx.fetchall()[0][0]
    logging.debug("SQL table " + table_name + " exists = " + str(extant))
    return extant
Esempio n. 25
0
def get_tag_ids(cursor, question_id):
    tag_ids = []

    cursor.execute(
        sql.SQL("""SELECT tag_id FROM question_tag
                   WHERE question_id = {question_id};
                       """).format(question_id=sql.Literal(question_id)))
    data = cursor.fetchall()

    for id in data:
        tag_ids.append(id['tag_id'])

    tag_ids = tuple(tag_ids)

    return tag_ids
Esempio n. 26
0
    def get_chats_favourite_users(self, unique_id: str):
        """
        Method returns a list of ids of chats associated with the certain tiktoker.

        :param unique_id: the nickname of a tiktoker
        :return: a list of chat ids
        """
        with self.connection.cursor() as cur:
            query = sql.SQL("SELECT chat_id FROM favourite_users "
                            "WHERE unique_id = {}").format(
                                sql.Literal(unique_id))
            cur.execute(query)
            chat_ids = [chat_id[0] for chat_id in cur.fetchall()]

        return chat_ids
Esempio n. 27
0
    def get_balance_by_cid(self, cid):
        """called at the server side to retrieve the account balance d of the given client_id (cid)

        @param cid: client id
        @return dict {'balance':balance, 'base': base}
        """
        #remove LIMIT 1 FOR UPDATE SKIP LOCKED
        query=sql.SQL("SELECT (banking.balance, cur.currency_name) FROM banking INNER JOIN currency AS cur ON (cur.id=banking.currency_id) WHERE banking.client_id={cid} ;").\
            format(cid=sql.Literal(cid))
        self.db_log.debug(query)
        self.cur.execute(query)
        fet=eval(self.cur.fetchone()[0])
        balance=fet[0]
        base=fet[1]
        return {'balance':balance, 'base': base}
Esempio n. 28
0
def stock_labels(stk, elim_dct=get_eliminated_words()):
    q = sql.Composed(
        [
            sql.SQL("SELECT name FROM etfs WHERE ticker IN "),
            sql.SQL("(SELECT etf FROM stk_etfs WHERE stk = "),
            sql.Literal(stk),
            sql.SQL(")")
        ]
    )
    res = stxdb.db_read_cmd(q.as_string(stxdb.db_get_cnx()))
    etf_words = ' '.join([x[0] for x in res])
    etf_words_list = etf_words.split()
    labels = [x for x in etf_words_list if x not in elim_dct]
    labels = list(set(labels))
    return labels
Esempio n. 29
0
def get_tokens_for_user_uuid(user_uuid, revoked=None):
    with get_connection() as connection:
        cursor = connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
        statement = sql.SQL(
            "SELECT user_uuid, jti, created, name, revoked from api_tokens where user_uuid = {}"
        ).format(sql.Placeholder())
        if revoked is not None:
            statement = sql.SQL(" ").join([
                statement,
                sql.SQL("AND revoked = {}").format(sql.Literal(revoked))
            ])
        cursor.execute(statement, (user_uuid, ))
        data = cursor.fetchall()
        cursor.close()
        return data
Esempio n. 30
0
    def get_cursor(self, schema=None):
        """Get a database cursor object to be used for making queries.

        The optional schema indicated will cause a SET SCHEMA command, but
        only if required. If schema is None, it means that any previous SET
        SCHEMA command on the connection won't matter (the queries will use
        explicit schemas)."""
        #FIXME: maybe use server side cursors?
        conn = self.connection
        cursor = conn.cursor(cursor_factory=DictCursor)
        if schema and self._last_schema.get(id(conn)) != schema:
            cursor.execute(
                sql.SQL("SET SCHEMA {}").format(sql.Literal(schema)))
            self._last_schema[id(conn)] = schema
        return cursor