Пример #1
0
def test_handle_redirect_reregister(
    mock_jwt_required, mock_jwt_identity, client: testing.FlaskClient, conn: connection,
):
    user = conftest.users[0]
    h_user = register_user(user, conn)
    token = fake_token(user)
    service_user_id = queries.service_user_id_for_gargling_id(
        conn,
        gargling_id=user.id,
        token_gargling_table=f"{GooglefitService.name}_token_gargling",
    )["service_user_id"]
    mock_jwt_identity.return_value = h_user.gargling_id
    with patch("gargbot_3000.health.googlefit.GooglefitService.token") as mock_handler:
        mock_handler.return_value = None, token
        response = client.get("/googlefit/redirect", query_string={"code": "123"})
    assert response.status_code == 200
    with conn.cursor() as cursor:
        cursor.execute(
            "SELECT * FROM googlefit_token where id = %(fake_user_id)s",
            {"fake_user_id": service_user_id},
        )
        data = cursor.fetchone()
    assert data["access_token"] == token.token
    assert data["refresh_token"] == token.refresh_token
    assert pendulum.from_timestamp((data["expires_at"])) == token.expiry
    with conn.cursor() as cursor:
        cursor.execute(
            "SELECT gargling_id "
            f"FROM googlefit_token_gargling where service_user_id = %(fake_user_id)s",
            {"fake_user_id": service_user_id},
        )
        data = cursor.fetchone()
    assert data["gargling_id"] == h_user.gargling_id
Пример #2
0
def test_handle_redirect(
    mock_jwt_required,
    mock_jwt_identity,
    client: testing.FlaskClient,
    conn: connection,
):
    user = conftest.users[0]
    token = fake_token(user)
    fake_id = token["x_user_id"]
    mock_jwt_identity.return_value = user.id
    with patch("gargbot_3000.health.polar.PolarService.token") as mock_handler:
        mock_handler.return_value = fake_id, token
        response = client.get("/polar/redirect", query_string={"code": "123"})
    assert response.status_code == 200
    with conn.cursor() as cursor:
        cursor.execute(
            "SELECT * FROM polar_token where id = %(fake_user_id)s",
            {"fake_user_id": fake_id},
        )
        data = cursor.fetchone()
    assert data["id"] == fake_id
    assert data["access_token"] == token["access_token"]
    with conn.cursor() as cursor:
        cursor.execute(
            "SELECT * FROM polar_token_gargling where service_user_id = %(fake_user_id)s",
            {"fake_user_id": fake_id},
        )
        data = cursor.fetchone()
    assert data["gargling_id"] == user.id
Пример #3
0
def test_multiple_users_exclusive(conn: connection,
                                  dbx: conftest.MockDropbox) -> None:
    users = ["slack_nick2", "slack_nick3"]
    exclusive_pic = "test_pic4"
    # get seed that returns nonexclusive
    for seed in range(0, 20):
        with conn.cursor() as cursor:
            cursor.execute(f"select setseed(0.{seed})")
        url1, timestamp, description = pictures.get_pic(conn,
                                                        dbx,
                                                        arg_list=users)
        assert_valid_returns(url1, timestamp, description)
        if not url1.endswith(exclusive_pic):
            break
    else:  # no test coverage
        raise Exception("could not find good seed")

    with conn.cursor() as cursor:
        cursor.execute(f"select setseed(0.{seed})")
    url2, timestamp, description = pictures.get_pic(conn,
                                                    dbx,
                                                    arg_list=["kun"] + users)
    assert_valid_returns(url2, timestamp, description)
    assert url2.endswith(exclusive_pic)
    for _ in range(10):
        url3, timestamp, description = pictures.get_pic(conn,
                                                        dbx,
                                                        arg_list=["kun"] +
                                                        users)
        assert_valid_returns(url3, timestamp, description)
        pic = next(pic for pic in conftest.pics if url3.endswith(pic.path))
        assert pic.faces == [2, 3], f"Wrong picture {pic}"
Пример #4
0
def transfer_by_name_tpc(from_connection: connection, from_name: str, to_connection: connection, to_name: str,
                         amount: Union[Decimal, int]):
    with from_connection.cursor() as from_cursor, to_connection.cursor() as to_cursor:
        try:
            from_cursor.execute("UPDATE USERS SET current_amount = current_amount - %s WHERE name=%s",
                                [amount, from_name])
            to_cursor.execute("UPDATE USERS SET current_amount = current_amount + %s WHERE name=%s", [amount, to_name])
        except IntegrityError:
            raise MoneyAmountError()
Пример #5
0
def create_table_from_csv(
        conn: connection,
        table: str,
        csv_file: str,
        delim: str = ';',
        indexes: dict = None
) -> bool:
    """Creates a table in PostgreSQL from a CSV header.
    If the table exists, it does nothing.

    :param conn: Psycopg2 connection to DB.
    :param table: (str) Name of the new table.
    :param csv_file: (str) Path of the CSV file.
    :param delim: (str) Delimiter used in the CSV.
    :param indexes: (dict) Indexes to create in format index_name -> [columns]
    :return: (bool) True if a new table was created.
    """

    query0 = """
        select exists (
            select * from information_schema.tables
            where table_name = %s);
    """
    query1 = sql.SQL('create table if not exists {} ();')
    query2 = sql.SQL('alter table {} add column {} %s;')
    with conn.cursor() as cur:
        cur.execute(query0, (table,))
        exists = cur.fetchone()[0]
    if exists:
        return False

    columns = get_columns(csv_file, delim=delim)
    # because table and column names are variable
    # create first an empty table, then add columns
    # it may raise
    with conn.cursor() as cur:
        cur.execute(query1.format(sql.Identifier(table)))
        for key, val in columns.items():
            col_name = key
            col_type = map_numpy_psql(val)
            cur.execute(query2.format(
                sql.Identifier(table),
                sql.Identifier(col_name)),
                (AsIs(col_type),)
            )
    if isinstance(indexes, dict):
        for key, val in indexes.items():
            create_index(conn, table, key, val)
    conn.commit()
    return True
Пример #6
0
def query_db_without_commit(conn: connection, query: str,
                            query_args: Query_Args):
    """
    Performs an operation on a database without committing the result.
    """
    cursor = conn.cursor()
    cursor.execute(query, query_args)
Пример #7
0
def insert(conn: connection, user: User):
    with conn.cursor() as cursor:
        cursor.execute(
            "INSERT INTO USERS (name, min_amount, max_amount, current_amount) VALUES (%s, %s, %s, %s) RETURNING id",
            [user.name, user.min_, user.max_, user.current]
        )
        user.id_ = cursor.fetchone()[0]
Пример #8
0
def create_article(
    db_connection: connection,
    article_input: ArticleInput,
    author_id: int,
    created_date: datetime,
) -> Article:
    cursor = db_connection.cursor(cursor_factory=DictCursor)

    cursor.execute(
        """
            INSERT INTO articles(
                title,
                author_id,
                content,
                created_date
            )
            VALUES (
                %s,
                %s,
                %s,
                %s
            )
            RETURNING
                id,
                title,
                author_id,
                content,
                status_id,
                created_date;
        """, (
            article_input.title,
            author_id,
            article_input.content,
            created_date,
        ))

    created_article = cursor.fetchone()
    db_connection.commit()

    cursor.execute(
        """
            SELECT
                article_statuses.name
            FROM
                article_statuses
            WHERE
                article_statuses.id = %s;
        """, (created_article['status_id'], ))

    article_status = cursor.fetchone()
    cursor.close()

    return Article(
        id=created_article['id'],
        title=created_article['title'],
        content=created_article['content'],
        author_id=created_article['author_id'],
        status=article_status['name'],
        created_date=created_article['created_date'],
    )
Пример #9
0
    def garg(self, db: connection, args: List[str]):
        user = args[0] if args else None
        if user and user not in self.slack_nicks_to_db_ids:
            return f"Gargling not found: {user}. Husk å bruke slack nick"

        if user:
            user_filter = f"= {self.slack_nicks_to_db_ids[user]}"
        else:
            user_filter = "IN (2, 3, 5, 6, 7, 9, 10, 11)"

        sql = (
            "SELECT db_id, post_text, post_time, post_id, bbcode_uid "
            f"FROM phpbb_posts WHERE db_id {user_filter} ORDER BY RANDOM() LIMIT 1"
        )

        cursor = db.cursor()
        cursor.execute(sql)
        result = cursor.fetchone()
        db_id = result["db_id"]
        post_id = result["post_id"]
        user = user if user is not None else self.db_ids_to_slack_nicks[db_id]
        post = self._sanitize(result["post_text"], result["bbcode_uid"])
        quote = (
            f"{post}\n"
            "------\n"
            f"- {user}\n"
            f"http://eirik.stavestrand.no/gargen/viewtopic.php?p={post_id}#p{post_id}\n"
        )
        return quote
def get_assets(connection: extensions.connection) -> Iterator[Asset]:
    """
    Get assets.

    :param connection: A database connection.
    :return: The assets.
    """
    sql = '''
         select
             asset.asset_uid,
             is_sensor_type.avro_schema_name
         from
             asset, is_asset_assignment, is_asset_definition, is_sensor_type
         where
             asset.asset_uid = is_asset_assignment.asset_uid
         and
             is_asset_assignment.asset_definition_uuid = is_asset_definition.asset_definition_uuid
         and
             is_asset_definition.sensor_type_name = is_sensor_type.sensor_type_name
         and 
             is_sensor_type.avro_schema_name is not null
    '''
    with closing(connection.cursor()) as cursor:
        cursor.execute(sql)
        rows = cursor.fetchall()
        for row in rows:
            asset_id = row[0]
            asset_type = row[1]
            yield Asset(id=asset_id, type=asset_type)
Пример #11
0
def select_row_count(table_name,conn:DB_CON):
    query="SELECT count(*) FROM "+table_name+" "
    cur=conn.cursor()
    cur.execute(query)
    query_results=cur.fetchall()
    print(query_results)
    conn.close()
Пример #12
0
def download_and_import_google_image_index(conn: connection) -> None:
    """Função para baixar e importar o índice de imagens do Google para um banco de dados
    PostGres com extensão espacial

    Args:
        conn (connection): Conexão com o banco de dados onde os dados serão inseridos
    Returns:
        None
    """

    _tmp_dir = temp.tempdir()
    landsat_file = os.path.join(_tmp_dir, "landsat.csv.gz")
    sentinel_file = os.path.join(_tmp_dir, "sentinel_index_l2.csv.gz")

    __download_file(GOOGLE_INDEX_LANDSAT, landsat_file)
    __download_file(GOOGLE_INDEX_SENTINEL_L2, sentinel_file)

    # Extraíndo os dados
    sentinel_file_extracted = __extract_gzfile(sentinel_file)
    landsat_file_extracted = __extract_gzfile(landsat_file)

    try:
        cur = conn.cursor()
        cur.copy_from(__remove_firstline_in_file(landsat_file_extracted),
                      "landsat_index",
                      sep=",")
        cur.copy_from(__remove_firstline_in_file(sentinel_file_extracted),
                      "sentinel_index",
                      sep=",")
        conn.commit()
    except BaseException as error:
        warnings.warn(
            f"Problemas ao inserir os dados no banco: \n {str(error)}")
    shutil.rmtree(_tmp_dir, ignore_errors=True)
def get_visitor_permissions_and_role(
    db_connection: connection,
) -> Optional[Dict[str, Any]]:
    cursor = db_connection.cursor(
        cursor_factory=DictCursor
    )

    cursor.execute("""
        SELECT
            user_roles.role AS role,
            ARRAY_AGG(permissions.name) AS permissions
        FROM
            user_roles
        LEFT JOIN
            user_roles_to_permissions
            ON user_roles.id = user_roles_to_permissions.user_role_id
        INNER JOIN
            permissions
            ON user_roles_to_permissions.permission_id = permissions.id
        WHERE
            user_roles.role = 'VISITOR'
        GROUP BY
            user_roles.role;
    """)

    visitor_permissions_and_role = cursor.fetchone()
    cursor.close()

    return visitor_permissions_and_role
Пример #14
0
def get_named_location_schema_name(connection: extensions.connection, named_location_id: int) -> Set[str]:
    """
    Get the schema name for a named location.

    :param connection: The database connection.
    :param named_location_id: The named location name.
    :return: The schema name.
    """
    sql = '''
        select 
            is_sensor_type.avro_schema_name
        from 
            is_sensor_type, is_asset_definition, is_asset_assignment, is_asset_location, nam_locn
        where
            is_sensor_type.sensor_type_name = is_asset_definition.sensor_type_name
        and 
            is_asset_definition.asset_definition_uuid = is_asset_assignment.asset_definition_uuid
        and 
            is_asset_assignment.asset_uid = is_asset_location.asset_uid
        and 
            is_asset_location.nam_locn_id = nam_locn.nam_locn_id
        and 
            is_sensor_type.avro_schema_name is not null
        and 
            nam_locn.nam_locn_id = %s
    '''
    schema_names = set()
    with closing(connection.cursor()) as cursor:
        cursor.execute(sql, [named_location_id])
        rows = cursor.fetchall()
        for row in rows:
            schema_name = row[0]
            schema_names.add(schema_name)
        return schema_names
Пример #15
0
def get_threshold_context(connection: extensions.connection,
                          threshold_uuid: str) -> List[str]:
    """
    Get all context codes for a threshold.

    :param connection: A database connection.
    :param threshold_uuid: The threshold UUID.
    :return: The context codes.
    """
    context_codes: List[str] = []
    with closing(connection.cursor()) as cursor:
        sql = '''
            select 
                context_code 
            from 
                threshold_context 
            where 
                threshold_uuid = %s
        '''
        cursor.execute(sql, [threshold_uuid])
        rows = cursor.fetchall()
        for row in rows:
            context_code = row[0]
            context_codes.append(context_code)
    return context_codes
Пример #16
0
def sentences_you_should_know(
    db: connection,
    offset: Tuple[int, int] = (0, 0),
    limit: Optional[int] = None,
) -> Tuple[int, List[Tuple[int, str, List[str]]]]:
    with db.cursor() as cur:
        cur.execute('select max(lvl) from sentences')
        (max_lvl_at_sentences, ) = cur.fetchone()
        # Let's update lvl in sentences for all the sentences that have words updated in the
        # levels from max_lvl_at_sentences:
        cur.execute(
            """
            with to_update(id) as (
                select s_id from sentence_words where lvl >= %s
                group by s_id order by s_id)
            update sentences
            set lvl = (
                select max(lvl) from sentence_words where s_id=id
                group by s_id having count(case when lvl is null then 1 end) = 0)
            where lvl is null and id in (select * from to_update)
        """, (0 if max_lvl_at_sentences is None else max_lvl_at_sentences +
              1, ))
        if cur.rowcount:
            db.commit()
        # Then we can do a fast query on only sentences:
        cur.execute(
            """
            select lvl, id, jpn, translations
            from sentences
            where (lvl, id) >= (%s, %s)
            order by lvl, id
        """ + ('limit %s' if limit is not None else ''),
            offset + ((limit, ) if limit is not None else ()),
        )
        return max_lvl_at_sentences, cur.fetchall()
Пример #17
0
def test_persist_token(conn: connection):
    user = conftest.users[0]
    register_user(user, conn)
    with conn.cursor() as cur:
        cur.execute("select * from googlefit_token")
        tokens = cur.fetchall()
        cur.execute("select * from googlefit_token_gargling")
        matched = cur.fetchall()
    assert len(tokens) == 1
    token = dict(tokens[0])
    id1 = token.pop("id")
    exp = {
        "access_token": "access_token2",
        "refresh_token": "refresh_token2",
        "expires_at": expiry,
        "enable_steps": False,
        "enable_weight": False,
    }
    assert token == exp
    assert len(matched) == 1
    match = dict(matched[0])
    id2 = match.pop("service_user_id")
    exp = {"gargling_id": 2}
    assert match == exp
    assert id1 == id2
Пример #18
0
def effectuate_table(
    connector: connection,
    delete_rows: List[int],
    update_rows: Dict[int, str],
    table: str,
):
    """
    Send updated data back to the database.
    """
    cursor = connector.cursor(cursor_factory=RealDictCursor)
    cursor.execute("BEGIN")

    if delete_rows:
        if len(delete_rows) == 1:
            delete_statement = f"WHERE id = {delete_rows[0]}"
        else:
            delete_statement = f"WHERE id IN {tuple(delete_rows)}"
        cursor.execute(
            f"""
            DELETE FROM {table}
            {delete_statement};
            """,
        )
    for id, virkning in update_rows.items():
        if virkning._upper.year == 9999:
            virkning._upper = "infinity"  # lol
        cursor.execute(
            f"""
            UPDATE {table}
            SET virkning.TimePeriod=%(virkning)s
            WHERE id = %(id)s;
            """,
            {"id": id, "virkning": virkning},
        )
    cursor.execute("COMMIT")
Пример #19
0
def producer_generator(target: Coroutine, *, table_name: str, pg_conn: _connection, storage: BaseStorage):
    cursor = pg_conn.cursor()
    cursor.arraysize = config.LIMIT

    state_manager = State(storage=storage)
    default_date = str(datetime(year=1700, month=1, day=1))
    current_state = state_manager.state.get(table_name, default_date)

    sql = f"""
            SELECT id, updated_at
            FROM content.{table_name}
            WHERE updated_at >= %s
            ORDER BY updated_at
            """

    cursor.execute(sql, (current_state,))

    while True:
        batch_result = cursor.fetchmany()
        ids_list = [item['id'] for item in batch_result]

        if not ids_list:
            break

        target.send(ids_list)

        state_manager.set_state(
            key=table_name, value=str(batch_result[-1]['updated_at']))
Пример #20
0
 def __init__(self, db: connection) -> None:
     cursor = db.cursor()
     self.years = self.get_years(cursor)
     self.topics = self.get_topics(cursor)
     self.users = self.get_users(cursor)
     self.possible_args = self.topics | self.years | set(self.users)
     self._connect_dbx()
Пример #21
0
def populate_quotes_table(db: connection) -> None:
    with db.cursor() as cursor:
        for quote in quotes:
            sql_command = """INSERT INTO phpbb_posts (db_id, post_id, post_time, post_text, bbcode_uid)
            VALUES (%(db_id)s,
                   %(post_id)s,
                   %(post_time)s,
                   %(post_text)s,
                   %(bbcode_uid)s);"""
            data = {
                "db_id": quote.db_id,
                "post_id": quote.post_id,
                "post_time": quote.post_time,
                "post_text": quote.post_text,
                "bbcode_uid": quote.bbcode_uid,
            }
            cursor.execute(sql_command, data)
        for message in messages:
            sql_command = """INSERT INTO msn_messages (session_id, msg_time, msg_color, from_user, msg_text, db_id)
            VALUES (%(session_id)s,
                   %(msg_time)s,
                   %(msg_color)s,
                   %(from_user)s,
                   %(msg_text)s,
                   %(db_id)s);"""
            data = {
                "session_id": message.session_id,
                "msg_time": message.msg_time,
                "msg_color": message.msg_color,
                "from_user": message.from_user,
                "msg_text": message.msg_text,
                "db_id": message.db_id,
            }
            cursor.execute(sql_command, data)
def get_named_location_context(connection: extensions.connection,
                               named_location_id: int) -> List[str]:
    """
    Get context entries for a named location.

    :param connection: A database connection.
    :param named_location_id: The named location ID.
    :return: The context entries.
    """
    sql = '''
        select 
            context_code, context_group_id
        from 
            nam_locn_context 
        where 
            nam_locn_id = %s
    '''
    contexts: List[str] = []
    with closing(connection.cursor()) as cursor:
        cursor.execute(sql, [named_location_id])
        rows = cursor.fetchall()
        for row in rows:
            context_code = row[0]
            group = row[1]
            if group is None:
                contexts.append(context_code)
            else:
                group_name = f'{context_code}-{str(group)}'
                contexts.append(group_name)
    return contexts
Пример #23
0
def get_comments_count(db_connection: connection,
                       article_id: Optional[int] = None) -> int:
    cursor = db_connection.cursor(cursor_factory=DictCursor)

    has_article_id_filter = article_id is not None

    cursor.execute(
        """
        SELECT
            count(*) AS comments_count
        FROM
            comments
        WHERE
            CASE
                -- Has article_id filter?
                WHEN %s
                    THEN comments.article_id = %s
                    ELSE TRUE
            END;
    """, (
            has_article_id_filter,
            article_id,
        ))

    result = cursor.fetchone()
    cursor.close()

    return result['comments_count']
Пример #24
0
def delete_database(conn: connection, test_db: bool):
    """Delete the database."""
    if test_db:
        database = TestingConfig.POSTGRES_DB
    else:
        database = current_app.config["POSTGRES_DB"]

    cursor = conn.cursor()
    cursor.execute("ROLLBACK")

    # Don't validate name for `police_data_test`.
    if database != TestingConfig.POSTGRES_DB:
        # Make sure we want to do this.
        click.echo(f"Are you sure you want to delete database {database!r}?")
        click.echo("Type in the database name '" +
                   click.style(database, fg="red") + "' to confirm")
        confirmation = click.prompt("Database name")
        if database != confirmation:
            click.echo("The input does not match. "
                       "The database will not be deleted.")
            return None

    try:
        cursor.execute(f"DROP DATABASE {database};")
    except psycopg2.errors.lookup("3D000"):
        click.echo(f"Database {database!r} does not exist.")
        cursor.execute("ROLLBACK")
    else:
        click.echo(f"Database {database!r} was deleted.")
Пример #25
0
def hyperparameter_optimization(a:Namespace,c:connection,task:str):
    dtype = torch.float
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    global cur
    cur = c.cursor()
    global conn
    conn = c
    global args
    args = a

    global ss
    global data_composition_key
    global model_key
    _,ss,data_composition_key,model_key=task.split(":")

    make_sure_table_exist(args, conn, cur, args.states_current_task_table_name)
    make_sure_table_exist(args, conn, cur, args.best_validation_results_table_name)
    make_sure_table_exist(args, conn, cur, args.best_test_results_table_name)

    range_lr = ax.RangeParameter(name="lr",lower=1e-7,upper=0.5,parameter_type=ax.ParameterType.FLOAT)
    range_weight_decay = ax.RangeParameter(name="weight_decay",lower=1e-8,upper=0.5,parameter_type=ax.ParameterType.FLOAT)
    choice_optimizer = ax.ChoiceParameter(name="optimizer", values=["Adadelta","Adagrad","Adam","AdamW","Adamax","ASGD","RMSprop","Rprop","SGD"], parameter_type=ax.ParameterType.STRING)
    choice_criterion = ax.ChoiceParameter(name="criterion",values=["BCELoss","MSELoss"],parameter_type=ax.ParameterType.STRING)

    search_space = ax.SearchSpace(parameters=[range_lr, range_weight_decay,choice_optimizer,choice_criterion])

    experiment = ax.Experiment(name="experiment_building_blocks",search_space=search_space)

    sobol = ax.Models.SOBOL(search_space=experiment.search_space)
    generator_run = sobol.gen(1)
        
    return True
Пример #26
0
def exec_query(query:str,conn:DB_CON):
    cur=conn.cursor()
    cur.execute(query)
    query_results=cur.fetchall()
    print("query run successfull")
    conn.close()
    return query_results
def get_active_periods(connection: extensions.connection,
                       named_location_id: int) -> List[ActivePeriod]:
    """
    Get the active time periods for a named location.

    :param connection: A database connection.
    :param named_location_id: A named location ID.
    :return: The active periods.
    """
    sql = '''
        select 
            start_date, end_date 
        from 
            active_period 
        where 
            named_location_id = %s
    '''
    periods: List[ActivePeriod] = []
    with closing(connection.cursor()) as cursor:
        cursor.execute(sql, [named_location_id])
        rows = cursor.fetchall()
        for row in rows:
            start_date = row[0]
            if start_date is not None:
                end_date = row[1]
                periods.append(
                    ActivePeriod(start_date=start_date, end_date=end_date))
    return periods
Пример #28
0
def pg_get_version(pg_conn: connection) -> str:
    """Get PostgreSQL server version."""
    query = "SELECT version() AS pg_version"
    cur = pg_conn.cursor()
    cur.execute(query)
    ret: Dict[str, str] = cur.fetchone()
    return ret["pg_version"]
Пример #29
0
def pg_get_version(pg_conn: connection) -> str:
    """Get PostgreSQL server version."""
    query = queries.get("get_version")
    with pg_conn.cursor() as cur:
        cur.execute(query)
        ret: Dict[str, str] = cur.fetchone()
    return ret["pg_version"]
Пример #30
0
def create(conn: connection, author: int, title: str, content: str):
    with conn.cursor() as cursor:
        query = """
            INSERT INTO posts (title, slug, author, content)
            VALUES (%s, %s, %s, %s)
        """
        cursor.execute(query, (title, generate_slug(title), author, content))