Exemplo n.º 1
0
def create_roles_and_users(con: connection, db_user: str,
                           db_password: str) -> None:
    """
    Creates the roles and users.

    Args:
        con (object): a connection to the postgres database
        db_user: The username to connect to the database with.
        db_password: The password to connect to the database with.

    Raises:
        DatabaseError: An error occurred.
    """
    con.set_isolation_level(ISOLATION_LEVEL_READ_COMMITTED)
    cur = get_cursor(con)
    sql_file = f"roles{SEP}app_role.sql"
    execute_sql_from_file(cur, sql_file, "create application role")
    sql_file = f"roles{SEP}appdbo_role.sql"
    execute_sql_from_file(cur, sql_file, "create appdbo role")
    sql_file = f"users{SEP}dbo.sql"
    execute_sql_from_file(cur, sql_file, "create dbo user")
    sql_file = f"users{SEP}appuser.sql"
    execute_sql_from_file(cur, sql_file, "create application user")
    sql_stmt = f"ALTER USER {db_user} WITH PASSWORD '{db_password}';"
    execute_sql(cur, sql_stmt, "set pw for application user")
    sql_stmt = f"ALTER USER dbo WITH PASSWORD '{db_password}';"
    execute_sql(cur, sql_stmt, "set pw for dbo user")
    con.commit()
    cur.close()
Exemplo n.º 2
0
def test_tables(cur: psycopg2Ext.cursor, conn: psycopg2Ext.connection) -> None:
    """
    Description: Test table status to make sure tables exists.

    Arguments:
        cur (psycopg2Ext.cursor): cursor object
        conn (psycopg2Ext.connection): connection object

    Returns:
        None
    """
    print("\n==================== TEST -- table status  ====================")

    for query in create_table_queries:
        tbl_name = query[query.find("EXISTS") +
                         len("EXISTS"):query.find("(")].strip()
        query = f"""select exists(select * from information_schema.tables
            where table_name='{tbl_name}')"""

        try:
            cur.execute(query)
        except psycopg2.Error as e:
            msg = f"ERROR: Could not retrieve table info with query: {query}"
            logger.warning(msg, e)
            return
        conn.commit()

        try:
            tbl_status = cur.fetchone()[0]
        except psycopg2.Error as e:
            msg = f"ERROR: Could not fetch table status for table: {tbl_name}"
            logger.warning(msg, e)
            return

        print(f"Table '{tbl_name}' exists status: {tbl_status}.")
Exemplo n.º 3
0
def create_dist_schema(
    DWH_DB_USER: str,
    schema_name: str,
    cur: psycopg2Ext.cursor,
    conn: psycopg2Ext.connection,
) -> None:
    """
    Description: Create distribution schema and set search path
        to the schema name.

    Arguments:
        DWH_DB_USER (str): db user name to restrict authorization
        schema_name (str): schema
        cur (psycopg2Ext.cursor): cursor object
        conn (psycopg2Ext.connection): connection object

    Returns:
        None
    """

    queries = [
        f"DROP SCHEMA IF EXISTS {schema_name} CASCADE;",
        f"CREATE SCHEMA IF NOT EXISTS {schema_name} authorization {DWH_DB_USER };",
        f"SET search_path TO {schema_name};",
    ]

    for query in queries:
        try:
            cur.execute(query)
        except psycopg2.Error as e:
            msg = f"ERROR: Issue dropping/creating schema."
            logger.warning(msg, e)
            return
        conn.commit()
Exemplo n.º 4
0
def _migrate(migrations: List[MigrationFile],
             conn: connection,
             schema: str = None):
    _prepare_meta_table(conn, schema)
    prev_mig = _fetch_previous_migrations(_get_schema_cursor(conn, schema))
    prev_mig_iter = iter(prev_mig)
    current_mig = _next_or_none(prev_mig_iter)
    migrations.sort(key=lambda k: k.stamp)

    for migration in migrations:
        while migration.is_after(current_mig):
            log.info(f'IGNORE  {current_mig.stamp} {current_mig.name}')
            current_mig = _next_or_none(prev_mig_iter)

        stamp_match = False
        try:
            if migration.is_equal(current_mig):
                log.info(f'SKIP    {migration.stamp} {migration.name}')
                stamp_match = True
        except ValueError:
            log.warning(
                f'BADHASH {migration.stamp} {migration.name} Expected {current_mig.hash_} got {migration.hash_}'
            )
            stamp_match = True

        if stamp_match:
            current_mig = _next_or_none(prev_mig_iter)
        else:
            curs = _get_schema_cursor(conn, schema)
            _execute_file(migration, curs)
            curs.close()
            conn.commit()
Exemplo n.º 5
0
def create_article(
    db_connection: connection,
    article_input: ArticleInput,
    author_id: int,
    created_date: datetime,
) -> Article:
    cursor = db_connection.cursor(cursor_factory=DictCursor)

    cursor.execute(
        """
            INSERT INTO articles(
                title,
                author_id,
                content,
                created_date
            )
            VALUES (
                %s,
                %s,
                %s,
                %s
            )
            RETURNING
                id,
                title,
                author_id,
                content,
                status_id,
                created_date;
        """, (
            article_input.title,
            author_id,
            article_input.content,
            created_date,
        ))

    created_article = cursor.fetchone()
    db_connection.commit()

    cursor.execute(
        """
            SELECT
                article_statuses.name
            FROM
                article_statuses
            WHERE
                article_statuses.id = %s;
        """, (created_article['status_id'], ))

    article_status = cursor.fetchone()
    cursor.close()

    return Article(
        id=created_article['id'],
        title=created_article['title'],
        content=created_article['content'],
        author_id=created_article['author_id'],
        status=article_status['name'],
        created_date=created_article['created_date'],
    )
Exemplo n.º 6
0
def send_sync_reminders(conn: connection, slack_client, steps_data) -> None:
    reminder_users = queries.get_sync_reminder_users(conn)
    reminder_users_by_id = {user["id"]: user for user in reminder_users}
    for datum in steps_data:
        try:
            user_data = reminder_users_by_id[datum["gargling_id"]]
        except KeyError:
            continue
        msg = (
            f"Du gikk {datum['amount']} skritt i går, by my preliminary calculations. "
            "Husk å synce hvis dette tallet er for lavt. "
            f"Denne reminderen kan skrus av <{config.server_name}/health|her>. Stay "
            "beautiful, doll-face!"
        )
        try:
            resp = slack_client.chat_postMessage(
                text=msg, channel=user_data["slack_id"]
            )
            if isinstance(resp, Future):  # no test coverage
                # satisfy mypy
                raise Exception()
            if resp.data.get("ok") is True:
                queries.update_reminder_ts(
                    conn, ts=resp.data["ts"], id=datum["gargling_id"]
                )
                conn.commit()
        except Exception:  # no test coverage
            log.error(
                f"Error sending sync reminder for user id: {user_data['slack_id']}",
                exc_info=True,
            )
Exemplo n.º 7
0
def execute_operation(cur: cursor,
                      conn: connection,
                      commands: List[str],
                      verbose: bool = True) -> None:
    """
    Execute database operation.

    Parameters
    ----------
    cur : psycopg2.extensions.cursor
        Database cursor.
    conn : psycopg2.extensions.connection
        Database connection.
    commands : list[str]
        Commands or queries to be executed in the database.
    verbose : bool, optional
        Whether to be verbose.
    """
    for c in commands:
        if verbose:
            print(80 * '-')
            print(f'[{datetime.now()}] Running...')
            print(c)
        cur.execute(c)
        conn.commit()
        if verbose:
            print('Done!')
Exemplo n.º 8
0
def sentences_you_should_know(
    db: connection,
    offset: Tuple[int, int] = (0, 0),
    limit: Optional[int] = None,
) -> Tuple[int, List[Tuple[int, str, List[str]]]]:
    with db.cursor() as cur:
        cur.execute('select max(lvl) from sentences')
        (max_lvl_at_sentences, ) = cur.fetchone()
        # Let's update lvl in sentences for all the sentences that have words updated in the
        # levels from max_lvl_at_sentences:
        cur.execute(
            """
            with to_update(id) as (
                select s_id from sentence_words where lvl >= %s
                group by s_id order by s_id)
            update sentences
            set lvl = (
                select max(lvl) from sentence_words where s_id=id
                group by s_id having count(case when lvl is null then 1 end) = 0)
            where lvl is null and id in (select * from to_update)
        """, (0 if max_lvl_at_sentences is None else max_lvl_at_sentences +
              1, ))
        if cur.rowcount:
            db.commit()
        # Then we can do a fast query on only sentences:
        cur.execute(
            """
            select lvl, id, jpn, translations
            from sentences
            where (lvl, id) >= (%s, %s)
            order by lvl, id
        """ + ('limit %s' if limit is not None else ''),
            offset + ((limit, ) if limit is not None else ()),
        )
        return max_lvl_at_sentences, cur.fetchall()
Exemplo n.º 9
0
def download_and_import_google_image_index(conn: connection) -> None:
    """Função para baixar e importar o índice de imagens do Google para um banco de dados
    PostGres com extensão espacial

    Args:
        conn (connection): Conexão com o banco de dados onde os dados serão inseridos
    Returns:
        None
    """

    _tmp_dir = temp.tempdir()
    landsat_file = os.path.join(_tmp_dir, "landsat.csv.gz")
    sentinel_file = os.path.join(_tmp_dir, "sentinel_index_l2.csv.gz")

    __download_file(GOOGLE_INDEX_LANDSAT, landsat_file)
    __download_file(GOOGLE_INDEX_SENTINEL_L2, sentinel_file)

    # Extraíndo os dados
    sentinel_file_extracted = __extract_gzfile(sentinel_file)
    landsat_file_extracted = __extract_gzfile(landsat_file)

    try:
        cur = conn.cursor()
        cur.copy_from(__remove_firstline_in_file(landsat_file_extracted),
                      "landsat_index",
                      sep=",")
        cur.copy_from(__remove_firstline_in_file(sentinel_file_extracted),
                      "sentinel_index",
                      sep=",")
        conn.commit()
    except BaseException as error:
        warnings.warn(
            f"Problemas ao inserir os dados no banco: \n {str(error)}")
    shutil.rmtree(_tmp_dir, ignore_errors=True)
Exemplo n.º 10
0
def transfer_by_name(conn: connection, from_name: str, to_name: str,
                     amount: Union[Decimal, int]):
    try:
        transfer_by_name_impl(conn, from_name, to_name, amount)
        conn.commit()
    except MoneyAmountError:
        conn.rollback()
        raise
Exemplo n.º 11
0
def insert_metrics(conn: connection, metrics: List[dict]):
    if not metrics:
        return

    logger.info('Write messages ...')

    with conn.cursor() as cursor:
        extras.execute_batch(cursor, SQL_INSERT_METRICS, metrics)
        conn.commit()
Exemplo n.º 12
0
def conn(postgresql: connection):
    populate_user_table(postgresql)
    populate_pics_table(postgresql)
    populate_quotes_table(postgresql)
    populate_congrats_table(postgresql)
    populate_health_table(postgresql)
    postgresql.cursor_factory = RealDictCursor
    postgresql.commit()
    yield postgresql
Exemplo n.º 13
0
def create_comment(
    db_connection: connection,
    author_id: int,
    new_comment: NewComment,
    created_date: datetime,
) -> Comment:
    cursor = db_connection.cursor(cursor_factory=DictCursor)

    cursor.execute(
        """
            INSERT INTO comments(
                article_id,
                content,
                parent_comment_id,
                author_id,
                created_date
            )
            VALUES (
                %s,
                %s,
                %s,
                %s,
                %s
            )
            RETURNING
                id,
                article_id,
                author_id,
                content,
                parent_comment_id,
                created_date;
        """, (
            new_comment.article_id,
            new_comment.content,
            new_comment.parent_comment_id,
            author_id,
            created_date,
        ))

    created_comment_row = cursor.fetchone()
    db_connection.commit()
    cursor.close()

    return Comment(
        id=created_comment_row['id'],
        article_id=created_comment_row['article_id'],
        author_id=created_comment_row['author_id'],
        content=created_comment_row['content'],
        parent_comment_id=created_comment_row['parent_comment_id'],
        created_date=created_comment_row['created_date'],
    )
Exemplo n.º 14
0
def create_table_from_csv(
        conn: connection,
        table: str,
        csv_file: str,
        delim: str = ';',
        indexes: dict = None
) -> bool:
    """Creates a table in PostgreSQL from a CSV header.
    If the table exists, it does nothing.

    :param conn: Psycopg2 connection to DB.
    :param table: (str) Name of the new table.
    :param csv_file: (str) Path of the CSV file.
    :param delim: (str) Delimiter used in the CSV.
    :param indexes: (dict) Indexes to create in format index_name -> [columns]
    :return: (bool) True if a new table was created.
    """

    query0 = """
        select exists (
            select * from information_schema.tables
            where table_name = %s);
    """
    query1 = sql.SQL('create table if not exists {} ();')
    query2 = sql.SQL('alter table {} add column {} %s;')
    with conn.cursor() as cur:
        cur.execute(query0, (table,))
        exists = cur.fetchone()[0]
    if exists:
        return False

    columns = get_columns(csv_file, delim=delim)
    # because table and column names are variable
    # create first an empty table, then add columns
    # it may raise
    with conn.cursor() as cur:
        cur.execute(query1.format(sql.Identifier(table)))
        for key, val in columns.items():
            col_name = key
            col_type = map_numpy_psql(val)
            cur.execute(query2.format(
                sql.Identifier(table),
                sql.Identifier(col_name)),
                (AsIs(col_type),)
            )
    if isinstance(indexes, dict):
        for key, val in indexes.items():
            create_index(conn, table, key, val)
    conn.commit()
    return True
Exemplo n.º 15
0
def archive_article(db_connection: connection, id: int):
    cursor = db_connection.cursor()

    cursor.execute(
        """
            UPDATE
                articles
            SET
                status_id = 3
            WHERE
                id = %s;
        """, (id, ))

    db_connection.commit()
    cursor.close()
Exemplo n.º 16
0
def create_transaction(conn: connection, auto_commit: bool = False) -> cursor:
    cursor = conn.cursor()
    try:
        yield cursor
    except Exception:
        logging.debug('Rolling back')
        conn.rollback()
        raise
    else:
        if auto_commit:
            logging.debug('Committing')
            conn.commit()
        else:
            logging.debug('Rolling back')
            conn.rollback()
Exemplo n.º 17
0
def conn(postgresql: connection):
    commands.queries.create_schema(postgresql)
    pictures.queries.create_schema(postgresql)
    quotes.forum_queries.create_schema(postgresql)
    quotes.msn_queries.create_schema(postgresql)
    greetings.queries.create_schema(postgresql)
    health.queries.create_schema(postgresql)
    journey.queries.create_schema(postgresql)
    populate_user_table(postgresql)
    populate_pics_table(postgresql)
    populate_quotes_table(postgresql)
    populate_congrats_table(postgresql)
    postgresql.cursor_factory = LoggingCursor
    postgresql.commit()
    yield postgresql
Exemplo n.º 18
0
def execSql(conn:ext.connection, stmt:str, commit:bool=False, params:tuple=None) -> ext.cursor :
    if conn == None or conn.closed:
        raise _EXCEPTION_NoConnection
    
    cursor = conn.cursor()
    try:
        cursor.execute(stmt, params)
        if commit:
            conn.commit()
        
        return cursor
    except SyntaxError as cause:
        conn.rollback()
        raise cause
    except Exception as cause:
        conn.rollback()
        raise SqlExcutionError("Excution ERR[" + str(cause) +"](" + stmt +")", cause)
Exemplo n.º 19
0
def delete_sync_reminders(conn: connection, slack_client) -> None:
    reminder_users = queries.get_sync_reminder_users(conn)
    log.info(reminder_users)
    for user in reminder_users:
        if user["last_sync_reminder_ts"] is None:  # no test coverage
            continue
        try:
            slack_client.chat_delete(
                channel=user["slack_id"], ts=user["last_sync_reminder_ts"]
            )
        except Exception:  # no test coverage
            log.error(
                f"Error deleting sync reminder for user id: {user['id']}",
                exc_info=True,
            )
        queries.update_reminder_ts(conn, ts=None, id=user["id"])
        conn.commit()
Exemplo n.º 20
0
def drop_tables(cur: psycopg2Ext.cursor, conn: psycopg2Ext.connection) -> None:
    """
    Description: Drop each table using queries in
        `drop_table_queries` list.

    Arguments:
        cur (psycopg2Ext.cursor): cursor object
        conn (psycopg2Ext.connection): connection object

    Returns:
        None
    """
    for query in drop_table_queries:
        try:
            cur.execute(query)
        except psycopg2.Error as e:
            msg = f"ERROR: Could not drop table with query: {query}"
            logger.warning(msg, e)
            return
        conn.commit()
Exemplo n.º 21
0
def insert_salary(db_connection: connection, salary_date: PostSalaryReportData,
                  created_date: datetime) -> None:
    cursor = db_connection.cursor(cursor_factory=DictCursor)

    cursor.execute(
        """
            INSERT INTO salaries
                (position_id, city_id, technology_id, annual_salary, work_experience, created_date)
            VALUES
                (%s, %s, %s, %s, %s, %s);
        """, (
            salary_date.position_id,
            salary_date.city_id,
            salary_date.technology_id,
            salary_date.annual_salary,
            salary_date.work_experience,
            created_date,
        ))

    db_connection.commit()
    cursor.close()
Exemplo n.º 22
0
def migrate(dir: str, conn: connection, schema: str = None):
    _prepare_meta_table(conn, schema)
    pm = _fetch_previous_migrations(_get_schema_cursor(conn, schema))
    pmi = iter(pm)
    try:
        cm = next(pmi)
    except StopIteration:
        cm = None

    for sql_file in _get_sql_files(dir):
        if sql_file.is_equal(cm):
            print('Skipping {}'.format(sql_file.path))
            try:
                cm = next(pmi)
            except StopIteration:
                cm = None
        else:
            curs = _get_schema_cursor(conn, schema)
            _execute_file(sql_file, curs)
            curs.close()
            conn.commit()
Exemplo n.º 23
0
def create_schema(con: connection) -> None:
    """
    Pulls in 'schema\app.sql' and applies it to the database over the given {con}.

    Args:
        con: A connection to the database.

    Raises:
        DatabaseError: An error occurred.
    """
    platform = os.environ[OS_ENVIRON_PLATFORM_KEY]
    _LOG.info(f"platform: {platform}")
    cur = get_cursor(con)
    if platform == 'AWS':
        sql_stmt = """SET SESSION AUTHORIZATION dbo;"""
        execute_sql(cur, sql_stmt, "auth dbo")

    sql_file = f"schema{SEP}app.sql"
    execute_sql_from_file(cur, sql_file, "create schema")
    cur.close()
    con.commit()
Exemplo n.º 24
0
def load_table_from_csv(
        conn: connection,
        table: str,
        filep: str,
        **kwargs
):
    """Load records from a CSV file onto a table
    in PostgreSQL.

    Assume the CSV and the DB have coherent format. Raises
    exception otherwise.

    :param conn: (connection) Database connection (psycopg2)
    :param table: (str) Table to be populated
    :param filep: (str) CSV file path
    :param delim: (str) Delimiter used in the CSV. Default to ';'
    :param headers: (bool) True (default) is the CSV contains headers.
                    If False, then columns must be provided.
    :param columns: (list) Column names in the CSV. Ignored if headers is True.
    """

    delim = kwargs.get('delim', ';')
    headers = kwargs.get('headers', True)
    columns = kwargs.get('columns', None)

    freader = open(filep, 'rb')
    if headers:
        head = next(freader)
        cols = head.decode().strip().split(delim)
    else:
        cols = columns
    with conn.cursor() as cur:
        cur.copy_from(
            freader,
            table=table,
            sep=delim,
            null='',
            columns=cols)
    conn.commit()
Exemplo n.º 25
0
def process_data(cur: cursor, conn: connection, filepath: str,
                 func: Callable) -> None:
    """
    will process each data file in a give filepath using func
    """

    # get all files matching extension from directory
    all_files = []
    for root, dirs, files in os.walk(filepath):
        files = glob.glob(os.path.join(root, "*.json"))
        for f in files:
            all_files.append(os.path.abspath(f))

    # get total number of files found
    num_files = len(all_files)
    print(f"{num_files} files found in {filepath}")

    # iterate over files and process
    for i, datafile in enumerate(all_files, 1):
        func(cur, datafile)
        conn.commit()
        print(f"{i}/{num_files} files processed.")
Exemplo n.º 26
0
def add_sentence(m: Mecab, db: connection, jpn: str) -> Tuple[int, int]:
    entries = m(jpn)
    with db.cursor() as cur:
        cur.execute('select coalesce(max(lvl)+1, 0) from added_sentences')
        new_level = cur.fetchone()[0]
        cur.execute(
            'with known as (select id from features where feature in (' +
            ','.join(['%s'] * len(entries)) + ') order by id)'
            'update sentence_words '
            '   set lvl = %s '
            ' where f_id in (select * from known) and lvl is null',
            (*[e.normalized() for e in entries], new_level),
        )
        rowcount = cur.rowcount
        if rowcount > 0:
            cur.execute(
                'insert into added_sentences(lvl, jpn) values(%s, %s)',
                (new_level, jpn),
            )
        db.commit()
        print('Updated {} words with new level {}'.format(rowcount, new_level))
        return rowcount, new_level
Exemplo n.º 27
0
def write_db(conn: connection, data: List[EpisodeData], showname: str) -> int:
    c = conn.cursor()
    tuples = [(s.audio_url, s.title, s.description, showname, s.date)
              for s in data]
    log.info("Got %d episodes, let's see how many are already in the database",
             len(tuples))
    numchange = 0
    for t in tuples:
        c.execute(u"select count(*) from rrepisodes where audio_url = %s",
                  (t[0], ))
        num = c.fetchone()
        if num[0] == 1:
            continue
        log.info("New episode on %s", t[4])
        c.execute(
            """insert into rrepisodes (audio_url,title,description,show,date)
            values (%s,%s,%s,%s,%s) """,
            t,
        )
        numchange = numchange + 1
    conn.commit()
    return numchange
Exemplo n.º 28
0
    def steps(self, date: pendulum.Date, conn: connection) -> t.Optional[int]:
        log.info("Getting polar steps")
        trans = self._get_transaction()
        if trans is not None:
            activities = trans.list_activities()["activity-log"]
            log.info(f"number of activities: {len(activities)}")
            steps_by_date: dict[pendulum.Date, list] = defaultdict(list)
            for activity in activities:
                summary = trans.get_activity_summary(activity)
                log.info(summary)
                parsed = pendulum.parse(summary["date"])
                assert isinstance(parsed, pendulum.DateTime)
                taken_at = parsed.date()
                created_at = pendulum.parse(summary["created"])

                n_steps = summary["active-steps"]
                log.info(f"n steps {created_at}: {n_steps}")
                steps_by_date[taken_at].append(
                    {"n_steps": n_steps, "created_at": created_at}
                )
            not_past: list[dict] = []
            for activity_date, activity_list in steps_by_date.items():
                activity_list.sort(key=itemgetter("created_at"))
                last_synced = activity_list[-1]
                if activity_date < date:
                    continue
                last_synced["gargling_id"] = self.gargling_id
                last_synced["taken_at"] = activity_date
                log.info(f"last_synced, {activity_date}: {last_synced}")
                not_past.append(last_synced)
            queries.upsert_steps(conn, not_past)
            conn.commit()
            trans.commit()
        todays_data = queries.cached_step_for_date(conn, date=date, id=self.gargling_id)
        steps = todays_data["n_steps"] if todays_data is not None else 0
        return steps
Exemplo n.º 29
0
def create_index(
        conn: connection,
        table: str,
        index: str,
        columns: list
):
    """Create an index in a PostgresSQL database table"""

    log.debug('Creating index on table %s, columns %s',
              table, columns)
    query = sql.SQL("""
        create index {idx_name}
        on {table} ({cols});
    """)
    idx_name = sql.Identifier(table + '_' + index)
    table = sql.Identifier(table)
    cols = sql.SQL(', ').join(map(sql.Identifier, columns))
    with conn.cursor() as cur:
        cur.execute(query.format(
            idx_name=idx_name,
            table=table,
            cols=cols))
    conn.commit()
    log.debug('Index created')
Exemplo n.º 30
0
def create_tables(connection: PostgresConnection) -> None:
    """Create empty tables in database."""
    with connection.cursor() as cursor:
        for sql in get_sql_script("create_tables"):
            cursor.execute(sql)
    connection.commit()