Esempio n. 1
0
def db_downgrade(step):
    """
    Downgrades a DB to a previous version as specified with the `step`
    :param step: number of downgrades to do
    :return: None
    """
    to_use = [_.strip('.sql') for _ in migration_files()]

    # since it's a downgrade, a reverse of the migration is essential
    to_use.reverse()

    generate_migration_file()
    dbd_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')

    try:
        count = 0
        for _ in to_use:
            count += 1
            if MySQLScheme.fetch_one(REVISION_EXISTS,
                                     **{"args": {'revision': _}}):
                MySQLScheme.commit(getattr(dbd_query, f"downgrade_{_}").sql)
                LOGGER.info(f"successful downgrade: {_}")
            if count == step:
                break
    except errors.ProgrammingError:
        print("no more downgrade left")
Esempio n. 2
0
def db_initialise():
    """
    Create the migrations folder and DB table if they are non-existent
    :return: None
    """
    generate_migration_file()
    if not MySQLScheme.fetch_one(IS_MIGRATION_TABLE,
                                 **{"args": {'schema': SCHEMA}}):
        with open(MIGRATION_FILE, 'r') as init_sql:
            data = init_sql.read()

            if f"CREATE TABLE IF NOT EXISTS {MIGRATION_TABLE}" not in data:
                when = str(int(time.time()))
                sql_file = os.path.join(MIGRATION_FOLDER, f"{when}.sql")

                with open(sql_file, 'w') as save_sql:
                    up = MYSQL_MIGRATION_UP.format(f"upgrade-{when}", when,
                                                   MIGRATION_TABLE)
                    down = MYSQL_MIGRATION_DOWN.format(f"downgrade-{when}",
                                                       MIGRATION_TABLE)

                    save_sql.write("\n\n".join([up, down]))
                    LOGGER.info(f"migration file: "
                                f"{os.path.join('migrations', sql_file)}")
            else:
                when = re.findall('[0-9]+', data)[0]

            generate_migration_file()
            dbi_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')
            MySQLScheme.commit(getattr(dbi_query, f"upgrade_{when}").sql)
            LOGGER.info(f"initial successful migration: {when}")
    def __init__(self):
        self.pgconn = psycopg2.connect(PGCONNSTRING)
        self.queries = None

        for sql in ['sql/album.sql', 'sql/artist.sql', 'sql/genre-topn.sql']:
            queries = anosql.from_path(sql, 'psycopg2')
            if self.queries:
                for qname in queries.available_queries:
                    self.queries.add_query(qname, getattr(queries, qname))
            else:
                self.queries = queries
Esempio n. 4
0
def initdb(data):
    click.echo('-> Initializing database...')
    db.create_all()
    admin_user = User(username='******', name='Administrator')
    admin_user.set_password('laevus')
    db.session.add(admin_user)
    db.session.commit()
    sql_queries = anosql.from_path(os.path.join(data, 'schema.sql'), 'psycopg2')
    with sqla_raw_conn() as cnx:
        sql_queries.create_views(cnx)
    click.echo('-> Database initialized.')
Esempio n. 5
0
 def post(self):
     observation_dict = self.post_parser.parse_args(strict=True)
     insert_query = anosql.from_path(
         os.path.join(os.path.dirname(picinceptor.__file__), 'database',
                      'insert_observation.sql'),
         'psycopg2').insert_observation
     with psycopg2.connect(host=current_app.config['DB_HOST'],
                           port=current_app.config.get('DB_PORT', 5432),
                           user=current_app.config['DB_USER'],
                           password=current_app.config['DB_PASS'],
                           dbname=current_app.config['DB_NAME']) as cnx:
         insert_query(cnx, **observation_dict)
Esempio n. 6
0
def db_upgrade():
    """
    Runs an upgrade on a DB using the generated `MIGRATION_FILE`
    :return: None
    """
    generate_migration_file()
    dbu_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')

    for time_step in [_.strip('.sql') for _ in migration_files()]:
        decide = MySQLScheme.fetch_one(REVISION_EXISTS,
                                       **{"args": {'revision': time_step}})
        if not decide:
            MySQLScheme.commit(getattr(dbu_query, f"upgrade_{time_step}").sql)
            LOGGER.info(f"successful migration: {time_step}")
        else:
            LOGGER.info(f'migration already exists: {time_step}')
Esempio n. 7
0
 def __init__(self, user: str, password: str, host: str, port: int,
              database: str):
     tables = ["repositories", "contributors", "stats_contributions"]
     self.connection = self._create_connection(user, password, host, port,
                                               database)
     # init queries
     self.queries = None
     for table in tables:
         queries = anosql.from_path(f"{service.ROOT_DIR}/sql/{table}.sql",
                                    "psycopg2")
         if self.queries:
             for qname in queries.available_queries:
                 self.queries.add_query(qname, getattr(queries, qname))
         else:
             self.queries = queries
         # init db
         getattr(self.queries, f"create_{table}")(self.connection)
Esempio n. 8
0
    def __enter__(self):
        self.conn = sqlite3.connect(self.filename)

        # Load functions
        functions = inspect.getmembers(self.sqlhelpers, inspect.isfunction)
        for name, function in functions:
            parameters = inspect.signature(function).parameters
            self.conn.create_function(name, len(parameters), function)

        # Load queries
        queries = anosql.from_path("sql", "sqlite3")
        for query in queries.available_queries:
            function = getattr(queries, query)
            setattr(self, query, functools.partial(function, self.conn))
            self.queries[query] = set()
            for match in anosql.patterns.var_pattern.finditer(function.sql):
                var_name = match.group("var_name")
                if var_name:
                    self.queries[query].add(var_name)

        return self
Esempio n. 9
0
 def get(self):
     query = anosql.from_path(
         os.path.join(os.path.dirname(picinceptor.__file__), 'database',
                      'select_observations.sql'),
         'psycopg2').get_observations
     with psycopg2.connect(host=current_app.config['DB_HOST'],
                           port=current_app.config.get('DB_PORT', 5432),
                           user=current_app.config['DB_USER'],
                           password=current_app.config['DB_PASS'],
                           dbname=current_app.config['DB_NAME']) as cnx:
         rows = query(cnx)
     # build response
     res = {'type': 'FeatureCollection', 'features': []}
     features = res['features']
     for row in rows:
         (obs_id, obs_date, woodpecker_id, breeding_code, habitat,
          dominant_tree, has_dead_trees, has_conifers, observer_first_name,
          observer_surname, observer_school, geojson) = row
         observer_str = '{0} {1}'.format(observer_first_name,
                                         observer_surname).strip()
         observer_str = ('{0} [{1}]'.format(observer_str, observer_school)
                         if observer_school.strip() else
                         observer_str).strip()
         features.append({
             'type': 'Feature',
             'id': obs_id,
             'properties': {
                 'observationDate': obs_date.strftime('%Y-%m-%d'),
                 'woodpeckerId': woodpecker_id,
                 'breedingCode': breeding_code,
                 'habitat': habitat,
                 'dominantTree': dominant_tree,
                 'hasDeadTrees': has_dead_trees,
                 'hasConifers': has_conifers,
                 'observer': observer_str
             },
             'geometry': json.loads(geojson)
         })
     return res
Esempio n. 10
0
#!/usr/bin/env python3

from fastapi import FastAPI, File, UploadFile
from starlette.responses import FileResponse, Response

import anosql
import datetime
import json
import shutil
import sqlite3
import tempfile

queries = anosql.from_path('queries.sql', 'sqlite3')


def run_query_on_db(db_path, query_name):
    with sqlite3.connect(db_path) as con:
        return getattr(queries, query_name)(con)[0][0]


def run_query_on_form(file, query_name):
    with tempfile.NamedTemporaryFile() as tf:
        shutil.copyfileobj(file.file, tf)
        return run_query_on_db(tf.name, query_name)


def generate_filename(extension):
    return 'geopap-{}.{}'.format(datetime.date.today(), extension)


app = FastAPI()
Esempio n. 11
0
def queries():
    dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                            "blogdb", "sql")
    return anosql.from_path(dir_path, "sqlite3")
Esempio n. 12
0
 def add_queries_from_path(self, fn: str):
     """Load queries from a file or directory."""
     self._create_fns(sql.from_path(fn, self._db))
Esempio n. 13
0
 def table_queries(self):
     return anosql.from_path(f"{SQL_DIR}/create_tables.sql", "psycopg2")
Esempio n. 14
0
 def drop_duplicates_queries(self):
     return anosql.from_path(f"{SQL_DIR}/drop_duplicates.sql", "psycopg2")
Esempio n. 15
0
def get_query(config_path):
    query = anosql.from_path(config_path, 'psycopg2')
    
    return query
Esempio n. 16
0
 def insert_queries(self):
     return anosql.from_path(f"{SQL_DIR}/insert_values.sql", "psycopg2")
Esempio n. 17
0
 def create_index_queries(self):
     return anosql.from_path(f"{SQL_DIR}/create_indexes.sql", "psycopg2")
Esempio n. 18
0
 def __init__(self):
     self.connection = self._connect_db()
     self.queries = anosql.from_path(
         f"{api.ROOT_DIR}/sql/stats_contributions.sql", "psycopg2")
Esempio n. 19
0
def queries():
    dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "blogdb", "sql")
    return anosql.from_path(dir_path, "psycopg2")