Esempio n. 1
0
    def do_query(conn, mode: str, query: str, data=None, **kwargs):
        """
        Func that executes query string after having parameterised it.

        - *conn*: connection object returned by .connect() method. 
        - *mode*: r (SELECT), w (INSERT), wr (INSERT RETURNING).
        - *query*: string query to be parameterised and executed.
        - *data*: collection or dictionary containing data for placeholders.
        - *kwargs*: kwarg to be parameterised and used to form query string.

        It returns a psycopg2 iterable cursor object if r or wr mode. 
        Otherwise None.

        See here for cursor object https://www.psycopg.org/docs/cursor.html
        """

        validate_mode_arg(mode)
        (data is not None and validate_data_arg_type(data, mode))

        if conn is not None:

            with conn:
                curs = conn.cursor()

                parameterized_query = parameterize_query(data=data,
                                                         query=query,
                                                         **kwargs)

                if mode in ["r", "wr"]:
                    try:
                        curs.execute(parameterized_query, data)
                    except DatabaseError as error:
                        raise DatabaseError(psycopg2_exception_enhanced(error))

                    return curs  # user to curs.close() after reading from it.

                else:
                    try:
                        curs.execute(parameterized_query, data)
                    except DatabaseError as error:
                        raise DatabaseError(psycopg2_exception_enhanced(error))

                    # 'with' context manager should close or rollback,
                    # let's close anyways in case something went wrong.
                    finally:
                        curs.close()
        else:
            raise (ValueError, "The \"conn\" argument seems to be None.")
Esempio n. 2
0
    def insert_one(self, newtuple, retkey=False):
        """Execute a single-tuple INSERT command

        :param newtuple: the tuple to be inserted
        :param retkey: indicates assigned key values should be returned
        """
        attrnames = [name for name, typ in newtuple._heading]
        targets = '(%s)' % ", ".join(attrnames)
        values_list = 'VALUES (%s)' % ", ".join(
            ['%%(%s)s' % name for name in attrnames])
        cmd = "INSERT INTO %s %s %s" % (self.name, targets, values_list)
        if retkey:
            cmd += " RETURNING %s" % ", ".join(self.key)
        curs = self.db.execute(cmd, tuple_values_dict(newtuple))
        if curs.rowcount != 1:
            self.db.rollback()
            raise DatabaseError("Failed to add %s %r" % (self.extname, self))
        if retkey:
            attrdict = dict(self.attributes)
            rettuple = Tuple(
                [Attribute(name, attrdict[name].type) for name in self.key])
            row = curs.fetchone()
            for attr, type_ in rettuple._heading:
                setattr(rettuple, attr, row[attr])
        curs.close()
        if retkey:
            return rettuple
Esempio n. 3
0
    def update_one(self, newtuple, keytuple, currtuple=None):
        """Execute a single-tuple UPDATE command using the primary key

        :param newtuple: Tuple with new values
        :param keytuple: Tuple with key values
        :param currtuple: previous version of newtuple
        """
        def update_one_cmd():
            if changed_values or not hasattr(self, 'update_one_cmd'):
                setlist = "SET %s" % ", ".join([
                    '%s = %%(%s)s' % (c, c)
                    for c in list(changed_values.keys())
                ])
                self.update_one_cmd = "UPDATE %s %s %s" % (
                    self.name, setlist,
                    self.where_clause(currtuple is not None))
            return self.update_one_cmd

        if currtuple:
            changed_values = tuple_values_dict(currtuple, newtuple)
            if not changed_values:
                return
        else:
            changed_values = tuple_values_dict(newtuple)
        values = self.key_values_update(keytuple, currtuple)
        values.update(changed_values)
        curs = self.db.execute(update_one_cmd(), values)
        if curs.rowcount != 1:
            self.db.rollback()
            raise DatabaseError("Failed to update %s %r" %
                                (self.extname, self))
        curs.close()
Esempio n. 4
0
    def _execute(self, query, params=None):
        # Raise error if that was expected, otherwise set the output
        if self._throw_on_execute:
            raise DatabaseError()

        self.description = self._results[0]
        self.rowcount = len(self._results[1])
        self._has_been_read = True
Esempio n. 5
0
def run_migrations_offline():
    """
    Run migrations in 'offline' mode.
    """
    if os.environ.get("TESTING"):
        raise DatabaseError("Test migrations offline is not permitted.")

    context.configure(url=str(DATABASE_URL))
    with context.begin_transaction():
        context.run_migrations()
Esempio n. 6
0
def run_migrations_offline() -> None:
    """Run migration in 'offline' mode """
    if os.environ.get("TESTING"):
        raise DatabaseError(
            "Running testing migrations offline currently not permitted")

    alembic.context.configure(url=str(DATABASE_URL))

    with alembic.context.begin_transaction():
        alembic.context.run_migrations()
Esempio n. 7
0
def run_migrations_offline() -> None:
    db_suffix = os.environ.get("DB_SUFFIX", "")

    if "test" in db_suffix:
        raise DatabaseError(
            "Running testing migrations offline currently not permitted.")

    alembic.context.configure(url=str(DATABASE_URL))

    with alembic.context.begin_transaction():
        alembic.context.run_migrations()
Esempio n. 8
0
def run_migrations_offline() -> None:
    """
    Run migrations in "offline" mode.
    """

    if "TESTING" in os.environ and os.environ["TESTING"]:
        raise DatabaseError("Running testing migrations offline is not permitted.")

    alembic.context.configure(url=os.environ["DATABASE_URL"])

    with alembic.context.begin_transaction():
        alembic.context.run_migrations()
Esempio n. 9
0
def run_migrations_offline():
    """
    Run migrations in 'offline' mode.
    """
    if os.environ.get("TESTING"):
        raise DatabaseError(
            "Running testing migrations offline currently not permitted.")
    context.configure(url=str(settings.SQLALCHEMY_DATABASE_URI),
                      target_metadata=target_metadata,
                      literal_binds=True, compare_type=True)
    with context.begin_transaction():
        context.run_migrations()
Esempio n. 10
0
    def connect(self, schema=None):
        """
        Method which attempts connection to postresql instance using either
        a series of dsn kwargs or a libpq string. If schema is passed it
        will set path to that schema so all subsequent queries will be
        executed there.

        - *schema*: schema to connect to. If not provided, then public.

        It returns a Psycpog2 connection class. If it cannot connect it will 
        return None.

        See here for conn class https://www.psycopg.org/docs/connection.html
        """

        try:
            if self.libpq_string is not None:
                connection = psycopg2.connect(**parse_dsn(self.libpq_string))
            else:
                connection = psycopg2.connect(**self.kwargs)

        except DatabaseError as error:

            raise DatabaseError(psycopg2_exception_enhanced(error))

        if connection is not None and schema is not None:
            with connection:
                with connection.cursor() as cur:
                    schema = sql.Identifier(schema)
                    try:
                        cur.execute(
                            sql.SQL(
                                "create schema if not exists {schema}; "
                                "set search_path to {schema}, public;").format(
                                    schema=schema))

                    except Exception as error:
                        raise DatabaseError(psycopg2_exception_enhanced(error))

        return connection
Esempio n. 11
0
def run_migrations_offline() -> None:
    '''
    Run migrations in 'offline' mode.
    '''
    db_suffix = os.environ.get('DB_SUFFIX', '')

    if 'test' in db_suffix:
        raise DatabaseError(
            'Running testing migrations offline currently not permitted')

    alembic.context.configure(url=str(DATABASE_URL))

    with alembic.context.begin_transaction():
        alembic.context.run_migrations()
Esempio n. 12
0
class CommonErrorStrategyTestCase(TestCase):
    exception_to_vaue_dict = {
        InterfaceError(): (True, TERMINATE),
        DatabaseError(choice(CommonErrorStrategy.BACKOFFABLE_MESSAGE)):
        (True, BACKOFF),
        OperationalError(): (True, TERMINATE),
        Exception(): (False, TERMINATE)
    }

    def setUp(self):
        self.strat = CommonErrorStrategy()

    def test_strategy_returns_correct_values(self):
        for exception, value in self.exception_to_vaue_dict.iteritems():
            return_value = self.strat.handle_exception(exception)
            self.assertEqual(return_value, value)
Esempio n. 13
0
    def delete_one(self, keytuple, currtuple=None):
        """Execute a single-tuple DELETE command using the primary key

        :param keytuple: Tuple with key values
        :param currtuple: tuple from previous get
        """
        def delete_one_cmd():
            if not hasattr(self, 'delete_one_cmd'):
                self.delete_one_cmd = "DELETE FROM %s %s" % (
                    self.name, self.where_clause(currtuple is not None))
            return self.delete_one_cmd

        values = self.key_values_update(keytuple, currtuple)
        curs = self.db.execute(delete_one_cmd(), values)
        if curs.rowcount != 1:
            self.db.rollback()
            raise DatabaseError("Failed to delete %s %r" %
                                (self.extname, self))
        curs.close()
def test_connect_err(mocker):
    mocker.patch('psycopg2.connect', side_effect=DatabaseError('DSN invalid'))
    conn, err = PostgreSQLAlt.do_connect('some_invalid_dsn')
    assert conn is None
    assert err is not None
Esempio n. 15
0
 def test_connection_fail_select(self):
     self.mock_execute.side_effect = DatabaseError()
     with self.assertRaises(DbtConfigError):
         DebugTask.validate_connection(self.target_dict)
     self.mock_execute.assert_has_calls(
         [mock.call('/* dbt */\nselect 1 as id', None)])
def test_get_config_err(conn):
    conn.fetchall_side_effect = DatabaseError('Something went wrong')
    config, err = PostgreSQLAlt.get_config(conn)
    assert config is None
    assert err is not None
Esempio n. 17
0
def insert_scene_metadata(metadata, use_gee, raster=None):
    """
        inserts the most important scene metadata before starting the inversion
        procedure into the OBIA4RTM PostgreSQL database

        Parameters
        ----------
        metadata : Dictionary
            Sentinel-2 scene metadata
        use_gee : Boolean
            true if GEE was used, false if Sen2Core was used
        raster : String
            File-path to the Sentinel-2 imagery in case Sen2core was used

        Returns
        -------
        None
        """
    # open database connection
    conn, cursor = connect_db()
    # get sensor and scene_id
    sensor, scene_id = get_sensor_and_sceneid(metadata)
    # get mean angles from scene-metadata
    # tto -> sensor zenith angle
    # psi -> relative azimuth angle between sensor and sun
    tto, psi = get_mean_angles(metadata)
    # sun zenith angle
    tts = get_sun_zenith_angle(metadata)
    # get the footprint already as PostGIS insert statment
    footprint_statement = get_scene_footprint(metadata, gee=use_gee)
    # full metadata as JSON
    metadata_json = json.dumps(metadata)
    # storage drive and filename of the image raster data
    # this part only applies to Sen2Core preprocessing
    if use_gee:
        storage_drive = 'NA: Google Earth Engine'
        filename = 'NA: Google Earth Engine'
    else:
        splitted = os.path.split(raster)
        storage_drive = splitted[0]
        filename = splitted[1]
    # get acquisition time and date
    acquisition_time, acquisition_date = get_acqusition_time(metadata)
    # insert this basic metadata direclty into the OBIA4RTM database before
    # continuing
    statement = "INSERT INTO public.scene_metadata (acquisition_time, "\
                "scene_id, sun_zenith, "\
                "obs_zenith, rel_azimuth, sensor, footprint, full_description, "\
                "storage_drive, filename) VALUES ('{0}','{1}',{2},{3},{4},"\
                "'{5}',{6},'{7}','{8}','{9}') ON CONFLICT (scene_id) "\
                "DO NOTHING;".format(
                        acquisition_time,
                        scene_id,
                        tts,
                        tto,
                        psi,
                        sensor,
                        footprint_statement,
                        metadata_json,
                        storage_drive,
                        filename
                        )
    try:
        cursor.execute(statement)
        conn.commit()
    except DatabaseError:
        raise DatabaseError('Insert of metadata failed!')
        sys.exit()
    # close database connection
    close_db_connection(conn, cursor)