def get_course_names(self, prefix=None):
     db = DAL('sqlite://courses.db', folder='dbs')
     db.define_table('courses', Field('class_id', type='integer'),
                     Field('class_name'), Field('date_time'),
                     Field('descriptive_link'), Field('enrolled'),
                     Field('instructor'), Field('link_sources'),
                     Field('location'), Field('status'))
     if (prefix != None):
         return db.executesql(
             'SELECT DISTINCT class_name FROM courses WHERE class_name LIKE '
             + prefix + '%')
     else:
         return db.executesql('SELECT DISTINCT class_name FROM courses')
Ejemplo n.º 2
0
def indexx():
    #response.flash = T("Hello World")
    #response.menu += [
    #    (T('My Sites'), False, URL('admin', 'default', 'site'))
    #]

    import os.path
    # x=os.getcwd()+'\..\models\database_registry.py.bak'
    # x=os.getcwd()+'\models\database_registry.py.bak'
    x = os.getcwd(
    ) + '\\applications\\' + request.application + '\models\database_registry.py.bak'
    outfile = os.getcwd(
    ) + '\\applications\\' + request.application + '\models\database_registry.py.out'
    # y=x + request.application
    # return 'ZZZ \>' + y + str(os.path.isfile(y)) + '\\' + request.application + ' \< ZZZ'
    #print(x)
    #return [os.path.dirname(os.path.abspath(__file__)),  " <".join(os.getcwd()).join(">> "), os.path.isfile(os.getcwd().join('/../models/database_registry.py.bak'))]
    # return [os.getcwd(), os.path.isfile(fname)]

    from pydal import DAL, Field
    # DAL()
    db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master')
    results = db.executesql('select * from sys.databases')
    with open(outfile, 'w') as f:
        for row in results:
            # print row.name
            # f.write("%s\n" % str(row.name))
            # register('ApplicationConfiguration', 'mssql4://BuildDbAdmin:Alt0ids76@localhost/ApplicationConfiguration')
            registerx(row.name,
                      'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name)

    # return 'ZZZ \>' + x + str(os.path.isfile(x)) + '\\' + request.application + ' \< ZZZ'
    return DBREG
Ejemplo n.º 3
0
def index():
    existingfile = os.getcwd(
    ) + '\\applications\\' + request.application + '\models\database_registry.py.bak'
    outfile = os.getcwd(
    ) + '\\applications\\' + request.application + '\models\database_registry.py.out'

    db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master')
    results = db.executesql('select * from sys.databases')
    with open(outfile, 'w') as f:
        for row in results:
            # f.write("%s\n" % str(row.name))
            register(row.name,
                     'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name)

    print("XXXXXXXXXXXXXXXXXRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRR")
    print(DBREG)
    print("XXXXXXXXXXXXXXXXXRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRR")
    return DBREG
Ejemplo n.º 4
0
 def setUp():
     import os.path 
 # x=os.getcwd()+'\..\models\database_registry.py.bak'
 # x=os.getcwd()+'\models\database_registry.py.bak'
     x=os.getcwd()+ '\\applications\\' +request.application+'\models\database_registry.py.bak'
     outfile=os.getcwd()+ '\\applications\\' +request.application+'\models\database_registry.py.out'
 # y=x + request.application
 # return 'ZZZ \>' + y + str(os.path.isfile(y)) + '\\' + request.application + ' \< ZZZ'
 #print(x)
 #return [os.path.dirname(os.path.abspath(__file__)),  " <".join(os.getcwd()).join(">> "), os.path.isfile(os.getcwd().join('/../models/database_registry.py.bak'))]
 # return [os.getcwd(), os.path.isfile(fname)]
     from pydal import DAL, Field
 # DAL()
     db=DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master')
     results=db.executesql('select * from sys.databases')
     with open(outfile, 'w') as f:
         for row in results:
            register(row.name, 'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name)
         # print row.name
         # f.write("%s\n" % str(row.name))
         # register('ApplicationConfiguration', 'mssql4://BuildDbAdmin:Alt0ids76@localhost/ApplicationConfiguration')
     return None
Ejemplo n.º 5
0
def index():
    # storedProcedures={"A":"AAAA", "B":"BBBB"}
    # return locals

    db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master')
    results = db.executesql('select name from sys.databases')
    appdbs = []
    sysdbs = ['master', 'tempdb', 'model', 'msdb']
    for i in range(len(results)):
        results[i] = results[i].name.encode('ascii', 'ignore')
        if results[i] not in sysdbs:
            appdbs.append(results[i])
    # with open(outfile, 'w') as f:
    # for row in results:
    ## f.write("%s\n" % str(row.name))
    # register(row.name, 'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name)
    # return results
    # return DBREG
    # return storedProcedures
    # import pyodbc
    procName = []
    for i in range(len(appdbs)):
        db = appdbs[i]
        cnxn = pyodbc.connect(
            r'Driver={SQL Server};Server=localhost;Database=' + db +
            r';Trusted_Connection=yes;')
        cursor = cnxn.cursor()
        cursor.execute(
            "SELECT * FROM information_schema.routines WHERE ROUTINE_TYPE = 'PROCEDURE'"
        )
        while 1:
            row = cursor.fetchone()
            if not row:
                break
            procName.append(db + "::" +
                            row.ROUTINE_NAME.encode('ascii', 'ignore'))

    return locals()
Ejemplo n.º 6
0
class dataSource:
    db = ""  # Conector Base de datos

    # constructor
    def __init__(self, host, user, passw, database, port, tipo_bd):

        if tipo_bd == "sqlite":
            self.db = DAL("sqlite://" + database + ".db")
        elif tipo_bd == "mysql":
            self.db = DAL("mysql://" + user + ":" + passw + "@" + host + "/" +
                          database + "")
        elif tipo_bd == "postgres":
            self.db = DAL("postgres://" + user + ":" + passw + "@" + host +
                          "/" + database + "")
        elif tipo_bd == "sqlserver":
            self.db = DAL("mssql4://" + user + ":" + passw + "@" + host + "/" +
                          database + "")
        elif tipo_bd == "firebird":
            self.db = DAL("firebird://" + user + ":" + passw + "@" + host +
                          "/" + database + "")
        elif tipo_bd == "oracle":
            self.db = DAL("oracle://" + user + ":" + passw + "@" + host + "/" +
                          database + "")
        elif tipo_bd == "db2":
            self.db = DAL("db2://" + user + ":" + passw + "@" + database + "")
        """
        Ingres	ingres://usuario:contraseña@localhost/nombrebd
        Sybase	sybase://usuario:contraseña@localhost/nombrebd
        Informix	informix://usuario:contraseña@nombrebd
        Teradata	teradata://DSN=dsn;UID=usuario;PWD=contraseña;DATABASE=nombrebd
        Cubrid	cubrid://usuario:contraseña@localhost/nombrebd
        SAPDB	sapdb://usuario:contraseña@localhost/nombrebd
        IMAP	imap://user:contraseña@server:port
        MongoDB	mongodb://usuario:contraseña@localhost/nombrebd
        """

        # Vincular a una tabla 1 preexistente.
        self.db.define_table(
            "bienes",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_bienes", type="id"),
            Field("identificacion", type="integer"),
            Field("tipo"),
            Field("serie"),
            Field("marca"),
            Field("codigo", type="integer"),
            Field("nombre"),
            Field("ficha_formacion", type="integer"),
            # Desactivar migraciones.
            migrate=False)
        # Vincular a una tabla 2 preexistente.
        self.db.define_table(
            "controles",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_control", type="id"),
            Field("identificacion", type="integer"),
            Field("fechahe", type="datetime"),
            Field("fechahs", type="datetime"),
            Field("control"),
            # Desactivar migraciones.
            migrate=False)
        # Vincular a una tabla 3 preexistente.
        self.db.define_table(
            "personal",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_personal", type="id"),
            Field("nombre"),
            Field("identificacion", type="integer"),
            Field("telefono", type="integer"),
            Field("correo"),
            Field("ficha_formacion", type="integer"),
            Field("rol"),
            # Desactivar migraciones.
            migrate=False)

        # Vincular a una table preexistente.
        self.db.define_table(
            "usuario",
            # Indicarle a pyDAL cuál es la clave principal.
            Field("id_usuario", type="id"),
            Field("user"),
            Field("clave"),
            Field("rol"),
            # Desactivar migraciones.
            migrate=False)
        """ Tipos de datos

        string          text        blob            boolean
        integer         double      decimal(n, m)   date
        time            datetime    password        upload
        reference <tabla>           list:string     list:integer
        list:reference <tabla>      json            bigint
        big-id          big-reference
        """

    def query(self, sql):
        try:
            self.db.executesql(sql)
            self.db.commit()
            return True
        except:
            return False

    def getData(self, sql):
        q = self.db.executesql(sql)
        self.db.commit()
        return q
Ejemplo n.º 7
0
class DBClient:

    log = customlogger(globalconfig.logging_level)

    def __init__(self, uri_db):
        self.log.info("creating instance of DBClient: {0}".format(uri_db))
        self.db = DAL(uri_db, migrate_enabled=False)
        self.wells = self.db.define_table('wells',
                                          Field('uuid'),
                                          Field('project_uuid'),
                                          Field('well_name'),
                                          Field('uwi'),
                                          Field('created_at'),
                                          Field('modified_at'),
                                          primarykey=['uuid'])
        self.clients = self.db.define_table('clients',
                                            Field('uuid'),
                                            Field('company_name'),
                                            Field('created_at'),
                                            Field('modified_at'),
                                            primarykey=['uuid'])
        self.projects = self.db.define_table('projects',
                                             Field('uuid'),
                                             Field('client_uuid'),
                                             Field('name'),
                                             Field('created_at'),
                                             Field('modified_at'),
                                             Field('basin'),
                                             Field('shapefile'),
                                             primarykey=['uuid'])

    @add_post_func_delay(0.5)
    def insert_well(self, *args):
        self.log.info("inserting well into DB..")
        self.wells.insert(uuid=uuid.uuid4(),
                          well_name=args[0],
                          uwi=args[1],
                          created_at=datetime.datetime.now(),
                          modified_at=datetime.datetime.now())
        self.db.commit()

    @add_post_func_delay(0.5)
    def insert_client(self, *args):
        self.log.info("inserting client into DB..")
        self.clients.insert(uuid=uuid.uuid4(),
                            company_name=args[0],
                            created_at=datetime.datetime.now(),
                            modified_at=datetime.datetime.now())
        self.db.commit()

    # TODO insert project has been modified on the front end such that there is no field called project type on DB table
    # def insert_project(self, *args):
    #     self.insert_client(args[0])
    #     c_uuid = self.clients(company_name=args[0])['uuid']
    #     # self.db.projects.insert(uuid=uuid.uuid4(), name=args[1], client_uuid=c_uuid, basin='Midland', created_at=datetime.datetime.now(), modified_at=datetime.datetime.now())
    #     self.projects.insert(uuid=uuid.uuid4(), name=args[1], client_uuid=c_uuid, basin='midland', created_at=datetime.datetime.now(), modified_at=datetime.datetime.now())
    #     self.db.commit()

    @add_post_func_delay(0.5)
    def delete_table(self, tablename):
        self.log.info("deleteing table {0}".format(tablename))
        table = self.db.get(tablename)
        table.truncate(mode='CASCADE')
        self.db.commit()

    def execute_sql(self, sql):
        self.log.info("executing sql: '{0}'".format(sql))
        return self.db.executesql(sql)

    def close(self):
        self.log.info("closing DB instance..")
        self.db.close()
Ejemplo n.º 8
0
from pydal import DAL, Field
from datetime import datetime

db = DAL('sqlite://download.db')

market = db.define_table(
    'market', Field('name'), Field('ask', type='double'),
    Field('timestamp', type='datetime', default=datetime.now))
db.executesql('CREATE INDEX IF NOT EXISTS tidx ON market (timestamp);')
db.executesql('CREATE INDEX IF NOT EXISTS m_n_idx ON market (name);')

buy = db.define_table(
    'buy',
    Field('market'),
    Field('purchase_price', type='double'),
    Field('selling_price', type='double'),
    Field('amount', type='double'),
)
db.executesql('CREATE INDEX IF NOT EXISTS sidx ON buy (selling_price);')

picks = db.define_table(
    'picks', Field('market'), Field('old_price', type='double'),
    Field('new_price', type='double'), Field('gain', type='double'),
    Field('timestamp', type='datetime', default=datetime.now))
Ejemplo n.º 9
0
class DataBase:
    def __init__(self,
                 username,
                 password,
                 host='localhost',
                 dbname='postgres',
                 port=5432,
                 pool_size=5):
        self.schema = 'soad'
        self.username = username
        self.host = host
        self.port = port
        self.dbname = dbname
        self.folder = 'Resources' + os.sep + 'database'
        os.environ["PGPASSWORD"] = password
        self.password = password
        self.dbinfo = 'postgres://' + username + ':' + password + '@' + host + ':' + str(
            port) + '/' + self.dbname
        self.db = DAL(self.dbinfo,
                      folder=self.folder,
                      pool_size=pool_size,
                      migrate=False,
                      attempts=1)
        self.connection = None
        self.threadpool = QThreadPool()

    def busca_registro(self,
                       nome_tabela,
                       coluna,
                       valor='',
                       operador='=',
                       filtro=None):

        if filtro == '' or filtro is None:
            filtro = '1=1'

        sql = "select * from " + self.schema + ".fnc_buscar_registro(" \
              + "p_tabela=>" + "'" + nome_tabela + "'" \
              + ", p_coluna=>" + "'" + coluna + "'" \
              + ", p_valor=>" + "'" + valor + "'" \
              + ", p_operador=>" + "'" + operador + "'" \
              + ", p_filtro=>" + "'" + filtro + "'" \
              + ");"

        return self.execute_sql(sql)

    def get_registro(self, fnc, campo, valor):

        sql = "select * from " + self.schema + "." + fnc + "(" \
              + "p_" + campo + "=>" + "'" + str(valor) + "'" \
              + ");"

        return self.execute_sql(sql)

    def call_procedure(self, schema='soad', params=None):
        if not params:
            return
        # Remove parametros vazios
        vazio = []
        for param in params["params"].items():
            if param[1] == '':
                vazio.append(param[0])

        logging.info('[DataBase] Parâmetros vazios: ' + str(vazio))

        for i in range(len(vazio)):
            del params["params"][vazio[i]]

        params = json.dumps(params, ensure_ascii=False)
        sql = "select * from " + schema + ".fnc_chamada_de_metodo(" \
              + "p_json_params=>" + "'" + params + "'" \
            + ");"

        return self.execute_sql(sql)

    def execute_sql(self, sql, as_dict=True):
        retorno = list()
        try:
            retorno = self.db.executesql(query=sql, as_dict=as_dict)
            self.db.commit()
            logging.debug('[DataBase] status=' + str(True))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] retorno=' + str(retorno))
            prc = True, retorno, str(self.db._lastsql)

        except Exception as e:
            self.db.rollback()
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception=' + str(e))
            retorno.append(e)
            prc = False, retorno, str(sql)

        except:
            e = 'Exceção não tratada'
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception2=' + str(e))
            retorno.append(e)
            prc = False, e, str(sql)

        return prc

    def __conectar_banco__(self, progress_callback):
        try:
            self.connection = self.db.__call__()
            #progress_callback.emit(100)
        except Exception as e:
            logging.debug('[DataBase] ' + str(e))
            os.environ["PGPASSWORD"] = ''
            pass
            #progress_callback.emit(0)
        return self

    def definir_schema(self, schema):
        self.schema = schema
        self.execute_sql("SET search_path TO " + self.schema, as_dict=False)

    def fechar_conexao(self):
        self.db.close()

    def progress_fn(self, n):
        print("%d%% done" % n)

    def retorno_conexao(self, s):
        self.connection = s

    def thread_complete(self):
        logging.debug('[DataBase] Thread Completed')

    def abrir_conexao(self):
        # Pass the function to execute
        worker = Worker(
            self.db.__call__
        )  # Any other args, kwargs are passed to the run function
        worker.signals.result.connect(self.retorno_conexao)
        worker.signals.finished.connect(self.thread_complete)
        #worker.signals.progress.connect(self.progress_fn)

        # Execute
        self.threadpool.start(worker)
Ejemplo n.º 10
0
from pydal import DAL, Field

# db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/PyOdbcDb')
db = DAL(
    'mssql://Driver={SQL Server};Server=localhost;Database=master;Trusted_Connection=yes;'
)
results = db.executesql("select name from sys.databases")

print(results)
    except DecompressionBombError as e:
        print(f"DecompressionBomb, Skipping image {image} (#{n})")
    else:
        image_ids.append(int(image_id_re.match(image)[1]))
        img_array = keras.preprocessing.image.img_to_array(img)
        img_array = tf.expand_dims(img_array, 0)  # Create batch axis
        images.append(img_array)

scores = handle_predictions_batch(n, images, model, scores)

db = DAL('sqlite://flower_storage.db', folder='./data')
define_tables(db)

db(db.images.id > 0).update(is_live=False)

db.commit()

is_non_flower_indexes = scores[scores > 0.5].index
db.executesql(
    "UPDATE images SET is_live='T', is_non_flower='T' WHERE id IN({})".format(
        ','.join(list(is_non_flower_indexes.astype('str')))))
db.commit()

is_flower_indexes = scores[scores <= 0.5].index
db.executesql(
    "UPDATE images SET is_live='T', is_non_flower='F' WHERE id IN({})".format(
        ','.join(list(is_flower_indexes.astype('str')))))
db.commit()

# %%
','.join(['1', '2', '3'])
Ejemplo n.º 12
0
#]

import os.path
# x=os.getcwd()+'\..\models\database_registry.py.bak'
# x=os.getcwd()+'\models\database_registry.py.bak'
x = os.getcwd(
) + '\\applications\\' + request.application + '\models\database_registry.py.bak'
outfile = os.getcwd(
) + '\\applications\\' + request.application + '\models\database_registry.py.out'
# y=x + request.application
# return 'ZZZ \>' + y + str(os.path.isfile(y)) + '\\' + request.application + ' \< ZZZ'
#print(x)
#return [os.path.dirname(os.path.abspath(__file__)),  " <".join(os.getcwd()).join(">> "), os.path.isfile(os.getcwd().join('/../models/database_registry.py.bak'))]
# return [os.getcwd(), os.path.isfile(fname)]

from pydal import DAL, Field
# DAL()
db = DAL('mssql4://BuildDbAdmin:Alt0ids76@localhost/master')
results = db.executesql('select * from sys.databases')
with open(outfile, 'w') as f:
    for row in results:
        # print row.name
        # f.write("%s\n" % str(row.name))
        # register('ApplicationConfiguration', 'mssql4://BuildDbAdmin:Alt0ids76@localhost/ApplicationConfiguration')
        register(row.name,
                 'mssql4://BuildDbAdmin:Alt0ids76@localhost/' + row.name)

    # return 'ZZZ \>' + x + str(os.path.isfile(x)) + '\\' + request.application + ' \< ZZZ'
# return DBREG
# return dict()
Ejemplo n.º 13
0
class KeyValueStore(object):
    def __init__(self):
        self.db = DAL('sqlite://keyvalstore.db', folder=get_config_dir())
        self.db.define_table(
            'store', Field('key', type='string', required=True, unique=True),
            Field('value', type='blob'),
            Field('version', type='integer', default=0))

    def __del__(self):
        self.db.close()

    def set(self, key, value):
        # We have to execute pure SQL statements because pydal does not support blobs as it seems
        res = self.db.executesql('SELECT value from store where key=?',
                                 placeholders=[key])
        if len(res) > 0:
            self.db.executesql('UPDATE store SET value=? where key=?',
                               placeholders=[pickle.dumps(value), key])
        else:
            self.db.executesql(
                'INSERT INTO "store"("key","value") VALUES (?, ?)',
                placeholders=[key, pickle.dumps(value)])
        self.db.commit()

    def get(self, key, default=""):
        res = self.db.executesql('SELECT value from store where key=?',
                                 placeholders=[key])
        return default if len(res) == 0 else pickle.loads(res[0][0])

    def set_versioned(self, key, value, version):
        response = requests.post(
            'https://dibser.vserverli.de/php/keyvaluestore.php?key={}&version={}'
            .format(key, version),
            data=pickle.dumps(value),
            headers={'Content-Type': 'application/octet-stream'})
        response.raise_for_status()
        res = self.db.executesql('SELECT value from store where key=?',
                                 placeholders=[key])
        if len(res) > 0:
            self.db.executesql(
                'UPDATE store SET value=?, version=? where key=?',
                placeholders=[pickle.dumps(value), version, key])
        else:
            self.db.executesql(
                'INSERT INTO "store"("key","value", "version") VALUES (?, ?, ?)',
                placeholders=[key, pickle.dumps(value), version])
        self.db.commit()

    def get_versioned(self, key, default=""):
        res = self.db.executesql('SELECT value,version from store where key=?',
                                 placeholders=[key])
        value = None
        if len(res) > 0:
            newest_version = local_version = res[0][1]
            if not local_version: local_version = 1
            value = res[0][0]

        # check remote version and update if remote is newer (status code == 200 if a newer version is available, otherwise status code 404 if not available or 304 if older/same version)
        response = requests.get(
            'https://dibser.vserverli.de/php/keyvaluestore.php?key={}&version={}'
            .format(key, local_version))
        if response.status_code == 200:
            newest_version = int(response.headers['X-Keyvalstore-Version'])
            value = response.content
            print(
                "Remote version is newer for key={}. Local version={} remote version={}"
                .format(key, local_version, newest_version))

        return (default, -1) if value == None else (pickle.loads(value),
                                                    newest_version)
Ejemplo n.º 14
0
class DataBase:

    def __init__(self, dbname='postgres', schema='Acervo', pool_size=5):

        load_dotenv()

        self.dbname = dbname
        self.schema = schema
        self.username = os.getenv("DBUSERNAME")
        self.password = os.getenv("DBPASS")
        self.host = os.getenv("DBHOST")
        self.port = os.getenv("DBPORT")
        self.folder = 'Resources' + os.sep + 'database'

        self.dbinfo = \
            'postgres://' + str(self.username) + ':' + str(self.password) + '@' \
            + str(self.host) + ':' + str(self.port) + '/' + str(self.dbname)

        self.db = DAL(
            self.dbinfo,
            folder=self.folder,
            pool_size=pool_size,
            migrate=False,
            attempts=1
        )
        self.connection = None

    def execute_sql(self, sql, as_dict=True):
        retorno = list()
        try:
            retorno = self.db.executesql(query=sql, as_dict=as_dict)
            self.db.commit()
            logging.debug('[DataBase] status=' + str(True))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] retorno=' + str(retorno))
            prc = True, retorno, str(self.db._lastsql)

        except Exception as e:
            self.db.rollback()
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception=' + str(e))
            retorno.append(e)
            prc = False, retorno, str(sql)

        except:
            e = 'Exceção não tratada'
            logging.debug('[DataBase] status=' + str(False))
            logging.debug('[DataBase] sql=' + str(sql))
            logging.debug('[DataBase] exception2=' + str(e))
            retorno.append(e)
            prc = False, e, str(sql)

        return prc

    def __conectar_banco__(self):
        try:
            self.connection = self.db.__call__()
        except Exception as e:
            logging.debug('[DataBase] ' + str(e))
        return self

    def definir_schema(self, schema):
        self.schema = schema
        self.execute_sql("SET search_path TO " + self.schema, as_dict=False)

    def fechar_conexao(self):
        self.db.close()