def test_connect_badargs(monkeypatch, pgconn, args, kwargs, exctype): def fake_connect(conninfo): return pgconn yield with pytest.raises(exctype): psycopg.connect(*args, **kwargs)
def check_row_factory_connection() -> None: """Type-check connect(..., row_factory=<MyRowFactory>) or Connection.row_factory cases. """ conn1: Connection[int] cur1: Cursor[int] r1: Optional[int] conn1 = connect(row_factory=int_row_factory) cur1 = conn1.execute("select 1") r1 = cur1.fetchone() r1 != 0 with conn1.cursor() as cur1: cur1.execute("select 2") conn2: Connection[Person] cur2: Cursor[Person] r2: Optional[Person] conn2 = connect(row_factory=Person.row_factory) cur2 = conn2.execute("select * from persons") r2 = cur2.fetchone() r2 and r2.name with conn2.cursor() as cur2: cur2.execute("select 2") cur3: Cursor[Tuple[Any, ...]] r3: Optional[Tuple[Any, ...]] conn3 = connect() cur3 = conn3.execute("select 3") with conn3.cursor() as cur3: cur3.execute("select 42") r3 = cur3.fetchone() r3 and len(r3)
def test_connect_badargs(monkeypatch, pgconn, args, kwargs): def fake_connect(conninfo): return pgconn yield monkeypatch.setattr(psycopg.connection, "connect", fake_connect) with pytest.raises((TypeError, psycopg.ProgrammingError)): psycopg.connect(*args, **kwargs)
def test_dsn_env(self, dsn, monkeypatch): dsn = conninfo_to_dict(dsn) dsn.pop("application_name", None) monkeypatch.delenv("PGAPPNAME", raising=False) with psycopg.connect(**dsn) as conn: assert "application_name=" not in conn.info.dsn monkeypatch.setenv("PGAPPNAME", "hello test") with psycopg.connect(**dsn) as conn: assert "application_name='hello test'" in conn.info.dsn
def test_get_params_env(self, dsn, monkeypatch): dsn = conninfo_to_dict(dsn) dsn.pop("application_name", None) monkeypatch.delenv("PGAPPNAME", raising=False) with psycopg.connect(**dsn) as conn: assert "application_name" not in conn.info.get_parameters() monkeypatch.setenv("PGAPPNAME", "hello test") with psycopg.connect(**dsn) as conn: assert ( conn.info.get_parameters()["application_name"] == "hello test")
def test_connect_args(monkeypatch, pgconn, args, kwargs, want): the_conninfo = None def fake_connect(conninfo): nonlocal the_conninfo the_conninfo = conninfo return pgconn yield monkeypatch.setattr(psycopg.connection, "connect", fake_connect) psycopg.connect(*args, **kwargs) assert conninfo_to_dict(the_conninfo) == conninfo_to_dict(want)
def abre_coneccion(self): try: f = open("coneccion.rc", 'r') l = f.readline()[:-1] self.cnx = connect(l) f.close() print "Conección por archivo" except: DB = config.DB user = config.user password = config.password host = config.host self.cnx = connect(DB + " " + user + " " + password + " " + host) print "Conección base"
def abre_coneccion(self): try: f = open("coneccion.rc", 'r') l = f.readline()[:-1] self.cnx = connect(l) f.close() print "Conección por archivo" except: DB = config.DB user = config.user password = config.password host = config.host self.cnx = connect(DB + " " + user + " " + password + " " + host ) print "Conección base"
def main() -> None: cnn = psycopg.connect() cnn.execute( sql.SQL("create table testdec ({})").format( sql.SQL(", ").join([ sql.SQL("{} numeric(10,2)").format(sql.Identifier(f"t{i}")) for i in range(ncols) ]))) cur = cnn.cursor() if test == "copy": with cur.copy( f"copy testdec from stdin (format {format.name})") as copy: for j in range(nrows): copy.write_row([ Decimal(randrange(10000000000)) / 100 for i in range(ncols) ]) elif test == "insert": ph = ["%t", "%b"][format] cur.executemany( "insert into testdec values (%s)" % ", ".join([ph] * ncols), ([Decimal(randrange(10000000000)) / 100 for i in range(ncols)] for j in range(nrows)), ) else: raise Exception(f"bad test: {test}")
def __init__(self, hostname, dbname, schema, go_association, go_graph, size, type, informative): self.schema = schema if self.schema: #input from database self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%self.schema) else: #input from files self.goa_f = csv.reader(open(go_association, 'r'), delimiter='\t') self.gog_f = csv.reader(open(go_graph, 'r'), delimiter='\t') self.size = int(size) self.type = int(type) self.informative = int(informative) output_format_dict = {0:self.output_full, 1:self.output_stat, 2:self.output_between} if self.type in output_format_dict: self._output = output_format_dict[self.type] else: sys.stderr.write('Type %d invalid\n'%self.type) sys.exit(2) #biological_process is the root self.root = 'GO:0008150' self.go_graph = kjGraph() self.go_id2gene_id_dict = {} #value utilizes kjSet data structure self.go_id2go_name = {} #mapping between go_id an it's name self.go_id_descendent2gene_id_dict = {} self.informative_node_dict = {} self.log_file = open('/tmp/go_informative_node.log', 'w')
def run(paccs,outf): paccs=paccs outf=outf conn=psycopg.connect("dbname=SETdb") curs=conn.cursor() # if len(sys.argv)>1: # infname=sys.argv[1] # if len(sys.argv)>2: # outfname=sys.argv[2] # else: # outfname=raw_input("Please specify the output filename:\n") # else: # infname=raw_input("please enter the files containing paccs:\n") # outfname=raw_input("Please specify the output filename:\n") # # inf=open(infname,'r') # outf=open(outfname,'w') for i in range(len(paccs)): curs.execute("select * from dom_prot where pname=%s order by start",(paccs[i],)) row=curs.fetchall() if row: output(outf,row)
def __init__(self, hostname, dbname, schema, source_table, target_table, offset, limit, \ output, bonferroni=0, report=0, log=0, wu=0, needcommit=0): self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.source_table = source_table self.target_table = target_table self.offset = offset self.limit = limit self.output = output if self.output: #filename exists self.outf = open(self.output, 'w') self.bonferroni = int(bonferroni) self.report = int(report) self.log = int(log) self.wu = int(wu) self.needcommit = int(needcommit) self.global_go_id_to_no_dict = {} self.global_go_no_to_size_dict = {} self.global_gene_to_go_dict = {} self.no_of_records = 0 if self.log: self.logfile = open('/tmp/cluster_stat.log','w') self.cluster_memory = {}
def __init__(self, hostname, dbname, schema, table, mcl_table, tag, p_value_list, unknown_list, connectivity_list,\ recurrence_list, size_list, leave_one_out, wu, judger_type, depth_list, dir_files=None, dominant=0, needcommit=0,\ needcommit_of_gene_stat=0, gene_table='p_gene'): self.hostname = hostname self.dbname = dbname self.schema = schema self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.table = table self.mcl_table = mcl_table self.tag = tag self.p_value_list = p_value_list self.unknown_list = unknown_list self.connectivity_list = connectivity_list self.recurrence_list = recurrence_list self.cluster_size_list = size_list self.leave_one_out = int(leave_one_out) self.wu = int(wu) self.judger_type = int(judger_type) self.depth_list = depth_list self.dir_files = dir_files self.dominant = int(dominant) self.needcommit = int(needcommit) #a dictionary mapping the user choices to stat classes #self.stat_class_dict = {1: gene_stat, # 0: gene_stat_on_mcl_result} self.stat_data = [] #default parameters to invoke gene_stat self.report = 0 self.log = 0 self.needcommit_of_gene_stat = int(needcommit_of_gene_stat) self.gene_table = gene_table
def __init__(self, hostname, dbname, schema, type, output, orgn='sc'): self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.type_dict = {0:'molecular_function', 1:'biological_process', 2:'cellular_component'} self.type = self.type_dict[int(type)] self.ofname = output self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} self.organism = self.org_short2long[orgn] self.log_file = open('/tmp/GO_graphxml.log', 'w') self.termid_dict = {} self.acc2id_dict = {} self.go_graph = Graph.Graph()
def _cursor(self, settings): set_tz = False if self.connection is None: set_tz = True if settings.DATABASE_NAME == "": from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("You need to specify DATABASE_NAME in your Django settings file.") conn_string = "dbname=%s" % settings.DATABASE_NAME if settings.DATABASE_USER: conn_string = "user=%s %s" % (settings.DATABASE_USER, conn_string) if settings.DATABASE_PASSWORD: conn_string += " password='******'" % settings.DATABASE_PASSWORD if settings.DATABASE_HOST: conn_string += " host=%s" % settings.DATABASE_HOST if settings.DATABASE_PORT: conn_string += " port=%s" % settings.DATABASE_PORT self.connection = Database.connect(conn_string, **self.options) self.connection.set_isolation_level(1) # make transactions transparent to all cursors cursor = self.connection.cursor() if set_tz: cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE]) cursor.execute("SET client_encoding to 'UNICODE'") cursor = UnicodeCursorWrapper(cursor, "utf-8") return cursor
def start_up(self, **kwargs): # Here we generate a connection to postgres # and generate the corresponding tables self.host = kwargs["postgres_host"] # Host self.port = kwargs["postgres_port"] # Port self.user = kwargs["postgres_user"] # User self.password = kwargs["postgres_password"] # Password self.database = kwargs["postgres_database"] # Database # Initialize connection try: self.conn = psycopg.connect(host=self.host, port=self.port, user=self.user, password=self.password, dbname=self.database) # Set a cursor self.cursor = self.conn.cursor(binary=True) except Exception as e: raise Exception("Could not establish a connection to Postgres.", e) # Create tables if they not exist try: self._create_tables(**kwargs) except Exception as e: raise Exception("Could not create tables in Postgres.", e)
def __init__(self, config: Config = None): if not config: config = Config.load() self.config = config self.already_checked: dict[Site, set[BaseVideo]] = { site: set() for site in list(Site.__members__.values()) } # Dict of dicts in the form {site: {video_id: db_id}} self.all_vids: dict[Site, dict[str, int]] = { site: {} for site in list(Site.__members__.values()) } self.all_vid_ids: dict[Site, set[str]] = { site: set() for site in list(Site.__members__.values()) } self.channel_cache: dict[Site, set[BaseChannel]] = { site: set() for site in list(Site.__members__.values()) } self.db_channel_cache: dict[Site, set[str]] = { site: set() for site in list(Site.__members__.values()) } self._yt_api = YTApi(self.config.yt_token) self.all_tags: dict[str, int] = {} self.threads: list[threading.Thread] = [] self._conn = psycopg.connect(self.config.db_conn_string, row_factory=dict_row) self.db = DbUtils(self._conn)
def sql_open_connection(self): db = connection_dict(self.config, 'database') logging.getLogger('hyperdb').info('open database %r'%db['database']) try: conn = psycopg.connect(**db) except psycopg.OperationalError, message: raise hyperdb.DatabaseError, message
def cursor(self): from django.conf import settings set_tz = False if self.connection is None: set_tz = True if settings.DATABASE_NAME == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured, "You need to specify DATABASE_NAME in your Django settings file." conn_string = "dbname=%s" % settings.DATABASE_NAME if settings.DATABASE_USER: conn_string = "user=%s %s" % (settings.DATABASE_USER, conn_string) if settings.DATABASE_PASSWORD: conn_string += " password='******'" % settings.DATABASE_PASSWORD if settings.DATABASE_HOST: conn_string += " host=%s" % settings.DATABASE_HOST if settings.DATABASE_PORT: conn_string += " port=%s" % settings.DATABASE_PORT self.connection = Database.connect(conn_string, **self.options) self.connection.set_isolation_level( 1) # make transactions transparent to all cursors cursor = self.connection.cursor() if set_tz: cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE]) cursor = UnicodeCursorWrapper(cursor, settings.DEFAULT_CHARSET) global postgres_version if not postgres_version: cursor.execute("SELECT version()") postgres_version = [ int(val) for val in cursor.fetchone()[0].split()[1].split('.') ] if settings.DEBUG: return util.CursorDebugWrapper(cursor, self) return cursor
def __init__(self, dir, hostname, dbname, orgn, log, needcommit=0): self.dir = dir self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to graph") self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} self.organism = self.org_short2long[orgn] self.log = int(log) if self.log: self.logfile = open('/tmp/gene_id_to_no.log', 'w') self.needcommit = int(needcommit) #records down the maximum gene_no ever assigned. self.max_gene_no = 0 #mapping between gene_id and gene_no self.vertex_dict = {} self.vertex_dict_extension = {} #a unique collection of all genes in the datasets of the directory self.gene_set = Set()
def search(self): conn = psycopg.connect(dbname=self.__dbname, user=self.__user, host=self.__host, password=self.__password, port=self.__port) cur = conn.cursor() cur.execute("SELECT domain_name FROM v_domains") domains = cur.fetchall() list_of_domains = [] for domain_name in domains: list_of_domains.extend(domain_name) data = [] for domain in list_of_domains: cur.execute( "SELECT json FROM v_xml_cdr WHERE domain_name = '{}' AND start_stamp BETWEEN CURRENT_DATE - INTERVAL '1 DAY' AND CURRENT_DATE - INTERVAL '1 DAY' + '23:59:59'" .format(domain)) data.extend(cur.fetchall()) cur.close() conn.close() return data
def import_csv(writer_security, map_sec, map_headers): s = "host=localhost dbname=mra user='******' port=5432 password='******'" handle = psycopg.connect(s) cr = handle.cursor() #cr.execute("select model as model_id,'1' as perm_read, model as name,'1' as perm_create,'1' as perm_unlink,'1' as perm_write,res_groups.name as group_id from ir_model,res_groups where model like '%cci%' order by model") cr.execute( "select ir_model.name as model_id,'1' as perm_read, ir_model.name as name,'1' as perm_create,'1' as perm_unlink,'1' as perm_write,res_groups.name as group_id from ir_model,res_groups where model like '%cci%' or model like 'account%' or model like 'sale%' or model like 'audittrail%' or model like 'res.partner%' or model like 'meeting%' or model like 'crm%' or model like 'event%' or model like 'res.company' or model like 'res.con%' or model like 'purchase%' \ or model like 'credit%' or model like 'translation' or model like 'letter%' or model like 'membership' \ or model like 'product%' or model like 'project%' or model like 'payment%' or model like 'crossovered%' \ or model like 'hr%' order by model") d = [] record = {} # for key, column_name in map_headers.items(): # record[key] = column_name #record1 = ['model_id', 'group_id', 'perm_read', 'perm_create', 'perm_unlink', 'perm_write'] record = { 'model_id': 'model_id', 'group_id': 'group_id', 'perm_read': 'perm_read', 'perm_unlink': 'perm_unlink', 'perm_write': 'perm_write', 'perm_create': 'perm_create', 'name': 'name' } writer_security.writerow(record) for i in cr.fetchall(): # for j in range(len(i)): record = {} j = 0 for key, fnct in map_sec.items(): record[key] = i[j] j = j + 1 writer_security.writerow(record)
def __init__(self, hostname, dbname, schema, orgn, needcommit=0): """ 08-30-05 add rn to org_short2long remove self.organism """ self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.needcommit = int(needcommit) self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'rn':'Rattus norvegicus', 'Rattus norvegicus':'Rattus norvegicus', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} #mapping between gene_no and GO list self.geneno_go_dict = {} #mapping between gene_no and gene_id self.geneno_to_geneid = {}
def cursor(self): from django.conf import settings set_tz = False if self.connection is None: set_tz = True if settings.DATABASE_NAME == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured, "You need to specify DATABASE_NAME in your Django settings file." conn_string = "dbname=%s" % settings.DATABASE_NAME if settings.DATABASE_USER: conn_string = "user=%s %s" % (settings.DATABASE_USER, conn_string) if settings.DATABASE_PASSWORD: conn_string += " password='******'" % settings.DATABASE_PASSWORD if settings.DATABASE_HOST: conn_string += " host=%s" % settings.DATABASE_HOST if settings.DATABASE_PORT: conn_string += " port=%s" % settings.DATABASE_PORT self.connection = Database.connect(conn_string, **self.options) self.connection.set_isolation_level(1) # make transactions transparent to all cursors cursor = self.connection.cursor() if set_tz: cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE]) cursor = UnicodeCursorWrapper(cursor, settings.DEFAULT_CHARSET) global postgres_version if not postgres_version: cursor.execute("SELECT version()") postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')] if settings.DEBUG: return util.CursorDebugWrapper(cursor, self) return cursor
def test_weakref(dsn): conn = psycopg.connect(dsn) w = weakref.ref(conn) conn.close() del conn gc_collect() assert w() is None
def _cursor(self, settings): set_tz = False if self.connection is None: set_tz = True if settings.DATABASE_NAME == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured( "You need to specify DATABASE_NAME in your Django settings file." ) conn_string = "dbname=%s" % settings.DATABASE_NAME if settings.DATABASE_USER: conn_string = "user=%s %s" % (settings.DATABASE_USER, conn_string) if settings.DATABASE_PASSWORD: conn_string += " password='******'" % settings.DATABASE_PASSWORD if settings.DATABASE_HOST: conn_string += " host=%s" % settings.DATABASE_HOST if settings.DATABASE_PORT: conn_string += " port=%s" % settings.DATABASE_PORT self.connection = Database.connect(conn_string, **self.options) self.connection.set_isolation_level( 1) # make transactions transparent to all cursors cursor = self.connection.cursor() if set_tz: cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE]) cursor.execute("SET client_encoding to 'UNICODE'") cursor = UnicodeCursorWrapper(cursor, 'utf-8') return cursor
def test_putconn_no_pool(dsn): with pool.ConnectionPool(dsn, min_size=1) as p: conn = psycopg.connect(dsn) with pytest.raises(ValueError): p.putconn(conn) conn.close()
def load(self): '''Load state from database''' import psycopg if options["globals", "verbose"]: print >> sys.stderr, 'Loading state from',self.db_name,'database' self.db = psycopg.connect(self.db_name) c = self.cursor() try: c.execute("select count(*) from bayes") except psycopg.ProgrammingError: self.db.rollback() self.create_bayes() if self._has_key(self.statekey): row = self._get_row(self.statekey) self.nspam = row["nspam"] self.nham = row["nham"] if options["globals", "verbose"]: print >> sys.stderr, ('%s is an existing database,' ' with %d spam and %d ham') \ % (self.db_name, self.nspam, self.nham) else: # new database if options["globals", "verbose"]: print >> sys.stderr, self.db_name,'is a new database' self.nspam = 0 self.nham = 0
def _cursor(self): new_connection = False set_tz = False settings_dict = self.settings_dict if self.connection is None: new_connection = True set_tz = settings_dict.get('TIME_ZONE') if settings_dict['NAME'] == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("You need to specify NAME in your Django settings file.") conn_string = "dbname=%s" % settings_dict['NAME'] if settings_dict['USER']: conn_string = "user=%s %s" % (settings_dict['USER'], conn_string) if settings_dict['PASSWORD']: conn_string += " password='******'" % settings_dict['PASSWORD'] if settings_dict['HOST']: conn_string += " host=%s" % settings_dict['HOST'] if settings_dict['PORT']: conn_string += " port=%s" % settings_dict['PORT'] self.connection = Database.connect(conn_string, **settings_dict['OPTIONS']) self.connection.set_isolation_level(1) # make transactions transparent to all cursors connection_created.send(sender=self.__class__) cursor = self.connection.cursor() if new_connection: if set_tz: cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']]) if not hasattr(self, '_version'): self.__class__._version = get_version(cursor) if self._version[0:2] < (8, 0): # No savepoint support for earlier version of PostgreSQL. self.features.uses_savepoints = False cursor.execute("SET client_encoding to 'UNICODE'") cursor = UnicodeCursorWrapper(cursor, 'utf-8') return cursor
def __init__(self, hostname, dbname, schema, source_table, target_table, prune_type, threshhold, report, log=0, needcommit=0): self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.source_table = source_table self.target_table = target_table self.prune_type = int(prune_type) self.threshhold = float(threshhold) self.report = int(report) self.log = int(log) self.needcommit = int(needcommit) self.prune_func_dict = {0:self.prune_on_edge_set, 1:self.prune_on_recurrence_array, 2:self.prune_on_vertex_set} #a function mapping structure. self.vertex_set_dict = {} #store a edge_set_dict or recurrence_pattern_dict with the same vertex_set self.mcl_id2connectivity_dict ={} #store the mcl_id:connectivity pair self.good_mcl_id_dict = {} #the mcl_id:recurrence_array pair dictionary self.bad_mcl_id_list = [] #store the mcl_ids that are going to be deleted. if self.log: self.log_file = open("/tmp/cluster_prune.log", 'w') self.no_of_goods = 0 self.no_of_bads = 0 self.run_no = 0 self.remaining = 1 #1 is useful to let the first run start #two tables for bucketing self.src_table = 'src_tmp' self.tg_table = 'tg_tmp'
def __init__(self, hostname, dbname, schema, type, output, orgn="sc"): self.conn = psycopg.connect("host=%s dbname=%s" % (hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s" % schema) self.type_dict = {0: "molecular_function", 1: "biological_process", 2: "cellular_component"} self.type = self.type_dict[int(type)] self.ofname = output self.org_short2long = { "at": "Arabidopsis thaliana", "ce": "Caenorhabditis elegans", "dm": "Drosophila melanogaster", "hs": "H**o sapiens", "mm": "Mus musculus", "sc": "Saccharomyces cerevisiae", "Arabidopsis thaliana": "Arabidopsis thaliana", "Caenorhabditis elegans": "Caenorhabditis elegans", "Drosophila melanogaster": "Drosophila melanogaster", "H**o sapiens": "H**o sapiens", "Mus musculus": "Mus musculus", "Gorilla gorilla Pan paniscus H**o sapiens": "H**o sapiens", "Saccharomyces cerevisiae": "Saccharomyces cerevisiae", } self.organism = self.org_short2long[orgn] self.log_file = open("/tmp/GO_graphxml.log", "w") self.termid_dict = {} self.acc2id_dict = {} self.go_graph = Graph.Graph()
def import_csv(writer_security, map_sec, map_headers): s = "host=localhost dbname=mra user='******' port=5432 password='******'" handle=psycopg.connect(s) cr = handle.cursor() #cr.execute("select model as model_id,'1' as perm_read, model as name,'1' as perm_create,'1' as perm_unlink,'1' as perm_write,res_groups.name as group_id from ir_model,res_groups where model like '%cci%' order by model") cr.execute("select ir_model.name as model_id,'1' as perm_read, ir_model.name as name,'1' as perm_create,'1' as perm_unlink,'1' as perm_write,res_groups.name as group_id from ir_model,res_groups where model like '%cci%' or model like 'account%' or model like 'sale%' or model like 'audittrail%' or model like 'res.partner%' or model like 'meeting%' or model like 'crm%' or model like 'event%' or model like 'res.company' or model like 'res.con%' or model like 'purchase%' \ or model like 'credit%' or model like 'translation' or model like 'letter%' or model like 'membership' \ or model like 'product%' or model like 'project%' or model like 'payment%' or model like 'crossovered%' \ or model like 'hr%' order by model") d=[] record = {} # for key, column_name in map_headers.items(): # record[key] = column_name #record1 = ['model_id', 'group_id', 'perm_read', 'perm_create', 'perm_unlink', 'perm_write'] record = {'model_id': 'model_id', 'group_id': 'group_id', 'perm_read': 'perm_read', 'perm_unlink': 'perm_unlink', 'perm_write': 'perm_write', 'perm_create': 'perm_create','name':'name'} writer_security.writerow(record) for i in cr.fetchall(): # for j in range(len(i)): record = {} j=0 for key,fnct in map_sec.items(): record[key] = i[j] j=j+1 writer_security.writerow(record)
def __init__(self, hostname, dbname, schema, type, orgn): self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.type = int(type) self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} if self.type == 2: self.organism = self.org_short2long[orgn] self.global_vertex_dict = {} self.global_graph_dict = {} self.pickle_fname = os.path.join(os.path.expanduser('~'), 'pickle/yeast_global_struc') self.vertex_block = '' #following four variables keeping track of the change self.global_vertex_dict_before = {} self.global_vertex_dict_after = {} self.no_of_edges_before = 0 self.no_of_edges_after = 0
def _connect(self): if psycopg is None: raise ImproperlyConfigured('psycopg3 is not installed!') conn = psycopg.connect(dbname=self.database, **self.connect_params) if self._isolation_level is not None: conn.isolation_level = self._isolation_level return conn
def get_cursor(config): """Setup database connection.""" dbname = config.get('database', 'name') dbuser = config.get('database', 'user') dbpass = config.get('database', 'password') dbconn = psycopg.connect(database=dbname, user=dbuser, password=dbpass) return dbconn, dbconn.cursor()
def __init__(self, hostname, dbname, fixed, var, tag, xlim, ylim, based_on_clusters, type, l1, ofname): self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to graph") self.tag = tag if type!=2: (self.fixed_label, self.fixed_value) = fixed.split(':') self.fixed_value_list = self.fixed_value.split(',') self.var = var self.x_range = xlim.split('-') self.x_range = map(float, self.x_range) self.y_range = ylim.split('-') self.y_range = map(float, self.y_range) self.based_on_clusters = int(based_on_clusters) self.type = int(type) self.l1 = int(l1) self.ofname = ofname self.option_label_dict = {'u': 'unknown_cut_off', 'n':'connectivity_cut_off', 'r':'recurrence_cut_off', 's':'cluster_size_cut_off', 'p':'p_value_cut_off'} self.option_num_dict = {} self.stat_data = [] self.no_of_curves = 0 # a list of varying values, for legend self.varying_list = []
def __init__(self, hostname, dbname, orgn, unknown_file): """ 08-30-05 add rn to org_short2long """ self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'rn':'Rattus norvegicus', 'Rattus norvegicus':'Rattus norvegicus', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} self.organism = self.org_short2long[orgn] self.unknown_file = open(unknown_file, 'w') #data structure to store biological_process known genes self.known_genes_set = Set() #data structure to store biological_process unknown genes self.unknown_genes_set = Set()
def db_connect(hostname, dbname, schema=None, password=None, user=None): """ 2008-07-29 copied from annot.bin.codense.common add the code to deal with importing psycopg 2007-03-07 add password and user two options 02-28-05 establish database connection, return (conn, curs). copied from CrackSplat.py 03-08-05 parameter schema is optional """ connection_string = 'host=%s dbname=%s'%(hostname, dbname) if password: connection_string += ' password=%s'%password if user: connection_string += ' user=%s'%user try: import psycopg except ImportError: try: import psycopg2 as psycopg except ImportError: sys.stderr.write("Neither psycopg nor psycopg2 is installed.\n") raise conn = psycopg.connect(connection_string) curs = conn.cursor() if schema: curs.execute("set search_path to %s"%schema) return (conn, curs)
def load(self): """Load state from database""" import psycopg if options["globals", "verbose"]: print("Loading state from", self.db_name, "database", file=sys.stderr) self.db = psycopg.connect("dbname=" + self.db_name) c = self.cursor() try: c.execute("select count(*) from bayes") except psycopg.ProgrammingError: self.db.rollback() self.create_bayes() if self._has_key(self.statekey): row = self._get_row(self.statekey) self.nspam = row["nspam"] self.nham = row["nham"] if options["globals", "verbose"]: print( ("%s is an existing database," " with %d spam and %d ham") % (self.db_name, self.nspam, self.nham), file=sys.stderr, ) else: if options["globals", "verbose"]: print(self.db_name, "is a new database", file=sys.stderr) self.nspam = 0 self.nham = 0
def __init__(self, fname, dbname, schema, needcommit=0): self.reader = csv.reader(open(fname, 'r'), delimiter='\t') self.conn = psycopg.connect('dbname=%s'%dbname) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.needcommit = int(needcommit) self.go_id_dict = {}
def _cursor(self, settings): set_tz = False if self.connection is None: set_tz = True if settings.DATABASE_NAME == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("You need to specify DATABASE_NAME in your Django settings file.") conn_string = "dbname=%s" % settings.DATABASE_NAME if settings.DATABASE_USER: conn_string = "user=%s %s" % (settings.DATABASE_USER, conn_string) if settings.DATABASE_PASSWORD: conn_string += " password='******'" % settings.DATABASE_PASSWORD if settings.DATABASE_HOST: conn_string += " host=%s" % settings.DATABASE_HOST if settings.DATABASE_PORT: conn_string += " port=%s" % settings.DATABASE_PORT self.connection = Database.connect(conn_string, **self.options) self.connection.set_isolation_level(1) # make transactions transparent to all cursors cursor = self.connection.cursor() if set_tz: cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE]) if not hasattr(self, '_version'): version = get_version(cursor) self.__class__._version = version if version < (8, 0): # No savepoint support for earlier version of PostgreSQL. self.features.uses_savepoints = False cursor.execute("SET client_encoding to 'UNICODE'") cursor = UnicodeCursorWrapper(cursor, 'utf-8') return cursor
def get_connection_by_config(self, config_file_path, section_name): if (len(config_file_path) > 0 and len(section_name) > 0): # Create an instance of ConfigParser class. config_parser = ConfigParser() # read the configuration file. config_parser.read(config_file_path) # if the configuration file contains the provided section name. if (config_parser.has_section(section_name)): # read the options of the section. the config_params is a list object. config_params = config_parser.items(section_name) # so we need below code to convert the list object to a python dictionary object. # define an empty dictionary. db_conn_dict = {} # loop in the list. for config_param in config_params: # get options key and value. key = config_param[0] value = config_param[1] # add the key value pair in the dictionary object. db_conn_dict[key] = value # get connection object use above dictionary object. # conn = psycopg.connect(**db_conn_dict) result = " ".join( str(key + "=") + str(value) for key, value in db_conn_dict.items()) print(result) conn = psycopg.connect(result) self._conn = conn print( "******* get postgresql database connection with configuration file ********", "\n")
def run(domain='', table='mir'): conn=psycopg.connect("dbname=mirdb") curs=conn.cursor() if len(sys.argv)>1: infname=sys.argv[1] if len(sys.argv)>2: outfname=sys.argv[2] else: outfname=raw_input("Please specify the output filename:\n") else: infname=raw_input("please enter the files containing paccs:\n") outfname=raw_input("Please specify the output filename:\n") inf=open(infname,'r') outf=open(outfname,'w') paccs=readpaccs(inf) rows=[] noofrows=0 if domain: for i in range(len(paccs)): curs.execute("select m.acc,m.sequence,s.start,s.tail from mir m, segment s where m.acc=s.acc and s.segname=%s and m.acc=%s",(domain,paccs[i],)) row=curs.fetchall() if row: rows.append([]) j=noofrows rows[j].append(row[0][0]) rows[j].append(row[0][1]) rows[j].append(row[0][2]) rows[j].append(row[0][3]) noofrows=noofrows+1 else: for i in range(len(paccs)): if table=='mir': curs.execute("select acc,sequence from mir where acc=%s",(paccs[i],)) elif table=='pr_chromdb': curs.execute("select pacc,sequence from "+table+" where pacc=%s",(paccs[i],)) elif table=='temp_pr': curs.execute("select pacc,sequence from "+table+" where pacc=%s",(paccs[i],)) row=curs.fetchall() if row: rows.append([]) j=noofrows rows[j].append(row[0][0]) rows[j].append(row[0][1]) rows[j].append(1) rows[j].append(len(row[0][1])) noofrows=noofrows+1 fastaOut(outf,rows) outf.close() inf.close()
def work(): with psycopg.connect(dsn) as conn: with conn.cursor(binary=fmt) as cur: cur.execute(faker.drop_stmt) cur.execute(faker.create_stmt) with faker.find_insert_problem(conn): cur.executemany(faker.insert_stmt, faker.records) stmt = sql.SQL( "copy (select {} from {} order by id) to stdout (format {})" ).format( sql.SQL(", ").join(faker.fields_names), faker.table_name, sql.SQL(fmt.name), ) with cur.copy(stmt) as copy: if set_types: copy.set_types(faker.types_names) if method == "read": while 1: tmp = copy.read() if not tmp: break elif method == "iter": list(copy) elif method == "row": while 1: tmp = copy.read_row() # type: ignore[assignment] if tmp is None: break elif method == "rows": list(copy.rows())
def SETdb2sp_check(): conn = psycopg.connect("dbname=SETdb") curs = conn.cursor() curs.execute("select pacc from pname") rows = curs.fetchall() rows.sort() i=0 for row in rows: for r in row: curs.execute("select pacc,sequence from protein where pacc=%s",(r,)) row = curs.fetchall() pacc = row[0][0] sequence = row[0][1] curs.execute("select acc,seq from db_sp where fasta=%s",(r,)) rows=curs.fetchall() if rows: for row in rows: sp_acc = row[0] sp_seq = row[1] if sp_seq != sequence: sys.stdout.write('>%dSETdb_%s\n%s\n>%ddb_sp_%s\n%s\n' % (2*i+1,pacc,sequence,2*i+2,sp_acc,sp_seq)) i=i+1 curs.execute('select nom from nomenclature where pacc=%s',(pacc,)) row =curs.fetchall() if row: sys.stderr.write('%s:%s\n'%(pacc,row))
def __init__(self, dir, hostname, dbname, schema, orgn, union=0, needcommit=0): self.dir = dir self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.union = int(union) self.needcommit = int(needcommit) self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} self.organism = self.org_short2long[orgn] #mapping between gene_id and gene_no self.gene_id2gene_no = {} #mapping between gene_id and its occurence self.gene_id2freq = {} #unique gene collection, for database submission self.gene_set = Set()
def run(self): """ 05-14-05 06-30-05 add gph_dir --dstruc_loadin() if self.schema: --cor_vector_from_db or --cor_vector_from_files """ if self.schema: self.conn = psycopg.connect('host=%s dbname=%s'%(self.hostname, self.dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%self.schema) else: if self.dir == None: sys.stderr.write("Either schema or data directory should be specified\n") sys.exit(2) communicator = MPI.world.duplicate() self.dstruc_loadin(communicator) self.mpi_synchronize(communicator) if self.schema: self.cor_vector_from_db(self.output_file) else: if self.significance_file==None: sys.stderr.write("Error: where's the significance file?\n") sys.exit(2) self.cor_vector_from_files(communicator, self.dir, self.gph_dir, self.output_file, \ self.significance_file, self.p_value_cut_off, self.cor_cut_off)
def start(self): if self.proc: logging.info("proxy already started") return logging.info("starting proxy") pproxy = which("pproxy") if not pproxy: raise ValueError("pproxy program not found") cmdline = [pproxy, "--reuse"] cmdline.extend(["-l", f"tunnel://:{self.client_port}"]) cmdline.extend( ["-r", f"tunnel://{self.server_host}:{self.server_port}"] ) self.proc = sp.Popen(cmdline, stdout=sp.DEVNULL) logging.info("proxy started") self._wait_listen() # verify that the proxy works try: with psycopg.connect(self.client_dsn): pass except Exception as e: pytest.fail(f"failed to create a working proxy: {e}")
def db(is_legacy=False): """ Creates a connections to the database. Used as a context manager that closes the connection automatically on exit. Legacy connections will roll-back before close as they are always read-only. Nextgen connections will be committed before close unless there is an error. """ database_url = build_url(is_legacy=is_legacy) logger.debug(f"Starting a connection with {database_url}") with connect(database_url, row_factory=dict_row) as connection: try: logger.debug(f"Creating cursor for {database_url}") with connection.cursor() as cursor: yield cursor logger.debug(f"Finalizing transaction for {database_url}") if not is_legacy: logger.debug(f"Committed transaction for {database_url}") connection.commit() else: logger.debug(f"Rolling back read-only transaction for {database_url}") connection.rollback() except Exception as err: logger.error(f"Hit an error in transaction for {database_url}: {err}") connection.rollback() raise
def __init__(self, fname, hostname, dbname, schema, parser, u_fname, orgn, needcommit=0): self.go_inf = open(fname, 'r') self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.parser = parser_map[parser] self.reader = csv.reader(open(u_fname, 'r'), delimiter=';') self.needcommit = int(needcommit) self.org_short2long = {'at':'Arabidopsis thaliana', 'ce':'Caenorhabditis elegans', 'dm':'Drosophila melanogaster', 'hs':'H**o sapiens', 'mm':'Mus musculus', 'sc':'Saccharomyces cerevisiae', 'Arabidopsis thaliana':'Arabidopsis thaliana', 'Caenorhabditis elegans':'Caenorhabditis elegans', 'Drosophila melanogaster':'Drosophila melanogaster', 'H**o sapiens':'H**o sapiens', 'Mus musculus':'Mus musculus', 'Gorilla gorilla Pan paniscus H**o sapiens':'H**o sapiens', 'Saccharomyces cerevisiae':'Saccharomyces cerevisiae'} self.organism = self.org_short2long[orgn] #mapping between gene_id and gene_no self.vertex_dict = {} #stores all the unknown genes in schema.gene self.unknown_gene_list = [] #GO DAG self.go_graph = Graph.Graph()
def _cursor(self): set_tz = False settings_dict = self.settings_dict if self.connection is None: set_tz = True if settings_dict['NAME'] == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured( "You need to specify NAME in your Django settings file.") conn_string = "dbname=%s" % settings_dict['NAME'] if settings_dict['USER']: conn_string = "user=%s %s" % (settings_dict['USER'], conn_string) if settings_dict['PASSWORD']: conn_string += " password='******'" % settings_dict['PASSWORD'] if settings_dict['HOST']: conn_string += " host=%s" % settings_dict['HOST'] if settings_dict['PORT']: conn_string += " port=%s" % settings_dict['PORT'] self.connection = Database.connect(conn_string, **settings_dict['OPTIONS']) self.connection.set_isolation_level( 1) # make transactions transparent to all cursors connection_created.send(sender=self.__class__) cursor = self.connection.cursor() if set_tz: cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']]) if not hasattr(self, '_version'): self.__class__._version = get_version(cursor) if self._version[0:2] < (8, 0): # No savepoint support for earlier version of PostgreSQL. self.features.uses_savepoints = False cursor.execute("SET client_encoding to 'UNICODE'") cursor = UnicodeCursorWrapper(cursor, 'utf-8') return cursor
def __init__(self, hostname, dbname, schema, table, branch, depth, all, new_table, report=0, \ needcommit=0, log=0): self.conn = psycopg.connect('host=%s dbname=%s'%(hostname, dbname)) self.curs = self.conn.cursor() self.curs.execute("set search_path to %s"%schema) self.table = table self.depth = int(depth) self.all = int(all) self.new_table = int(new_table) self.report = int(report) self.needcommit = int(needcommit) self.log = int(log) #mapping for the branches self.branch_dict = {0:'molecular_function', 1:'biological_process', 2:'cellular_component'} self.branch = self.branch_dict[int(branch)] if self.log: self.log_file = open('/tmp/go_node_distance.log','w') #mapping between go term id and its index list self.go_id2index = {} #mapping between go_id and go_acc self.go_id2acc = {} #GO DAG (directed) self.go_digraph = Graph.Graph() #GO undirected Graph, to compute distance between two nodes self.go_graph = Graph.Graph() #the node_list contains the nodes to compute pairwise distances self.node_list = Set() #the list containing all indices self.index_list = Set() #key structure, mapping between a pair of go_id's and its associated distances self.go_id2distance = {}
def work(): with psycopg.connect(dsn) as conn: with conn.cursor(binary=fmt_out, row_factory=row_factory) as cur: cur.execute(faker.drop_stmt) cur.execute(faker.create_stmt) with faker.find_insert_problem(conn): cur.executemany(faker.insert_stmt, faker.records) cur.execute(faker.select_stmt) if fetch == "one": while 1: tmp = cur.fetchone() if tmp is None: break elif fetch == "many": while 1: tmp = cur.fetchmany(3) if not tmp: break elif fetch == "all": cur.fetchall() elif fetch == "iter": for rec in cur: pass
def __init__(self, conn_str): """ Constructor. conn_str -- a string of connection string. """ connection = dbapi.connect(conn_str, maxconn=5, minconn=5, serialize=0) generic.ConnectionHelper.__init__(self, connection)
def test_load_global_ctx(dsn, global_adapters): psycopg.adapters.register_loader("text", make_loader("gt")) psycopg.adapters.register_loader("text", make_bin_loader("gb")) conn = psycopg.connect(dsn) cur = conn.cursor(binary=False).execute("select 'hello'::text") assert cur.fetchone() == ("hellogt",) cur = conn.cursor(binary=True).execute("select 'hello'::text") assert cur.fetchone() == ("hellogb",)
def __init__(self, database, host, user, password, maxconn, minconn): dsn = 'dbname=%s host=%s user=%s password=%s' % (database, host, user, password) self._conn = dbapi.connect(dsn, maxconn=maxconn, minconn=minconn, serialize=0) self._conn.autocommit(1)
def check_row_factories() -> None: conn1 = connect(row_factory=rows.tuple_row) v1: Tuple[Any, ...] = conn1.execute("").fetchall()[0] conn2 = connect(row_factory=rows.dict_row) v2: Dict[str, Any] = conn2.execute("").fetchall()[0] conn3 = connect(row_factory=rows.class_row(Person)) v3: Person = conn3.execute("").fetchall()[0] conn4 = connect(row_factory=rows.args_row(argsf)) v4: float = conn4.execute("").fetchall()[0] conn5 = connect(row_factory=rows.kwargs_row(kwargsf)) v5: int = conn5.execute("").fetchall()[0] v1, v2, v3, v4, v5