def db_comments_417(now, now_tuple, col_name, sec_head, className): now = str(int(float(now))) table_str = time.strftime("annote_%Y", now_tuple) table_str2 = "annote" print '<TABLE align="CENTER" bgcolor="black" cellpadding="2" border="0" width="100%">' print '<TR WIDTH="100%"><TD>' print '<TABLE bgcolor="#EEEEEE" border="0" cellpadding="2" width="100%">' print '<TR><TD><font color="blue" size="4" face="ARIAL"><B>'+sec_head+'</B></font></TD></TR>' print '<TR><TD bgcolor="white">' try: import pg svrdb = pg.connect('svr_frcst') my417db = pg.connect('svr_'+className) select = svrdb.query("SELECT "+col_name+" from "+table_str+" where ztime = '"+str(int(float(now)))+"'").getresult() select2 = my417db.query("SELECT "+col_name+" from "+table_str2+" where ztime = '"+str(int(float(now)))+"'").getresult() except ValueError: print "None available for this hour..." select = [(" "),(" ")] select2 = "" if len(select2) > 0: select = select2 if len(select) == 0: print "None available for this hour..." else: print '<font size="6">'+select[0][0][0]+'</font>'+select[0][0][1:] # Get the neat capital letter to start print '<BR><BR>' print '</TD></TR></TABLE>' print '</TD></TR></TABLE>'
def db_connect(pwd): mysql.connector.connect(host='localhost', user='******', password='') # Noncompliant # ^^^^^^^^^^^ mysql.connector.connect(host='localhost', password='', user='******') # Noncompliant mysql.connector.connect('localhost', 'sonarsource', '') # Noncompliant # ^^ mysql.connector.connect('localhost', 'sonarsource', 'hello') # OK mysql.connector.connect('localhost', 'sonarsource', password='******') # OK mysql.connector.connect('localhost', 'sonarsource', password=pwd) # OK mysql.connector.connect('localhost', 'sonarsource', pwd) # OK mysql.connector.connect('localhost', 'sonarsource') # OK mysql.connector.connect('localhost', 'sonarsource', **dict) # OK mysql.connector.connection.MySQLConnection(host='localhost', user='******', password='') # Noncompliant pymysql.connect(host='localhost', user='******', password='') # Noncompliant pymysql.connections.Connection(host='localhost', user='******', password='') # Noncompliant psycopg2.connect(host='localhost', user='******', password='') # Noncompliant pgdb.connect(host='localhost', user='******', password='') # Noncompliant pg.DB(host='localhost', user='******', passwd='') # Noncompliant pg.DB('dbname', 'localhost', 5432, 'opt', 'postgres', '') # Noncompliant pg.connect(host='localhost', user='******', passwd='') # Noncompliant pg.DB(host='localhost', user='******', passwd=pwd) # Compliant pg.DB('dbname', 'localhost', 5432, 'opt', 'postgres', pwd) # Compliant
def Main(): form = cgi.FormContent() if not form.has_key("stations"): style.SendError("No station specified!") if not form.has_key("dataCols"): style.SendError("No data specified!") stations = form["stations"] dataCols = form["dataCols"] stationTables = map(addYear, stations) year = form["year"][0] startMonth = str( form["startMonth"][0] ) startDay = str( form["startDay"][0] ) endMonth = str( form["endMonth"][0] ) endDay = str( form["endDay"][0] ) timeType = form["timeType"][0] startTime = startMonth+"-"+startDay+"-"+year endTime = endMonth+"-"+endDay+"-"+year dataCols = tuple(dataCols) strDataCols = str(dataCols)[1:-2] strDataCols = regsub.gsub("'", " ", strDataCols) if timeType == "hourly": mydb = pg.connect('campbellHourly', 'localhost', 5432) strDataCols = "date_part('year', day) AS year, date_part('month', day) AS month, date_part('day', day) AS day, date_part('hour', day) AS hour, "+strDataCols else: mydb = pg.connect('campbellDaily', 'localhost', 5432) strDataCols = "date_part('year', day) AS year, date_part('month', day) AS month, date_part('day', day) AS day, "+strDataCols print 'Content-type: text/plain \n\n' print """ # Output for Iowa Campbell Station Data # Notes on the format of this data and units can be found at # http://www.pals.iastate.edu/campbell/info.txt # If you have trouble getting the data you want, please send email to [email protected] """ for stationTable in stationTables: queryStr = "SELECT day as sortday, '"+stationTable[:-5]+"' as statid, "+strDataCols+" from "+stationTable+" WHERE day >= '"+startTime+"' and day <= '"+endTime+"' ORDER by sortday ASC" # print queryStr results = mydb.query(queryStr) headings = results.listfields()[1:] strHeadings = str(tuple(headings))[1:-2] strHeadings = regsub.gsub("'", " ", strHeadings) print regsub.gsub(",", " ", strHeadings) results = results.getresult() printer(results, timeType) print "\n\n\n" print "#EOF"
def connectdb(self, given_dbname, given_host=None, given_port=None, given_opt=None): con = None retry = 1000 while retry: try: if (given_port is None): con = pg.connect(host=given_host, opt=given_opt, dbname=given_dbname) else: con = pg.connect(host=given_host, port=given_port, opt=given_opt, dbname=given_dbname) break except Exception as e: if (("the database system is starting up" in str(e) or "the database system is in recovery mode" in str(e)) and retry > 1): retry -= 1 time.sleep(0.1) else: raise con.set_notice_receiver(null_notice_receiver) return con
def conecta(self): #self.db = pg.DB() try: if self.usuario != '': self.db = pg.connect(dbname=self.nombre, host=self.host, port=self.puerto, passwd=self.password, user=self.usuario) else: self.db = pg.connect(self.nombre, self.host, self.puerto) except TypeError: return 'Error BD argumentos' except SyntaxError: return 'Error BD argumentos duplicados' except: return 'Error desconocido conectanco a' + self.nombre return 'Conectado'
def printIntro(caseNum, className = "nill"): import pg mydb = pg.connect('svr_frcst') try: entry1 = mydb.query("SELECT comments from intro WHERE case_num = '"+caseNum+"' ").getresult()[0][0] except: entry1 = "Not yet written" print '<font color="blue"><H2>Preview of this Case:</H2></font>' try: ldb = pg.connect('svr_'+className) entry2 = ldb.query("SELECT comments from intro WHERE case_num = '"+caseNum+"' ").getresult()[0][0] print entry2 except: print entry1 print '<BR><BR>'
def transform_all(): db = pg.connect(user="******", host=dbHost, dbname=dbName) print 'Transform all ETRF coordinates to ITRF coordinates for given date' target = getInputWithDefault("Enter target_date",default_targetdate) all_stations=db.query("select distinct o.stationname from object o inner join field_rotations r on r.id = o.id").getresult(); ref_stations=db.query("select distinct o.stationname from object o inner join reference_coord r on r.id = o.id").getresult(); for stationname in ref_stations: station = stationname[0] if 0 != Popen(['./calc_coordinates.py',station,"LBA",target]).wait(): sys.exit(1) if 0 != Popen(['./calc_coordinates.py',station,"CLBA",target]).wait(): sys.exit(1) #if station[:1] == 'C': # core station if 0 != Popen(['./calc_coordinates.py',station,"HBA0",target]).wait(): sys.exit(1) if 0 != Popen(['./calc_coordinates.py',station,"CHBA0",target]).wait(): sys.exit(1) if 0 != Popen(['./calc_coordinates.py',station,"HBA1",target]).wait(): sys.exit(1) if 0 != Popen(['./calc_coordinates.py',station,"CHBA1",target]).wait(): sys.exit(1) #else: #remote or international station if 0 != Popen(['./calc_coordinates.py',station,"HBA",target]).wait(): sys.exit(1) if 0 != Popen(['./calc_coordinates.py',station,"CHBA",target]).wait(): sys.exit(1) db.close() missing_stations=list(set(all_stations) - set(ref_stations)) for stationname in missing_stations: station = stationname[0] print "Station with known HBA rotation but no ETRF: ",station
def create_conn(config): conn = None try: conn = pg.connect(dbname=config['dbname'], host=config['host'], port=config['port'], user=config['username'], passwd=config['password']) except: anom.wrt_log('Connection failed') return conn
def main(): # parse command line begin cmd_parser = OptionParser() cfg_parser = ConfigParser.ConfigParser() cmd_parser.add_option( "-c", "--config", action = "store", type = "string", dest = "config_file", default = "/home/inferno/python/config.ini", help = "set configuration file (default: config.ini)", metavar="CONFIG" ) (cmd_options, args) = cmd_parser.parse_args() # parse config file try: cfg_parser.read( cmd_options.config_file ) except: print "gen_dns.py: Config file error" return 1 print "gen_dns.py: Reading config: " + str( cmd_options.config_file ) # parse config file end # TODO: locking! # get common options from config sqlhost = str( cfg_parser.get( "Common", "sqlhost" ) ) sqldb = str( cfg_parser.get( "Common", "sqldb" ) ) sqlusr = str( cfg_parser.get( "Common", "sqluser" ) ) sqlpass = str( cfg_parser.get( "Common", "sqlpass" ) ) vlan_tbl = str( cfg_parser.get( "Common", "vlan_table" ) ) host_tbl = str( cfg_parser.get( "Common", "host_table" ) ) user_tbl = str( cfg_parser.get( "Common", "user_table" ) ) reco_tbl = str( cfg_parser.get( "Common", "record_table" ) ) # get dns options from config dns_outdir = str( cfg_parser.get( "DNS", "output_dir" ) ) dns_domain = str( cfg_parser.get( "DNS", "domain" ) ) # connect try: con = pg.connect( dbname=sqldb, host=sqlhost, user=sqlusr, passwd=sqlpass ) except: print "gen_dns.py: Connection FAILED" return 2 now = datetime.datetime.now() print 'gen_dns.py: DNS Config Generation Started at '+str(now) gen_revers_dns( con, '', dns_domain, host_tbl, dns_outdir ) gen_dns( con, dns_domain, host_tbl, reco_tbl, user_tbl, dns_outdir ) # close connection con.close() now = datetime.datetime.now() print 'gen_dns.py: DNS Config Generation Finished at '+str(now) return 0
def get_collected_cids(self): conn = pg.connect( 'itemcase', '50.116.1.34', 5432, None, None, 'postgres', base64.decodestring('ZmFzdE1vdmluZzJCcmVha0V2ZXJ5dGhpbmc=')) r = conn.query('SELECT cid_product FROM product').getresult() conn.close() return r
def DoScan(user_1id, user_2id, second): if user_1id == user_2id: return connection = pg.connect(dbname='info_db', host='localhost', user='******', passwd='givemehack') res1, res2 = "", "" try: #res1 = connection.query("select * from u" + str(user_1id)).getresult() res1 = connection.query("select * from " + str(user_1id)).getresult() res2 = connection.query("select * from " + str(user_2id)).getresult() except: print("one of these peoples is not found in database...") coincidence = 0 coincidence_str = [] excepts = 0 i = 0 for i in range(0, len(res1)): try: for j in range(0, len(res2)): if res1[i][0] == res2[j][0]: coincidence += 1 coincidence_str.append(str(res1[i][1])) f.write(user_1id + " - " + user_2id + "\n") except: excepts += 1 print("coincidence: " + str(coincidence) + " (" + user_1id + " and " + user_2id + ")") print("coincidence name: " + str(coincidence_str)) print("excepts: " + str(excepts)) if second == "0": DoScan(user_2id, user_1id, "1")
def test_pygresql_escape_string(self): '''Test pygresql (escape strings)''' self.user = testlib.TestUser() self.testuser = self.user.login self.testdb = "pygresql_db" self._create_user(self.testuser, self.user.password) self._create_db(self.testdb, self.testuser) import pg self.pgcnx = pg.connect(dbname=self.testdb, host='127.0.0.1', user=self.testuser, passwd=self.user.password) search = "''" warning = 'Could not find "%s"\n' % search self.assertTrue(pg.escape_string("'") == search, warning) # fix for CVE-2009-2940 added this search = "''" warning = 'Could not find "%s"\n' % search try: self.assertTrue(self.pgcnx.escape_string("'") == search, warning) except AttributeError: warning = 'CVE-2009-2940: Could not find required pyobj.escape_string()' self.assertTrue(False, warning)
def hasRecentImport(): # how recent is recent? usually they are spaced by 6 hours, so give # some buffer too. tolerance_hours = 8.0 dbname = "weather" query = "select max(date) from import_times;" last_import = hours_since_last_import = None ok = False now = datetime.utcnow().replace(second=0, microsecond=0) # go to the DB and find out when the most recent import was c = pg.connect(user="******", dbname=dbname, port=settings.DATABASE_PORT) r = c.query(query) print r.dictresult() last_import_str = r.dictresult()[0]["max"] if last_import_str is not None: last_import = datetime.strptime(last_import_str, "%Y-%m-%d %H:%M:%S") if last_import is not None: # was that recent enough? hours_since_last_import = (now - last_import).seconds / (60.0 * 60.0) if hours_since_last_import < tolerance_hours: ok = True c.close() print "hasRecentImport: ", ok, hours_since_last_import, tolerance_hours return (ok, hours_since_last_import, tolerance_hours)
def do(argDict, selStr0): print 'Dumping argDict:' for k in argDict: print k, argDict[k] verifyArgs(argDict) whereStr = constructSQLWhere(argDict) print 'WHERE str is: \n ' + whereStr fromStr = '(select min(obshistid) obshistid from output_opsim3_61 ' +\ '%s group by expdate) a,' % whereStr matchStr = 'output_opsim3_61 b where a.obshistid = b.obshistid;' dBName0 = 'cosmoDB.11.19.2009' res = pg.connect(host='deathray.astro.washington.edu', user='******', dbname=dBName0, passwd='cosmouser') #b.rotSkyPos, b.rotTelPos, b.sunalt, b.sunaz, b.rawseeing, b.seeing, #b.filtsky, b.dist2moon, b.moonalt, b.phaseangle, b.miescatter, #b.moonillum, b.darkbright, b.perry_skybrightness from (select #min(obshistid) obshistid from output_opsim3_61 where fielddec between #-0.033 and 0.033 and fieldra between 0.161 and 6.122 and expmjd #between 49353. and 49718. group by expdate) a, output_opsim3_61 b #where a.obshistid = b.obshistid; query = 'SELECT %s FROM %s %s' % (selStr0, fromStr, matchStr) print 'Query: %s' % query results = res.query(query) dictRes = results.dictresult() print 'Got %i results' % len(dictRes) #dictRes['DBQueried'] = dBName #print dictRes return dictRes, dBName0
def test_zz_CVE_2010_0442(self): '''Test CVE-2010-0442''' self.user = testlib.TestUser() self.testuser = self.user.login self.testdb = "%s_db" % (self.testuser) self._create_user(self.testuser, self.user.password) self._create_db(self.testdb, self.testuser) import pg self.pgcnx = pg.connect(dbname=self.testdb, host='127.0.0.1', user=self.testuser, passwd=self.user.password) rc, result = self.daemon.status() try: q = self.pgcnx.query( "SELECT substring(B'10101010101010101010101010101010101010101010101',33,-15);" ) except: # Natty and older calculate the string length, and return a # result. Oneiric and newer actually error out if the length # is specified as negative. if self.lsb_release['Release'] < 11.10: self.assertTrue( False, "SELECT returned with error (likely vulnerable)") else: self.assertTrue(True, "SELECT should have returned an error!")
def __init__(self, dbname, year, filename = None): self.dtFormat = "%Y-%m-%d %H:%M:%S" self.dbname = dbname # e.g., "weather" self.start = datetime(year, 1, 1, 0, 0, 0) self.end = self.start + timedelta(hours = 24*365) self.cnn = pg.connect(user = "******", dbname = dbname, port = settings.DATABASE_PORT)
def test_pgsql_config(defaults): database = pg.connect(dbname=defaults['dbname'], user=defaults['dbuser'], passwd=defaults['dbpasswd'], host=defaults['dbhost'], port=defaults['dbport']) return database
def OpenPostgres(self): result = None ## To ensure we have a connection if (self._conn == 0): self._output.OutputInfo('Opening PostgreSQL connection') try: # Get the config settings sHost = GetValue(self._sConfigFile, 'PGHost', self._output) sDatabase = GetValue(self._sConfigFile, 'PGDBName', self._output) sUser = GetValue(self._sConfigFile, 'PGUser', self._output) sPwd = GetValue(self._sConfigFile, 'PGPwd', self._output) # Now connect self._conn = pg.connect(host=sHost, user=sUser, dbname=sDatabase, passwd=sPwd) except KeyboardInterrupt: Out.OutputError('Keyboard interrupt detected', False) raise except: self._output.OutputError('On connecting to Postgres DB', True) self._output.OutputException(sys.exc_info(), True) self._bError = True return
def Scanner(user_1id, user_2id, second): connection = pg.connect(dbname='info_db', host='localhost', user='******', passwd='givemehack') res1 = connection.query("select * from u" + str(user_1id)).getresult() res2 = connection.query("select * from u" + str(user_2id)).getresult() coincidence = 0 coincidence_str = [] excepts = 0 i = 0 for i in range(0, len(res1)): try: for j in range(0, len(res2)): if res1[i][0] == res2[j][0]: coincidence += 1 coincidence_str.append(str(res1[i][1])) except: excepts += 1 print("coincidence: " + str(coincidence)) print("coincidence name: " + str(coincidence_str)) print("excepts: " + str(excepts)) #print("\nRes1: " + str(res1)) #print("\nRes2: " + str(res2)) if second == "0": Scanner(user_2id, user_1id, "1")
def index(req): #Extracting the POST variable form = req.form acct = form['userAccountID'] # Connecting to Database... db = pg.connect('myEskwela', 'localhost', 5432, None, None, 'postgres', 'password') # Query for the Subject List using Stored Proc currentSem = get_current_semester(db) q0 = "SELECT get_current_subject('"+acct+"', '"+currentSem+"')" query0 = db.query(q0) result0 = query0.dictresult() html = '<ul id="student-subjects-list-object" data-role="listview" data-filter="true">' for sect in result0: section = sect['get_current_subject'] q1 = "SELECT get_subject_code_of_section_code('"+section+"')" query1 = db.query(q1) result1 = query1.dictresult() subjectCode = result1[0]['get_subject_code_of_section_code'] # Parse it to html subjSectionCode = subjectCode + "@" + section html = concat(html, generate_li_item(subjSectionCode, db)) # Close DB # Return it... return html
def _conn_to_rs(self, opt=options, timeout=set_timeout_stmt, database=None): rs_conn_string = "host={host} port={port} dbname={db} user={user} password={password} {opt}".format( host=self.get_host(), port=self.get_port(), db=self.get_db(), password=self.get_password( ), # First fetch the password because temporary password updates user! user=self.get_user(), opt=opt) logging.debug(GET_SAFE_LOG_STRING(rs_conn_string)) try: # noinspection PyArgumentList rs_conn = connect(rs_conn_string) except pg.InternalError as ie: if hasattr(ie, 'args') and len(ie.args) > 0 \ and ('Operation timed out' in ie.args[0] or 'timeout expired' in ie.args[0]): msg = 'Connection timeout when connecting to {h}:{p}.\n' msg += 'Make sure that firewalls and security groups allow connections.' logging.fatal(msg.format(h=self.get_host(), p=self.get_port())) else: logging.fatal( 'Internal error encountered when trying to connect: {ie}'. format(ie=ie)) raise sys.exc_info()[0](sys.exc_info()[1]).with_traceback( sys.exc_info()[2]) if self._configured_timeout is not None and not self._configured_timeout == timeout: rs_conn.query(timeout) self.database_timeouts[database][opt] = timeout return rs_conn
def sql_own(): try: conn2 = pgsql.connect(dbname='swati',user='',passwd='') except: print "couldnot connect to sql server" sys.exit() return conn2
def OpenPostgres(self): ## To ensure we have a connection if (self._conn == 0): self._output.OutputInfo('Opening PostgreSQL connection') try: # Get the config settings sHost = self.host sDatabase = self.dbname sUser = self.username sPwd = self.password sPort = self.port # Now connect self._conn = pg.connect(host=sHost, user=sUser, dbname=sDatabase, passwd=sPwd, port= sPort) except KeyboardInterrupt: self._output.OutputError('Keyboard interrupt detected', False) raise except: self._output.OutputError('On connecting to Postgres DB', True) self._output.OutputException(sys.exc_info(), True) self._bError = True return self._conn
def getDBConnection(): from core import db_conf reload(db_conf) import pg con = pg.connect("IBSng", db_conf.DB_HOST, db_conf.DB_PORT, None, None, db_conf.DB_USERNAME, db_conf.DB_PASSWORD) return con
def process_query(self, query, s_page, e_page, s_price, e_price): self.log( "CID collection process started (pages: " + str(e_page - s_page + 1) + ", p. interval: " + str(s_price) + "-" + str(e_price) + ").", True) self.log("Using bridge #" + str(self.c_index) + ".", False) conn = pg.connect( 'itemcase', '50.116.1.34', 5432, None, None, 'postgres', base64.decodestring('ZmFzdE1vdmluZzJCcmVha0V2ZXJ5dGhpbmc=')) for i in range(s_page, e_page + 1): r = self.crawl_page(self.query, i, s_price, e_price) for c in r.get_product_cids(): if not (c, ) in self.collected: self.collected.append((c, )) self.total_new_collected += 1 self.log('CID #' + str(c) + " was collected.", False) conn.query( 'INSERT INTO product (cid_product, dt_collected, id_category) VALUES(' + c + ', \'' + str(datetime.date.today()) + '\', 328)') conn.close() self.log("CID collection process finished.", False) self.log( str(self.total_new_collected) + " new CIDs have been collected so far.\n", False)
def delete_duplicate_mb_albums(): """Identify and delete duplicate albums Only delete those albums labeled by musicbrainz. We always keep the biggest (in bytes) complete album songs / features are preserved in offline directory. If the track times are sufficiently different from the published track times, we skip and recommend user delete by hand. This is to avoid deleting a good import while leaving behind an erroneous import. See the Track class in model.py """ #cannot figure out how to do this without a select : s = select([album.c.mb_id, func.count(album.c.mb_id)]).group_by( album.c.mb_id).having(func.count(album.c.mb_id) > 1) dupes = session.execute(s).fetchall() tt_std = 200. #hand set in matcher. But not so important.. import pg dbmb = pg.connect('musicbrainz_db', user=config.DEF_DBUSER, passwd=config.DEF_DBPASS, host=config.DEF_DBHOST) for [mb_id, count] in dupes: if len(mb_id.strip()) < 10: continue dupealbums = Album.query.filter( func.length(Album.mb_id) > 10).filter_by(mb_id=mb_id) #look up track times. This requires two queries. One to translate the mb_id (their published text key) #into an mb_numeric_id (their internal key). Then the query against the mb_numeric_id mb_numeric_id = dbmb.query( "SELECT R.id FROM album as R, albummeta as AM WHERE R.gid = '%s' AND AM.id = R.id" % mb_id).getresult()[0][0] q = """SELECT T.length FROM track as T INNER JOIN albumjoin as AJ ON T.id = AJ.track INNER JOIN artist as A ON T.artist = A.id WHERE AJ.album = %i ORDER BY AJ.sequence""" % mb_numeric_id mbtrackresult = numpy.array(dbmb.query(q).getresult()) mbtimes = numpy.array(mbtrackresult[:, ]).flatten() / 1000. bytes = list() timeterms = list() for a in dupealbums: ttimes = numpy.array(map(lambda t: t.secs, a.tracks)) # df=abs(ttimes-mbtimes) time_term = numpy.mean( numpy.exp(-(mbtimes / 1000.0 - ttimes / 1000.0)**2 / tt_std)) currbytes = 0 for t in a.tracks: currbytes += t.bytes bytes.append(currbytes) timeterms.append(time_term) keepidx = numpy.argmax(numpy.array(bytes)) if timeterms[keepidx] < .9: print 'Not deleting', dupealbums[ keepidx], 'because the time match is not very good. Do so by hand!' print ' Times to delete:', numpy.array( map(lambda t: t.secs, dupealbums[keepidx].tracks)) print ' Times from MBrZ:', mbtimes else: for (idx, a) in enumerate(dupealbums): if idx <> keepidx: print 'Deleting', a, timeterms[idx] delete_album(a) dbmb.close()
def saveAllNames(group_id, offset): offset = int(offset) address = "https://api.vk.com/method/groups.getMembers?group_id=" + group_id + "&fields=sex&access_token=" + admin_token + "&count=1000" data = urlopen(address) decoded_response = data.read().decode() final_data = json.loads(decoded_response) connection = pg.connect(dbname='info_db', host='localhost', user='******', passwd='givemehack') i = offset """while i < (offset + 998): tmp_a = final_data['response']['users'][i]['uid'] tmp_b = getFollowers(final_data['response']['users'][i]['uid']) connection.query("INSERT INTO followers VALUES(" + str(tmp_a) + ", " + str(tmp_b) + ")") print("now user: "******""" for i in range(offset, offset + 998): tmp_a = final_data['response']['users'][i]['uid'] tmp_b = getFollowers(final_data['response']['users'][i]['uid']) connection.query("INSERT INTO followers VALUES(" + str(tmp_a) + ", " + str(tmp_b) + ")") #print("now user: " + str(i)) connection.close() saveAllNames(group_id, offset + 998)
def connect_mbdb(): """ Simple connection to the musicbrainz database, returns a pgobject Return None if there is a problem """ try: connect = pg.connect('musicbrainz_db', 'localhost', -1, None, None, USER, PASSWD) except TypeError as e: print('CONNECT_MBDB: type error, should not happen:', e) return None except SyntaxError as e: print('CONNECT_MBDB: syntax error, should not happen:', e) return None except pg.InternalError as e: print('CONNECT_MBDB, internal error:', e) return None # check for levenshtein function #q = "SELECT levenshtein('allo','allo2')" #try: # res = connect.query(q) #except pg.ProgrammingError: # print 'we need levenshtein (contrib) added to the database:' # print 'psql -d musicbrainz_db -f /usr/share/postgresql/8.4/contrib/fuzzystrmatch.sq' # connect.close() # return None # done return connect
def __init__(self, dbname = ""): self.c = pg.connect(user = "******" , dbname = dbname , port = settings.DATABASE_PORT ) self.weatherData = WeatherData() self.pyrgeometerData = PyrgeometerData()
def db_open(): sql_host = get_config('sql_host').strip() sql_user = get_config('sql_user').strip() sql_passwd = get_config('sql_passwd').strip() sql_db = get_config('sql_db').strip() conn = pg.connect(host=sql_host, dbname=sql_db, user=sql_user, passwd=sql_passwd) return conn
def hasRecentImport(): # how recent is recent? usually they are spaced by 6 hours, so give # some buffer too. tolerance_hours = 8.0 dbname = "weather" query = "select max(date) from import_times;" last_import = hours_since_last_import = None ok = False now = datetime.utcnow().replace(second = 0 , microsecond = 0) # go to the DB and find out when the most recent import was c = pg.connect(user = "******", dbname = dbname, port = settings.DATABASE_PORT) r = c.query(query) print r.dictresult() last_import_str = r.dictresult()[0]["max"] if last_import_str is not None: last_import = datetime.strptime(last_import_str, "%Y-%m-%d %H:%M:%S") if last_import is not None: # was that recent enough? hours_since_last_import = (now - last_import).seconds / (60.0 * 60.0) if hours_since_last_import < tolerance_hours: ok = True c.close() print "hasRecentImport: ", ok, hours_since_last_import, tolerance_hours return (ok, hours_since_last_import, tolerance_hours)
def main(): sql_host = get_config('sql_host').strip() sql_user = get_config('sql_user').strip() sql_passwd = get_config('sql_passwd').strip() sql_db = get_config('sql_db').strip() print sql_host,sql_user,sql_passwd,sql_db try: conn = pg.connect(host=sql_host, dbname=sql_db, user=sql_user, passwd=sql_passwd) except: print 'Connection to DB %s failed.\n' % (sql_db) sys.exit(1) try: conn.query('LISTEN CAP_BREACHED_DISABLE_HOUSEHOLD') conn.query('LISTEN CAP_BREACHED_DISABLE_USER') conn.query('LISTEN CAP_BREACHED_DISABLE_DEVICE') conn.query('LISTEN CAP_UPDATED_ENABLE_HOUSEHOLD') conn.query('LISTEN CAP_UPDATED_ENABLE_USER') conn.query('LISTEN CAP_UPDATED_ENABLE_DEVICE') conn.query('LISTEN CAP_STATUS_ENABLE_HOUSEHOLD') conn.query('LISTEN CAP_STATUS_ENABLE_USER') conn.query('LISTEN CAP_STATUS_ENABLE_DEVICE') # for n% notification. conn.query('LISTEN NOTIFY_USER') except: print 'LISTEN query failed' sys.exit(1) check_notify(conn)
def ajoute_db(): if len(sys.argv) < 3: usage() sys.exit(0) else: b_qty = int(sys.argv[1]) m_qty = int(sys.argv[2]) connexion = pg.connect("dbname=pitr-test user=pitruser") for iterations_berger in range(0, b_qty): ident = int( connexion.query("SELECT nextval('berger_id_seq')").getresult()[0] [0]) insert_query = "INSERT INTO berger(id,nom,prenom) VALUES (%d,'%s','%s');" % ( ident, vote(), vote()) connexion.query(insert_query) for iteration_mouton in range(0, m_qty): m_ident = int( connexion.query("SELECT nextval('mouton_id_seq')").getresult() [0][0]) insert_query = "INSERT INTO mouton(id,surnom) VALUES (%d,'%s');" % ( m_ident, vote()) connexion.query(insert_query) connexion.query("INSERT INTO troupeau VALUES(%d,%d)" % (ident, m_ident)) connexion.close()
def connect_mbdb(): """ Simple connection to the musicbrainz database, returns a pgobject Return None if there is a problem """ try: connect = pg.connect('musicbrainz_db','localhost',-1,None,None, USER,PASSWD) except TypeError as e: print('CONNECT_MBDB: type error, should not happen:',e) return None except SyntaxError as e: print('CONNECT_MBDB: syntax error, should not happen:',e) return None except pg.InternalError as e: print('CONNECT_MBDB, internal error:', e) return None # check for levenshtein function #q = "SELECT levenshtein('allo','allo2')" #try: # res = connect.query(q) #except pg.ProgrammingError: # print 'we need levenshtein (contrib) added to the database:' # print 'psql -d musicbrainz_db -f /usr/share/postgresql/8.4/contrib/fuzzystrmatch.sq' # connect.close() # return None # done return connect
def DoLast(user_id): address = "https://api.vk.com/method/users.getSubscriptions?user_id=" + str( user_id) + "&extended=1&count=9999" data = urlopen(address) decoded_response = data.read().decode() final_data = json.loads(decoded_response) connection = pg.connect(dbname='we', host='localhost', user='******', passwd='givemehack') try: connection.query('CREATE TABLE me' + str(user_id) + ' (group_id text,group_name text);') i = 0 print(str(adata.subscriptions_amount)) while i < int(adata.subscriptions_amount): print("now(save): " + str(i)) try: connection.query("INSERT INTO me" + str(user_id) + " VALUES(" + "'" + str(final_data['response'][i]['gid']) + "'" + ", " + "'" + str(final_data['response'][i]['name']) + "'" + ")") except (KeyError, TypeError): pass i = i + 1 except: print("This table was found!")
def listHours(caseNum): import pg, mx.DateTime, time advdb = pg.connect('severe2', 'localhost', 5432) advdb.query("SET TIME ZONE 'GMT' ") cases = advdb.query("select date_part('hour', age(endtime, starttime)), starttime, endtime from cases WHERE casenum = '"+caseNum+"' ").getresult() starttime = cases[0][1] endtime = cases[0][2] startDate = mx.DateTime.ISO.ParseDateTime(starttime) startSecs = startDate.gmticks() endDate = mx.DateTime.ISO.ParseDateTime(endtime) endSecs = endDate.gmticks() multi = 1 if caseNum[0] == "w": multi = 3 now = startSecs print '<SELECT name="zticks" size="10">' while ( now < endSecs ): thisTuple = time.gmtime(now) print '<option value="'+str(int(float(now)))+'">'+time.strftime("%b %d, %Y %HZ", thisTuple) now = now + multi*3600 print '</SELECT>'
def do(argDict, selStr0): print 'Dumping argDict:' for k in argDict: print k, argDict[k] verifyArgs(argDict) whereStr = constructSQLWhere(argDict) print 'WHERE str is: \n ' + whereStr fromStr = '(select min(obshistid) obshistid from output_opsim3_61 ' +\ '%s group by expdate) a,' % whereStr matchStr = 'output_opsim3_61 b where a.obshistid = b.obshistid;' dBName0 = 'cosmoDB.11.19.2009' res = pg.connect(host='deathray.astro.washington.edu', user='******', dbname=dBName0, passwd='cosmouser') #b.rotSkyPos, b.rotTelPos, b.sunalt, b.sunaz, b.rawseeing, b.seeing, #b.filtsky, b.dist2moon, b.moonalt, b.phaseangle, b.miescatter, #b.moonillum, b.darkbright, b.perry_skybrightness from (select #min(obshistid) obshistid from output_opsim3_61 where fielddec between #-0.033 and 0.033 and fieldra between 0.161 and 6.122 and expmjd #between 49353. and 49718. group by expdate) a, output_opsim3_61 b #where a.obshistid = b.obshistid; query = 'SELECT %s FROM %s %s' % ( selStr0, fromStr, matchStr) print 'Query: %s' % query results = res.query(query) dictRes = results.dictresult() print 'Got %i results' % len(dictRes) #dictRes['DBQueried'] = dBName #print dictRes return dictRes, dBName0
def get_connection(host): rs_conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % ( host, port, dbname, dbuser, dbpassword) rs_conn = pg.connect(dbname=rs_conn_string) #rs_conn.query("set statement_timeout = 1200000") return rs_conn
def OpenPostgres(self): ## To ensure we have a connection if (self._conn == 0): self._output.OutputInfo('Opening PostgreSQL connection') try: # Get the config settings sHost = self.host sDatabase = self.dbname sUser = self.username sPwd = self.password sPort = self.port # Now connect self._conn = pg.connect(host=sHost, user=sUser, dbname=sDatabase, passwd=sPwd, port=sPort) except KeyboardInterrupt: self._output.OutputError('Keyboard interrupt detected', False) raise except: self._output.OutputError('On connecting to Postgres DB', True) self._output.OutputException(sys.exc_info(), True) self._bError = True return self._conn
def conn_to_rs(host, port, db, usr, pwd, opt=options, timeout=set_timeout_stmt): rs_conn_string = """host=%s port=%s dbname=%s user=%s password=%s %s""" % (host, port, db, usr, pwd, opt) print "Connecting to %s:%s:%s as %s" % (host, port, db, usr) rs_conn = pg.connect(dbname=rs_conn_string) rs_conn.query(timeout) return rs_conn
def dbConn(opts): """ Standard db connection function. They don't get any easier than this. This function takes a class dict, or in this case, opts from the option parser, then references the values by property name. """ try: conn = pg.connect(dbname=opts.db_database, user=opts.db_username, passwd=opts.db_password, host=opts.db_host, port=opts.db_port) except pg.InternalError as e: """ We need to clean up the message, in case it is -- 'FATAL: password authentication failed for user "tip_u_cash"' because the message is duplicated due to an error in the python pg package. """ lines = str(e).split("\n") old_len = len(lines) lines = [line.replace('FATAL: ', '') for line in lines] lines = PyTis.unique(lines) new_len = len(lines) if old_len == new_len: # no duplicates e = str(e).strip("\n").replace('FATAL: ', '') else: e = "\n".join(lines).strip("\n").replace('FATAL: ', '') raise PyTis.ConfigurationError(str(e)) else: return conn
def Main(): form = cgi.FormContent() start_year = form["start_year"][0] end_year = form["end_year"][0] start_month = form["start_month"][0] end_month = form["end_month"][0] start_day = form["start_day"][0] end_day = form["end_day"][0] station = form["station"][0] db = form["db"][0] data_type = form["data_type"][0] mydb = pg.connect(db) data_type = re.split(',', data_type, 1) start_tuple = (int(start_year), int(start_month), int(start_day), 0, 0, 0, 0, 0, 0) end_tuple = (int(end_year), int(end_month), int(end_day), 0, 0, 0, 0, 0, 0) start_secs = time.mktime(start_tuple) end_secs = time.mktime(end_tuple) if start_secs > end_secs: style.SendError("Go back and check your dates.") days = int((end_secs - start_secs)/86400) final = [] for day in range(0, days+1): this_sec = start_secs + (86400 * day) local = time.localtime(this_sec) year = time.strftime("%Y", local) month = time.strftime("%m", local) day = time.strftime("%d", local) sation = station+"_"+year if db == "campbell_hourly": select = mydb.query("SELECT yeer, month, day, tod, "+data_type[0]+" from "+sation+" WHERE (yeer = '"+year+"' AND month = '"+month+"' AND day = '"+day+"')") else: select = mydb.query("SELECT yeer, month, day, "+data_type[1]+" from "+sation+" WHERE (yeer = '"+year+"' AND month = '"+month+"' AND day = '"+day+"')") select = select.getresult() final = final + select print 'Content-type: text/html \n\n' print '<PRE>' if db == "campbell_hourly": print 'Station Year \tMonth \tDay \tHour\t'+index_dict[data_type[1]]+'\n' else: print 'Station Year \tMonth \tDay \t'+index_dict[data_type[1]]+'\n' select = final # select.sort() for i in range(len(select)): print station[3:8]+"\t", for j in range(len(select[i])): print select[i][j],"\t", print print '</PRE>'
def migrate_photo(id): global DB, HOST, DB_USER, DB_PASS, photo_table, photo_seq_table # Connect to MySQL dbmy = MySQLdb.connect(db=DB, host=HOST, user=DB_USER, passwd=DB_PASS) mycur = dbmy.cursor() mycur.execute("select description, img_data, tnimg_data, filename, filesize, filetype from %s where id='%d'" % (photo_table, id)) # Fetch data desc, Fdata, TNdata, Fname, Fsize, Ftype = mycur.fetchone() print "Transfering %s (Name: %s - Size: %s - Type: %s)" % (desc, Fname, Fsize, Ftype) # Close MySQL connexion dbmy.close() # Connect to PostgreSQL dbpg = pg.connect(dbname=DB, host=HOST, user=DB_USER, passwd=DB_PASS) # Copy images to PostgreSQL dbpg.query("begin") Olo = dbpg.locreate(pg.INV_WRITE) TNlo = dbpg.locreate(pg.INV_WRITE) # Insert data req = dbpg.query("insert into %s (description, img_data, tnimg_data, filename, filesize, filetype) values ('%s', '%s', '%s', '%s', '%s', '%s')" % (photo_table, desc, Olo.oid, TNlo.oid, Fname, Fsize, Ftype)) # Write data to Large Object Olo.open(pg.INV_WRITE) Olo.write(Fdata) Olo.close() TNlo.open(pg.INV_WRITE) TNlo.write(TNdata) TNlo.close() # Commit changes dbpg.query("commit") last_id = dbpg.query("select currval('%s')" % photo_seq_table).getresult()[0][0] print "%s transfered with id #%d" % (Fname, last_id) # Close PostgreSQL connexion dbpg.close() return last_id
def odbconnect(options): global ddb_cn, odb_cn if not options.opasswd: options.opasswd = getpass.getpass("oPassword:"******"WARN: El servidor de destino no es el mismo que el de origen." print options.ddriver, options.odriver print options.dhost, options.ohost print options.dport, options.oport print options.duser, options.ouser options.samedatabase = False # options.opasswd = raw_input("Password: "******"mysql": cn = _mysql.connect( db=options.odb, port=int(options.oport), host=options.ohost, user=options.ouser, passwd=options.opasswd ) cn.set_character_set("UTF8") # Hacemos la codificación a UTF8. # Si la conexión es a Postgres , de la siguiente manera else: cn = pg.connect( dbname=options.odb, port=int(options.oport), host=options.ohost, user=options.ouser, passwd=options.opasswd, ) cn.query("SET client_encoding = 'UTF8';") cn.query("SET client_min_messages = warning;") except: print ( "Error trying to connect to *origin* database '%s' in host %s:%s" " using user '%s'" % (options.odb, options.ohost, options.oport, options.ouser) ) return 0 if options.debug: print ( "* Succesfully connected to *origin* database '%s' in host %s:%s" " using user '%s'" % (options.odb, options.ohost, options.oport, options.ouser) ) odb_cn = cn return cn
def dbconn(self): """ Connect to the data base """ self.log.notice("[PgNewsHandler]: dbconn") db = pg.connect(dbname=self.db, host=self.host, user=self.db_user, passwd=self.db_pass) return db
def __init__(self, host=None, database=None, username=None, debuglevel=0): self.set_debug(debuglevel) try: self.__database = pg.connect(host=host, dbname=database, user=username) if self.__debuglevel > 0: self.sql_message("Connected to Host [%s] DataBase [%s] Username [%s]" % (host, database, username)) except pg.error, msg: self.fatal_message("Unable to connect to Host [%s] DataBase [%s] Username [%s] ==> [%s]" % (host, database, username, msg))
def open_database(dbname="pykota") : """Returns the database object or None if we can't connect to it.""" try : pykotadb = pg.connect(host="localhost", port=5432, dbname=dbname, user="******") except pg.error, msg : sys.stderr.write("%s\n" % msg) sys.stderr.write("Unable to connect to the local PostgreSQL server.\nPlease modify the open_database() method in %s\nto connect to the correct PostgreSQL server\nand relaunch the script.\n" % sys.argv[0]) return
def get_entry(caseNum, className): entry1 = mydb.query("SELECT comments from intro WHERE case_num = '"+caseNum+"' ").getresult() try: ldb = pg.connect('svr_'+className) entry2 = ldb.query("SELECT comments from intro WHERE case_num = '"+caseNum+"' ").getresult() return entry2 except: return entry1
def relink(self): try: self.Link = pg.connect(dbname=self.database, host = self.dbhost, user = self.dbuser, passwd = self.dbpwd,port=self.dbport) if not self.ifAutoCommit: self.Link.query("begin") except Exception, e: self.Err=e.args[0] self.Link=False
def configConnect(configFile, operation): global connection print "Establish connection.." configLines = open(configFile,"r").readlines() #store lines into a dict config = {} inOperation = 0 for i in configLines: #allow documentation and empty string, match operation #print operation,i[:-1] if i[0] == ">": if i[1:-1] != operation: if inOperation: break else: continue elif i[1:-1] == operation: inOperation = 1 continue #assign values to dict if inOperation and i[0] != "#" and i != "" and i != "\n": if i[-1] == "\n": config[i[:i.find(" ")]] = i[i.find(" ")+1:-1] else: config[i[:i.find(" ")]] = i[i.find(" ")+1:] if config.has_key("pass"): connection = pg.connect(dbname = config["dbname"], host = config["host"], port = int(config["port"]), user = config["user"], passwd = config["pass"]) else: connection = pg.connect(dbname = config["dbname"], host = config["host"], port = int(config["port"]), user = config["user"]) print " Host = "+ connection.host print " DB = "+ connection.db #print " Port = %i" % connection.port #print " Status = %i\n" % connection.status return config
def psql_all_data(): import pg conn = pg.connect("odetta", "scidb1.nersc.gov", 5432, None, None, "odetta_admin", "spectronic20") data = conn.query( "SELECT m_id,wavelength,luminosity,photon_count from fluxvals") ids = conn.query("SELECT distinct m_id from fluxvals") return [data, ids]
def connect(name): """ Connect to an iemdatabase with given name """ dbhost = "iemdb" if os.environ["USER"] == "akrherz": dbhost = "localhost" return pg.connect(name, dbhost)
def listGoodCases(selected = 'null'): import pg, mx.DateTime advdb = pg.connect('severe2_adv', 'localhost', 5432) basedb = pg.connect('severe2', 'localhost', 5432) cases1 = advdb.query("SELECT * from basecases").dictresult() print '<SELECT name="caseNum" size="10">' for i in range(len( cases1 )): cases = basedb.query("SELECT * from cases WHERE casenum = '"+cases1[i]['casenum']+"' ").dictresult() thisCase = cases[0]["casenum"] thisStart = cases[0]["starttime"] thisEnd = cases[0]["endtime"] startDate = mx.DateTime.ISO.ParseDateTimeGMT(thisStart) print '<option value="'+thisCase+'">'+thisCase+' -- '+startDate.strftime("%d %B %Y") print '</SELECT>'
def sqlconn(): try: conn = pgsql.connect(dbname=sql_db,host=sql_host,user=sql_user,passwd=sql_passwd) #cursor = conn.cursor() except: print "Could not connect to sql server" sys.exit() return conn
def configConnect(configFile, operation): global connection print ("Establish connection..") configLines = open(configFile,"r").readlines() #store lines into a dict config = {} inOperation = 0 for i in configLines: #allow documentation and empty string, match operation #print operation,i[:-1] if i[0] == ">": if i[1:-1] != operation: if inOperation: break else: continue elif i[1:-1] == operation: inOperation = 1 continue #assign values to dict if inOperation and i[0] != "#" and i != "" and i != "\n": if i[-1] == "\n": config[i[:i.find(" ")]] = i[i.find(" ")+1:-1] else: config[i[:i.find(" ")]] = i[i.find(" ")+1:] if config.has_key("pass"): connection = pg.connect(dbname = config["dbname"], host = config["host"], port = int(config["port"]), user = config["user"], passwd = config["pass"]) else: connection = pg.connect(dbname = config["dbname"], host = config["host"], port = int(config["port"]), user = config["user"]) print (" Host = "+ connection.host) print (" DB = "+ connection.db) #print " Port = %i" % connection.port #print " Status = %i\n" % connection.status return config
def get_connection(host): rs_conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % ( host, port, dbname, dbuser, dbpassword) rs_conn = pg.connect(dbname=rs_conn_string) #rs_conn.query("set statement_timeout = 1200000") print("Connection to Redshift cluster %s establised successfully" % host) return rs_conn
def __init__(self, dbname, host, port, user, passwd): self._database = pg.connect(dbname=dbname, host=host, port=port, user=user, passwd=passwd) self.dbname = dbname self.host = host self.port = port self.user = user self.passwd = passwd self.isOpen = True