def get_new_connection(self, conn_params): """Opens a connection to the database.""" conn_params = self.get_connection_params() autocommit = conn_params.get('autocommit', True) if 'connection_string' in conn_params: return Database.connect(conn_params, autocommit=autocommit) conn = [] if 'dsn' in conn_params: conn.append('DSN=%s' % conn_params['dsn']) else: driver = conn_params['driver'] if os.path.isabs(driver): conn.append('DRIVER=%s' % driver) else: conn.append('DRIVER={%s}' % driver) conn.append('SERVER=%s' % conn_params['host']) if 'port'in conn_params: conn.append('PORT=%s' % conn_params['port']) if conn_params['database']: conn.append('DATABASE=%s' % conn_params['database']) if conn_params['user']: conn.append('UID=%s;PWD=%s' % (conn_params['user'], conn_params['password'])) else: conn.append('Integrated Security=SSPI') return Database.connect(';'.join(conn), autocommit=autocommit, unicode_results=True)
def return_count(mType,mat,rad,podl): if(mType=="pz"): mirrorType="пьезо" radius="Is Null" if(mType=="p"): mirrorType="плоское" radius="Is Null" if(mType=="s"): mirrorType="сфера" radius=u"= "+rad if(mat=="z"): material="='z'" if(mat=="si"): material="Is Null" #db = pyodbc.connect('DSN=test',connect_args={'convert_unicode': True}) if(podl==0): db = pyodbc.connect('DSN=zerki_current') sqlQuery=u"SELECT Count(Zerki.[нПодложки]) AS count FROM Zerki WHERE (((Zerki.[Вкомп])='свободно') AND ((Zerki.[МесХран]) Is Not Null) AND ((Zerki.[Ячейка]) Is Not Null) AND ((Zerki.[Тзер])='"+mirrorType+"') AND ((Zerki.[Материал подл]) "+material+") AND ((Zerki.[ГОтказ])='годен') AND ((Zerki.[Рсферы]) "+radius+"));" else: db = pyodbc.connect('DSN=podl_local') sqlQuery=u"SELECT count(PODL.[нПодл]) AS count FROM PODL WHERE ((Not (PODL.[Завод])='оптик пента') AND ((PODL.[Воз])='свободно') AND ((PODL.[Тзер])='"+mirrorType+"') AND ((PODL.[№кор]) Is Not Null) AND ((PODL.[Место]) Is Not Null) AND ((PODL.[Материал]) "+material+") AND ((PODL.[РсферыНом])"+radius+"));" #print(sqlQuery) cursor=db.cursor() cursor.execute(sqlQuery) data=cursor.fetchall() return data[0][0]
def __init__(self, ms_con_str, pgres_con_str, log_file = None, commit_freq = 1000): """initializer arguments: ms_con_str -- string to connect to SQL Server database pgres_con_str -- string to connect to PostgreSQL database log_file -- path of file to log transactions. Use None to turn off logging. commit_freq -- number of rows to move before committing and logging each transaction """ self.__cxn_ms = pyodbc.connect(ms_con_str) self.__cxn_pgres = pyodbc.connect(pgres_con_str) if log_file is not None: self.__log_file = open(log_file, 'a') self.__log_is_file = True self.__commit_freq = commit_freq self.__log('Connection initialized:') self.__log('SQL Server Connection String: {}'.format(ms_con_str)) self.__log('PostgreSQL Connection String: {}'.format(pgres_con_str)) self.__log('') self.__log('Commit Frequency: {}'.format(commit_freq)) self.__log('')
def importSEMSchedules(xlFile,DBFile): conn=pyodbc.connect('Driver={Microsoft Excel Driver (*.xls, *.xlsx, *.xlsm, *.xlsb)}; Dbq='+xlFile, autocommit=True) cursor = conn.cursor() xlsStore = [] for row in cursor.execute('SELECT * FROM [Sheet1$]'): print (row.Description) xlsStore.append([row.Schedule,row.Description,row[2],row.FOLLOWS]) cursor.close() conn.close() #Access conn = pyodbc.connect('DRIVER={Microsoft Access Driver (*.mdb)};DBQ='+DBfile) cursor = conn.cursor() try: SQL = "DROP TABLE [SEM Schedules-Import]" cursor.execute(SQL) conn.commit() except: print('INFORMATION: All Master-Import table did not exist. As expected, attempt to delete it failed'+'\n') cursor.execute(""" CREATE TABLE [SEM Schedules-Import] (id COUNTER CONSTRAINT C1 PRIMARY KEY, Schedule MEMO, description MEMO, RunsOn MEMO, FOLLOWS MEMO); """) conn.commit() #Create table from Excel in Access for row in xlsStore: cursor.execute(""" INSERT INTO [SEM Schedules-Import] (Schedule,Description, RunsOn,FOLLOWS) VALUES (?,?,?,?) """, [row[0],row[1],row[2],row[3]]) conn.commit() cursor.close() conn.close()
def createConn(self,profile): conn = None server_type = profile["servertype"] if server_type == "oracle": import cx_Oracle codepage = sys.getdefaultencoding() if codepage == "gbk" : os.environ['NLS_LANG']="SIMPLIFIED CHINESE_CHINA.ZHS16GBK" dns_tns = cx_Oracle.makedsn(profile["host"],1521,profile["sid"]) conn = cx_Oracle.connect(profile["user"], profile["password"], dns_tns) elif server_type == "mssql": import pyodbc if profile.get("database") != None: conn = pyodbc.connect(driver = '{SQL Server}',server=profile["host"],\ database=profile["database"], uid=profile["user"], pwd = profile["password"] ) else : conn = pyodbc.connect(driver = '{SQL Server}',server=profile["host"],\ uid=profile["user"], pwd = profile["password"] ) elif server_type == "mysql": import MySQLdb if profile.get("database") != None: conn = MySQLdb.connect (host = profile["host"] , user = profile["user"],\ db=profile["database"], passwd = profile["password"], charset = "utf8", use_unicode = True ) else : conn = MySQLdb.connect (host = profile["host"] , user = profile["user"],\ passwd = profile["password"], charset = "utf8", use_unicode = True ) elif server_type == "sqlite": import sqlite3 as sqlite conn = sqlite.connect(profile["file"]) return conn
def __init__(self, server='rotr5ws1', database='omMeas', timeout=120, user=None, debug=False): if re.search('ELF', arch): # Linux self.executemany_symbol = "%s" if user == None: # assume pre-setup connection in ODBC settings self.db = MySQLdb.connect(host=server, user='******', passwd='tiger', db=database, connect_timeout=timeout) else: password = getpass.getpass() self.db = MySQLdb.connect(host=server, user=user, passwd=password, db=database, connect_timeout=timeout) else: # assume Windows self.executemany_symbol = "?" try: if user == None: # assume pre-setup connection in ODBC settings self.db = pyodbc.connect("server="+server+";dsn="+database+";timeout="+str(timeout)) else: password = getpass.getpass() self.db = pyodbc.connect("server="+server+";dsn="+database+";timeout="+str(timeout)+";uid="+user+";pwd="+password) except pyodbc.Error, e: print traceback.print_exc() if e.args[0] == 'IM002': # no driver error print \ """ ========================================================================================== ERROR: ODBC Driver NOT FOUND. Please Install MySQL ODBC Connector to your local Computer. ========================================================================================== """ raise e
def open(self) -> bool: if not self.check_for_dsn(self.dsn): return False self.logger.info("Attempting to connect to the dsn '%s'." % s) if self.integrated == 'Y': try: self.connection = pyodbc.connect( 'DSN=%s;Trusted_Connection=yes' % self.dsn , timeout=5 # allow 5 seconds to connect ) self.logger.info( 'connection to ' + self.dsn + ' opened at ' + timestr()) return True except Exception as e: self.logger.error('Unable to connect to ' + self.dsn + '.') self.logger.error(str(e)) self.status = str(e) return False else: # self.password = getpass.getpass( # 'Please enter your password for %s: \n' % self.dsn) try: self.connection = pyodbc.connect( 'DSN=%s;PWD=%s' % (self.dsn, self.password) , timeout=5 # allow 5 seconds to connect ) self.logger.info( 'connection to ' + self.dsn + ' opened at ' + timestr()) return True except Exception as e: self.logger.error('Unable to connect to ' + self.dsn + '.') self.logger.error(str(e)) self.status = str(e) return False
def main(): # JYDB db cnxn_jydb = pyodbc.connect(""" DRIVER={SQL Server}; SERVER=172.16.7.229; DATABASE=jydb; UID=sa; PWD=sa123456""") # JRGCB db cnxn_jrgcb = pyodbc.connect(""" DRIVER={SQL Server}; SERVER=172.16.7.166; DATABASE=jrgcb; UID=sa; PWD=sa123456""") ######################################################## # read index data for JYDB sql_mktbeta = """ SELECT A.SecuCode, B.TradingDay, B.PrevClosePrice, B.ClosePrice, A.ChiName, A.InnerCode FROM [JYDB].[dbo].[SecuMain] A, [JYDB].[dbo].[QT_IndexQuote] B WHERE A.InnerCode = B.InnerCode AND A.SecuCode IN ('000300','000905','000852') AND B.ChangePCT is not null ORDER BY A.SecuCode, B.TradingDay""" data_mktbeta = pd.read_sql(sql_mktbeta, cnxn_jydb, index_col='TradingDay') data_mktbeta = indexdata_reshape(data_mktbeta) sql_indubeta = """ SELECT A.SecuCode, B.TradingDay, B.PrevClosePrice, B.ClosePrice, A.ChiName, A.InnerCode FROM [JYDB].[dbo].[SecuMain] A, [JYDB].[dbo].[QT_IndexQuote] B WHERE A.InnerCode = B.InnerCode AND A.SecuCode IN ('CI005001','CI005002','CI005003','CI005004','CI005005', 'CI005006','CI005007','CI005008','CI005009','CI005010', 'CI005011','CI005012','CI005013','CI005014','CI005015', 'CI005016','CI005017','CI005018','CI005019','CI005020', 'CI005021','CI005022','CI005023','CI005024','CI005025', 'CI005026','CI005027','CI005028','CI005029') AND B.ChangePCT is not null ORDER BY A.SecuCode, B.TradingDay""" data_indubeta = pd.read_sql( sql_indubeta, cnxn_jydb, index_col='TradingDay') data_indubeta = indexdata_reshape(data_indubeta) ######################################################## ######################################################## # sql to select distinct fund manager sql_allmng = """ SELECT DISTINCT [ManagerID] FROM [jrgcb].[dbo].[FundAndManagerData] ORDER BY [ManagerID] """ data_allmng = pd.read_sql(sql_allmng, cnxn_jrgcb) # call organize data ob_win = 180 args = [(_id, data_mktbeta, data_indubeta, ob_win) for _id in data_allmng_.ManagerID] with Pool(2) as pool: pool.starmap(organize_data, args)
def test_odbc(database): #----------------------------------------------------------------------------------------------- # Tests connection to the specified database, the name passed is the ODBC name in Windows #----------------------------------------------------------------------------------------------- try: pyodbc.connect('DSN='+database) except: return 0 return 1
def DW_connect(linux=False,DSN='DWMarketData'): '''Connect to the EA Datawarehouse from window and linux boxes: Note: DSN can also be: NZxDaily_LIVE, or HH''' if linux == True: con = pyodbc.connect('DSN=' + DSN + ';UID=linux_user;PWD=linux') else: con = pyodbc.connect('DRIVER={SQL Server Native Client 10.0};SERVER=eadwprod\live;DATABASE=' + DSN + ';UID=linux_user;PWD=linux') return con
def getPsychPV(self): """ Liefert eine Liste der PsychPV Einträge zurück, die nach Datum sortiert ist """ PsychPVList = [] if self.__CaseNodeChildID != '': # init SQL connections and cursors NodesSQLConn = pyodbc.connect(self.__connection_str) NodesSQLCursor = NodesSQLConn.cursor() NodeChildsSQLConn = pyodbc.connect(self.__connection_str) NodeChildsSQLCursor = NodeChildsSQLConn.cursor() PropertiesSQLConn = pyodbc.connect(self.__connection_str) PropertiesSQLCursor = PropertiesSQLConn.cursor() # fetch nodes sqlquery = """ select * from id_scorer.dbo.NODES where NODETYPEID='3' and PARENTID=? """ for node in NodesSQLCursor.execute(sqlquery, self.__CaseNodeChildID): newPsychPV = PsychPV() sqlquery = """ select * from id_scorer.dbo.PROPERTIES where NodeID=? """ for property in PropertiesSQLCursor.execute(sqlquery, node.ID): if property.PROPERTYNAME == 'Finished': if property.PROPERTYVALUE == 'true': newPsychPV.Finished = True if property.PROPERTYNAME == 'Date': newPsychPV.Date = datetime.datetime.strptime( property.PROPERTYVALUE.split('T')[0], "%Y-%m-%d").date() sqlquery = """ select * from id_scorer.dbo.NODES where ParentID=? and NODETYPEID='7' """ for ChildNode in NodeChildsSQLCursor.execute(sqlquery, node.ID): sqlquery = """ select * from id_scorer.dbo.PROPERTIES where NodeID=? """ for ChildNodeProperty in PropertiesSQLCursor.execute(sqlquery, ChildNode.ID): if ChildNodeProperty.PROPERTYNAME == 'Value': newPsychPV.StatusStr = ChildNodeProperty.PROPERTYVALUE PsychPVList.append(newPsychPV) del newPsychPV # close SQL connections and cursors NodesSQLCursor.close() NodesSQLConn.close() NodeChildsSQLCursor.close() NodeChildsSQLConn.close() PropertiesSQLCursor.close() PropertiesSQLConn.close() PsychPVList.sort(key = lambda x: x.Date) return PsychPVList
def connect_to_source(self, connection_alias, connection_type, connection_source,username,password,args): """ Establishes a connection to a data source and removes the alias. :param connection_alias: the plain english name to associate with this connection :param connection_type: the type of connection to use :param connection_source: the name of an an ODBC DSN or connection string :param username: the username to use for a connection (optional) :param password: connection_source: the password to use for a connection (optional) :returns: formatted string """ logging.debug(" -- Working with connection '%s' of type '%s'" % (connection_alias, connection_type)) if self.is_registered(connection_alias): raise Exception(stack()[0][3], \ " --- There is already a connection with the alias %s" % connection_alias) logging.debug(" --- Working with connection '%s' of type '%s'" % (connection_alias, connection_type)) if connection_type.lower() == 'odbc': try: logging.debug(" ---- Attempting to connect to '%s' " % (connection_alias)) # connect to the ODBC source if (len(username) > 0 and len(password) > 0): new_cnxn = pyodbc.connect(connection_source,uid=username,pwd=password) else: new_cnxn = pyodbc.connect(connection_source) new_cursor = new_cnxn.cursor() newSource = ODBCSource(args) # store the results and make this the active connection self._conn_info[connection_alias] = \ {'alias' : connection_alias, \ 'object' : newSource, \ 'cursor' : new_cursor, \ 'connection' :new_cnxn, \ 'type' : connection_type } except pyodbc.Error, err: logging.error("The connection is '%s' the available connections are " % connection_alias) logging.error(self._conn_info) logging.error(err[1]) raise err self._default_conn_alias = connection_alias logging.debug(" --- Connected data source '%s' as type '%s'" % (connection_alias,connection_type))
def __get_connection_object__(self): try: if self.__trusted__: return pyodbc.connect(server=self.__server__, driver=self.__provider__, database=self.__database__, trusted_connection=self.__trusted__, port=self.__port__, sslmode=self.__sslmode__) else: return pyodbc.connect(server=self.__server__, driver=self.__provider__, database=self.__database__, uid=self.__user__, pwd=self.__password__, port=self.__port__, sslmode=self.__sslmode__) except: raise
def _connect(self): if self._conf_defs['connection_type'] == DSN: conn_string = 'DSN=%s' % self._conf_defs['connection_string'] self.logger.debug('Connect to {0}'.format(conn_string)) self._conn = pyodbc.connect(conn_string, unicode_results=True) elif self._conf_defs['connection_type'] == CONNECTION_STRING: self._conn = pyodbc.connect(self._conf_defs['connection_string'], unicode_results=True) else: #TODO need to be implemented based on connection string raise HydroException("Vertica connection string connection is not implemented")
def dbconn(db): if os.name == 'nt': cnxn =pyodbc.connect(''.join(('DRIVER={SQL Server};\ SERVER=;DATABASE=', db,';UID=;\ PWD='))) if os.name == 'posix': cnxn =pyodbc.connect(''.join(('DRIVER=freetds;port=1433;\ SERVER=;DATABASE=', db,';UID=;\ PWD=;tds_version=7.3'))) return cnxn.cursor()
def get_new_connection(self, conn_params=None): # assert False, "JAMI: I think this method is never used" # bad assumption assert conn_params is None, "JAMI: We don't use conn_params that come from outside... should we?" connstr = self._get_connection_string() options = self.settings_dict['OPTIONS'] autocommit = options.get('autocommit', False) if self.unicode_results: connection = Database.connect(connstr, autocommit=autocommit, unicode_results='True') else: connection = Database.connect(connstr, autocommit=autocommit) return connection
def connect(self): try: if self.DSN: self.db = pyodbc.connect("DSN=%s;UID=%s;PWD=%s;charset=%s"%(self.DSN, self.UID, self.PWD, self.charset) ) except Exception,e: print e if self.DRIVER: print "use driver" self.db = pyodbc.connect('DRIVER={%s};HOST=%s:%s;UID=%s;PWD=%s;charset=UTF-8'%(self.DRIVER, self.HOST, str(self.PORT), self.UID, self.PWD)) else: raise ValueError("Need DSN or DRIVER&&HOST&&PORT")
def get_cursor(): if servertype == "MSSQL": conn = pyodbc.connect('DRIVER={SQL Server};%s' % connection) elif servertype == "MYSQL": conn = pyodbc.connect('DRIVER={MySQL};%s' % connection) elif servertype == "CUSTOM": conn = pyodbc.connect(connection) else: print "Unsupported database type" return None return conn.cursor()
def init_db(database_name='', charset='utf8'): db_info = get_db_info() db = None if sys.platform.startswith('linux'): db = pyodbc.connect('DSN=%s;UID=%s;PWD=%s;DATABASE=%s;' % ( db_info['dsn'], db_info['user'], db_info['passwd'], database_name)) else: # assuming windows... db = pyodbc.connect('DRIVER=%s; SERVER=%s; DATABASE=%s; UID=%s; PWD=%s' % ( db_info['DRIVER'], db_info['host'], database_name, db_info['user'], db_info['passwd'])) cursor = db.cursor() return db, cursor
def SetHandle(self, databasename): self.DatabaseDB = get_path() + '/TestResult/Database/' + str(databasename) + ".mdb" self.BasicinfoDB = get_path() + "/Data/BasicInfo.mdb" self.DBHandle = 'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};DBQ=%s;' % self.DatabaseDB self.DBBasicHandle = 'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};DBQ=%s;' % self.BasicinfoDB try: self.DBBasicConnection = pyodbc.connect(self.DBBasicHandle) self.BasicCursor = self.DBBasicConnection.cursor() self.TestConnection = pyodbc.connect(self.DBHandle) self.Cursor = self.TestConnection.cursor() except: tkinter.messagebox.showinfo(title='提示', message="数据库初始化连接错误")
def getBIScore(self): """ Liefert eine Liste aller Einträge zur Behandlungsintensität zurück, die nach Datum sortiert ist """ BIScoreList = [] if self.__CaseNodeChildID != '': # init SQL connections and cursors NodesSQLConn = pyodbc.connect(self.__connection_str) NodesSQLCursor = NodesSQLConn.cursor() NodeChildsSQLConn = pyodbc.connect(self.__connection_str) NodeChildsSQLCursor = NodeChildsSQLConn.cursor() PropertiesSQLConn = pyodbc.connect(self.__connection_str) PropertiesSQLCursor = PropertiesSQLConn.cursor() ## fetch all CareIntensityERW2013 Nodes sqlquery = """ select ID from id_scorer.dbo.NODES where NODETYPEID='16' and PARENTID=? """ for node in NodesSQLCursor.execute(sqlquery, self.__CaseNodeChildID): sqlquery = """ select * from id_scorer.dbo.PROPERTIES where NODEID=? """ newBIScore = BI_Score() for property in PropertiesSQLCursor.execute(sqlquery, node.ID): if property.PROPERTYNAME == "SomaScore": newBIScore.SomaScore = int(property.PROPERTYVALUE) if property.PROPERTYNAME == 'PsyScore': newBIScore.PsyScore = int(property.PROPERTYVALUE) if property.PROPERTYNAME == 'SocialScore': newBIScore.SocialScore = int(property.PROPERTYVALUE) if property.PROPERTYNAME == 'totalScore': newBIScore.TotalScore = int(property.PROPERTYVALUE) if property.PROPERTYNAME == 'Finished': if property.PROPERTYVALUE == 'true': newBIScore.Finished = True if property.PROPERTYNAME == 'Date': newBIScore.Date = datetime.datetime.strptime( property.PROPERTYVALUE.split('T')[0], "%Y-%m-%d").date() BIScoreList.append(newBIScore) del newBIScore # close SQL connections and cursors NodesSQLCursor.close() NodesSQLConn.close() NodeChildsSQLCursor.close() NodeChildsSQLConn.close() PropertiesSQLCursor.close() PropertiesSQLConn.close() BIScoreList.sort(key = lambda x: x.Date) return BIScoreList
def get_new_connection(self, conn_params): connstr = self._get_connection_string()#';'.join(cstr_parts) options = self.settings_dict['OPTIONS'] autocommit = options.get('autocommit', False) if self.unicode_results: connection = Database.connect(connstr, \ autocommit=autocommit, \ unicode_results='True') else: connection = Database.connect(connstr, autocommit=autocommit) connection_created.send(sender=self.__class__, connection=self) return connection
def _load_data_mv90(self, turbo=False, ndays=None): tz = pytz.timezone("America/Jamaica") tzutc = pytz.timezone("UTC") con = pyodbc.connect( servername="mv90", user=os.environ["MV90_USER"], password=os.environ["MV90_PASS"], driver="FreeTDS", database="MR-Auxillary", ) cur = con.cursor() multiplier = 1.0 for row in cur.execute("SELECT MULTIPLIER FROM METER_MULTIPLIER WHERE METER=?", (self.meter_id,)): multiplier = float(row[0]) con = pyodbc.connect( servername="mv90", user=os.environ["MV90_USER"], password=os.environ["MV90_PASS"], driver="FreeTDS", database="mv90db", ) cur = con.cursor() linecount = 0 if ndays is None: cur.execute("SELECT * FROM utsProfile WHERE p_mtrid=?", (self.meter_id,)) else: d = datetime.datetime.now() - datetime.timedelta(days=ndays) cur.execute( "SELECT * FROM utsProfile WHERE p_mtrid=? AND p_dtm > ?", (self.meter_id, int(datetime.datetime.strftime(d, "%Y%m%d0000"))), ) for row in cur: if row[1] != 1: continue ts = datetime.datetime.strptime(row[2], "%Y%m%d%H%M") ts = ts.replace(tzinfo=tzutc) ts = ts.astimezone(tz) if not turbo: try: dp = ProfileDataPoint.objects.get(meter=self, ts=ts) except: ProfileDataPoint.objects.create( meter=self, ts=ts, kwh=float(row[3]) * multiplier * 0.0658750031288964, raw=float(row[3]) * 1000 ) else: ProfileDataPoint.objects.create( meter=self, ts=ts, kwh=float(row[3]) * multiplier * 0.0658750031288964, raw=float(row[3]) * 1000 ) linecount = linecount + 1 return True if linecount > 0 else False
def matchCalls(authType = "windows", server = None, database = None, username = None, password = None, recordingTempDirectory = None, destinationDirectory = None, recordings = None): """Gets data from call log stored in SQL Server, matches to recordings, and copies files. Can use Windows auth or SQL Server auth.""" if authType == 'windows': print "Connecting to %s with Windows auth." % server conn = pyodbc.connect(driver="{SQL Server}", server=server, database=database, trusted_connection="yes") # create our PyODBC connection with Windows auth elif authType == 'sql': if username and password: print "Connecting to %s with username %s and password %s." % (server, username, password) conn = pyodbc.connect(driver="{SQL Server}", server=server, database=database, uid=username, password=password) # create our PyODBC connection with user/pass else: print "When using SQL Server authentication, you must specify a username and password." exit(1057) else: print "Unknown error occurred when setting database authentication mode." exit(13816) for rec in recordings: callLog = {} cursor = conn.cursor() print "Selecting records that match '%s' and '%s'" % (rec.sourcenumber, rec.destnumber) # now we need to get all of the calls from our given source number to our given destination cursor.execute("SELECT * FROM PhoneLog WHERE CallerNumber=\'%s\' and DialedNumber=\'%s\'" % (rec.sourcenumber, rec.destnumber)) while 1: row = cursor.fetchone() if not row: break else: td = parser.parse(row.Time) td = td.replace(tzinfo=dateutil.tz.gettz('UTC')) # tag the call log time as UTC (it's already UTC, no need to convert) callLog[row.CallID] = td try: # call recordings aren't matched to call data by any UID # we'll find the matching recording (same source/destination numbers) with the nearest timestamp to what we've got in our logs matched = min(callLog, key=lambda y:abs((callLog[y]-rec.time).total_seconds())) # get the least difference between logs and recording print "Matched call is call ID %s at %s." % (matched, rec.time) copyfile(recordingTempDirectory+"/"+rec.filename, destinationDirectory+"/"+matched+".WAV") # copy to the destination using try: remove("%s/%s" % (recordingTempDirectory, rec.filename)) # once it's copied, remove it from our source dir except: print "Tried to remove %s/%s but failed." % (recordingTempDirectory, rec.filename) except: print "Couldn't find a match, moving on." continue
def connect_to_database(self): """Makes an ODBC database connection.""" #TODO: test "Driver={Microsoft ODBC for Oracle};Server=" + dbInst + ';Uid=' + schema + ';Pwd=' + passwd + ";" if self.mongodb_client_info: import pymongo import bson client = pymongo.MongoClient(self.mongodb_client_info) self.db_connection = client[self.mongodb_database] elif 'dynamodb' in self.job['location']['config']: import boto3 if self.dynamodb_endpoint_url: self.dynamodb = boto3.resource('dynamodb', region_name=self.dynamodb_region, endpoint_url=self.dynamodb_endpoint_url) else: self.dynamodb = boto3.resource('dynamodb', region_name=self.dynamodb_region) else: self.drvr = self.sql_connection_info['connection']['driver'] srvr = self.sql_connection_info['connection']['server'] db = self.sql_connection_info['connection']['database'] un = self.sql_connection_info['connection']['uid'] pw = self.sql_connection_info['connection']['pwd'] if self.drvr == 'Oracle': import cx_Oracle try: self.db_connection = cx_Oracle.connect("{0}/{1}@{2}/{3}".format(un, pw, srvr, db)) except Exception: port = self.sql_connection_info['connection']['port'] sid = self.sql_connection_info['connection']['sid'] dsn_string = cx_Oracle.makedsn(srvr, port, sid) try: self.db_connection = cx_Oracle.connect(user=un, password=pw, dsn=dsn_string) except cx_Oracle.DatabaseError: # Try - uid/pwd@server:port/sid port = self.sql_connection_info['connection']['port'] sid = self.sql_connection_info['connection']['sid'] self.db_connection = cx_Oracle.connect("{0}/{1}@{2}:{3}/{4}".format(un, pw, srvr, port, sid)) self.db_cursor = self.db_connection.cursor() elif self.drvr == 'SQL Server': import pyodbc self.__sql_server_connection_str = "DRIVER={0};SERVER={1};DATABASE={2};UID={3};PWD={4}".format(self.drvr, srvr, db, un, pw) self.db_connection = pyodbc.connect(self.__sql_server_connection_str) self.db_cursor = self.db_connection.cursor() self.__sql_server_connection_str = "DRIVER={0};SERVER={1};DATABASE={2};UID={3};PWD={4};OPTION=3".format(self.drvr, srvr, db, '', '') elif 'MySQL' in self.drvr: import pyodbc # Ex. "DRIVER={MySQL ODBC 5.3 ANSI Driver}; SERVER=localhost; DATABASE=test; UID=root;OPTION=3" self.__sql_server_connection_str = "DRIVER={0};SERVER={1};DATABASE={2};UID={3};PWD={4};OPTION=3".format(self.drvr, srvr, db, un, pw) self.db_connection = pyodbc.connect(self.__sql_server_connection_str) self.db_cursor = self.db_connection.cursor() self.__sql_server_connection_str = "DRIVER={0};SERVER={1};DATABASE={2};UID={3};PWD={4};OPTION=3".format(self.drvr, srvr, db, '', '')
def __init__(self, databasename, init=True): if init is not True: self.SetHandle(databasename) else: self.frequency = [1, 2, 3, 4, 5, 6, 7.5, 9.5] # set the dot location self.SourceDatabase = get_path() + '/Data/MyDatabase.mdb' self.DatabaseDB = get_path() + '/TestResult/Database/' + str(databasename) + ".mdb" self.BasicinfoDB = get_path() + "/Data/BasicInfo.mdb" # get the database handle self.DBHandle = 'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};DBQ=%s;' % self.DatabaseDB # get the Basic info database handle self.DBBasicHandle = 'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};DBQ=%s;' % self.BasicinfoDB try: self.DBBasicConnection = pyodbc.connect(self.DBBasicHandle) self.BasicCursor = self.DBBasicConnection.cursor() except: tkinter.messagebox.showinfo(title='提示', message="数据库初始化连接错误") # create the test database if file_exist(self.DatabaseDB): # connect the database try: self.TestConnection = pyodbc.connect(self.DBHandle) self.Cursor = self.TestConnection.cursor() except: tkinter.messagebox.showinfo(title='提示', message="数据库初始化连接错误") if self.Cursor.tables(table='TestDate').fetchone(): self.TestSeriesNo = self.Fetchone("SELECT MAX(TestSeriesNo) FROM TestDate")[0] if self.TestSeriesNo is None: self.TestSeriesNo = 1 self.Execute("INSERT INTO TestDate (TestSeriesNo, TestDate) VALUES" " (%d, NOW())" % self.TestSeriesNo) else: self.TestSeriesNo += 1 self.Execute("INSERT INTO TestDate (TestSeriesNo, TestDate) VALUES" " (%d, NOW())" % self.TestSeriesNo) else: self.TestSeriesNo = 1 self.Execute("Create TABLE TestDate(TestSeriesNo INT NOT NULL," " TestDate DATE, PRIMARY KEY(TestSeriesNo))") self.Execute("INSERT INTO TestDate (TestSeriesNo, TestDate) VALUES (%d, NOW())" % self.TestSeriesNo) else: copy_file(self.SourceDatabase, self.DatabaseDB) # connect the database self.TestConnection = pyodbc.connect(self.DBHandle) # create the database cursor self.TestSeriesNo = 1 self.Cursor = self.TestConnection.cursor() self.Execute("INSERT INTO TestDate (TestSeriesNo, TestDate) VALUES (%d, NOW())" % self.TestSeriesNo)
def _get_conn(): ''' Return a MSSQL connection. ''' if 'config.option' in __salt__: return pyodbc.connect('DSN={0};UID={1};PWD={2}'.format( __salt__['config.option']('returner.odbc.dsn'), __salt__['config.option']('returner.odbc.user'), __salt__['config.option']('returner.odbc.passwd'))) else: cfg = __opts__ return pyodbc.connect('DSN={0};UID={1};PWD={2}'.format( cfg.get('returner.odbc.dsn', None), cfg.get('returner.odbc.user', None), cfg.get('returner.odbc.passwd', None)))
def database_connect (): global DB_CONN if DB_CONN: return False DB_CONN = pyodbc.connect(config.database) database_cleanup() return True
def __init__(self): self.readConfig() self.conn_string = \ 'DSN={dsn};UID={uid};PWD={pwd};DATABASE={database};'.format( dsn=self.dsn, uid=self.user, pwd=self.password, database=self.database) self.cnxn = pyodbc.connect(self.conn_string)
def mf_lookup(find_str, item): """ given name look up for managerID """ cnxn_jrgcb = pyodbc.connect(""" DRIVER={SQL Server}; SERVER=172.16.7.166; DATABASE=jrgcb; UID=sa; PWD=sa123456""") if item == 'Name': sql_mf = """ SELECT DISTINCT ManagerID FROM [jrgcb].[dbo].[FundAndManagerData_v2] WHERE [Name] = '""" + find_str + """' """ return pd.read_sql(sql_mf, cnxn_jrgcb) if item == 'SecuCode': sql_mf = """ SELECT DISTINCT ManagerID FROM [jrgcb].[dbo].[FundAndManagerData_v2] WHERE [SecuCode] = '""" + find_str + """' """ return pd.read_sql(sql_mf, cnxn_jrgcb)
def connectDB(db_file): odbc_conn = 'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)}; DBQ=%s' % ( db_file) return pyodbc.connect(odbc_conn)
import datetime, glob, os, shutil, openpyxl, pyodbc from openpyxl.utils.dataframe import dataframe_to_rows import pandas as pd from loader import get_dir, sql_execute from sqlalchemy import * from sqlalchemy.orm import sessionmaker from collections import namedtuple from dateutil import relativedelta from collections import namedtuple con = create_engine(os.getenv('sql_engine'), convert_unicode=False) conn = pyodbc.connect(os.getenv('sql_conn')) cursor = conn.cursor() class my_exception(Exception): pass def check_robot(a): date = datetime.datetime.today().strftime("%Y_%m_%d") path = os.getenv('path_robot') + '\\' + date + '\\' + '*' spisok = 'В директории Robot сейчас лежат файлы:' for file in glob.glob(path): spisok += '\n' + file.split('\\')[-1] return spisok def sort_death_mg(a): def search_mo(street, house):
def get_new_connection(self, conn_params): database = conn_params['NAME'] host = conn_params.get('HOST', 'localhost') user = conn_params.get('USER', None) password = conn_params.get('PASSWORD', None) port = conn_params.get('PORT', None) options = conn_params.get('OPTIONS', {}) driver = options.get('driver', 'ODBC Driver 13 for SQL Server') dsn = options.get('dsn', None) # Microsoft driver names assumed here are: # * SQL Server Native Client 10.0/11.0 # * ODBC Driver 11/13 for SQL Server ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client') # available ODBC connection string keywords: # (Microsoft drivers for Windows) # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/applications/using-connection-string-keywords-with-sql-server-native-client # (Microsoft drivers for Linux/Mac) # https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/connection-string-keywords-and-data-source-names-dsns # (FreeTDS) # http://www.freetds.org/userguide/odbcconnattr.htm cstr_parts = {} if dsn: cstr_parts['DSN'] = dsn else: # Only append DRIVER if DATABASE_ODBC_DSN hasn't been set cstr_parts['DRIVER'] = driver if ms_drivers.match(driver): if port: host = ','.join((host, str(port))) cstr_parts['SERVER'] = host elif options.get('host_is_server', False): if port: cstr_parts['PORT'] = str(port) cstr_parts['SERVER'] = host else: cstr_parts['SERVERNAME'] = host if user: cstr_parts['UID'] = user cstr_parts['PWD'] = password else: if ms_drivers.match(driver): cstr_parts['Trusted_Connection'] = 'yes' else: cstr_parts['Integrated Security'] = 'SSPI' cstr_parts['DATABASE'] = database if ms_drivers.match(driver) and os.name == 'nt': cstr_parts['MARS_Connection'] = 'yes' connstr = encode_connection_string(cstr_parts) # extra_params are glued on the end of the string without encoding, # so it's up to the settings writer to make sure they're appropriate - # use encode_connection_string if constructing from external input. if options.get('extra_params', None): connstr += ';' + options['extra_params'] unicode_results = options.get('unicode_results', False) timeout = options.get('connection_timeout', 0) retries = options.get('connection_retries', 5) backoff_time = options.get('connection_retry_backoff_time', 5) query_timeout = options.get('query_timeout', 0) conn = None retry_count = 0 need_to_retry = False while conn is None: try: conn = Database.connect(connstr, unicode_results=unicode_results, timeout=timeout) except Exception as e: for error_number in self._transient_error_numbers: if error_number in e.args[1]: if error_number in e.args[1] and retry_count < retries: time.sleep(backoff_time) need_to_retry = True retry_count = retry_count + 1 else: need_to_retry = False break if not need_to_retry: raise conn.timeout = query_timeout return conn
def reader(): #The reader function allows the user to bring in data from a database using basic sql commands #There is CTE to deal with a log table of unstructured data and then binning within sql itself #to keep data transformation within the python script to a minimum server = '' database = '' username = '' password = '' driver = '' query = ''' with returns as ( select dl.logkey as producttype, --productid count(dl.id) as 'numberoftimesreturned' from deliverylog dl inner join orderrequest or on or.orderid=dl.key where dl.tablename='TransactionStatus' and dl.columnname='Status' and dl.changevalue='Returned' group by dl.logkey ) select p.orderid, p.producttypename, --what type, fruit, clothing, tagged by seller p.producttypeid, p.case when revenue-cost < 0 then 1 else 0 end as 'losstransaction', p.customerid, case when c.annualprofit between 0 and 1000 then 1 when c.annualprofit between 1000 and 20000 then 2 when c.annualprofit between 2000 and 30000 then 3 when c.annualprofit between 3000 and 40000 then 4 when c.annualprofit between 4000 and 50000 then 5 when c.annualprofit between 5000 and 60000 then 6 when c.annualprofit between 6000 and 70000 then 7 when c.annualprofit between 7000 and 80000 then 8 when c.annualprofit between 9000 and 10000 then 9 when c.annualprofit > 1000000 then 10 else null end as 'customertier', --10 tiers based on market research of customer operating profit p.salelocationid, p.revenue, p.cost, p.revenue-p.cost as 'margin', (p.revenue-p.cost)/p.revenue as 'marginpercent', p.shipstate, p.storestate, cast (p.transactioncomplete as date) as 'transactiondate', p.commodityweight, isnull(r.numberoftimesreturned,0) as timesreturned from producttable p left join returns on r.producttypeid=p.producttypeid --left join here because we want products types that had returns or no returns inner join customertable c on p.customerid=c.customerid where p.revenue-p.cost between -2000 AND 100000 --cleaning for faulty manually inputted user data''' cnxn = pyodbc.connect('DRIVER=' + driver + ';PORT=1433;SERVER=' + server + ';PORT=1443;DATABASE=' + database + ';UID=' + username + ';PWD=' + password) dFrame = pd.read_sql_query(query, cnxn) return dFrame
import requests import pyodbc from bs4 import BeautifulSoup def isblank(a,b): if len(str(a))<1: return(b) else: return(a) headers={ 'Cookie':'device_id=923502187b8a828f8a283f9134b3ea5d; s=gd12owoxky; aliyungf_tc=AQAAAMhBEBAKwwUAGNKutLDP13gosQvd; xq_a_token=82d9cefaa0793743cb186e53294ec0e61ac2abec; xq_a_token.sig=5N1bqGL6dBOdtpsJHPbhJk4l6_g; xq_r_token=11b86433a20d1d1eef63ecc12252297196a20e10; xq_r_token.sig=RPGspgHiNeURBrDthhch0e5_T0g; u=381501259919928; __utmt=1; __utma=1.1358685580.1500807993.1501259925.1501293474.3; __utmb=1.6.10.1501293474; __utmc=1; __utmz=1.1500807993.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); Hm_lvt_1db88642e346389874251b5a1eded6e3=1500807845,1501259921; Hm_lpvt_1db88642e346389874251b5a1eded6e3=1501293544', 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36', 'X-Requested-With':'XMLHttpRequest' } db=pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;PORT=1433;DATABASE=symbol;UID=sa;PWD=xiaotong123') cur=db.cursor() codelist=[] exchange=[] sqlquery="select distinct symbol,iif(exchange='Shenzhen','SZ','SH') as ex from [indexes]" try: cur.execute(sqlquery) results=cur.fetchall() for row in results: codelist.append(row[0]) exchange.append(row[1]) except: print('CodeList获取失败') j=0 for codelst in codelist: print(j+1,end=':') ex=exchange[j]
# -*- coding: utf-8 -*- import requests import urlparse from bs4 import BeautifulSoup from datetime import datetime import os, sys, re import pyodbc cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=IMDBComplete') cursor = cnxn.cursor() f = open("C:/Users/BigData/Dropbox/BoxOffice/after20000101/after20000101.txt", "r") for line in f.readlines(): try: #print line link = line.strip() #print link pkno = link.split('/')[4] #print pkno rs = requests.session() request_get = requests.get(link) #print request_get response = request_get.text.encode('utf8') soup = BeautifulSoup(response) top = soup.select('.maindetails_center')[0] itemprop = top.select('.itemprop')[0].text.encode('utf-8') title = " ".join(itemprop.split()) title = title.decode('utf-8') #print title request_get = requests.get(link + "business?ref_=tt_dt_bus")
def connection_to_db(username: str = 'test', password: str = '123'): server = 'localhost' database = 'test' connection_string = 'DRIVER={0};SERVER={1};DATABASE={2};UID={3};PWD={4}'.format( 'ODBC Driver 13 for SQL Server', server, database, username, password) return pyodbc.connect(connection_string, autocommit=True)
def compute_mass_potential(graip_db, dp_shp, slpd, alpha, si=None, sic=None, dist=None): conn = pyodbc.connect(utils.MS_ACCESS_CONNECTION % graip_db) cursor = conn.cursor() slpd_gdal = gdal.Open(slpd) slpd_gdal_band = slpd_gdal.GetRasterBand(1) if si: si_gdal = gdal.Open(si) si_gdal_band = si_gdal.GetRasterBand(1) if sic: sic_gdal = gdal.Open(sic) sic_gdal_band = sic_gdal.GetRasterBand(1) if dist: dist_gdal = gdal.Open(dist) dist_gdal_band = dist_gdal.GetRasterBand(1) # open the drainpoints shapefile driver = ogr.GetDriverByName(utils.GDALFileDriver.ShapeFile()) dataSource = driver.Open(dp_shp, 1) layer = dataSource.GetLayer() try: # for each drain point in shape file for dp in layer: geom = dp.GetGeometryRef() # find grid row and col corresponding to drain point row, col = utils.get_coordinate_to_grid_row_col(geom.GetX(0), geom.GetY(0), slpd_gdal) geom = None # get the id of the drain point from shape file graipdid = dp.GetField('GRAIPDID') # get current grid cell data current_cell_slope_data = slpd_gdal_band.ReadAsArray(xoff=col, yoff=row, win_xsize=1, win_ysize=1) if si: current_cell_si_data = si_gdal_band.ReadAsArray(xoff=col, yoff=row, win_xsize=1, win_ysize=1) if sic: current_cell_sic_data = sic_gdal_band.ReadAsArray(xoff=col, yoff=row, win_xsize=1, win_ysize=1) if dist: current_cell_dist_data = dist_gdal_band.ReadAsArray(xoff=col, yoff=row, win_xsize=1, win_ysize=1) dp_row = cursor.execute("SELECT * FROM DrainPoints WHERE GRAIPDID=%d" % graipdid).fetchone() if dp_row: if current_cell_slope_data[0][0] != slpd_gdal_band.GetNoDataValue(): dp_row.Slope = float(current_cell_slope_data[0][0]) dp_row.ESI = dp_row.ELength * math.pow(dp_row.Slope, alpha) else: dp_row.Slope = utils.NO_DATA_VALUE dp_row.ESI = utils.NO_DATA_VALUE update_sql = "UPDATE DrainPoints SET Slope=?, ESI=? WHERE GRAIPDID=?" data = (dp_row.Slope, dp_row.ESI, dp_row.GRAIPDID) cursor.execute(update_sql, data) if si: if current_cell_si_data[0][0] != si_gdal_band.GetNoDataValue(): dp_row.SI = float(current_cell_si_data[0][0]) else: dp_row.SI = utils.NO_DATA_VALUE update_sql = "UPDATE DrainPoints SET SI=? WHERE GRAIPDID=?" data = (dp_row.SI, dp_row.GRAIPDID) cursor.execute(update_sql, data) if sic: if current_cell_sic_data[0][0] != sic_gdal_band.GetNoDataValue(): dp_row.SIR = float(current_cell_sic_data[0][0]) else: dp_row.SIR = utils.NO_DATA_VALUE update_sql = "UPDATE DrainPoints SET SIR=? WHERE GRAIPDID=?" data = (dp_row.SIR, dp_row.GRAIPDID) cursor.execute(update_sql, data) if dist: if current_cell_dist_data[0][0] != dist_gdal_band.GetNoDataValue(): dp_row.DistToStream = float(current_cell_dist_data[0][0]) else: dp_row.DistToStream = utils.NO_DATA_VALUE update_sql = "UPDATE DrainPoints SET DistToStream=? WHERE GRAIPDID=?" data = (dp_row.DistToStream, dp_row.GRAIPDID) cursor.execute(update_sql, data) conn.commit() except: raise finally: # cleanup if conn: conn.close() if dataSource: dataSource.Destroy()
def database_writer(): #get current working directory and get con cwd = os.getcwd() #spawn instance of ConfigPraser and read config placed in /config/config.ini config = ConfigParser() config.read(cwd + '\\config\\config.ini') input('PRESS ENTER TO WRITE DATABASE') print(block1_name) print(block2_rate) print(block3_designation) print(block4_ssn) print(block6_uic) print(block7_station) if block2_rate[-1] == '1': block_senior_rater = config.get('e6', 'senior_rater') block_rater = config.get('e6', 'rater') block_reporting_senior = config.get('e6', 'reporting_senior') block15_to = config.get('e6', 'to') elif block2_rate[-1] == '2': block_senior_rater = config.get('e5', 'senior_rater') block_rater = config.get('e5', 'rater') block_reporting_senior = config.get('e5', 'reporting_senior') block15_to = config.get('e5', 'to') elif block2_rate[-1] == '3': block_senior_rater = config.get('e4', 'senior_rater') block_rater = config.get('e4', 'rater') block_reporting_senior = config.get('e4', 'reporting_senior') block15_to = config.get('e4', 'to') else: block_senior_rater = config.get('airman', 'senior_rater') block_rater = config.get('airman', 'rater') block_reporting_senior = config.get('airman', 'reporting_senior') block15_to = config.get('e3', 'to') print(block_senior_rater) print(block_rater) print(block_reporting_senior) input() #I don't like this portion but here is a simple explanation why it had to be done that way. NAVFIT98 is all kinds of messed up. It uses default MDB database (prior Access 2007). with Access 2007, Microsoft introduced a new database engine (the Office Access Connectivity Engine, referred to also as ACE and Microsoft Access Database Engine) to replace the Jet database used for MDB files. The problem is that NAVFIT98 writers did not re-write the code to accomodate the changes. Instead they're hiding mdb database under accbd extension. For that reason blank .mdb database is moved over and worked on, then extension name is changed to accdb. It makes no difference for neither MS Access nor NAVFIT98 but it does for Microsoft Access Database Engine. #get current working directory cwd = os.getcwd() shutil.copy((cwd + '\\lib\\blank.mdb'), name_folder) db_location = name_folder + '\\blank.mdb' #initiate ODB driver with right parameters database_driver = '{Microsoft Access Driver (*.mdb, *.accdb)}' conn = pyodbc.connect(driver=database_driver, dbq=db_location, autocommit=True) cursor = conn.cursor() cursor.execute('select * from Reports') sql = 'UPDATE [Reports] SET [Name] = ?, [Rate] = ?, [Desig] = ?, [SSN] = ?, [Active] = ?, [UIC] = ?, [ShipStation] = ?, [PromotionStatus] = ?, [DateReported] = ?, [Periodic] = ?, [FromDate] = ?, [ToDate] = ? WHERE [ReportType] = ?' params = (block1_name, block2_rate, block3_designation, block4_ssn, '1', block6_uic, block7_station, 'REGULAR', block9_date_reported, '1', block14_from, block15_to, 'Eval') cursor.execute(sql, params) conn.commit() cursor.close() conn.close() #this one has to stay for the reason stated above. M$ Access Driver would crash since it's not real MADE but Jet database. os.rename( name_folder + '\\blank.mdb', name_folder + '\\' + block2_rate + ' ' + (name.split())[1] + '.accdb') input('DATABASE WRITTEN SUCCESSFULLY')
def ReadAndLoad(self): procresults = {} try: my_datafilepathname = self.DataFilePathName #r"//Ipc-vsql01/data/Batches/prod/WatchFolder/incoming/PagesOutput_GetPadPortBenchAsOf_20161124_ADAKAT.xls" # get and format the modified date import os.path, time print 'got here !', my_datafilepathname filedatetime = os.path.getmtime(my_datafilepathname) from datetime import datetime filedatetime_forsql = datetime.fromtimestamp( filedatetime).strftime('%Y-%m-%d %H:%M:%S') import bs4, sys with open(my_datafilepathname, 'r') as f: webpage = f.read().decode('utf-8') soup = bs4.BeautifulSoup(webpage, "lxml") row_id = '' fieldnames = {} is_dataline = 0 total_deleted = 0 total_inserted = 0 already_ran_delete_statement = 0 for node in soup.find_all( 'th', attrs={} ): #'style':'display: table-header-group; mso-number-format:\@;' if node.attrs['class'][0] in [ 'HeaderCellNumeric', 'HeaderCellString' ]: fieldnames[len(fieldnames)] = node.string for nodeA in soup.find_all('tr', attrs={}): print '-----------------------' is_dataline = 0 fieldvalues = {} for nodeB in nodeA.find_all('td', attrs={}): #print 'got here!!' #print nodeB.attrs['class'][0] if nodeB.attrs['class'][0] in [ 'DataCellNumeric', 'DataCellString' ]: #print 'got here!!!' if fieldnames[len(fieldvalues)] == 'RowID': is_dataline = 1 row_id = nodeB.string print 'RowID:', row_id #print row_id, fieldnames[len(fieldvalues)],'=', nodeB.string fieldvalues[fieldnames[len( fieldvalues)]] = nodeB.string if is_dataline == 1: #print 'got here !@' fieldnames_string = '' fieldvalues_string = '' for k, v in fieldvalues.items(): #print '#',k,v, fieldnames_string = fieldnames_string + k + ',' fieldvalues_string = fieldvalues_string + "'" + v + "'," fieldnames_string = fieldnames_string[:-1] fieldvalues_string = fieldvalues_string[:-1] #print 'fieldnames_string....................' #print fieldnames_string #fieldnames_string = fieldnames_string + 'last_update' #fieldvalues_string = fieldvalues_string + "'" + filedatetime_forsql + "'" #print fieldnames_string #print fieldvalues_string #print row_id #print fieldvalues[fieldnames[0]],fieldvalues[fieldnames[1]],fieldvalues[fieldnames[2]] import pyodbc cnxn = pyodbc.connect( r'DRIVER={SQL Server};SERVER=ipc-vsql01;DATABASE=DataAgg;Trusted_Connection=True;' ) cursor = cnxn.cursor() if already_ran_delete_statement == 0: cursor.execute( "delete from dbo.xanalysisofbenchmarks_managerandstrategyandportcodelist where AsOfDate = ?", fieldvalues['AsOfDate']) total_deleted = total_deleted + cursor.rowcount print ' ', cursor.rowcount, 'records deleted' cnxn.commit() already_ran_delete_statement = 1 insert_sql = "insert into xanalysisofbenchmarks_managerandstrategyandportcodelist(" + fieldnames_string + ") values (" + fieldvalues_string + ")" print insert_sql cursor.execute(insert_sql) procresults['records inserted'] = cursor.rowcount total_inserted = total_inserted + cursor.rowcount print ' ', cursor.rowcount, 'records inserted' cnxn.commit() procresults['resultvalue1'] = 'success' procresults['total_deleted'] = total_deleted procresults['total_inserted'] = total_inserted except Exception, e: print type(e) print 'there was an error on ' + self.DataFilePathName
ws.write(r, 8, row['GiftType']) r += 1 return r, width def sterilizestring(s): for char in "?.!/;:": s = s.replace(char, '_') return s #----------------------------------------------------------------- print("Processing...") cnxn = pyodbc.connect( "Driver={SQL Server Native Client 11.0};" #requires explicitily stating the sql driver "Server=overlook;" "Database=re_racc;" "Trusted_Connection=yes;") #use windows integrated security cursor = cnxn.cursor() startdate = '2018-07-01' enddate = '2019-06-30' sqlcommand = 'exec sp_giftreconreport ' '?' ', ' '?' '' sqlparams = (startdate, enddate) cursor.execute(sqlcommand, sqlparams) columns = [column[0] for column in cursor.description] #['Campaign', 'Fund', 'Appeal', 'GiftID', 'RECORDS_ID', 'Type', 'GiftType', 'TotalAmount', 'SplitAmount', 'AnonRecord', 'AnonGift', 'CONSTITUENT_ID', 'FIRST_NAME', 'KEY_NAME', 'Ref', 'Name', 'Reference', 'POST_DATE', 'CHECK_DATE', 'CHECK_NUMBER', 'GiftDate', 'BATCH_NUMBER', 'FundCategory', 'IMPORT_ID'] print(columns) data = [ ] #grab results, put into a list, put list into numpy array, and then put numpy array into pandas dataframe for row in cursor: data.append(tuple(row))
def connect_to_db(): # Database credentials. connection_string = os.environ["ENV_DATABASE_CONNECTION_STRING"] return pyodbc.connect(connection_string)
def _does_table_exist(sql_configuration): df = pd.read_sql( SqlStatement.get_does_table_exist_statement( sql_configuration.schema_name, sql_configuration.table_name), pyodbc.connect(sql_configuration.get_pyodbc_connection_string())) return next(df.iterrows())[1]['record_count'] > 0
""" Table Reference AGAMA """ import pyodbc pdt_conn = pyodbc.connect('Driver={SQL Server};' 'Server=10.199.16.69;' 'Database=PDT;' 'Trusted_Connection=no;' 'UID=sa;' 'PWD=Akreditasi2019!;') siakad_conn = pyodbc.connect('Driver={SQL Server};' 'Server=10.199.16.69;' 'Database=SIAKAD;' 'Trusted_Connection=no;' 'UID=sa;' 'PWD=Akreditasi2019!;') sipmaba_conn = pyodbc.connect('Driver={SQL Server};' 'Server=10.199.16.69;' 'Database=SIPMABA;' 'Trusted_Connection=no;' 'UID=sa;' 'PWD=Akreditasi2019!;') """ Select Data PDT """ print("=== Source Condition ====\n") cursor_1 = pdt_conn.cursor() cursor_1.execute('SELECT * FROM dbo.MD_ref_agama') for row in cursor_1: print (row)
def connect(self): return pyodbc.connect(self._config.connection_string)
from datetime import datetime import time import urllib import os #AZURE SQL SERVER CONNECTION params = urllib.quote_plus( "DRIVER={ODBC Driver 17 for SQL Server};SERVER=mbslbiserver.database.windows.net;DATABASE=mbsldwh_dev;UID=Reports;PWD=mbsl1234!" ) engineAzure = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params) df_Migration_Agg = pd.read_sql_query("""SELECT top 10 * FROM Migration_Agg """, engineAzure) df_Migration_Agg.to_csv("migration_agg.csv") import pyodbc cnxn = pyodbc.connect('Driver={ODBC Driver 17 for SQL Server};' 'Server=mbslbiserver.database.windows.net;' 'Database=mbsldwh_dev;' 'UID=Reports;' 'PWD=mbsl1234!') cursor = cnxn.cursor() cursor.execute("""bulk insert migration_agg from 'migration_agg.csv'; """) os.remove('migration_agg.csv') #conn = ctds.connect('mbslbiserver.database.windows.net', user='******', password='******', database='mbsldwh_dev') #conn.bulk_insert('Migration_Agg', (df_Migration_Agg.to_records(index=False).tolist()))
# connect_str: "jdbc:sqlserver://kangxhsqlserversea.database.windows.net:1433;database=kangxhsqldbsea;user=allenk@kangxhsqlserversea;password=L04N8Bmv12zWdMd;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;" import pyodbc import string connect_str = "jdbc:sqlserver://kangxhsqlserversea.database.windows.net:1433;database=kangxhsqldbsea;user=allenk@kangxhsqlserversea;password=L04N8Bmv12zWdMd;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;" server = connect_str[connect_str.find("sqlserver://") + len("sqlserver://") : connect_str.find(":1433")] database = connect_str[connect_str.find("database=") + len("database=") : connect_str.find(";user="******"user="******"user="******"@", connect_str.find("user="******"user="******"password="******"password="******";encrypt=true")] driver= "{ODBC Driver 17 for SQL Server}" sqlconnect = pyodbc.connect('DRIVER='+driver+';SERVER='+server+';PORT=1433;DATABASE='+database+';UID='+username+';PWD='+ password) cursor = sqlconnect.cursor() cursor.execute("SELECT TOP 20 pc.Name as CategoryName, p.name as ProductName FROM [SalesLT].[ProductCategory] pc JOIN [SalesLT].[Product] p ON pc.productcategoryid = p.productcategoryid") row = cursor.fetchone() while row: print (str(row[0]) + " " + str(row[1])) row = cursor.fetchone()
import pyodbc import pandas as pd import Password import requests import urllib3 # ---------------------- Database Call ---------------------------------- try: conn = pyodbc.connect('Driver={SQL Server Native Client 11.0};' 'Server=174.101.154.93;' 'Database=PasswordManager;' 'uid=bruceba;' 'pwd=password') # Store return value of a database query in a Panda Data Frame value = pd.read_sql_query('select * from tEntries', conn) print(value.count()) # Foreach row value, create a Password Class and store it in a list of Password Classes DatabasePasswords = [ (Password.Password(row.EntryID, row.UserID, row.WebsiteDomainID, row.WebsitePasswordID, row.CategoryID)) for index, row in value.iterrows() ] # Foreach Password Class in list of Passwords, print out the EntryID an int for x in DatabasePasswords: print(int(x.EntryID)) # print out the length of the array not the count in this instance
from datetime import datetime # defines excek styleas input_start_date = "'" + str( input( 'please input the report start date: in the format 2017-11-01:')) + "'" input_end_date = "'" + str( input('please input the report end date: in the format 2017-11-01:')) + "'" style0 = xlwt.easyxf('font: name Times New Roman, color-index red, bold on', num_format_str='#,##0.00') style1 = xlwt.easyxf(num_format_str='D-MMM-YY') style = XFStyle() style.num_format_str = 'YYYY-MM-DD' # connect to DB cnxn = pyodbc.connect("Driver={ODBC Driver 13 for SQL Server};" "server=tvbcsveis05;" "Database=SC_ETL_NEW;" "Trusted_Connection=yes") cursor = cnxn.cursor() # with open('C:/Users/ac21611/Desktop/bbcl_scrips/Weekly/SQL_SCRIPTS/PV.txt, 'r') as myfile: # data=myfile.read().replace('\n', ''); fd = open( 'C:/Users/ac21611/Desktop/bbcl_scrips/Weekly/SQL_SCRIPTS/WEEKLYPV.sql', 'r') sqlFile = fd.read() fd.close() rows = cursor.execute(sqlFile % { 'start_date': input_start_date, 'end_date': input_end_date
data = json.load(f) df = pd.DataFrame({key: pd.Series(value) for key, value in data.items()}) df['video_Id'] = 'bofQbiUPKXA' df['comment_datetime'] = pd.to_datetime(df['comment_time']) df['comment_year'] = df['comment_datetime'].dt.year df['comment_month'] = df['comment_datetime'].dt.month df['comment_day'] = df['comment_datetime'].dt.day df['comment_time'] = df['comment_datetime'].dt.time server = 'DESKTOP-H5MDRS4\SQLEXPRESS' database = 'youtube_db' #connect to MS SQL SERVER cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server}; \ SERVER=' + server + '; \ DATABASE=' + database + '; \ Trusted_Connection=yes;') cursor = cnxn.cursor() insert_query = ''' INSERT INTO VideoComments (videoID,commentAuthor,userID,comment,commentTime,commentDateTime,commentYear,commentMonth,commentDay) VALUES (?,?,?,?,?,?,?,?,?); ''' for column, value in df.iterrows(): row = list(value) cursor.execute(insert_query, row) #commit query cnxn.commit()
import pyodbc server = 'local server' database = 'Northwind' username = '******' password = '******' con = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password)
import pyodbc DB = {'servername': 'PRASUN-ITT', 'database': 'AdventureWorks2014'} # create the connection conn = pyodbc.connect('DRIVER={SQL Server};SERVER=' + DB['servername'] + ';DATABASE=' + DB['database'] + ';Trusted_Connection=yes') cursor = conn.cursor() cursor.execute('SELECT * FROM [AdventureWorks2014].[Production].[Location]') for row in cursor: print(row)
def sql_connet(user, password, host): cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + host + ';UID=' + user + ';PWD=' + password) cursor = cnxn.cursor() return cnxn, cursor
import logging import traceback import json import random import pyodbc import time logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) fh = logging.FileHandler('dashboard.log') fh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s -%(levelname)s - %(message)s') fh.setFormatter(formatter) logger.addHandler(fh) conn = pyodbc.connect("Driver=/usr/local/lib/libtdsodbc.so;Server=140.160.141.163;UID=WIN-EFOCD61SVFV\Administrator;PWD=morethan12characters!;PORT=1433;DATABASE=ION_Data2;TDS_VERSION=8.0;") if conn : logger.info('successfully connected to the db') cmd = '''select top 10 DLS.TimestampSourceLT, DL.Value, S.Name from Source S, DataLogStamp DLS, DataLog DL where S.ID = DLS.SourceID and DL.DataLogStampID = DLS.ID and DL.Value is not null and DL.Value > 0 and DLS.TimestampSourceLT < '{NextYear}' and S.Name >= '{earliestName}' and S.Name <= '{latestName}' order by DLS.TimestampSourceLT;''' cmd2 = '''select top 10 T.TimestampSourceLT, DL.Value from
def test_post_encourage_user_relationship(): global StudentEntityKey mydb = pyodbc.connect('Driver={SQL Server};' 'Server=' + ss_helpers.dbserv + ';' 'DB=mco;' 'user='******';' 'pwd=' + ss_helpers.dbpword + ';') mycursor = mydb.cursor() ##########Getting a user's AuthenticationID################ mycursor.execute( "SELECT top 10 cp.entitykey, ca.zip, cp.birthdate, ce.IsAutoLogin, ce.userid, em.emailkey\ FROM MCO.Core.person cp\ join MCO.Core.entityuser ce on cp.entitykey = ce.entitykey\ join MCO.Core.EntityAddress cea on cp.entitykey = cea.entitykey\ join MCO.Core.AddressDomestic ca on cea.AddressKey = ca.addresskey\ join MCO.Core.EntityEmail ee on ce.entitykey = ee.entitykey\ join MCO.Core.Email em on ee.emailkey = em.emailkey\ where ce.IsAutoLogin = 1 and ca.zip is Not Null and cp.birthdate is Not Null and ce.userid is Not Null \ order by ce.entitykey desc") myresult = mycursor.fetchall() print(myresult) ## need a loop to post all the mentor/student relationships from the students I grabbed from the db above i = 0 for row in myresult: StudentEntityKey = (row[i]) print(StudentEntityKey) head = { 'Content-Type': 'application/json', 'x-api-key': ss_helpers.environ, } creds = {"relationship_type_key": 8, "entity_key": StudentEntityKey} response = requests.post(ss_helpers.envUrl + '/encourage_users/' + MentorEntityKey + '/relationships', headers=head, json=creds) print(response) print( "Create Encourage User Relationship: Expected Response Code is 201, Actual Response Code is", response.status_code) assert response.status_code == 201 head = { 'Content-Type': 'application/json', 'x-api-key': ss_helpers.environ, } creds = {"relationship_type_key": 9, "entity_key": StudentEntityKey} response = requests.post(ss_helpers.envUrl + '/encourage_users/' + MentorEntityKey + '/relationships', headers=head, json=creds) print(response) print( "Create Encourage User Relationship: Expected Response Code is 201, Actual Response Code is", response.status_code) assert response.status_code == 201 StudentEntityKey = str(StudentEntityKey)
import pyodbc cnxn = pyodbc.connect( "DRIVER={SQL Server};SERVER=redhookdev;DATABASE=news_matching;uid=python_class_user;pwd=[provided in class]" ) import elasticsearch as els from elasticsearch import helpers es = els.Elasticsearch("http://fcsearchdev04.nycnt1a.fdncenter.org:9200", http_auth=("elastic", "ocelot243kiwi")) chunk_size = 1 data = None counter = 0 with open("fromdavid.json", encoding='utf-8') as f: data = f.read() if data is not None: data = json.loads(data) docs = data["fromdavid"] docs_to_index = [] for d in docs: counter += 1 identifier = d["key"] docs_to_index.append({ "_index": "dao", "_source": d, "_type": "text", "id": identifier }) if counter % chunk_size == 0: try:
def _establish_connection(self): connection = pyodbc.connect( 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + self.server + ';DATABASE=' + self.database + ';UID=' + self.username + ';PWD=' + self.password) return connection
def Update(conexion): print("actulizando") cur = conexion.cursor() cur.execute( "update dimusers set account = 'BTO\\Anibal' where accountid = 1007") cur.commit() Select(conexion) def Delete(CONEXION): print("borrando") cur = CONEXION.cursor() cur.execute("delete from dimusers where accountid = 1007") cur.commit() Select(CONEXION) cn = pyodbc.connect("Driver={SQL Server Native Client 11.0};" "Server=.;" "Database=DataMart__Security;" "Trusted_Connection=Yes;") if cn: print("conecto!") Select2(cn) #Insert(cn) #Update(cn) #Delete(cn) cn.close()
import pyodbc as sql import os # Create connection SERVERNAME = 'RDW-BCRSQL01' DATA_BASE_INFO = 'Qpulse5' USERNAME = os.environ.get('DB_USER_DEV_QP') PASSWORD = os.environ.get('DB_PW_DEV_QP') print("\r\n") con = sql.connect(driver="{ODBC Driver 17 for SQL Server}", server=SERVERNAME, database=DATA_BASE_INFO, uid=USERNAME, password=PASSWORD, encoding='utf-8') cur = con.cursor() db_cmd = """ DROP TABLE IF EXISTS ##databaseperms CREATE TABLE ##databaseperms ( dbname varchar(50) , dbrolename varchar(50) , dbusernames varchar(max) ) DECLARE @databaseid int, @dbname varchar(50), @useStatement nvarchar(100), @insertStatement nvarchar(max)
import dash import dash_core_components as dcc import dash_html_components as html import pandas as pd import plotly.graph_objs as go import pyodbc from datetime import datetime as dt app = dash.Dash() server = 'imlvs03\sql2005' database = 'DW_Development' user = '******' password='******' conn = pyodbc.connect(uid=user, pwd=password, driver='{SQL Server Native Client 11.0}',server='imlvs03\sql2005',database='DW_Development') sql = """SELECT brand, name, state, fueltype, date, price from fuelstations join fuelprices on fuelstations.id = fuelprices.id order by date asc, brand asc""" df = pd.read_sql(sql, conn) brands = df['brand'].unique() fueltypes = df['fueltype'].unique() states = df['state'].unique() app.layout = html.Div([ html.Div([ dcc.Dropdown( id='state', options=[{'label': i, 'value': i} for i in states],