def main(): table_name = sys.argv[1] corpus = ReviewCorpus.load("model/" + table_name) read_con = MySQLdb.connect( host=DB_SERVER, port=DB_SERVER_PORT, user=DB_USER, passwd=DB_PASSWORD, db=DB_NAME, charset="utf8mb4" ) write_con = MySQLdb.connect( host=DB_SERVER, port=DB_SERVER_PORT, user=DB_USER, passwd=DB_PASSWORD, db=DB_NAME, charset="utf8mb4" ) read_cursor = read_con.cursor() read_cursor.execute(SQL_QUERY % (table_name)) write_cursor = write_con.cursor() for _ in range(read_cursor.rowcount): record = read_cursor.fetchone() hotel_review = format_review(record[1]) tfidf_score = 0 if hotel_review: tfidf_score = corpus.tfidf_score(hotel_review) write_cursor.execute(SQL_UPDATE % (table_name, tfidf_score, record[0])) write_con.commit() print "[%f]%s" % (tfidf_score, record[1]) write_cursor.close() write_con.close() read_cursor.close() read_con.close()
def connect(self, hostType, db): ##Make sure we really need to connect## connect = False if hostType in SQLHub.connection and SQLHub.connection[hostType]['con_obj']: try: ##We have a connection, make sure it's active## SQLHub.connection[hostType]['con_obj'].ping() except OperationalError: ##Connection is corrupt, reconnect## connect = True else: ##No connection for host type, make connection## connect = True if connect: ##No connection exists, connect## SQLHub.connection[hostType] = dict( con_obj=None, cursor=None) if db: SQLHub.connection[hostType]['con_obj'] = MySQLdb.connect( host=self.conf[hostType]['host'], user=self.conf[hostType]['user'], passwd=self.conf[hostType].get('passwd', ''), cursorclass=MySQLdb.cursors.DictCursor, db=db) else: SQLHub.connection[hostType]['con_obj'] = MySQLdb.connect( host=self.conf[hostType]['host'], user=self.conf[hostType]['user'], passwd=self.conf[hostType].get('passwd', ''), cursorclass = MySQLdb.cursors.DictCursor) SQLHub.connection[hostType]['cursor'] = SQLHub.connection[hostType]['con_obj'].cursor()
def open_master_connection(): """ Open a connection on the master """ if options.defaults_file: conn = MySQLdb.connect(read_default_file = options.defaults_file) config = ConfigParser.ConfigParser() try: config.read(options.defaults_file) except: pass username = config.get('client','user') password = config.get('client','password') port_number = int(config.get('client','port')) else: username = options.user port_number = options.port if options.prompt_password: password=getpass.getpass() else: password=options.password conn = MySQLdb.connect( host = options.host, user = username, passwd = password, port = options.port, unix_socket = options.socket) return conn, username, password, port_number
def index(req): send = [] post_data = str(req.form.list)[8:-7] if(post_data[0] == 'R'): generateStandard() db = MySQLdb.connect("localhost", "erik", "erik", db_name) cursor = db.cursor() cursor.execute("SELECT DISTINCT(user_id) FROM {}".format(table_name)) send = cursor.fetchall() db.close() elif post_data[0] == 'S': tmp = post_data.split(',') getSubtract(tmp[1], tmp[2]) else: user_id = int(post_data) db = MySQLdb.connect("localhost", "erik", "erik", db_name) cursor = db.cursor() cursor.execute("SELECT image_id, browser FROM {} WHERE user_id='{}'".format(table_name, user_id)) data = cursor.fetchall() db.close() generatePictures(data, user_id) send = gen_hash_codes(data) send_string = json.dumps(send) return send_string
def remove_all_data(): """ Completely wipes the ingestion, should never be used apart from testing :return: Returns True when completed """ root_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) cur = root_db.cursor() query = "SHOW DATABASES;" cur.execute(query) for row in cur.fetchall(): if row[0].find(db_prepend) > -1 or row[0] == 'Person_Course': #Drop the relevant DBs query = "DROP DATABASE "+row[0] cur.execute(query) root_db.commit() log("*** Removing database "+row[0]) #Empty the ingestor pcourse_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) pcur = pcourse_db.cursor() query = "TRUNCATE ingestor" pcur.execute(query) pcourse_db.commit() log("*** Resetting ingestor cache") #Delete the mongoDB cmd = config.MONGO_PATH + "mongo " + config.MONGO_HOST + "/logs --eval \"db.dropDatabase()\"" os.system(cmd)
def SaveToken_old(token,typeToken,useruid):#typeToken is 0 for facebook token , 1 for twitter token #check if the token is present in the database db=MySQLdb.connect(host=HOST,user=DBUSER,passwd=DBPASS,db=DBNAME, use_unicode=True,charset="utf8") #db=MySQLdb.connect(HOST,DBUSER,DBPASS,DBNAME) cursor=db.cursor(MySQLdb.cursors.DictCursor) sql="SELECT count(*) as total FROM accesstoken where accesstoken='"+token+"';" rowcount=0 try: queryEsit=cursor.execute(sql) results=cursor.fetchall() dicti=results[0] rowcount=dicti['total'] except: rowcount=0 cursor.close() db.close() if rowcount>0:return db=MySQLdb.connect(host=HOST,user=DBUSER,passwd=DBPASS,db=DBNAME, use_unicode=True,charset="utf8") #db=MySQLdb.connect(HOST,DBUSER,DBPASS,DBNAME) cursor=db.cursor(MySQLdb.cursors.DictCursor) sql="INSERT INTO accesstoken (accesstoken,type,user_uid) VALUES ('%s',%d,%d);"%(token,typeToken,useruid) try: cursor.execute(sql) db.commit() except: db.rollback() cursor.close() db.close()
def read_volt(config): data = [] chandata = [] #Read from DB db = MySQLdb.connect("localhost","monitor","1234","smartRectifier") curs = db.cursor() cmd = "SELECT srVBat FROM sensorDataCurrent ORDER BY srVBat ASC LIMIT 1" curs.execute(cmd) value = curs.fetchone() db.close() data.append(value[0]) #Update data to NULL db = MySQLdb.connect("localhost","monitor","1234","smartRectifier") curs = db.cursor() with db: cmd = 'UPDATE sensorDataCurrent SET srVBat = NULL WHERE name = "currentData"' curs.execute(cmd) db.close() for i in range(len(data)): chandata.append({"name": "Battery Voltage", "mode": "float", "unit": "Custom", "customunit": "Volt", "LimitMode": 1, "LimitMaxError": 70.0, "LimitMaxWarning": 50.0, "LimitMinWarning": 40.0, "LimitMinError": 30.0, "value": float(data[i])}) return chandata
def upload_meta(articleExists, url, title, author, domain, time_pub, text): gTime = strftime("%Y-%m-%d %H:%M:%S", gmtime()) conn = MySQLdb.connect(host="newshub.c5dehgoxr0wn.us-west-2.rds.amazonaws.com", user="******", passwd="columbiacuj", db="Newshub", charset="utf8") cursor = conn.cursor() # since we cannot get the author attribute, so we use the word "author" instead value = ['', title, author, domain, url, time_pub, gTime, text] if(articleExists == True): cursor.execute("insert into articles values (%s, %s, %s, %s, %s, %s, %s, %s)", value); else: cursor.execute("insert into deletions values (%s, %s, %s, %s, %s, %s, %s, %s)", value); conn.commit() cursor.close() conn.close() if(articleExists == True): print "upload an updated article" else: conn = MySQLdb.connect(host="newshub.c5dehgoxr0wn.us-west-2.rds.amazonaws.com", user="******", passwd="columbiacuj", db="Newshub", charset="utf8") cursor = conn.cursor() cursor.execute("delete from articles where URL=%s", url) conn.commit() cursor.close() conn.close() print "move a deleted article"
def setUp(self): _skip_if_no_MySQLdb() import MySQLdb try: # Try Travis defaults. # No real user should allow root access with a blank password. self.db = MySQLdb.connect(host='localhost', user='******', passwd='', db='pandas_nosetest') except: pass else: return try: self.db = MySQLdb.connect(read_default_group='pandas') except MySQLdb.ProgrammingError as e: raise nose.SkipTest( "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf. ") except MySQLdb.Error as e: raise nose.SkipTest( "Cannot connect to database. " "Create a group of connection parameters under the heading " "[pandas] in your system's mysql default file, " "typically located at ~/.my.cnf or /etc/.my.cnf. ")
def __init__(self, dbname,uname,passwd): self.__con= MySQLdb.connect('localhost','root', 'root') self.__cursor=self.__con.cursor() query="show databases" self.__cursor.execute(query) rows=self.__cursor.fetchall() found=False for row in rows: if (str(row[0])==dbname): found=True break #if the database does not exist create it if(found==False): query='select user from mysql.user' self.__cursor.execute(query) rows=self.__cursor.fetchall() found=False for row in rows: if(str(row[0])==uname): found=True break if(found==False): query="create user "+uname+"@localhost identified by '"+passwd+"'" self.__cursor.execute(query) query='CREATE DATABASE '+str(dbname) self.__cursor.execute(query) query="GRANT ALL PRIVILEGES ON "+dbname+".* To "+uname+"@localhost" self.__cursor.execute(query) #close the root connection and connect to the database self.__con.close() self.__con=MySQLdb.connect('localhost',uname,passwd,dbname) self.__cursor=self.__con.cursor() self.__con.commit() return
def redirect_short(short): val = ["shorts.html/" + short] write_log(["V"],request,val) if short[len(short)-1] == '_': db = MySQLdb.connect(host="localhost", user="******", passwd="ivB2hnBX",db="ashwin_chandak") # database info cur = db.cursor() cur.execute("SELECT * FROM clicks WHERE short_id = '%s'" % short[0:len(short)-1]) short_stats = cur.fetchall() return flask.render_template('stats.html',short_url = short[0:len(short)-1], number_clicks = len(short_stats),stats = short_stats) # retrieve short string from url db = MySQLdb.connect(host="localhost", user="******", passwd="ivB2hnBX",db="ashwin_chandak") # database info db.autocommit(True) cur = db.cursor() cur.execute("SELECT * FROM links WHERE short = '%s'" % short) # redirect if it's exist if cur.rowcount > 0: results = cur.fetchone() long_url= results[2] app.logger.debug("redirect to " + long_url) #Get IP address and json of geolocationdata, so that latitude and longitude entries can be stored in db ip_address = get_ip(request) geodata = get_geolocation(ip_address) cur.execute("INSERT INTO clicks (`short_id`,`ip_address`,`lat`,`long`) VALUES ('%s','%s',%s,%s)" % ( short,ip_address,geodata.get('latitude','NONE'),geodata.get('longitude','NONE'))) return redirect(long_url) # return 404 if not found abort(404)
def connect(self, charSet=None, dbase=None): try: if charSet == None and dbase == None: return self.__connections.getConnection() if dbase == None: dbase = self.dbName # bit ugly but mysqldb docs dont say what the default value for charset is, # then i could just set the default val for charSet if charSet != None: con = mdb.connect( self.host, self.user, self.passwd, db=dbase, port=self.port, charset=charSet) else: con = mdb.connect( self.host, self.user, self.passwd, db=dbase, port=self.port) return con except mdb.Error as dbError: self.onDBError(dbError)
def main(): localfile = 'local.txt' with open(localfile) as f: g = f.read() if g == 'True': local = True else: local = False if local == False: fldr = 'mlb-dfs/' serverinfo = security('mysql', fldr) con = MySQLdb.connect(host=serverinfo[2], user=serverinfo[0], passwd=serverinfo[1], db='MurrDogg4$dfs-mlb') else: fldr = '' con = MySQLdb.connect('localhost', 'root', '', 'dfs-mlb') #### Localhost connection gameday = datetime.date.today() datestr = datestring(gameday)[1] gameList = getSchedule(datestr) addtoDb(con, gameList, datestr) return
def connect_to_sql(self, sql_connect, db_name="", force_reconnect=False, create_db=True): """ Connect to SQL database or create the database and connect :param sql_connect: the variable to set :param db_name: the name of the database :param force_reconnect: force the database connection :param create_db: create the database :return the created SQL connection """ print self if sql_connect is None or force_reconnect: try: sql_connect = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db=db_name) return sql_connect except Exception, e: # Create the database if e[0] and create_db and db_name != "": if sql_connect is None: sql_connect = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD) utils.log("Creating database " + db_name) cur = sql_connect.cursor() cur.execute("CREATE DATABASE " + db_name) sql_connect.commit() sql_connect.select_db(db_name) return sql_connect else: utils.log("Could not connect to MySQL: %s" % e) return None
def putFBgenerefs(df,auth=authent_local,blocsz=10000): con = mdb.connect(**auth) # add as gene references table to local db vals= [ tuple([ None if pd.isnull(v) else v for v in rw]) for rw in df.values ] sql="""INSERT INTO generefs(pubid,FBid,symb) VALUES( %s,%s,%s) """ nadds=len(vals)*1.0 nblocs=int(np.ceil(nadds/blocsz)) print('Adding gene references to local db') with con: cur=con.cursor() cur.execute("DROP TABLE IF EXISTS generefs") cur.execute("""CREATE TABLE generefs( generef_id INT AUTO_INCREMENT PRIMARY KEY, pubid INT, FBid VARCHAR(50), symb VARCHAR(100)) """) #chunk up to keep from bombing c=0 for i in range(nblocs): con = mdb.connect(**auth) with con: cur=con.cursor() if (c+blocsz)>nadds: cur.executemany(sql,vals[c:]) else: cur.executemany(sql,vals[c:(c+blocsz)]) c+=blocsz print('Done')
def upload(upload_rows,table): try: if "adconnect" in table: db = mdb.connect(host="localhost", user="******", db='nami_adconnect_backup') cur = db.cursor() query = "REPLACE INTO `nami_adconnect_backup`.`" + table + """`(`DATE`,`PUB_NAME`,`SOURCE_NAME`,`BUDGET DEPLETED / MISCONFIGURATION`,`DAYPART BLOCK`,`CPIP FILTER`,`GEO FILTER`,`IP BLOCK`,`KEYWORD BLOCK`,`REFERER BLOCK`,`MISSING UA/REF`,`IE USER AGENT FILTER`,`CHROME USER AGENT FILTER`,`FIREFOX USER AGENT FILTER`,`SAFARI USER AGENT FILTER`,`OTHER USER AGENT FILTER`,`MAXQUERY THROTTLE`,`FEED FILTERS/TOLERANCES`,`FEED CALLS`,`TIMEOUTS`,`VALID FEED CALLS`,`CALLS PER QUERY`,`FEED COVERAGE`,`VALID FEED COVERAGE`,`PUBLISHER REQUEST RATIO`,`AD RETURN RATIO`,`AD AVAILABLE RATIO`,`CPC CLIPPED`,`EXCESS ADS`,`PUBLISHER USED LISTINGS`,`PUBLISHER COVERAGE`,`AVG PUBLISHER LISTINGS`,`AVG USED LISTINGS`,`GROSS CLICKS`, `INCOMPLETE URL`,`CLICK TIME FILTER`,`INTERASP FILTER`,`IP MISMATCH`,`UA MISMATCH`,`REFERER MISMATCH`,`CPL FILTER`,`CPQ FILTER`,`CPIP FILTER 2`,`CLICK CAP FILTER`,`COOKIE FILTERED`,`DOMAIN FILTER`,`BACKBUTTON FILTER`,`NO FLASH FILTER`,`OFFSCREEN FILTERED`,`ADVANCED JS FILTERED`,`IFRAME FILTERED`,`TOTAL FILTERED CLICKS`,`ROLLOVER REDIRECT DROPS`,`COOKIE REDIRECT DROPS`,`REFPAGE REDIRECT DROPS`,`JS REDIRECT DROPS`,`TOTAL DROPPED`,`SYSTEM ERRORS`,`TOTAL ERRORS AND DROPS`,`OFFSCREEN`,`ONSCREEN`,`NOSCREEN`,`ROLLOVER CLICKS`,`NON-ROLLOVER CLICKS`,`COOKIE PASS`,`COOKIE FAIL`,`IFRAME PASS`,`IFRAME FAIL`,`ADVANCED JS PASS`,`NATURALPLUS COMPLETE`,`REF ASSIGN JS`, `NATURAL JS`,`COMPLETED REFPAGE`,`FINAL REDIRECT`,`MAX JS NEG PLUS`,`ESTIMATED NET CLICKS`,`GOOD CLICK RATIO`,`ESTIMATED GROSS REVENUE`,`AVG ESTIMATED GROSS CPC`,`ASP ESTIMATED REVENUE`,`ESTIMATED PUBLISHER REVENUE`,`AVG ESTIMATED PUBLISHER CPC`,`ESTIMATED PUBLISHER ECPM`,`VALID NET CLICKS`,`VALID CLICKS PER PUBLISHER USED LISTINGS`,`VALID CLICK RATIO`,`VALID GROSS REVENUE`,`VALID ASP NET REVENUE`,`VALID PUBLISHER NET REVENUE`,`VALID REVENUE RATIO`,`VALID AVG GROSS CPC`,`VALID AVG PUBLISHER CPC`,`VALID PUBLISHER ECPM`) VALUES (%s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""" elif "advertiser" in table: db = mdb.connect(host="localhost", user="******", db='nami_admanager_backup', port=2000) cur = db.cursor() query = "REPLACE INTO `nami_admanager_backup`.`" + table + "`(`Date`,`Advertiser`,`Campaign`,`Adgroup`,`Ad`,`Matches`,`Impressions`,`Clicks`,`Conversions`,`Spend`,`Cost`,`CPC`,`Matches_CTR`,`Impressions_CTR`) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" if len(upload_rows)>1000: chunked = chunk(upload_rows,1000) for id,row in enumerate(chunked): cur.executemany(query,row) db.commit() print("%s - SUCCESSFULLY INSERTED %s OUT OF %s ROWS INTO DATABASE"%(datetime.now(),len(row),len(upload_rows))) else: cur.executemany(query,upload_rows) db.commit() print("%s - SUCCESSFULLY INSERTED %s ROWS INTO DATABASE"%(datetime.now(),len(upload_rows))) except: print(traceback.print_exc())
def create_databases(installer_json, config_json): db_conf = installer_json['database'] db = MySQLdb.connect(host="localhost", user=db_conf['user'], passwd=db_conf['pass']) cur = db.cursor() prefix = installer_json['prefix_name'] platforms = config_json['platforms'] sql_path = '../sql/' for platform in platforms: path = sql_path + platform + '.sql' try: with open(path): db_name = prefix + '_' + platform + '_main' print '* Creating ' + platform + ' Database.' #Step 1 cur.execute('CREATE DATABASE IF NOT EXISTS ' + db_name + ' DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci') db_platform = MySQLdb.connect(host="localhost", user=db_conf['user'], passwd=db_conf['pass'], db= db_name) cur_platform = db_platform.cursor() fo = open(path, "r") db_script = fo.read() fo.close() if(db_script != ''): cur_platform.execute(db_script) except IOError: print '*** Warning: Problem creating ' + platform + ' Database.'
def putFBrefs(df,auth=authent_local,blocsz=2000): con = mdb.connect(**auth) # add as refs table to local db vals= [ tuple([ None if pd.isnull(v) else v for v in rw]) for rw in df.values ] sql="""INSERT INTO pub(pubid,title,ref,abstract) VALUES( %s,%s,%s,%s) """ nadds=len(vals)*1.0 nblocs=int(np.ceil(nadds/blocsz)) print('Adding to references to local db') with con: cur=con.cursor() cur.execute("DROP TABLE IF EXISTS pub") cur.execute("""CREATE TABLE pub( pubid INT PRIMARY KEY, title TEXT, ref TEXT, abstract TEXT) """) #chunk up to keep from bombing c=0 for i in range(nblocs): con = mdb.connect(**auth) with con: cur=con.cursor() if (c+blocsz)>nadds: cur.executemany(sql,vals[c:]) else: cur.executemany(sql,vals[c:(c+blocsz)]) c+=blocsz print('Done')
def criartabela_troca(self): if not os.path.exists('agenciaBeta1.db'): print 'Banco de Dados existente troca' conexao = MySQLdb.connect(self.host,self.user,self.passwd,self.database ) print 'Banco de Dados ja instalado troca' #trocar aqui na linha de baixo o db conexao = MySQLdb.connect(host="localhost", user="******",passwd="carros", db="agenciaBeta1") cursor = conexao.cursor() sqlt = """create table trocae( codigotroca int(10) PRIMARY kEY Auto_increment NOT NULL UNIQUE, codigocliente varchar(60) NOT NULL , codigoveiculo varchar(10) NOT NULL , codigovendedor varchar(60) NOT NULL , placa varchar(10) NOT NULL , modelo varchar(60) NOT NULL , marca varchar(60) NOT NULL , ano varchar(60) NOT NULL , proprietario varchar(60) NOT NULL , valor float(10) , data varchar(20))""" try: #verifica se o database nomeado acima ja foi criado print ' uuuu troca' cursor.execute(sqlt) conexao.commit() print 'trocado' except MySQLdb.Error, e: if e.args[0] == 1150: print 'Tratando o erro' self.addTreeViewCliente()
def criartabela_venda(self): if not os.path.exists('agenciaBeta1.db'): print 'Banco de Dados existente' conexao = MySQLdb.connect(self.host,self.user,self.passwd,self.database ) print 'Banco de Dados ja instalado' conexao = MySQLdb.connect(host="localhost", user="******",passwd="carros", db="agenciaBeta1") cursor = conexao.cursor() sql = """create table venven( codigovenda int(10) PRIMARY kEY Auto_increment NOT NULL UNIQUE, codigo varchar(60) NOT NULL , codigoveiculo varchar(10) NOT NULL , codigovendedor varchar(60) NOT NULL , formadepagamento varchar(50) not null, financeira varchar(50)not null, data varchar(20))""" print'l' try: print ' testando 1' cursor.execute(sql) conexao.commit() print 'testando 2' except MySQLdb.Error, e: if e.args[0] == 1050: print 'Tratando o erro' self.addTreeViewCliente()
def getQiubaiDb(password='******'): setDelimiter('try database connect') userName='******' hostName='114.215.108.67' dbName='test' portName=3306 try: conn=mydb.connect(host=hostName,user=userName,passwd=password,db=dbName,port=portName) except mydb.OperationalError: while True: password=raw_input('password for %s:'%userName) try: conn=mydb.connect(host=hostName,user=userName,passwd=password,db=dbName,port=portName) break except mydb.OperationalError: continue curs=conn.cursor() searchDbSql='SELECT * FROM qiubai' try: curs.execute(searchDbSql) except: setDelimiter('no DB qiubai found') setQiubaiDb(curs) setDelimiter('database well connect') return (conn,curs)
def main(run): try: printAlert(0,"Testing Mysql protocol [3306]") MySQLdb.connect(init.var['target'],init.var['user'],init.var['pass'],'') printAlert(3,"Logged with "+init.var['user']+"/"+init.var['pass']+" in Mysql") except:printAlert(1,"Service Off or No Logged.") try: printAlert(0,"Testing SSH protocol [22]") connect = pxssh.pxssh() connect.login(init.var['target'],init.var['user'],init.var['pass']) printAlert(3,"Logged with "+init.var['user']+"/"+init.var['pass']+" in SSH") except:printAlert(1,"Service Off or No Logged.") try: printAlert(0,"Testing FTP protocol [21]") ftp.login(init.var['user'],init.var['pass']) printAlert(3,"Logged with "+init.var['user']+"/"+init.var['pass']+" in FTP") except:printAlert(1,"Service Off or No Logged.") try: printAlert(0,"Testing POP3 protocol [110]") red=poplib.POP3(init.var['target'], 110) red.user(init.var['user']+"@"+init.var['target']) red.pass_(init.var['pass']) printAlert(3,"Logged with "+init.var['user']+"/"+init.var['pass']+" in POP3") except:printAlert(1,"Service Off or No Logged.") Space()
def run(request): PATH_VAR=request.path path=PATH_VAR.split('/') path_var=path[1] output= None status=database_status(path_var) if status == "disable": p= subprocess.Popen(["/bin/bash","/home/tcs/DEVOPS_MIGRATION/myproject/vm_launch.sh",path_var,' &'], stdout=subprocess.PIPE) output, err = p.communicate() print output cnx = MySQLdb.connect(user='******', passwd="test",db="Devops") cur = cnx.cursor() cur.execute("update devops_images set status='running' where image_name="+`str(path_var)`) cnx.commit() elif status == "running": p= subprocess.Popen(["/bin/bash","/home/tcs/DEVOPS_MIGRATION/myproject/vm_terminate.sh",path_var], stdout=subprocess.PIPE) output, err = p.communicate() cnx = MySQLdb.connect(user='******', passwd="test",db="Devops") cur = cnx.cursor() cur.execute("update devops_images set status='disable' where image_name="+`str(path_var)`) cnx.commit() t = loader.get_template('click.html') files_path=database_path() # bashCommand = "sh /home/tcs/test-launch.sh "+path_var # os.system(bashCommand) if status=="running": c = RequestContext(request,{'path':path_var,'button_name':'Terminate','test':files_path}) return HttpResponse(t.render(c)) else: #print "OUTPUT HI" c = RequestContext(request,{'path':path_var,'button_name':'Deploy','test':files_path}) return HttpResponse(t.render(c))
def take_snapshot(self): '''1st session ''' db1 = MySQLdb.connect(self.conn_location, 'root', self.conn_password) exc = db1.cursor() exc.execute("FLUSH TABLES WITH READ LOCK;") '''2nd session ''' db2 = MySQLdb.connect(self.conn_location, 'root', self.conn_password) exc = db2.cursor() exc.execute("SHOW MASTER STATUS;") rows = exc.fetchall() db2.close() i = 0 ret = {} for row in rows: i = i+1 ret['position' + str(i)] = {'binfile': row[0], 'position': row[1], 'mysqldump_path': self.mysqldump_path} # dump everything except test? os.system("mysql --user=root --password="******" --batch --skip-column-names " + \ "--execute=\"SHOW DATABASES\" | egrep -v \"information_schema|test\" " + \ "| xargs mysqldump --user=root --password="******" --lock-all-tables --databases > " + self.mysqldump_path) exc = db1.cursor() exc.execute("UNLOCK TABLES;") db1.close() return ret
def get(self): # Display existing Earthquake entries and a form to add new entries. if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='Earthquake', user='******', passwd='root') else: # db = MySQLdb.connect(host='127.0.0.1', port=3306, db='Earthquake', user='******', charset='utf 8') # Alternatively, connect to a Google Cloud SQL instance using: db = MySQLdb.connect(host='173.194.225.166', db='Earthquake', port=3306, user='******') # Create table and populate self.createTable(db) self.truncateData(db) self.populateData(db) # Get weekly earth quake data for predefined values magnitudeTwo = self.getData(db, '2', 1) magnitudeThree = self.getData(db, '3', 1) magnitudeFour = self.getData(db, '4', 1) magnitudeFive = self.getData(db, '5', 1) magnitudeGreaterFive = self.getData(db, 'gt5', 2) # After usage closing the connection to DB db.close() variables = {'magnitudeTwo': magnitudeTwo, 'magnitudeThree': magnitudeThree, 'magnitudeFour': magnitudeFour, 'magnitudeFive': magnitudeFive, 'magnitudeGreaterFive': magnitudeGreaterFive} template = JINJA_ENVIRONMENT.get_template('main.html') self.response.write(template.render(variables))
def __init__(self,db_url,username,password,database,table): self.database = database self.table = table self.db = MySQLdb.connect(db_url,username,password, cursorclass=cursors.SSCursor) cursor = self.db.cursor() if database == None or database.strip() == "": log("Error: Please provide database in config.json") raise KeyError create_db_sql = "CREATE DATABASE IF NOT EXISTS " + self.database cursor.execute(create_db_sql) cursor.execute("use " + database) self.db = MySQLdb.connect(db_url,username,password) columns = generate_headers(for_db=True) sql_str = "" for col in columns: if col == NUMBER: sql_str = sql_str + col + " int(10) PRIMARY KEY AUTO_INCREMENT, " continue if col == RESERVED_DAYS_12_MONTHS: sql_str = sql_str + col + " text, " continue sql_str = sql_str + col + " varchar(100), " sql_str = sql_str.strip(" ") sql_str = sql_str.strip(",") create_table_sql = "CREATE TABLE IF NOT EXISTS " + self.table + "(" \ + sql_str + \ ")" cursor.execute(create_table_sql) cursor.close()
def post(self): community = cgi.escape(self.request.get('community')) user = users.get_current_user() if not user: self.redirect('/') else: email = user.email() if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/hack-the-north-1:its-not-django', db='musicsite', user='******') else: db = MySQLdb.connect(host='localhost', user='******', passwd="htndjango",db="musicsite") cursor = db.cursor() result = cursor.execute('UPDATE users SET invite_accepted=1 WHERE email="%s" AND community_name="%s";' % (email,community)) db.commit() result = cursor.execute('SELECT community_name,community_id FROM users WHERE email = "%s" AND invite_accepted = 0 AND invite_hidden=0;' % email) count = cursor.rowcount if (count == 0): template_messages={ "message": "%s has been joined successfully! You have no pending invites!" % community } else: pendinglist = [] for row in cursor: pendinglist.append(row[0]) invitelist = "" template_messages={ "message": "%s has been joined successfully! You have %s pending invite(s)!" % (community,count), "invites": pendinglist } db.close() self.render_response('join_community_page.html', **template_messages)
def get(self): user = users.get_current_user() if not user: self.redirect('/') else: email = user.email() if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/hack-the-north-1:its-not-django', db='musicsite', user='******') else: db = MySQLdb.connect(host='localhost', user='******', passwd="htndjango",db="musicsite") cursor = db.cursor() result = cursor.execute('SELECT community_name,community_id FROM users WHERE email = "%s" AND invite_accepted = 0 AND invite_hidden=0;' % email) count = cursor.rowcount if (count == 0): template_messages={ "message": "You have no pending invites!" } else: #known bug where if you have two pending invites of the same name, accepting will cause you to accept both pendinglist = [] for row in cursor: pendinglist.append(row[0]) template_messages={ "message": "You have %s pending invite(s)!" % count, "invites": pendinglist } db.close() self.render_response('join_community_page.html', **template_messages)
def _open(self, conv='', host='', user='', passwd='', db='', port=0): """Open database connection. In case of errors exceptions are thrown.""" if conv=='': self._db = MySQLdb.connect(host, user, passwd, db, port, connect_timeout = DB_TIMEOUT) else: self._db = MySQLdb.connect(conv=conv, host=host, user=user, passwd=passwd, db=db, port=port, connect_timeout = DB_TIMEOUT) self._db.set_character_set('utf8')
def get(self): # Display existing guestbook entries and a form to add new entries. if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='im', user='******', passwd='Fudge') else: db = MySQLdb.connect(host='127.0.0.1', port=3306, db='im', user='******', passwd='') # Alternatively, connect to a Google Cloud SQL instance using: # db = MySQLdb.connect(host='ip-address-of-google-cloud-sql-instance', port=3306, user='******') cursor = db.cursor() cursor.execute('SELECT guestName, content, entryID FROM entries') # Create a list of guestbook entries to render with the HTML. guestlist = []; for row in cursor.fetchall(): guestlist.append(dict([('name',cgi.escape(row[0])), ('message',cgi.escape(row[1])), ('ID',row[2]) ])) variables = {'guestlist': guestlist} template = JINJA_ENVIRONMENT.get_template('main.html') self.response.write(template.render(variables)) db.close()
#!/usr/bin/python3 """ lists all states from the database in ascending order where name of state matches argument given using MySQLdb """ if __name__ == "__main__": from sys import argv import MySQLdb db = MySQLdb.connect(host="localhost", user=argv[1], passwd=argv[2], db=argv[3]) cur = db.cursor() cur.execute("""SELECT id, name FROM states WHERE name LIKE BINARY '{}' \ ORDER BY states.id ASC""".format(argv[4])) rows = cur.fetchall() for row in rows: print(row) cur.close() db.close()
def get_CTL(self): con = MySQLdb.connect(**data) query = "select ID from CTLv7;" self.CTL = pdsql.read_sql(query, con).values con.close()
class TestMySQLDBUnicodeChanges(TestUnicodeChanges): def setUp(self): self.basedir = "MySQLDBUnicodeChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url( "mysql://*****:*****@localhost/buildbot_test", self.basedir) self.db = DBConnector(self.spec) self.db.start() result = self.db.runQueryNow("SHOW TABLES") for row in result: self.db.runQueryNow("DROP TABLE %s" % row[0]) self.db.runQueryNow("COMMIT") try: import MySQLdb conn = MySQLdb.connect(user="******", db="buildbot_test", passwd="buildbot_test", use_unicode=True, charset='utf8') except: TestMySQLDBUnicodeChanges.skip = "MySQLdb not installed"
def out_of_range(): print('*********************************************************') print('* out of range! *') print('*********************************************************') menu() options = { '0': query_flight, '1': query_bus, '2': query_hotel, '3': query_account, '4': query_reservation, } Cust_Name = '' host = '' username = '' password = '' table = '' argv_check() db = MySQLdb.connect(host, username, password, table) cursor = db.cursor() account_check() while True: menu() db.close()
items = re.findall(res,gettitle) return items #去掉div def todiv(self,data): res = re.compile('<div.*?</div>') im = re.sub(res, '', data) return im get=getbaidu() getbaidu=get.getnuma() #连接数据库 db=MySQLdb.connect("localhost","root","root","python",charset='gbk') #创建操作的游标 cursor=db.cursor() #执行添加 for item in getbaidu: #print item[0], get.todiv(item[1]) sql="INSERT INTO GETBAIDU(url,title)VALUES(%s,%s)" %("'"+get.todiv(item[1])+"'","'"+item[0]+"'") try: cursor.execute(sql) db.commit() except: db.rollback() #print "fail"
def connection(): conn = MySQLdb.connect(host= "localhost", user="******", passwd="", db="eatbot") return conn
soup = BeautifulSoup(r.text, 'html.parser') soup1 = soup.find('div', {'class': 'homeserver-count'}) soup2 = soup1.find_next('div', {'class': 'homeserver-count'}) pub_num = soup1.p.em.get_text() buy_num = soup2.p.em.get_text() except Exception, e: logging.warning('update cjol num error, msg is {}'.format( str(e)), exc_info=True) sql = """update grapuser_info set buy_num={}, pub_num={} where user_name ="{}" and grap_source = "{}"; """.format(buy_num, pub_num, un, mysql_source) try: db = MySQLdb.connect(**self.sql_config) cur = db.cursor() aaa = cur.execute(sql) db.commit() db.close() except Exception, e: print Exception, str(e) logging.warning('update mysql error, msg is {}'.format(str(e)), exc_info=True) pass except Exception, e: print Exception, str(e) logging.warning('error, msg is {}'.format(str(e)), exc_info=True) pass return pub_num, buy_num
def __init__(self, host, user, passwd, db): self._db_connection = MySQLdb.connect(host, user, passwd, db) self._db_cur = self._db_connection.cursor()
if phenotype in PHENOTYPES['disease']: query = query.format('disease') elif phenotype in PHENOTYPES['food_and_nutrition']: query = query.format('foodnutrition') elif phenotype in PHENOTYPES['personality']: query = query.format('personality') else: return None cursor.execute(query) return cursor.fetchone() if __name__ == '__main__': login() try: with mdb.connect('localhost', 'root', 'toor', 'userdb') as cur: add_user('+17146235999', 'Patrick Woo-Sam', cur) set_user_state('+17146235999', 2, cur) get_user_state('+17146235999', cur) egg_allergy_score = get_score('+17146235999', 'egg-allergy', cur) disease_scores = get_scores('+17146235999', PHENOTYPES['disease'], cur) for phenotype in phenotypes: if num_suggestions(phenotype, cur): print(phenotype) print(get_suggestions(phenotype, cur)) except Exception as err: print(err) finally: driver.close()
def mysql_connection(self, db_data_lst): host_address, user, pass_word, db_name = db_data_lst mconn = MySQLdb.connect(host_address, user, pass_word, db_name) mcur = mconn.cursor() return mconn, mcur
#======= Estruturas de dados e DB #----- Importar biblioteca do Mysql import MySQLdb #----- Configurar a conexão conexao = MySQLdb.connect(host='mysql.topskills.study', database='topskills01', user='******', passwd='ts2019') #----- Salva o cursor da conexão em uma variável cursor = conexao.cursor() #----- Criação do comando SQL e passado para o cursor comando_sql_select = "SELECT * FROM 01_MDG_PESSOA" cursor.execute(comando_sql_select) #---- Pega todos os resultados da execução do comando SQL e armazena em uma variável resultado = cursor.fetchall() #cria uma lista para armazenar os dicionarios lista_pessoas = [] for p in resultado: #----- Criação do dicionario que representa uma pessoa dicionario_pessoa = { 'Id': 0, 'Nome': '', 'Sobrenome': '', 'Idade': 0, 'Endereco_Id': 0 } #--- pega cada posição da tupla e atribui a uma chave do dicionário dicionario_pessoa['Id'] = p[0] dicionario_pessoa['Nome'] = p[1]
# coding: utf-8 import MySQLdb __author__ = 'tuomao' from bs4 import BeautifulSoup import setting mysql_con = MySQLdb.connect(**setting.DATABASE['mysql']) def get_file_content(file_path): file = open(file_path, 'r') content = file.read() return content def get_permissions(content): bs = BeautifulSoup(content) permissions = bs.find_all('permission') _permissions = [] for permission in permissions: if permission.attrs.get('android:name', '') == 'android.permission.GET_TASKS': print(1) per = { 'name': permission.attrs.get('android:name', '').split('.')[-1], 'group': permission.attrs.get('android:permissiongroup', ''), 'protectionLevel': permission.attrs.get('android:protectionlevel', '') } _permissions.append(per) return _permissions
import MySQLdb import csv from datetime import datetime, date #Connect to a MYSQL database con = MySQLdb.connect(host= 'localhost', port =3306, db ='Suppliers', user = '******', passwd = 'Password05') c = con.cursor() #create a cursor that we can use to execute SQL statement #Insert / add records in the table "supplier" filewriter = csv.writer(open('supplier_list.csv', 'w',newline=''), delimiter = ",") header = ['Supplier Name','Invoice Number','Par Number','Cost','Purchase Date'] filewriter.writerow(header) c.execute("SELECT * FROM supplier WHERE Cost >300;") rows = c.fetchall() for row in rows: filewriter.writerow(row)
from flask import Response from flask import request from redis import Redis from datetime import datetime import MySQLdb import sys import redis import time import hashlib import os import json app = Flask(__name__) startTime = datetime.now() R_SERVER = redis.Redis(host=os.environ.get('REDIS_HOST', 'redis'), port=6379) db = MySQLdb.connect("mysql", "root", "password") cursor = db.cursor() @app.route('/init') def init(): cursor.execute("DROP DATABASE IF EXISTS USERDB") cursor.execute("CREATE DATABASE USERDB") cursor.execute("USE USERDB") sql = """CREATE TABLE users ( ID int, USER char(30) )""" cursor.execute(sql) db.commit() return "DB Init done"
# -*- coding: utf-8 -*- """ Created on Tue Jul 11 12:31:51 2017 @author: Mars """ import MySQLdb # 打开数据库连接 db = MySQLdb.connect("localhost","root","root","sys" ) # 使用cursor()方法获取操作游标 cursor = db.cursor() # 使用execute方法执行SQL语句 cursor.execute("SELECT * from sys.path_table") # 使用 fetchone() 方法获取一条数据库。 data = cursor.fetchone() print data #print ("Database version : %s " % data) # 关闭数据库连接 db.close()
print lastMonthList lastMonthList.sort() ## Other initialisations continent = DB_configuration.continent IXP_collector = {} IXP_CC = {} Current_db = 'MergedData' CC_IXP = {} ## connect to the DB db = MySQLdb.connect(host = DB_configuration.host, user = DB_configuration.user, passwd = DB_configuration.passwd, db = Current_db) cur = db.cursor() print 'Connected' query = "select IXP, RouteCollector, CC from AllRouteCollectors where Continent = '"+continent+"';" cur.execute(query) data = cur.fetchall() i = 0 while (i<len(data)): row = data[i] if row[0] not in IXP_collector.keys(): IXP_collector[row[0]] = [] IXP_CC[row[2]] = row[0] if row[2] not in CC_IXP.keys(): CC_IXP[row[2]] = []
import MySQLdb conn = MySQLdb.connect('localhost', 'root', 'root', 'table_name') cursor = conn.cursor() cursor.execute("SELECT * FROM table_name") # get a single row row = cursor.fetchone() print(row) # disconnect from the database conn.close() # Traceback (most recent call last): # File "mysqlclient_package_sample.py", line 4, in <module> # conn = MySQLdb.connect('localhost', 'root', 'root', 'table_name') # File "/usr/local/lib/python3.6/site-packages/MySQLdb/__init__.py", line 86, in Connect # return Connection(*args, **kwargs) # File "/usr/local/lib/python3.6/site-packages/MySQLdb/connections.py", line 204, in __init__ # super(Connection, self).__init__(*args, **kwargs2) # _mysql_exceptions.OperationalError: (1049, "Unknown database 'table_name'")
import MySQLdb import random import datetime db = MySQLdb.connect("localhost","root","random1998","farmacia") cursor = db.cursor() date = datetime.datetime(2018, 4, 28) for i in range(100000): if i % 1000 == 0: print(i) id_fat = i + 16 #i am lazy cursor.execute("INSERT INTO Fatura (id, data_f, data_s, pontos_r, pontos_u, desconto, total, id_func, id_c) VALUES ('%d', '%s', '%s', '%d', '%d', '%f', '%f', '%d', '%d');"% (id_fat, date, date, 0, 0, 0.0, 0.0, random.randint(1,4), random.randint(1,7))) #db.commit() nrandom = random.randint(1,4) added_meds = [] for i in range(nrandom): med = random.randint(1,6) while med in added_meds: med = random.randint(1,6) added_meds.append(med) cursor.execute("INSERT INTO Fatura_Med (id_f, id_m, qt_v, preco_v, preco_l) VALUES ('%d', '%d', '%f', '%f', '%f');" % (id_fat, med, random.randint(1,4), 0, 0)) #db.commit() db.commit()
import MySQLdb from apiclient.discovery import build from apiclient.errors import HttpError from oauth2client.tools import argparser execfile("../creds.py") DEVELOPER_KEY = devkey YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) db = MySQLdb.connect(host="localhost", user="******", passwd="", db="youtube_app") results1 = youtube.subscriptions().list( part="snippet,contentDetails,statistics", maxResults=50, channelId="UCUbh6T8Nr6ss7JWsq-3xYQg").execute() results2 = youtube.subscriptions().list( part="snippet", maxResults=50, channelId="UCUbh6T8Nr6ss7JWsq-3xYQg", pageToken=results1["nextPageToken"]).execute() results3 = youtube.subscriptions().list( part="snippet", maxResults=50, channelId="UCUbh6T8Nr6ss7JWsq-3xYQg",
def main(argv): USERNAME = '******' PASSWORD = '******' DATABASE = 'testdb' TABLE = 'collect_openmote_20160706_prd' SVM_STORINGMODEL = 'SVM_HAR_PA1.pkl' DCT_STORINGMODEL = 'DCT_HAR_PA1.pkl' print 'Prediction SBAN, version ' + VERSION try: opts, args = getopt.getopt( argv, "hu:p:d:t:s", ["username="******"password="******"database=", "table=", "store="]) except getopt.GetoptError: print 'predictionMySqlOpenMote.py [-u <username>] [-p <password>] [-d <database>] [-t <table>][-s <store>]' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'predictionMySqlOpenMote.py [-u <username>] [-p <password>] [-d <database>] [-t <table>][-s <store>]' sys.exit() elif opt in ("-u", "--username"): USERNAME = arg elif opt in ("-p", "--password"): PASSWORD = arg elif opt in ("-d", "--database"): DATABASE = arg elif opt in ("-t", "--table"): TABLE = arg elif opt in ("-s", "--store"): SVM_STORINGMODEL = arg print 'USERNAME is "', USERNAME print 'PASSWORD is "', PASSWORD print 'DATABASE is "', DATABASE print 'TABLE is "', TABLE print 'TRAINED_MODEL is "', SVM_STORINGMODEL #clf = joblib.load(SVM_STORINGMODEL) clf = joblib.load(DCT_STORINGMODEL) # Open database connection db = MySQLdb.connect("localhost", USERNAME, PASSWORD, DATABASE) # prepare a cursor object using cursor() method cursor = db.cursor() st2, t2 = readStartTime(cursor, TABLE, 2) print 'systemtime2 = ', st2, ' sensortime2 = ', t2 stmax = st2 #Read after 30seconds when all sensors are available st12, t12 = readStartTime2(cursor, TABLE, 2, stmax + 20000) print 'st2 = ', st12 st = st12 rawRH = readSensorData(cursor, TABLE, 2, st) print 'rawRH.shape', rawRH.shape tRH = rawRH[:, 25] tRH0 = np.floor(np.mean(tRH)) print 'starting point = ', tRH0 while True: rawRH = readSensorData2(cursor, TABLE, 2, tRH0) minLen = len(rawRH) if minLen > 0: # round up raw data upto 16 rows #print ' round up raw data up to 16 rows' if len(rawRH) < 16: print 'need to round RH len = ', len(rawRH) rawRH = roundUpRawData(rawRH) #print 'Converting raw data ' RH = convertData(rawRH) #RH_fe = eFeat(RH) RH_fe = extract_feature(RH) #RH_fe_s = preprocessing.scale(RH_fe) RH_fe_s = RH_fe RH_fe_s = RH_fe_s.reshape(1, len(RH_fe_s)) predicted = clf.predict(RH_fe_s) print("Time = ", tRH0, "predicted = ", predicted) #writeActivity(TABLE, predicted, 2, t12) show(int(predicted) - 1) #time.sleep(0.01) else: print 'no available input sensor data' time.sleep(0.1) tRH0 += 1 #root.mainloop() # disconnect from server db.close()
from scrapy.contrib.spiders import XMLFeedSpider from news.items import newsItem import MySQLdb #import database connection db = MySQLdb.connect("localhost","root","local","MyDB") #prepare a cursor object using cursor() method cursor = db.cursor() sql = "select url from MyDB.SourceUrls where deleted_ind is null limit 10" arr = [] try: cursor.execute(sql) result = cursor.fetchall() #print result for row in result: arr.extend(row) except: print "Error: unable to fetch the data" db.close() #use XmlPathSelector (other one is HtmlXPathSelector - for HTML data) class spider1(XMLFeedSpider): name='newsspider' #start_urls = ['http://feeds.mercurynews.com/mngi/rss/CustomR'] start_urls = arr itertag = 'item'
import MySQLdb import sys if __name__ == "__main__": argv = sys.argv username = argv[1] password = argv[2] database_name = argv[3] """ connection to a db """ db = MySQLdb.connect( host="localhost", user=username, passwd=password, db=database_name, port=3306 ) """ Nos da la capacidad de tener múltiples entornos de trabajo """ cur = db.cursor() """ execute to execte a query """ cur.execute("SELECT * FROM states ORDER BY id ASC") """ tuple wiat all rows """ rows = cur.fetchall() for row in rows: print(row)
from socket import * from uuid import getnode as get_mac import socket import MySQLdb db = MySQLdb.connect("localhost", "root", "", "detect") cursor = db.cursor() #Device details function #def device_details(): print('running~') network = '146.64.204.' def is_up(addr): s = socket(AF_INET, SOCK_STREAM) s.settimeout(0.01) if not s.connect_ex((addr)): s.close() return 1 else: s.close() for ip in xrange(48, 109): addr = network + str(ip) mac = (get_mac()) theVal = '' theNam = addr theStat = (getfqdn(addr)) cursor = db.cursor()
p = i.strip().split('=') if len(p) < 2: continue cf[p[0]] = p[1] # some package may make build machine down if 'BLACKLIST_PACKAGES' in cf: p_blacklist = cf['BLACKLIST_PACKAGES'].strip().split(" ") else: p_blacklist = ['gcc-4.9', 'gcc-4.7', 'gcc-5', 'globus-'] if cf['DB_TYPE'] == 'MYSQL': import MySQLdb conn = MySQLdb.connect(host=cf['MYSQL_HOST'], user=cf['MYSQL_USER'], passwd=cf["MYSQL_PASSWORD"], db=db, charset="utf8") elif cf['DB_TYPE'] == 'POSTGRE': import psycopg2 conn = psycopg2.connect(host=cf['POSTGRE_HOST'], user=cf['POSTGRE_USER'], password=cf["POSTGRE_PASSWORD"], database=db) else: sys.exit(1) cursor = conn.cursor() sql = "select pkg,ver from %s" % (table) cursor.execute(sql)
def basarili(self,mail): db2 = MySQLdb.connect(host= "127.0.0.1", user = "******", passwd = "", db= "deustaj", use_unicode=True, charset="utf8") cursor2 = db2.cursor() for i in range(len(mail)): self.b_mail.send_keys(mail[i]) self.b_button.click() self.driver.find_element_by_class_name("btn-success").click() self.disp = self.b_mail.is_displayed() time.sleep(3) cursor2.execute("SELECT * FROM f_basvurular") if cursor2.rowcount > self.cursor.rowcount: d = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") with open("LOGS/firma-basvuru.txt", "a") as file: file.write(" " + "\n") file.write(str(d)) file.write(" " + "\n") file.write("Yapılan test: Başarılı e-mail başvuru testi") file.write(" " + "\n") file.write("Girilen E-Mail : " + mail[i]) file.write(" " + "\n") file.write("Beklenen sonuç: GİRİŞ BAŞARILI!") file.write(" " + "\n") file.write("Alınan sonuç: GİRİŞ BAŞARILI!") file.write(" " + "\n\n") file.write("TEST BAŞARILI!") file.write(" " + "\n\n") cprint(Fore.LIGHTBLUE_EX, "Test Çalıştırılma Tarih/Saati : " + str(d)) print("") cprint(Fore.LIGHTBLUE_EX, "Girilen E-Mail : " + mail[i]) print("") cprint(Fore.YELLOW, "Beklenen Sonuç = GİRİŞ BAŞARILI") print("") cprint(Fore.GREEN, "Alınan Sonuç = GİRİŞ BAŞARILI!") print("") cprint(Fore.LIGHTMAGENTA_EX, "TEST BAŞARILI!") time.sleep(0.5) else: d = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") with open("LOGS/firma-basvuru.txt", "a") as file: file.write(" " + "\n") file.write(str(d)) file.write(" " + "\n") file.write("Yapılan test: Başarılı e-mail başvuru testi") file.write(" " + "\n") file.write("Girilen E-Mail : " + mail[i]) file.write(" " + "\n") file.write("Beklenen sonuç: GİRİŞ BAŞARILI!") file.write(" " + "\n") file.write("Alınan sonuç: GİRİŞ BAŞARISIZ!") file.write(" " + "\n\n") file.write("TEST BAŞARISIZ!") file.write(" " + "\n\n") cprint(Fore.LIGHTBLUE_EX, "Test Çalıştırılma Tarih/Saati : " + str(d)) print("") cprint(Fore.LIGHTBLUE_EX, "Girilen E-Mail: " + mail[i]) print("") cprint(Fore.YELLOW, "Beklenen Sonuç = BAŞARILI") print("") cprint(Fore.RED, "Alınan Sonuç = Giriş BAŞARISIZ!") print("") cprint(Fore.LIGHTMAGENTA_EX, "TEST BAŞARISIZ!") time.sleep(0.5) cprint(Fore.YELLOW, "Çıkmak için 'e' başarısız test için 'b' tuşlayın") a = input() if a == 'e': driver.close() elif a == 'b': self.b_mail = self.driver.find_element_by_id(self.degerler["basvuru_mail"]) self.b_button = self.driver.find_element_by_id(self.degerler["basvuru_btn"]) firma_basvuru.basarisiz(["berke.com","1234518"])
#! /usr/bin/env python # -*- coding:utf-8 -*- # 导入指定数据 import requests import re,time import os,sys,xlrd import MySQLdb as mdb reload(sys) sys.setdefaultencoding('utf-8') db_conn = mdb.connect('localhost','root','abcxxx123','yjy_xiyizonghe',unix_socket='/tmp/mysql.sock') cursor = db_conn.cursor() db_conn.set_character_set('utf8') # 读取excel data = xlrd.open_workbook("/root/scripts/passport.xls") # 获取sheet名称 table_name = 'yjy_user_1' table = data.sheet_by_name(u''+table_name) #tables = data.sheets() #for table in tables: # print table # 读取行数,列数 nrows = table.nrows print "一共有行数:",nrows ncols = table.ncols print "一共有列数:",ncols time.sleep(3) print "开始导入数据" columns = table.row_values(0) sql = "insert into " + table_name + "(" # 获取字段名称
def connection_Online(): DATA = [DB_HOST, DB_USER, DB_PASS, DB_NAME] return MySQLdb.connect(*DATA)
def get_con(self): self.conn = MySQLdb.connect(self.host, self.user, self.password, self.db, charset="utf8")
from bs4 import BeautifulSoup import re import urllib2 import MySQLdb import string import requests # import socks # import socket # socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 9150) # socket.socket = socks.socksocket # print >> open("clinics_bangalore_link.html",'a'),"" # print >> open("doc_bangalore_link.html",'a'),"" db = MySQLdb.connect("localhost","root","8520","Ziffi") cursor = db.cursor() def fn (url): req = urllib2.Request(url) html_page = urllib2.urlopen(req)#.read() soup = BeautifulSoup(html_page) finalurl = html_page.geturl() return [soup,finalurl] TAG_RE = re.compile(r',*\xc2\xa0') def remove_xc2(text): return TAG_RE.sub('', text) soup2 = BeautifulSoup(open("src_pg.html"))
# The libraries we'll need import sys, session, cgi, MySQLdb # Get a DB connection db = MySQLdb.connect("creosote.eng.unimelb.edu.au", "group04", "04yapmqzoH04", "group04", 3306) cursor = db.cursor() # Manage the session sess = session.Session(expires=20 * 60, cookie_path='/') # --------------------------------------------------------------------------------------------------------------------- # send session cookie print "%s\nContent-Type: text/html\n" % (sess.cookie) # What came on the URL string? params = cgi.FieldStorage() singleForum = 0 singleThread = 0 # Check if user is viewing a specific forum if params.has_key('forum'): # Find all threads in the forum sql = """ select p.title, replace(p.body,'\n','<BR>'), p.date_posted, t.num_views, u.user_name, t.thread_id, t.num_posts from thread t inner join post p on t.thread_id=p.thread_id inner join reg_user u on p.user_id=u.user_id where t.forum_id=%s