def waitUntilMysqlServerIsUp(myHost,myCnf,mySocket,pidFile,timeout): log.debug("Entering wait until Mysql Server is up") ts1 = time.time() for i in range(timeout): if i == timeout - 1: log.warning("Mysql Backup server might be up but couldnt connect after %ssec" % timeout) if not os.path.exists(pidFile): time.sleep(1) continue log.debug("Mysql Backup server has a pid file") try: if len(mySocket) > 0: log.debug("Trying to connect with %s %s %s" % (myHost,myCnf,mySocket)) conn = _mysql.connect(host=myHost,read_default_file=myCnf,unix_socket=mySocket) conn = _mysql.connect(host=myHost,read_default_file=myCnf,unix_socket=mySocket) else: log.debug("Trying to connect with %s %s" % (myHost,myCnf)) conn = _mysql.connect(host=myHost,read_default_file=myCnf) log.debug("Trying query...") conn.query("SHOW DATABASES;") # This should raise exception if server is down if not len(conn.store_result().fetch_row(maxrows=0,how=1)[0]['Database']) > 0: raise dbIsNotUp ts2 = time.time() log.info("Mysql Backup Server is up, waited %ssec" % (ts2-ts1)) break except (error): time.sleep(1)
def authenticate(self,username,authorized,authstring,scopes=[]): if authorized == 1: conn = _mysql.connect(HOST_NAME,MYSQL_USERNAME,MYSQL_PASS,DB_NAME) conn.query("SELECT token from "+TABLE_NAME+" where login = '******'") result = conn.store_result() tkn = result.fetch_row(1) if tkn[0][0] == authstring: return app_driver.octoAPy(tkn[0][0]) else: return app_driver.octoAPy(None) else: headers={'Authorization':'Basic '+base64.urlsafe_b64encode("%s:%s" % (username,authstring)),'Accept':'application/json','Content-Type':'application/json'} options={'note':self.note,'client_id':self.ClientId,'client_secret':self.ClientSecret,'fingerprint':username,'scopes':scopes} r = requests.post("https://api.github.com/authorizations",headers=headers,data=json.dumps(options)) if r.status_code != 201: conn = _mysql.connect(HOST_NAME,MYSQL_USERNAME,MYSQL_PASS,DB_NAME) conn.query("SELECT token from "+TABLE_NAME+" where login = '******'") result = conn.store_result() tkn = result.fetch_row(1) return app_driver.octoAPy(tkn[0][0]) else: tkn = json.loads(r.text)['token'] conn = _mysql.connect(HOST_NAME,MYSQL_USERNAME,MYSQL_PASS,DB_NAME) conn.query("INSERT INTO "+TABLE_NAME+" VALUES('"+username+"','"+tkn+"')") return app_driver.octoAPy(tkn)
def connect(self, db_host='', db_user='', db_passwd='', db_name=''): if db_host and db_user and db_passwd and db_name: self.db_host = db_host self.db_user = db_user self.db_passwd = db_passwd self.db_name = db_name self.db = _mysql.connect(db_host, db_user, db_passwd, db_name) elif self.db_host and self.db_user and self.db_passwd and self.db_name: self.db = _mysql.connect(self.db_host, self.db_user, self.db_passwd, self.db_name)
def connect(self, store, user='******', passwd='', db=None): if db: self.db = _mysql.connect('localhost', user, passwd, db) store[self.plugin_info.db_name] = self.db else: try: self.db = _mysql.connect('localhost', user, passwd) except _mysql_exceptions.OperationalError: raise DBAuthFail(self.plugin_info.db_name) store[self.plugin_info.db_name] = self.db
def change_root_passwd(): db = None try: db = _mysql.connect(host="localhost", user="******", db="mysql") db = _mysql.connect(host="localhost", user="******", db="mysql") except: db = _mysql.connect(host="localhost", user="******", passwd = "root", db="mysql") db.query("UPDATE user SET Password = PASSWORD('root') WHERE user = '******'") db.query("FLUSH PRIVILEGES") db.close()
def worker(index, info_pipe, qps_array, qps_query_table, nworkers, client_arguments): try: global worker_on prefix = str(os.getpid()) # create connection and run sanity check (show tables) conn = _mysql.connect(**client_arguments) conn.query('show tables') conn.store_result() while True: info_pipe.send(True) try: logger.debug("WAITING FOR INSTRUCTIONS") workload = info_pipe.recv() query_table = QueryTable(workload, qps_array, qps_query_table) worker_qps = int(ceil(((query_table.total_qps + 1) / nworkers) * PERIOD)) # sleep up front to smoothen out QPS time.sleep(index*1.1/nworkers) while True: start_time = time.time() for i in xrange(worker_qps): query_gen = query_table.get_random_query() try: query = query_gen.query_f(prefix) conn.query(query) conn.store_result() except _mysql.MySQLError as (n,m): logger.debug(n) if n == ER_SERVER_GONE: # the server might have died and restarted in between. # try to reconnect once. conn = _mysql.connect(**client_arguments) elif n in uncaught_errors: raise logger.warning(query) logger.warning("[%d] : %s" % (n,m)) query_gen.stats[index] += 1 diff = time.time() - start_time sleep_time = PERIOD - diff if sleep_time > 0: logger.debug("sleep %g" % sleep_time) time.sleep(sleep_time) except KeyboardInterrupt as e: logger.debug("WAITING TO SEND PAUSED") conn.store_result() except Exception as e: logger.error("Exception in child process %d: %s" % (index, str(e))) logger.debug(traceback.format_exc()) del info_pipe exit(1)
def connect(self): try: if len(self.sock) > 0: self.conn = _mysql.connect(host=self.host,read_default_file=self.cnf,unix_socket=self.sock) else: self.conn = _mysql.connect(host=self.host,read_default_file=self.cnf,unix_socket=self.sock) return True except: log.critical("Failed to connect to running mysql, error:%s, exiting..." % sys.exc_info()[1]) print "Error: %s\nExiting..." % sys.exc_info()[1] sys.exit(1)
def check_add_sql(self, page): database = _mysql.connect(host='tools-db', db='s51245__totoazero', read_default_file="/data/project/totoazero/replica.my.cnf") database.query('SELECT added FROM mort_recente WHERE page = "%s"' % page.title(asLink=False).replace('"', '\\"').encode('utf-8')) results=database.store_result() result=results.fetch_row(maxrows=0) if not result: (user, oldid, timestamp) = self.find_add(page) database = _mysql.connect(host='tools-db', db='s51245__totoazero', read_default_file="/data/project/totoazero/replica.my.cnf") database.query('INSERT INTO mort_recente VALUES ("%s", %i, "%s")' % (page.title(asLink=False).replace('"', '\\"').encode('utf-8'), oldid, timestamp.strftime("%Y-%m-%d %H:%M:%S"))) return timestamp else: return pywikibot.Timestamp.strptime(result[0][0], "%Y-%m-%d %H:%M:%S")
def wait_predict (): db = _mysql.connect('localhost', 'Jarred', 'blinky3', 'waittimes_db') db.query("DELETE FROM predicted_times") db.close() top_rides=[5,40,39,27,21] selected_rides=[1,2,3,4,5,7,9,13,14,15,17,18,21,22,23,24,27,29,31,32,34,36,38,39,40,41,42,43] id_dict2={'1': "it's a small world", '13': 'Dumbo', '14': 'Enchanted Tiki Rm', '15': 'Finding Nemo Subs', '17': "Gadget's Go Coaster", '18': 'Haunted Mansion', '2': 'Alice in Wonderland', '21': 'Indiana Jones Adv', '22': 'Jungle Cruise', '23': 'King Arthur Carrousel', '24': 'Mad Tea Party', '27': 'Matterhorn', '29': "Mr Toad's Wild Ride", '3': 'Astro Orbitor', '31': "Peter Pan's Flight", '32': "Pinocchio's Journey", '34': 'Pirates of Caribbean', '36': "Roger Rabbit's Spin", '38': "Snow White's Adv", '39': 'Space Mountain', '4': 'Autopia', '40': 'Splash Mountain', '41': 'Star Tours', '42': 'Storybook Land Boats', '43': 'Winnie the Pooh', '5': 'Big Thunder Mtn', '7': 'Buzz Lightyear', '9': 'Casey Jr Train'} hours_lista=[8,9,10,11] hours_listb=[12,13,14,15,16,17,18,19,20,21,22,23,24] wait_bin=[0,5,10,15,20,25,30,35,40,45,50,55,60,70,80,100,120] hour_list=[8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24] cindex=5 current_time= 8 #datetime.now().hour us_holidays=holidays.UnitedStates() datelist=[] try: db = _mysql.connect('localhost', 'Jarred', 'blinky3', 'waittimes_db') db.query("SELECT * FROM historic_dates ORDER BY date DESC") results=db.store_result() results= results.fetch_row(200) except _mysql.Error, e: print "Error %d: %s" % (e.args[0], e.args[1]) sys.exit(1)
def run(self): self.logger = log.get_logger('loader') # Because self._conn was passed in from the worker thread, we need # to call the mysql_thread_init() C function to make sure that # everything is initialized properly. However, _mysql doesn't expose # that function, so we call it implicitly by creating a MySQL # connection with a socket that's guaranteed to be invalid. try: _mysql.connect(unix_socket='.') except _mysql.MySQLError: pass try: self.logger.info('Starting loader') try: with self._conn_lock: self._active_conn_id = self._conn.thread_id() with self._task.protect(): self._task.data['conn_id'] = self._active_conn_id self._task.save() row_count = self._conn.query(self._sql, *self._params) finally: with self._conn_lock: self._active_conn_id = None with self._task.protect(): self._task.data['row_count'] = row_count self._task.save() except connection_wrapper.ConnectionWrapperException as e: self.logger.error('LOAD DATA connection error: %s', str(e)) self._set_error(ConnectionException(str(e))) except pool.MySQLError as e: errno, msg = e.args msg = "LOAD DATA error (%d): %s" % (errno, msg) self.logger.error(msg) self._set_error(WorkerException(msg)) except Exception as e: self._set_error(e) except KeyboardInterrupt: self.logger.info('Received KeyboardInterrupt, exiting...') finally: self._fifo.detach_reader() self.logger.info('Finished LOAD_DATA')
def main(): #author = input('Full name of author: ') author = 'William Stallings' # sqlite - db1 conn = sqlite3.connect('db1.sqlite3') cur = conn.cursor() cur.execute('SELECT * FROM books WHERE author = \'' + author + '\';') result = cur.fetchone() print result # mysql - db2 conn2 = _mysql.connect( host='localhost', user='******', passwd='', db='db2') conn2.query('SELECT * FROM authors') r2 = conn2.store_result() result2 = r2.fetch_row() print result2 #postgresql - db3 conn3 = psycopg2.connect( 'dbname=db3 user=postgres password=postgres') cur3 = conn3.cursor() cur.execute("INSERT INTO library VALUES ('" + str(result[1]) + "', '" + str(result[2]) + "', '" + result2[0][1] + "');") print 'entry inserted'
def actualizarSaldoCuenta(self, empresa, cuenta, debe, montoSistema, montoLocal): db = _mysql.connect('localhost', 'root', 'jssdmr207', empresa) db.query("select saldoSistema, saldoLocal, codigo from Cuenta where nombre = '"+cuenta+"'") result = db.store_result() row = result.fetch_row() actualSistema = float(row[0]) actualLocal = float(row[1]) codigo = row[2] if codigo.startswith('1') or codigo.startswith('5') or codigo.startswith('6') or codigo.startswith('8'): if not debe: actualSistema -= montoSistema actualLocal -= montoLocal else: actualSistema += montoSistema actualLocal += montoLocal else: if not debe: actualSistema += montoSistema actualLocal += montoLocal else: actualSistema -= montoSistema actualLocal -= montoLoca consulta = "saldoSistema = %f , saldoLocal = %f, where nombre = %s" %(actualSistema, actualLocal, cuenta) db.query("update Cuenta set "+consulta) db.close()
def QCalcReal(): # Compute distance to nearest swire source from radio objects. # Connect to the local database with the atlas uid db = _mysql.connect(host="localhost", user="******", passwd="atlas") # Lets run a querry db.query( "select t1.cid, t2.index_spitzer, \ pow((t1.RA_Deg-t2.RA_SPITZER)*cos(t1.Dec_Deg),2)+ \ pow(t1.Dec_Deg-t2.DEC_SPITZER,2)\ from ecdfs.coords as t1, swire_cdfs.cdfs as t2 \ where pow((t1.RA_Deg-t2.RA_SPITZER)*cos(t1.Dec_Deg),2)+ \ pow(t1.Dec_Deg-t2.DEC_SPITZER,2) <= pow(10/3600,2) \ order by t1.cid;" ) r = db.use_result() # fetch results, returning char we need float ! rows = r.fetch_row(maxrows=100) # Close connection to the database db.close() # print returned rows for row in rows: print row return
def get_query_v(query): con = None try : con = _mysql.connect('localhost','root','','ip_distribution'); con.query(query); #"select * from pro_pro_dis"); result = con.use_result() #print result v = [] while True: rows = result.fetch_row() if not rows : break #print rows[0] v.append(rows[0]) for i in enumerate(v): a = 2 #print i[1][0] #print i[1][1] #print i[0] #print i[1] #return i[1][0] #print i[2] return v except _mysql.Error, e : print "Error %d: %s" % (e.args[0] , e.args[1]) sys.exit(1)
def configurarMonedas(self, empresa, sistema, codSistema, local, codLocal, compra, venta): hoy = datetime.date.today() fecha = hoy.strftime("%Y-%m-%e") db = _mysql.connect('localhost', 'root', 'jssdmr207', empresa) db.query("call crearMoneda('%s', %i, 'Sistema', '%s')" %(sistema, 0, codSistema)) db.query("call crearMoneda('%s', %i, 'Local', '%s')" %(local, 0, codLocal)) db.query("select idMoneda from Moneda where nombre = '%s'" %(sistema)) result = db.store_result() if result.fetch_row: db.query("select idMoneda from Moneda where nombre = '%s'" %(local)) result = db.store_result() if result.fetch_row: db.query("call crearTipoCambio(1, 1, '%s', '%s')" %(fecha, sistema)) db.query("call crearTipoCambio(%f, %f, '%s', '%s')" %(compra, venta, fecha, local)) print "oui! tipo de cambio actualizado" db.close() return True else: print "no -s" db.close() return False else: print "no -s" db.close() return False
def cierreContableLikeABoss(self, empresa, dividendos, impuesto, ctaPyG, ctaUtilidades, ctaDividendos, ctaDividendosxPagar, ctaIR, ctaIRxPagar): db = _mysql.connect('localhost', 'root', 'jssdmr207', empresa) #Obtener el monto de los gastos db.query("select sum(saldo) from Cuenta where codigo like '6%' or codigo like '8%' or codigo like '5'") result = db.store_result() row = result.fetch_row() montoGastos = float(row[0]) #Obtener monto de ingresos db.query("select sum(saldo) from Cuenta where codigo like '5%' or codigo like '7%'") result = db.store_result() row = result.fetch_row() montoIngresos = float(row[0]) #Calculo de utilidades utilidadOperativa = ingresos - gastos montoImpuesto = utilidadOperativa * impuesto utilidadNeta = utilidadOperativa - montoImpuesto montoDividendos = utilidadNeta * dividendos utilidadRetenida = utilidadNeta - montoDividendos db.close() #Obtencion de la fecha de hoy hoy=daytime.date.today() fechas = today.strftime("%Y-%m-%e") #Registro de asientos de impuestos y dividendos asiento = self.crearAsiento(empresa,fechas, fechas)
def getEstadoPeriodo(self, idCuenta, empresa): db = _mysql.connect('localhost', 'root', 'jssdmr207', empresa) db.query("select Estado.nombre from PeriodoContable inner join Estado on PeriodoContable.idEstado = Estado.idEstado where idPeriodoContable = "+idCuenta) result = db.store_result() item = result.fetch_row() db.close() return item[0]
def crearCuenta(self, codigo, nombre, nombreExtra, titulo, moneda, empresa): db = _mysql.connect('localhost', 'root', 'jssdmr207', empresa) db.query("select * from Cuenta where codigo = '"+codigo+"'") result = db.store_result() if not result.fetch_row(): codigoAnt = "" codigoSplit = codigo.split("-") for x in range(len(codigoSplit)-2): codigoAnt += codigoSplit[x] + "-" codigoAnt += codigoSplit[len(codigoSplit)-2] db.query("select * from Cuenta where codigo = '"+codigoAnt+"'") result = db.store_result() if not result.fetch_row(): db.close() return False else: db.query("call crearCuenta('"+codigo+"','"+nombre+"','"+nombreExtra+"','"+titulo+"',0)") db.query("select * from Cuenta where codigo = '"+codigo+"'") if not result.fetch_row(): db.close() return False else: db.close() return True else: db.close() return False
def retrieve_from_db(): db = _mysql.connect(host='192.168.1.120', user='******', passwd='shah', port=3306, db='wiktionary') db.query('SELECT * FROM wiktionary_en') r = db.use_result() d = {} i = 0 print 'loading data' while True: row = r.fetch_row() if not row: break row = row[0] sw = StoredWord(row) lw = sw.word.lower() if lw not in d: d[lw] = Word(sw.language, sw.word) d[lw].addWord(sw) i += 1 if i % 100 == 0: print 'Finished loading %d rows with %d words' % (i, len(d)) print 'done loading data' fd = open('wiktionary2.json', 'w') fd.write('[\n') for k,v in sorted(d.iteritems()): fd.write(v, cls=Encoder) fd.write(',\n') fd.write('{}\n') fd.write(']\n') fd.close() print 'done' return 0
def copy_table(source_db, source_table, target_db, target_table, fields, fieldmap=None): ''' 주어진 table을 ORM을 거치지 않고 SQL을 통해 그대로 복사 단 HOST가 같아야 하고, target 쪽 USER에게 양 DB 접근 권한이 모두 있어야 함 source_db: settings.DATABASES 중 source source_table: source table name target_db: settings.DATABASES 중 target target_table: target table name fields: Tuple of field name. (필드 이름이 양쪽에서 같은 경우): DB에 저장되는 raw 필드명 fieldmap: Dict of fields (source 필드이름 => target 필드이름) ''' assert source_db['HOST'] == target_db['HOST'] select_fields, insert_fields = zip(*fieldmap.iteritems()) if fieldmap else [], [] select_fields += fields insert_fields += fields assert len(select_fields) == len(insert_fields) import _mysql db = _mysql.connect(host=(target_db['HOST'] or 'localhost'), user=target_db['USER'], passwd=target_db['PASSWORD']) db.query("""SET FOREIGN_KEY_CHECKS = 0;""") db.query("""DELETE FROM %s.%s;""" % (target_db['NAME'], target_table)) db.query("""INSERT INTO %s.%s (%s) SELECT %s FROM %s.%s;""" % (target_db['NAME'], target_table, ','.join(insert_fields), ','.join(select_fields), source_db['NAME'], source_table)) db.query("""SET FOREIGN_KEY_CHECKS = 1;""") db.close()
def queryUCSC_mySQLConnector(str_hgVersion, str_geneSymbol="", str_rsIDOfSNP="", str_chromOfSNP=""): global db ### db is global to prevent reconnecting. if db is None: print "SQL connection between UCSC database is re-opened. (" + str(str_hgVersion) + ")" conv = { FIELD_TYPE.LONG: int } db = _mysql.connect(host='genome-mysql.cse.ucsc.edu',user='******',passwd='',db=str_hgVersion,conv=conv) if str_geneSymbol!="": db.query("""SELECT * FROM kgXref INNER JOIN knownGene ON kgXref.kgID=knownGene.name WHERE kgXref.geneSymbol = '%s'""" % str_geneSymbol) r = db.use_result().fetch_row(how=1,maxrows=0) try: if len(r)>=1: return r[0]['geneSymbol'], r[0]['chrom'], r[0]['txStart'], r[0]['txEnd'], r[0]['strand'] else: print str(str_geneSymbol) + " miss in UCSC database." return "" except: pass elif (str_rsIDOfSNP!="" and str_chromOfSNP!=""): db.query("SELECT name, func FROM snp130 WHERE name='" + str_rsIDOfSNP + "' AND chrom='" + str_chromOfSNP + "' AND func REGEXP 'nonsense|missense|frameshift' LIMIT 1") r = db.use_result().fetch_row(how=1,maxrows=0) try: if len(r)>=1: return True else: return False except: pass else: print "SQL connection between UCSC database is abandoned" return
def on_test_button_clicked(self, *args): if self.cmb_connection_type.get_active() == 0: if not self.validate_mysql(): return import _mysql data = {"connect_timeout": 6} widget_map = {"password": "******"} for n in ["host", "user", "password", "port"]: if n == "port": port = self.glade.get_widget("tb_port").get_text() if not port: port = "3306" data[widget_map.get(n, n)] = int(port) else: data[widget_map.get(n, n)] = self.glade.get_widget("tb_%s" % n).get_text() try: handle = _mysql.connect(**data) except _mysql.DatabaseError as err: dialogs.alert( "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % (data["host"], data["user"], data["passwd"], sys.exc_value) ) return dialogs.alert( "successfully connected to host <b>%s</b> with user <b>%s</b>!" % (data["host"], data["user"]) ) handle.close() else: dialogs.alert("Nothing to test")
def push_success(success, successtext): currenttime = datetime.datetime.now() # get our db info from our local file dbcreds = get_mysql_credentials() # decode responce host = dbcreds[0].rstrip() dbname = dbcreds[1].rstrip() username = dbcreds[2].rstrip() password = dbcreds[3].rstrip() # connect to our database database = mysql.connect(host=host,user=username,passwd=password,db=dbname) if success: success = "1" else: success = "0" # enter the run into the database query = 'INSERT INTO runs (runsuccess,errtext,rundatetime) VALUES("{0}","{1}","{2}")'.format(success, successtext,currenttime.strftime("%Y-%m-%d %H:%M")) database.query(query) print "Pushing success to database."
def process(): # Request Process req_url = 'http://10.151.34.36/KELAS_C/KLP_01/Message/SMSAPI' req_params = urllib.urlencode(dict(APIId='hanahbanana', APISecretCode='segogoreng')) request = urllib2.urlopen(req_url, req_params) response = request.readline() # JSON Decode json_data = json.loads(response) # Connecting to MYSQL Server mysql_con = _mysql.connect('127.0.0.1', 'root', '', 'gammu') # Iterating data and entry to MYSQL Gammu server for i in json_data: sms_dest = i['Dest'] sms_msg = i['Msg'] print sms_dest + ": " + sms_msg mysql_con.query("INSERT INTO outbox(DestinationNumber, TextDecoded) VALUES ('" + sms_dest + "','" + sms_msg + "')") server_string = "Genuine Windows 8 SL Edition - HP Envy dv4 - Gammu version 1.32" last_update = datetime.datetime.now() req_url = 'http://10.151.34.36/KELAS_C/KLP_01/Report/ReportAPI' req_params = urllib.urlencode(dict(APIId='hahaha', APISecretCode='hihihi', ServerString=server_string, LastUpdate=last_update)) request = urllib2.urlopen(req_url, req_params) response = request.readline() # Close MySQL Connection if mysql_con: mysql_con.close() return
def check_address(address): print "[INFO ] Checking if '{0}' is in database ...".format(address) # get our db info from our local file dbcreds = get_mysql_credentials() # decode responce host = dbcreds[0].rstrip() dbname = dbcreds[1].rstrip() username = dbcreds[2].rstrip() password = dbcreds[3].rstrip() # connect to our database database = mysql.connect(host=host,user=username,passwd=password,db=dbname) query = 'SELECT COUNT(addressid) FROM addresses WHERE rawaddress = "{0}"'.format(address) database.query(query) dbresult=database.store_result() (addresscount,),=dbresult.fetch_row() if int(addresscount) == 0: exists = False else: exists = True return exists
def application(environ, start_response): # Returns a dictionary containing lists as values. #logging.debug('starting application',extra=s) d = parse_qs(environ['QUERY_STRING']) ip=environ['REMOTE_ADDR'] user=environ['USER'] hostnam=environ['REMOTE_ADDR'] s={'clientip': ip ,'user': user, 'hostnam': hostnam} logging.debug('variables loded',extra=s) try: logging.debug('starting the user',extra=s) # In this idiom you must issue a list containing a default value. msg = d.get('msg', [''])[0] # Returns the first msg value. num = d.get('num', [''])[0] # Returns a list of num. # Always escape user input to avoid script injection msg = escape(msg) num = escape(num) response_body = html % (msg or 'Empty',num or 'No num') logging.debug('user served',extra=s) if msg : myDB = _mysql.connect(host="192.168.0.120",db="test",user="******",passwd="066abde") myDB.query(" INSERT INTO test(name,ip,tel) VALUES ('%s','%s','%s') ;" % (msg,ip,num) ) myDB.close() logging.debug('data sent succefly msg=%s num=%s',msg,num,extra=s) app.sendsms(s,num,msg) status = '200 OK' except Exception, e: logging.exception('Error ---------------',extra=s)
def test_wcdb3_login1 (self): c = _mysql.connect( host = "z", user = "******", passwd = "nVZV4bLhpG", db = "cs327e_taylor") self.assert_(str(type(c)) == "<type '_mysql.connection'>")
def dataReceived(self, data): date = time.time() values = data.split(",") if len(values) != 4: print "Recieved:" + data return db = None try: db = _mysql.connect(config.hostname, config.username, config.password, config.database) db.query( "INSERT INTO tracker (date, speed, lat, lng, course)" + "VALUES ('" + str(date) + "', '" + str(values[0]) + "', '" + str(values[1]) + "', '" + str(values[2]) + "', '" + str(values[3]) + "')" ) print "---------------------" print "| Data Logged |" print "---------------------" print "Speed: " + values[0] print "Latitude: " + values[1] print "Longitude: " + values[2] print "Course: " + values[3] except _mysql.Error, e: print "Error %d: %s" % (e.args[0], e.args[1])
def connect(self): c = { "host": self.host, "user": self.user, "passwd": self.password, "connect_timeout": int(self.connect_timeout) } if self.port: c["port"] = int(self.port) if self.database: c["db"] = self.database try: self.handle = _mysql.connect(**c) except: # _mysql_exceptions.OperationalError: self.connected = False self.msg_log("%s: %s" % (sys.exc_type, sys.exc_value)) return self.connected = True self.version = self.handle.get_server_info() #if self.is_at_least_version("4.1.0"): # self.query("set names 'utf8'") # request utf8 encoded names and result! self.query("show variables") # get server variables result = self.handle.store_result() self.variables = dict(result.fetch_row(0)) if self.is_at_least_version("4.1.3"): self.charset = self.variables["character_set_server"] else: self.charset = "latin1" # use config default_charset as fallback! print "using default_charset %r for this database" % (self.charset) #print "server variables:" #import pprint #pprint.pprint(self.variables) self.refresh() if self.database: self._use_db(self.database)
def get_addresses(): print "[INFO ] Getting all addresses from database ..." # get our db info from our local file dbcreds = get_mysql_credentials() # decode responce host = dbcreds[0].rstrip() dbname = dbcreds[1].rstrip() username = dbcreds[2].rstrip() password = dbcreds[3].rstrip() # connect to our database database = mysql.connect(host=host,user=username,passwd=password,db=dbname) query = "SELECT DISTINCT itemid, address FROM incidents GROUP BY itemid" database.query(query) dbresult=database.store_result() addresses = [] for row in dbresult.fetch_row(maxrows=0): addresses.append((row[0],row[1])) return addresses
busy = False keyA = False keyB = False keyC = False comm = 'COM8' pincode = "" keuze = "" klantID = "5" tagID = "" count = 0 rows = 0 values = "0123456789ABCD*#" #initiate GUI,Database,Serial pygame.init() db = _mysql.connect(host="localhost", user="******", passwd="", db="kiwibank") ser = serial.Serial(comm, 9600) #Screen measurements display_width = 800 display_height = 600 #valueArrays inputArray = [" ", " ", " ", " ", " ", " "] digitArray = [] #Colors black = (0, 0, 0) white = (255, 255, 255) red = (255, 0, 0) red_dark = (200, 0, 0)
import _mysql db = _mysql.connect("192.168.8.88", "root", "root", "sys_shop")
# Example: querying data in MySQL import _mysql db = _mysql.connect(host="localhost", db="pycourse", user="******", passwd="python") db.query("""SELECT name, species FROM pet WHERE age>=1; """) result = db.store_result() print "Rows", result.num_rows() print "Fields", result.num_fields() for i in range(result.num_rows()): print result.fetch_row()[0]
#!/usr/bin/env python from movie import Movie from database import host, user, password, database import _mysql, datetime, time allmovies = [] connector = _mysql.connect(host, user, password, database) for movie_table in ["moviesintheatres", "opening_movies", "upcomingmovies"]: connector.query("SELECT * FROM " + movie_table) allmovies_result = connector.store_result() allmovies_movie = allmovies_result.fetch_row() while allmovies_movie != (): allmovies.append(allmovies_movie[0]) allmovies_movie = allmovies_result.fetch_row() def parental_rating(sentence, movie): tokened = sentence.split(' ') for word in tokened: if word in open("movie/pg.txt").read().split('\n'): movie.set_parental_rating("PG") elif word in open("movie/pg-13.txt").read().split('\n'): movie.set_parental_rating("PG-13") elif word in open("movie/r.txt").read().split('\n'): movie.set_parental_rating("R") def genre(sentence, movie): genre_list = [] genres = open("movie/genre.txt").read().split('\n') for genre in genres:
import _mysql db = _mysql.connect("127.0.0.1","root","ujj17121995","pydjango")
#-*- coding:utf-8 -*- # 这个脚本将获取并打印MySQL数据库的版本 import _mysql import sys con = None try: con = _mysql.connect('localhost', 'testuser', 'test623', 'testdb') con.query("SELECT VERSION()") result = con.use_result() print("MySQL version is:%s" % result.fetch_row()[0]) except _mysql.Error as e: print("Error %d:%s" % (e.args[0], e.args[1])) sys.exit(1) finally: if con: con.close()
import json import _mysql db = _mysql.connect(host="localhost", user="******", passwd="", db="orb") gfilename = raw_input("name of goal file: ") dfilename = raw_input("name of defense file: ") sfilename = raw_input("name of scale file: ") cfilename = raw_input("name of challenge: ") #gfile = open(gfilename,'r') #jsonI = gfile.read() #jsonAll = json.loads(jsonI) #for i in jsonAll: # print i, jsonAll[i] # if len(jsonAll[i]) == 4: # db.query("delete from goal where team="+str(i)) # db.query("insert into goal values ("+str(i)+","+str(jsonAll[i][0])+","+str(jsonAll[i][1])+","+str(jsonAll[i][2])+","+str(jsonAll[i][3])+")") # else: # print "WHY:",i # print len(jsonAll[i]) #gfile.close() #dfile = open(dfilename,'r') #jsonI = dfile.read() #jsonAll = json.loads(jsonI) #for i in jsonAll: # print i, jsonAll[i] # if len(jsonAll[i]) == 9: # db.query("delete from defense where team="+str(i)) # db.query("insert into defense values ("+str(i)+","+str(jsonAll[i][0])+","+str(jsonAll[i][1])+","+str(jsonAll[i][2])+","+str(jsonAll[i][3])+","+str(jsonAll[i][4])+","+str(jsonAll[i][5])+","+str(jsonAll[i][6])+","+str(jsonAll[i][7])+","+str(jsonAll[i][8])+")")
import _mysql conn = _mysql.connect("localhost", "root", "root") conn.query("drop database banking_teja") conn.query("create database banking_teja") conn = _mysql.connect("localhost", "root", "root", "banking_teja") conn.query(''' CREATE TABLE IF NOT EXISTS `account` ( `ac_no` varchar(45) NOT NULL, `ac_name` varchar(45) default NULL, `ac_branch` varchar(45) default NULL, `ac_bank` varchar(45) default NULL, `ac_bal` int(11) default NULL, PRIMARY KEY (`ac_no`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1 CREATE TABLE IF NOT EXISTS `payee` ( `ac_no` varchar(45) NOT NULL, `ac_name` varchar(45) default NULL, `ac_bank` varchar(45) default NULL, PRIMARY KEY (`ac_no`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1 CREATE TABLE IF NOT EXISTS `user_registration` ( `AC_NO` varchar(45) NOT NULL, `AC_NAME` varchar(45) default NULL, `AC_BRANCH` varchar(45) default NULL, `AC_PWD` varchar(45) default NULL, PRIMARY KEY (`AC_NO`)
import math import array import _mysql import numpy import scipy import matplotlib.pyplot as plt import astropysics as astro import pylab import sys answer = "no" db = _mysql.connect(host="localhost", user="******", passwd="atlas") while (answer != "exit"): # ask which id to delete answer = raw_input('Which ID to delete ?') print "\nentered : ", answer, "\n" sql1 = ("delete from atlas_dr3.cdfs_coords where id='" + answer + "'") print sql1 db.query(sql1) sql2 = ("delete from atlas_dr3.cdfs_deconv where id='" + answer + "'") print sql2 db.query(sql2) sql3 = ("delete from atlas_dr3.cdfs_name where id='" + answer + "'") print sql3 db.query(sql3)
import _mysql username = ["root", "toor"] password = ["root", "toor"] for i in username: for j in password: try: db = _mysql.connect(host="localhost", user=i, passwd=j, db="mysql") print(i, " username ve ", j, " parola icin giris yapildi") except: print(i, " username ve ", j, " parola icin giris yapilamadi")
#print(ByteToHex(s[i])) if string.find("0x83", ByteToHex(s[i])) != -1: if len(s) - i >= 3: if string.find("0x00", ByteToHex( s[i + 1])) != -1 and string.find( "0x03", ByteToHex(s[i + 2])) != -1: # print "Last Timestamp: "+time.strftime("%H:%M:%S") newtime = time.time() # print lasttime # print newtime if newtime - lasttime > 2: print "Button Pressed! Timestamp: " + time.strftime( "%H:%M:%S") try: db = _mysql.connect(host="localhost", user=config.USER, passwd=config.PASSWD, db=config.DB) db.query( """INSERT INTO events(type) VALUES('remote')""" ) db.close() except _mysql.Error: print "Shit! DB Error!" except NameError: print "No DB was initialized" buttonPressed = True doortime = newtime print "Door opened" openDoor("fellow p-spacer") lasttime = newtime done = 1
#!/usr/local/bin/python import os import time import _mysql while True : version = -1 con = None try : con = _mysql.connect('localhost','root','','ip_distribution'); con.query("select max(version) from configure where state = 0"); result = con.use_result() v = [] while True: rows = result.fetch_row() if not rows : break v.append(rows[0]) for i in enumerate(v): version = i[1][0] print version except _mysql.Error, e : print "Error %d: %s" % (e.args[0] , e.args[1]) sys.exit(1) finally:
'2TTCTTCTTTC', '1TTCC', '2TTTCTT', '1CTTT', '2CTTC', '1CTTT', '2T', '1CTTT' ], 'DXS10148': ['1GGAA', '1AAGA', '1AAAG', '1AAGG'], 'HPRTB': ['1AGAT'] } Locus = sys.argv[1] Allele = sys.argv[2] #Locus = 'D22S1045' #Allele = '15' #db connection db = _mysql.connect(host="127.0.0.1", user="******", passwd="100resu", db="fxbio") if __name__ == "__main__": db.query("SELECT * FROM ngs_data WHERE Locus = '" + Locus + "' && Allele = '" + Allele + "' ORDER BY Sequence;") query_LocusResult = db.store_result() row = query_LocusResult.fetch_row(1, 2) while row is not None: if (str(row) == '()'): break sequence = str(row[0]['ngs_data.Sequence'])[2:-1] #print('\n' + str(row[0]['ngs_data.Sample_Year']) + " " + str(row[0]['ngs_data.Sample_ID'])[2:-1] + " " + sequence) array_iterate = 0 sequence_iterate = 0 count = 0
def main(): text = False database = False usernames_blocked = [] site = pywikibot.Site() catname = u'Catégorie:Demande de déblocage' #DEBUG if debug: userlist = list() userlist.append(pywikibot.Page(site, u'Utilisateur:Toto Azéro')) #categ=catlib.Category(site, catname) #userlist=categ.articlesList() else: categ = pywikibot.Category(site, catname) userlist = categ.articles() # Step one: remove invalid entries generator = iter(userlist) preloadingGen = pagegenerators.PreloadingGenerator(generator, 60) #step two: add new entries user_list = list() userlist = list() for page in generator: userpage = page.title(withNamespace=False) if page.namespace() != 3: pywikibot.output( u'Page %s is not in the user talk namespace, skipping.' % userpage) continue (requester, oldid) = find_add(page) pywikibot.output(u'Request for unblock has been made by %s in id %i' % (requester, oldid)) if not requester.username in userpage.split('/')[0]: pywikibot.output( u'Request for unblock has been made by %s, who is not the owner of the page %s: skipping' % (requester, userpage)) continue username = re.split(u'/', userpage, 1)[0] pywikibot.output(u'Processing %s' % username) userlist.append(username) user = pywikibot.User(site, username) #try: # Désactivé : un utilisateur peut être bloqué par un blocage collatéral, or le bot # ne reporte pas sa demande si il ne détecte pas de blocage. # blocked=user.isBlocked() blocked = True #except userlib.InvalidUser: # params = { # 'action' :'query', # 'list' :'blocks', # 'bkip' :username, # } # # result=query.GetData(params, encodeTitle = False) # blocked=len(result['query']['blocks']) if (blocked or debug) and not check_open_section(username): pywikibot.output("%s is blocked" % username) if not database: database = _mysql.connect( host='tools-db', db='s51245__totoazero', read_default_file="/data/project/totoazero/replica.my.cnf") sqlusername = re.sub(u'\'', u'\'\'', username) database.query('SELECT date FROM unblocks WHERE username=\'%s\'' % sqlusername.encode('utf-8')) results = database.store_result() if results: result = results.fetch_row(maxrows=0) if len(result): for res in result: #date=res[0] #now=datetime.datetime.now() #delta=now-date #print delta #if (delta.days > 0): print res date = time.strptime(res[0], '%Y-%m-%d %H:%M:%S') now = time.gmtime() if (time.mktime(now) - time.mktime(date)) > (24 * 3600): update = True usernames_blocked.append(username) database.query( 'DELETE FROM unblocks WHERE username="******"' % sqlusername.encode('utf-8')) else: update = False else: update = True usernames_blocked.append(username) else: update = False if update: if not text: text = get_text() text += u'\n{{subst:Utilisateur:ZéroBot/Déblocage|%s|oldid=%i}}' % ( username, oldid) database.query( 'INSERT INTO unblocks VALUES (NULL , \'%s\', CURRENT_TIMESTAMP)' % sqlusername.encode('utf-8')) elif not check_open_section(username): pywikibot.output("%s is not blocked" % username) if not debug: usertext = page.get() newtext = re.sub(u'\{\{[Dd]éblocage\|(?!nocat)', u'{{Déblocage|nocat=oui|', usertext) page.put(newtext, u'Blocage terminé') else: pywikibot.output( "%s is blocked but a request has already been made" % username) if not database: database = _mysql.connect( host='tools-db', db='s51245__totoazero', read_default_file="/data/project/totoazero/replica.my.cnf") database.query('SELECT username FROM unblocks') results = database.store_result() result = results.fetch_row(maxrows=0) for res in result: if res[0].decode('utf-8') not in userlist: pywikibot.output("Removing %s" % res[0]) database.query('DELETE FROM unblocks WHERE username="******"' % res[0]) if text: put_text(text, usernames_blocked)
from cloudNG import * form = cgi.FieldStorage() theme = webTheme() theme.bgcolor = "#ffffff" sys.stdout.write("Content-Type:text/html\n\n") editable = False if theme.localUser: editable = True grid = {} db = _mysql.connect(host="localhost", user="******", passwd='beer', db="brewerslab") db2 = _mysql.connect(host="localhost", user="******", passwd='beer', db="brewerslab") db3 = _mysql.connect(host="localhost", user="******", passwd='beer', db="brewerslab") import time theme.bodytitle = "Water Profiles" theme.presentHead() theme.presentBody()
"Activation Loop End": "activation_loop_end", "aC Helix Start": "achelix_start", "aC Helix End": "achelix_end", "Bridge Glutamine": "bridge_glut_offset", "Bridge Lysine": "bridge_lys_offset", "Bridge Closest Atoms": "bridge_distance", "DxxxxG Start": "dxxxxg_motif_offset", "R-Spine B4": "rspine_start_offset", "R-Spine aC": "rspine_ac_offset" } query_attributes = list(header_map.values()) query_attributes.remove("gene") query_attributes.remove("offset") db = _mysql.connect(host = "acbbdb1.picr.man.ac.uk", user = "******", passwd = "nsmdP123", db = "nsmd") if sys.argv[2] == "auto": manual = False elif sys.argv[2] == "manual": manual = True else: raise Exception("Second argument: Must specify either auto or manual") force = False if len(sys.argv) >= 4 and sys.argv[3] == "force": force = True with open(sys.argv[1], "r") as f: sheet_lines = [l.strip() for l in f.readlines()]
# Get data from fields name = form.getvalue("name") search = str(name) print "Content-type:text/html\r\n\r\n" vm_count = 0 if(len(sys.argv)>1): debug = sys.argv[1] else: debug = 0 try: con = _mysql.connect(host = "localhost", user = "******", passwd = "", port=3306, db = "standards", unix_socket="/opt/lampp/var/mysql/mysql.sock") con.query("select * from standards.cust_octet where cust_name like '%" + search +"%'") result = con.store_result() octet = result.fetch_row()[0][1] con.close() con = _mysql.connect(host = "172.19.254.21", user = "******", passwd = "em7admin", port=int(7706), db = "master_dev") con.query("select distinct(device) from legend_device where ip like '" +octet+ "%' and (device like '%em7pr%' or device like '%em7mc%' or device like '%em7dc%' or device like '%em7db%' or device like '%spld%' or device like '%splm%' or device like '%splsr%' or device like '%splin%' or device like '%rly%') order by device ") result_1 = con.store_result() #con.query("select distinct(device) from legend_device where ip like '" +octet+ "%' and (device like '%spld%' or device like '%splm%' or device like '%splsr%' or device like '%splin%' or device like '%rly%') order by device ") #result_2 = con.store_result()
#!/usr/bin/python # -*- coding: utf-8 -*- import _mysql import sys try: con = _mysql.connect('localhost', 'root', 'root', 'TESTDB') con.query("SELECT VERSION()") result = con.use_result() print "MySQL version: %s" % \ result.fetch_row()[0] except _mysql.Error, e: print "Error %d: %s" % (e.args[0], e.args[1]) sys.exit(1)
import _mysql from dealFile import * #Get of Domains which has more than 2 interfaces have 16-20 examples db = _mysql.connect(host="localhost", user="******", passwd="zxcv4321", db="DDI") #db.query("""select COUNT(*) from PPI inner join example on (ID = PPI_ID) where domain1="ACT" and domain2="ACT" and topology_1 = 6 and topology_2 = 6""") #db.query("""select * from PPI inner join example on (ID = PPI_ID) where domain1="ACT" and domain2="ACT" """) ddiList = readDDIsFile('listOfFolders15OCT.txt') ddis = [] #Number of Domains which has 2 interfaces have more than 15 examples for ddi in ddiList: [domain1, domain2] = ddi if domain1 == domain2: continue #print i #print domain1 #print domain2 #query='SELECT DISTINCT topology_1,topology_2 from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'"' #query='SELECT DISTINCT topology_1,topology_2 from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'"' query = 'SELECT COUNT(DISTINCT topology_1,topology_2) from DDItopology WHERE domain1="' + domain1 + '" AND domain2="' + domain2 + '"' #print query #query='select domain1,domain2 from DDI1' db.query(query) result = db.store_result() numTopology = result.fetch_row(0) print numTopology[0][0] if numTopology[0][0] < 2: break try: query = 'SELECT COUNT(*) from DDItopology WHERE domain1="' + domain1 + '" AND domain2="' + domain2 + '"' #print query db.query(query)
], 'DXS10135': ['AAGA', 'GAAAG', 'GAAA'], 'DXS10146': [ 'TTCC', 'T', 'TTCC', 'TTTC', 'CTCCCTTCC', 'TTCC', 'TCCC', 'TTCTTCTTTC', 'TTCC', 'TTTCTT', 'CTTT', 'CTTC', 'CTTT', 'T', 'CTTT' ], 'DXS10148': ['GGAA', 'AAGA', 'AAAG', 'AAGG'], 'HPRTB': ['AGAT'] } Locus = sys.argv[1] Allele = sys.argv[2] #db connection db = _mysql.connect(host="localhost", user="******", passwd="Uq42=Tc8", db="fxbio") if __name__ == "__main__": db.query("SELECT * FROM ngs_data WHERE Locus = '" + Locus + "' && Allele = '" + Allele + "' ORDER BY Sequence;") query_LocusResult = db.store_result() row = query_LocusResult.fetch_row(1, 2) while row is not None: if (str(row) == '()'): break sequence = str(row[0]['ngs_data.Sequence'])[2:-1] #print('\n' + str(row[0]['ngs_data.Sample_Year']) + " " + str(row[0]['ngs_data.Sample_ID'])[2:-1] + " " + sequence) array_iterate = 0 sequence_iterate = 0 count = 0
def get_lock(process_name): global lock_socket lock_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) try: lock_socket.bind('\0' + process_name) except socket.error: print 'lock exists' sys.exit() config.config.anontune.users.administrator.username logging.basicConfig(level=logging.WARNING, filename="error_log") log = logging.getLogger('mai_logger') the_mutex = threading.Lock() try: db = _mysql.connect(config.config.database.host, config.config.database.username, config.config.database.password, "anontune2") except: print "Failed to connect to database server." exit() def insert_playlist(name, username): global the_mutex if the_mutex == None: return try: name = php_lib.url_escape(str(name), 1) username = php_lib.url_escape(username, 1) auth_username = php_lib.url_escape(str(config.config.anontune.users.administrator.username), 1) auth_password = php_lib.url_escape(str(config.config.anontune.users.administrator.password), 1) api_call = "http://www.anontune.com/api.php?c=insert_playlist&username=%s&auth_username=%s&auth_password=%s&name=%s&parent_id=0&cmd=0" % (username, auth_username, auth_password, name)
from optparse import OptionParser FILE_out = open("out.txt","w") usage = "usage: %prog [options] arg1" parser = OptionParser(usage=usage) parser.add_option("-i", "--input", dest="excel_file", help="path to the multi-sheet excel file of Australian Government Departments/Agencies - e.g. output from WriteCSVMultiTabExcel.py") (options, args) = parser.parse_args() #print(len(args)) #if len(args) != 1: # parser.error("Incorrect number of arguments. Try --help for usage.") FILE_out.write("Opening: %s" % options.excel_file) db=_mysql.connect(host="localhost",user="******", passwd="root",db="Australian_government") if len(options.excel_file) < 1: parser.error("Invalid file name. Try --help for usage") if (options.excel_file).find(".xls") < 0: parser.error("An .xls file is required: e.g. output from WriteCSVMultiTabExcel.py. Try --help for usage") class DataSet: def displayDataSet(self): print("DeptId : ", self.DeptId, ", Name: ", self.Name, ", SubDepartment: ", self.SubDepartment, ", ContactName: ", self.ContactName, ", JiraReference: ", self.JiraReference, ", ProjectId: ", self.ProjectId, ", ProjectName: ", self.ProjectName, ", Other: ", self.Other) def attributeName(col): if col == 0: return "DataSet"
if len(sys.argv) != 2: print >> sys.stderr, "Usage: kinase-offset-from-dirname.py dirname" sys.exit(1) base = os.path.basename(sys.argv[1]) bits = base.split("-") if len(bits) == 2: print bits[1] elif len(bits) == 1: db = _mysql.connect(host="acbbdb1", user="******", passwd="nsmdP123", db="nsmd") db.query( "select offset_canonical from genes join kinases on gene_id = genes.id where gene = '%s'" % base) rows = db.store_result().fetch_row(maxrows=0) if len(rows) == 0: raise Exception("No such gene %s" % base) elif len(rows) > 1: raise Exception("Unexpected response %s" % rows) print rows[0][0] else:
# # Connects to twitter stream API for geo-localised tweets and dumps tweets in a db # from twitter import * from datetime import datetime import operator import _mysql ##### max_tweet_to_treat = 100000000 number_of_tweets_in_mysql_insert_batch = 10 # db = _mysql.connect(host="127.0.0.1", user="******", passwd="python", db="streaming") ##### auth = OAuth( consumer_key='xxxxxx', consumer_secret='xxxx', token='xxxxxxxx', token_secret='xxxxxx' ) tweet_treated = 0 tweet_to_insert = 0 query = 'INSERT INTO streaming.tweets_geo VALUES' twitter_sample_stream = TwitterStream(auth=auth, domain='stream.twitter.com') for msg in twitter_sample_stream.statuses.filter( locations='-180,-90,180,90', language = 'en'): #for msg in twitter_sample_stream.statuses.sample( locations='-180,-90,180,90'): try: if 'text' in msg:
#!/usr/bin/env python import _mysql import json import cgi from datetime import datetime db = _mysql.connect(host="localhost", user="******", passwd="1", db="eden") db.query("SET NAMES utf8;") def insertEvent(eventid, title, dates, description, latitude, longitude): #insert event in database db.query("""INSERT INTO events (eventid, title, dates, description, coords) VALUES ("{0}", "{1}", "{2}", "{3}", GeomFromText('POINT({4} {5})') ); """.format(eventid, title, dates, description, latitude, longitude)) def getLocalEvents(latitude, longitude, radius): #get all events within raduis of provided point db.query(""" SELECT eventid, title, dates, description, X(coords) AS "latitude", Y(coords) AS "longitude", GLength( LineStringFromWKB(LineString(coords, GeomFromText('POINT({0} {1})'))) )*111300 AS distance
import pylab import sys # Database global db_host db_host = 'localhost' global db_user db_user = '******' global db_passwd db_passwd = 'atlas' print "\nStarting Plot Science" # Connect to the local database with the atlas uid db = _mysql.connect(host=db_host, user=db_user, passwd=db_passwd) # select from matches the IR Flux, Radio Flux, Redshift etc so we can do some science db.query( "select flux1/flux2,ang_sep_arcsec/log10(flux1+flux2) from v_0_3.radio_pairs where flux1/flux2 > 1.0 limit 0,10000;" ) # store_result() returns the entire result set to the client immediately. # The other is to use use_result(), which keeps the result set in the server #and sends it row-by-row when you fetch. #r=db.store_result() # ...or... r = db.use_result()
def main(): config.use_mwparserfromhell = False locale.setlocale(locale.LC_ALL, 'fr_FR.utf-8') db = False global test global dry dry = False # À activer seulement pour les tests test = False # À activer seulement pour tester le script sur une seule page ns_test = False recurse_test = False for arg in pywikibot.handleArgs(): if arg == "-dry": dry = True pywikibot.output(u'(dry is ON)') elif arg[0:6] == "-test:": test = True titre_page_test = arg[6:] elif arg[0:4] == "-ns:": ns_test = True namespaces_test_value = [int(i) for i in arg[4:].split(',')] elif arg[0:9] == "-recurse:": recurse_test = True recurse_test_value = bool(arg[9:]) comment_modele = u"%(nombre_articles)i articles) (Bot: Mise à jour de la liste des articles récents (%(precision_pages)s)" site = pywikibot.Site() titre_modele = u"Articles récents" modele = pywikibot.Page(site, titre_modele, ns=10) gen = pagegenerators.ReferringPageGenerator(modele, onlyTemplateInclusion=True) matchDebut1 = u"<!-- Ce tableau est créé automatiquement par un robot. Articles Récents DEBUT -->" matchFin1 = u"\n<!-- Ce tableau est créé automatiquement par un robot. Articles Récents FIN -->" matchDebut2 = u"<!-- Ce tableau est créé automatiquement par un robot. Articles Récents Liste DEBUT -->" matchFin2 = u"\n<!-- Ce tableau est créé automatiquement par un robot. Articles Récents Liste FIN -->" if test: pywikibot.output(u'(test is ON)') gen = [pywikibot.Page(site, titre_page_test)] if ns_test: pywikibot.output(u'(ns_test is ON)') for main_page in gen: try: comment = comment_modele pywikibot.output( u"\n========================\nTraitement de %s\n========================" % main_page.title()) text = main_page.get() ##################### ### Récupération des informations sur la page ##################### templates = textlib.extract_templates_and_params(text) template_in_use = None for tuple in templates: if tuple[0] != u'Articles récents': continue else: template_in_use = tuple[1] break if not template_in_use: pywikibot.output( u"Aucun modèle {{Articles récents}} détecté sur la page %s" % main_page.title()) continue titre_categorie = check_and_return_parameter( template_in_use, u'catégorie') if not titre_categorie: continue cat = pywikibot.Category(site, titre_categorie) nbMax = check_and_return_parameter(template_in_use, 'nbMax', 10) try: nbMax = int(nbMax) except: pywikibot.output(u'Erreur : nbMax incorrect') continue namespaces = check_and_return_parameter(template_in_use, 'namespaces', '0') namespaces = namespaces.split(',') try: namespaces = [int(k) for k in namespaces] except: pywikibot.output( u'Erreur : des namespaces spécifiés ne sont pas des entiers' ) continue recurse = check_and_return_parameter(template_in_use, 'recurse', '0') if recurse.lower().strip() in ('oui', '1'): recurse = True else: recurse = False delai_creation = check_and_return_parameter( template_in_use, 'delai', '0') try: delai_creation = int(delai_creation) except: pywikibot.output(u'Erreur : delai incorrect') continue format_date = check_and_return_parameter(template_in_use, u'date') or None if format_date: try: test_date = datetime.datetime.now() test_date.strftime(format_date) except: format_date = None pywikibot.output(u'Erreur : format de date incorrect') puce = check_and_return_parameter(template_in_use, 'puces', '#') listeRecents = text[(text.index(matchDebut1) + len(matchDebut1)):text.index(matchFin1)] # Permet d'enlever le premier élément (vide) de la liste listeRecents = listeRecents.split('\n%s ' % puce)[1:] listeRecents_old = [page for page in listeRecents] listeRecents = list() dico_dates_presentes = {} for recent in listeRecents_old: r = re.search(u"(\[\[.*\]\]) ?(\(.+\))?", recent) if r: listeRecents.append(r.group(1)) if r.group(2): dico_dates_presentes[r.group(1)] = r.group(2)[1:-1] else: pass text = re.sub( re.compile(u"%s.*%s" % (matchDebut2, matchFin2), re.S), u"%s%s" % (matchDebut2, matchFin2), text) ##################### # Au cas où il n'y aurait aucune nouvelle page mais # une ou des pages ayant été supprimée(s) exception_maj = False # Pour préciser le résumé d'édition precisions_comment = u"" pywikibot.output('stade 0') ##################### ### Vérification des pages récentes actuelles (en cas de suppression) ##################### for titre_article in listeRecents: try: page = pywikibot.Page( site, re.sub(u"\[\[(.*)\]\]", "\\1", titre_article )) # Pour enlever les crochets : [[…]]. # Si la page existe toujours et n'est pas une # redirection, on la laisse dans la liste… page.get() if format_date and not dico_dates_presentes.has_key( titre_article) and find_date(page, cat): # Date trouvée alors qu'elle n'y était pas. exception_maj = True dico_dates_presentes[titre_article] = find_date( page, cat).strftime(format_date) except pywikibot.NoPage: pywikibot.output(u"La page %s n'existe plus." % page.title(asLink=True)) pywikibot.output( u"Suppression de la page %s de la liste listeRecents" % page.title(asLink=True)) precisions_comment += (u"; - %s" % titre_article) listeRecents.remove(titre_article) # On force la mise à jour de la page, même si aucun nouvel article # récent n'est trouvé. exception_maj = True except pywikibot.IsRedirectPage: pywikibot.output( u"La page %s n'est plus qu'une redirection." % page.title(asLink=True)) try: nouvelle_page = page.getRedirectTarget() pywikibot.output( u"Modification du titre la page %s (renommée en %s)" % (page.title(asLink=True), nouvelle_page.title(asLink=True, withSection=False))) precisions_comment += ( u"; - %s ; + %s" % (titre_article, nouvelle_page.title(asLink=True, withSection=False))) if not nouvelle_page.title( asLink=True, withSection=False) in listeRecents: listeRecents[listeRecents.index( titre_article)] = nouvelle_page.title( asLink=True, withSection=False) else: pywikibot.output( u"La page destination était déjà présente dans la liste" ) listeRecents.pop(listeRecents.index(titre_article)) # On force la mise à jour de la page, même si aucun nouvel article # récent n'est trouvé. exception_maj = True except: pywikibot.output( u"an error occured (CircularRedirect?)") #except KeyboardInterrupt: # pywikibot.stopme() except: try: pywikibot.output( u"Erreur inconnue lors du traitement de la page %s" % page.title(asLink=True)) except: pywikibot.output( u"Erreur inconnue lors du traitement d'une page") else: # Si pas d'erreur : on passe à la page suivante continue if precisions_comment: precisions_comment = precisions_comment[ 2:] # Pour supprimer le '; ' ##################### ##################### ### Recherches des articles nouveaux ##################### precisions_comment2 = u"" # Récupération de la dernière mise à jour de la page par le bot db = _mysql.connect( host='tools-db', db='s51245__totoazero', read_default_file="/data/project/totoazero/replica.my.cnf") results = db.query( 'SELECT last FROM maj_articles_recents WHERE page="%s"' % main_page.title().replace('"', '\\"').encode('utf-8')) results = db.store_result() result = results.fetch_row(maxrows=0) pywikibot.output(("last check was " + str(result))) if result: first_passage = False t = result[0][0] timestamp = pywikibot.Timestamp.strptime( t, "%Y-%m-%d %H:%M:%S") # Permet de ne générer que la liste des articles ajoutés à la # catégorie après la dernière modification de la page # contenant le modèle {{Articles récents}}. #list_new.extend([page for page in site.categorymembers(cat, starttime=timestamp, sortby='timestamp', namespaces=[0])]) list_new = [ page for page in cat.articles(starttime=timestamp, sortby='timestamp', namespaces=namespaces, recurse=recurse) ] list_new.reverse() else: # nouvelle page, premier passage du bot first_passage = True timestamp = main_page.editTime() if delai_creation > 0: timestamp -= datetime.timedelta(hours=delai_creation) # Génération de la première liste, pour éviter si possible de # laisser la page vide. list_new = [page for page in cat.newest_pages(total=nbMax)] # TODO : mieux ? #list_new = [page for page in cat.articles(sortby='timestamp', namespaces=namespaces, recurse=recurse)] pywikibot.output('stade 2') now = datetime.datetime.now() # NB : exception_maj peut être passer à True si un article # a été supprimé de la catégorie. if len(list_new) == 0 and not exception_maj: # Inutile d'aller plus loin s'il n'y a aucun nouvel article. end_page(main_page, now, first_passage) continue # Liste des pages pour requête SQL sur base frwiki_p list_new_str = '("' list_new_str += '", "'.join([ page.title(asLink=False, underscore=True).replace('"', '\\"') for page in list_new ]) list_new_str += '")' pywikibot.output(list_new_str) # Fonctionne uniquement avec les pages du ns 0 pour le moment frwiki_p = _mysql.connect( host='frwiki.labsdb', db='frwiki_p', read_default_file="/data/project/totoazero/replica.my.cnf") pywikibot.output( 'SELECT page_title, page_id FROM page where page_title IN %s AND page_namespace=0' % list_new_str.encode('utf-8')) results = frwiki_p.query( 'SELECT page_title, page_id FROM page where page_title IN %s AND page_namespace=0' % list_new_str.encode('utf-8')) results = frwiki_p.store_result() result = results.fetch_row(maxrows=0) pywikibot.output(result) dico_result = {} for tuple in result: title = tuple[0] id = tuple[1] dico_result[title] = id pywikibot.output(dico_result) dico_timestamp = {} pywikibot.output('stade 3') frwiki_p = _mysql.connect( host='frwiki.labsdb', db='frwiki_p', read_default_file="/data/project/totoazero/replica.my.cnf") for key in dico_result: id = dico_result[key] pywikibot.output( 'SELECT cl_from, cl_timestamp FROM categorylinks WHERE cl_from = %s AND cl_to = "%s"' % (id.encode('utf-8'), cat.title(asLink=False, underscore=True, withNamespace=False).encode('utf-8'))) results = frwiki_p.query( 'SELECT cl_from, cl_timestamp FROM categorylinks WHERE cl_from = %s AND cl_to = "%s"' % (id.encode('utf-8'), cat.title(asLink=False, underscore=True, withNamespace=False).encode('utf-8'))) results = frwiki_p.store_result() result = results.fetch_row(maxrows=0) if result: dico_timestamp[key.decode( 'utf-8')] = pywikibot.Timestamp.strptime( result[0][1], "%Y-%m-%d %H:%M:%S") else: pywikibot.output(u"pas de date trouvée pour %s" % key.decode('utf-8')) pywikibot.output(dico_timestamp) # Permet de mettre les nouvelles pages comme des titres : # nécessaires plus loin ! list_new = [page.title(asLink=True) for page in list_new] # Permet de récupérer des infos sur la catégorie. # NB : Si ralentit le script, l'item cat_info['pages'] # correspondant au nombre de pages contenues # dans la catégorie doit pouvoir être remplacé # par len(listeCategorie) + len(list_new). cat_info = site.categoryinfo(cat) pywikibot.output(cat_info) pywikibot.output('stade 4') list_new_old = list() list_new_old.extend(list_new) pywikibot.output('delai_creation is %s' % delai_creation) #for titre_page in list_new_old: # print titre_page for titre_page in list_new_old: # NB : titre_page est du type [[Nom de la page]] pywikibot.output("----------") pywikibot.output(u"Page récemment ajoutée : %s" % titre_page) if not titre_page in listeRecents: if delai_creation: # Délai imposé (en heures) depuis la création de l'article, # au-delà duquel l'article récemment ajouté à la catégorie # ne doit pas figurer dans la liste. # Exemple : delai_creation = 24 # => le bot liste uniquement les articles créés il y # a moins de 24h. page = pywikibot.Page(site, titre_page[2:-2]) # NB : date_creation et date_plus_petite_requise # sont du type pywikibot.Timestamp date_creation = page.getVersionHistory()[-1][1] pywikibot.output(date_creation) if delai_creation > 0: date_plus_petite_requise = pywikibot.Timestamp.now( ) - datetime.timedelta(hours=delai_creation) elif delai_creation == -1: # 'timestamp' a été défini plus haut comme étant la date de dernière # édition du bot sur la page. date_plus_petite_requise = timestamp pywikibot.output(date_plus_petite_requise) if date_plus_petite_requise > date_creation: pywikibot.output(u"Vérification du délai : Non") pywikibot.output( u"La page ne satisfait pas le délai depuis la création imposé." ) list_new.remove(titre_page) continue else: pywikibot.output(u"Vérification du délai : OK") precisions_comment2 += (u"; + %s" % titre_page) else: # Si l'article se trouve déjà dans la liste listeRecents # il est inutile de le rajouter à nouveau. list_new.remove(titre_page) pywikibot.output( u"L'article était déjà présent sur la page.") # Re-vérification pour voir si list_new contient toujours # au moins une page. if len(list_new) == 0 and not exception_maj: # Inutile d'aller plus loin s'il n'y a aucun nouvel article. pywikibot.output('Nothing left.') continue # Re-vérification pour voir si list_new contient toujours # au moins une page. if len(list_new) == 0 and not exception_maj: # Inutile d'aller plus loin s'il n'y a aucun nouvel article. end_page(main_page, now, first_passage) continue if precisions_comment: # Si precisions_comment contient déjà des infos (suppression de pages) precisions_comment += precisions_comment2 else: precisions_comment = precisions_comment2[ 2:] # Pour supprimer le '; ' pywikibot.output('stade 5') # Pour compléter le résumé d'édition comment = comment % { 'nombre_articles': cat_info['pages'], 'precision_pages': precisions_comment } ##################### ### Création de la liste des articles récents ##################### liste_nouveaux_recents = list() liste_nouveaux_recents.extend(list_new) # Si le nombre d'articles nouveaux est strictement au nombre maximum # d'articles récents qui doivent figurer. if len(liste_nouveaux_recents) < nbMax: i = 0 while len(liste_nouveaux_recents) != nbMax: if len(listeRecents) < i + 1: # Dans le cas où la liste listeRecents ne contiendrait pas # assez d'éléments. break liste_nouveaux_recents.append(listeRecents[i]) i += 1 if i == len(listeRecents ): # Pourrait provoquer une erreur de longueur break elif len(liste_nouveaux_recents) > nbMax: liste_nouveaux_recents = liste_nouveaux_recents[0:(nbMax - 1)] # La liste liste_nouveaux_recents contient désormais # nbMax articles récents exactement pywikibot.output('stade 6') liste_nouveaux_recents_string = u"<!-- Ce tableau est créé automatiquement par un robot. Articles Récents DEBUT -->" for titre_article in liste_nouveaux_recents: liste_nouveaux_recents_string += u'\n%s %s' % (puce, titre_article) if format_date and dico_timestamp.has_key( titre_article[2:-2].replace(' ', '_')): pywikibot.output('stade 6-1') pywikibot.output( dico_timestamp[titre_article[2:-2].replace( ' ', '_')].strftime(format_date)) try: liste_nouveaux_recents_string += ( ' (' + dico_timestamp[titre_article[2:-2].replace( ' ', '_')].strftime(format_date).decode('utf-8') + ')') except: try: liste_nouveaux_recents_string += ( ' (' + dico_timestamp[titre_article[2:-2].replace( ' ', '_')].strftime(format_date) + ')') except: raise "erreur au stade 6-1" elif dico_dates_presentes.has_key(titre_article): pywikibot.output('stade 6-2') pywikibot.output(dico_dates_presentes[titre_article]) try: liste_nouveaux_recents_string += ( ' (' + dico_dates_presentes[titre_article] + ')') except: # UnicodeEncodeError: try: liste_nouveaux_recents_string += ( ' (' + dico_dates_presentes[titre_article].decode( 'utf-8') + ')') except: raise "erreur au stade 6-2" liste_nouveaux_recents_string += u"\n<!-- Ce tableau est créé automatiquement par un robot. Articles Récents FIN -->" ##################### ##################### ### Mise à jour du contenu de la page ##################### new_text = text pywikibot.output('stade 7') # Mise à jour de la liste des articles récents (listeRecents) new_text = re.sub( re.compile(u'%s.*%s' % (matchDebut1, matchFin1), re.S), liste_nouveaux_recents_string, new_text) pywikibot.output(new_text) pywikibot.output(u'Commentaire: %s' % comment) if not dry: main_page.put(new_text, comment=comment) end_page(main_page, now, first_passage) else: pywikibot.showDiff(main_page.get(), new_text) ##################### except Exception, myexception: pywikibot.output("Erreur lors du traitement de la page %s" % main_page.title(asLink=True)) almalog2.error( u'maj_articles_recents', u'traitement de %s : %s %s' % (main_page.title(asLink=True), type(myexception), myexception.args))
parser.add_argument('--dbuser', type=str, help='DB user name') parser.add_argument('--dbpassword', type=str, help='password for DB user') parser.add_argument( '--hostlist', type=str, help= 'path to a textfile containing the the host addresses. Each Line one host address. No http(s)://', default='hosts.txt') parser.add_argument('--requestinterval', type=int, help='time to wait between requests in seconds', default=10) parser.add_argument('--refreshinterval', type=int, help='time to wait before revisit a link in hours', default=24) parser.add_argument('--resetdb', action='store_true') args = parser.parse_args() db = mysql.connect(host=args.dbhost, user=args.dbuser, passwd=args.dbpassword, db=args.dbname) reset_db(args.resetdb) init_db() with open(args.hostlist, 'r') as hostlist: urls = hostlist.read().split('\n') main()
print(helperHTML.get_html_init()) print(helperHTML.get_html_start_preset()) if(helperSession.any_session_active() == False): #Show session inactive.. mlog.debug(TAG,"In show_remomve_payee with no active session. So prompting to sign in again..") print(helperHTML.get_html_invalid_session_preset()) print(helperHTML.get_html_end_preset()) sys.exit() #ELSE: Show list of payees to remove account_number = helperSession.get_session_accout_no() try: mlog.debug(TAG, "Establishing database connection..") conn = _mysql.connect(db_config.db_host,db_config.db_user,db_config.db_password,db_config.db_name) except Exception as e: mlog.error(TAG,"Unable to conenct to MyBanking Database.") print(''' <h4>Unable to conenct to MyBanking Database.</h4> <div id="entry" > <a href="show_dashboard.py">BACK</a> </div> ''') print(helperHTML.get_html_end_preset()) # print(e) sys.exit() #--------------------------------------------MENU DETAILS---------------------------------------------------- #Show MENU.. print('''
line = line.replace('$', '') line = line.replace('"', '') key = line.split('=') if key[0] == 'dbhost': dbhost = key[1] if key[0] == 'dbname': dbname = key[1] if key[0] == 'dbuser': dbuser = key[1] if key[0] == 'dbpass': dbpass = key[1] sense = SenseHat() now = datetime.datetime.now() con = _mysql.connect(dbhost, dbname, dbuser, dbpass) try: sql = "select power from energy1 where timestamp='" + now.strftime( "%Y-%m-%d %H:%M:00") + "'" con.query(sql) result = con.use_result() row = result.fetch_row()[0] power = row[0] con.close value = power + " Watt" sense.set_rotation(180) sense.show_message(value, scroll_speed=0.1,