def account(bot, update): user = update.message.from_user db = database.Connection() found, user_data = db.check_user(user.username) if found: text = 'Текущий аккаунт Last.fm:\n\n`{}`'.format(user_data['login']) keyboard_change = [[ InlineKeyboardButton('Изменить аккаунт', callback_data='account_change') ]] reply_markup = InlineKeyboardMarkup(keyboard_change) update.message.reply_text(text, parse_mode=ParseMode.MARKDOWN, reply_markup=reply_markup) return CHANGE else: text = ('Чтобы добавить акканут Last.fm, отправь мне логин и пароль. ' 'Не бойся, я не буду хранить твой пароль в открытом виде.') update.message.reply_text(text) text = 'Логин:' update.message.reply_text(text) db = database.Connection() db.add_user(user.username) return LOGIN
def query_flag_data(): db = database.Connection(host="127.0.0.1", database='blockindex', user='******', password='******') data = db.get("SELECT * FROM flag WHERE id = 1") return data
def execute_pg_sleep(conn_string): """ Simple helper to call inside thread """ conn = database.Connection(conn_string) res = conn.get('SELECT now(), pg_sleep(60)') LOG.info(res)
def load_data(hostname, date_str=None): with database.Connection() as cur: if date_str is None: cur.execute( "select convert(current_date(), datetime), convert(current_date(), datetime) + interval 1 day" ) else: cur.execute( "select convert(str_to_date(%s,'%%Y-%%m-%%d'), datetime), convert(str_to_date(%s,'%%Y-%%m-%%d'), datetime) + interval 1 day", (date_str, date_str)) starttime, endtime = cur.fetchone() cur.execute( "create temporary table data1(t datetime,piv float,pia float,piw float,pov float,poa float,loadw float,temp float,kwh float,lkwh float,soc float,aiw float) engine memory" ) cur.execute( "create temporary table data2(t datetime,piv float,pia float,piw float,pov float,poa float,loadw float,temp float) engine memory" ) cur.execute( "insert into data1(t,piv,pia,piw,pov,poa,loadw,temp,kwh,lkwh,soc,aiw) select t,piv,pia,piw,pov,poa,loadw,temp,kwh,lkwh,soc,aiw from data where hostname = %s and data.t >= %s and data.t < %s", (hostname, starttime, endtime)) cur.execute( "insert into data2(t,piv,pia,piw,pov,poa,loadw,temp) select t,piv,pia,piw,pov,poa,loadw,temp from data where hostname = %s and data.t >= %s - interval 5 minute and data.t < %s", (hostname, starttime, endtime)) cur.execute("create index idx_t on data2(t) using btree") cur.execute( "select data1.t,data1.piv,avg(data2.piv),data1.pia,avg(data2.pia),data1.piw,avg(data2.piw),data1.pov,avg(data2.pov),data1.poa,avg(data2.poa),data1.loadw,avg(data2.loadw),data1.temp,avg(data2.temp),kwh,lkwh,soc,aiw from data1,data2 where data2.t between data1.t - interval 5 minute and data1.t group by data1.t,data1.piv,data1.pia,data1.piw,data1.pov,data1.poa,data1.loadw,data1.temp,kwh,lkwh,soc,aiw order by data1.t" ) return (starttime, endtime, [(row[0], row[1:]) for row in cur])
def insert_mb_report(sql, index="test-index", doc_type="mb_report"): ''' with open(path, "r", encoding="utf8") as f: reader = csv.DictReader(f) for line in reader: line["REPORTADDR"] = line["REPORTADDR"] data = json.dumps(line) print(data) es.index(index=index, doc_type=doc_type, body=data) ''' person_sql = ''' select a.psnname from bd_person a where a.pk_user=:1 ''' db = database.Connection() ss = db.query(sql) for line in ss: line["VDATE"] = convert_date(line["VDATE"].strip()) try: line["PSNNAME"] = db.get(person_sql, line["PK_USER"])["PSNNAME"] except: continue data = json.dumps(line) print(data) es.index(index=index, doc_type=doc_type, body=data)
def initDb(): global _db try: _db = database.Connection(**db_config.gtc_traffic_db) except Exception, e: print "Exception:", e return False
def get_next( self, database_connection = None ): # only consult database if next timestamp is not already known if self.next is None: # open own database connection if none passed close_connection = False if database_connection is None: database_connection = db.Connection() close_connection = True t = self.time f = self.frame for x in range( 0, FRAMES_GAP ): f += 1 if f > 4: f = 1 t = t.addSecs( 1 ) n = TimeStamp( self.date, t, self.cam, f ) if n.exists( database_connection ): self.next = n n.previous = self break if close_connection: database_connection.close() return self.next
def initDb(): global _db try: _db = database.Connection(**iz_db) except Exception, e: print "Exception:", e return False
def q_index(): db = database.Connection(host="127.0.0.1", database='blockindex', user='******', password='******') datas = db.query("SELECT createtime,idx FROM bindex") return json.dumps(datas, default=encoder)
def __init__(self): handlers = [ (r"/node", NodeHandler), (r"/node/([^/]+)/relationships", RelationshipHandler), (r"/node/([^/]+)/stats", StatHandler), ] tornado.web.Application.__init__(self, handlers, **settings) self.db = database.Connection(options.graph_path)
def get_recent_pv_power(): result = [] with database.Connection() as cur: cur.execute( "select hostname,max(piw) as maxpiw from data where t > now() - interval 10 second group by hostname order by maxpiw desc" ) for row in cur: result.append({"nodename": row[0], "piw": float(row[1])}) return result
def get( self, timestamp, database_connection = None ): if timestamp not in self.store: if database_connection is None: database_connection = db.Connection() self.store[ timestamp ] = database_connection.get_detections_on_timestamp( timestamp ) return self.store[ timestamp ]
def __init__(self): self.templates = dict( (filename.split('templates/')[1].rsplit('.html')[0], render_template(filename)) for filename in glob.glob('templates/*.html')) self.connections = dict( (name, database.Connection(conn)) for name, conn in service.secrets['connections'].items())
def crawl_from_db(): db = database.Connection(host="127.0.0.1", database='crawler', user='******', password='******') data = db.query("select * from bk") data = map(lambda x: x['data'], data) db.close() return data
def __init__(self): with open('index.html', 'rb') as f: self.indexTemplate = mako.template.Template(f.read()) self.connections = dict( map( lambda x: (x, database.Connection(service.secrets['connections'][x])), ['Development', 'Integration', 'Archive', 'Production']))
def __init__(self): handlers = [(r"/", GetUserHandler), (r"/user/new", NewUserHandler), (r"/user", GetUserHandler)] settings = {} tornado.web.Application.__init__(self, handlers, **settings) self.db = database.Connection() self.db.execute('create table users (id integer, name char(20));') self.db.execute('insert into users (id, name) values (1,"jack");') self.db.execute('insert into users (id, name) values (2,"jill");')
def then_connection_strings_are(context, cluster): """ Function to check connection strings """ conn = database.Connection( PLPROXY_CONN_STRING.format(port=context.plproxy_port)) res = conn.get_func('plproxy.get_cluster_partitions', i_cluster_name=cluster) LOG.info(context.table.rows) helpers.assert_results_are_equal(context.table, res)
def set_value(nodename, key, value): with database.Connection() as cur: cur.execute("select count(*) from nodes where nodename=%s", (nodename, )) if cur.fetchone()[0] == 0: print "No such node defined" return False cur.execute( "replace into schedule(nodename,`key`,int_value,created_at) values(%s,%s,%s,now())", (nodename, key, value)) return True
def capture(device_number, width, height, skip_frames): with database.Connection() as cur: cur.execute("select get_lock(%s,%s)", (LOCK_NAME, LOCK_TIMEOUT)) if cur.fetchone()[0] != 1: return None #else cap = cv2.VideoCapture(device_number) cap.set(3, width) cap.set(4, height) for i in range(skip_frames): r, frame = cap.read() return frame
def account_login(bot, update): user = update.message.from_user login = update.message.text db = database.Connection() db.update_login(user.username, login) text = 'Пароль:' update.message.reply_text(text) return PASSWORD
def when_action_replay(context, action, container_name): # pylint: disable=W0613 """ Function that performs actions with WAL replay on replica """ conn_string = moby.container_conn_string(container_name) statement = 'SELECT pg_wal_replay_{action}()'.format(action=action) conn = database.Connection(conn_string) res = conn.get(statement) if res.errcode: LOG.info(res) raise RuntimeError('Could not execute statement')
def reset_email(): if request.method == 'GET': return render_template("reset.html") else: next = request.form.get("next") db = database.Connection(host="127.0.0.1", database='blockindex', user='******', password='******') affect = db.execute_rowcount( "UPDATE flag set flag = 0, next=%s WHERE id = 1" % next) return jsonify({'code': 200, 'affect': affect})
def start(self): self.dset_store.clear() for m in self.modules: m.reset() start_timestamp = ds.TimeStamp(config.FRAME_START, config.CAM) duration = config.FRAME_END - config.FRAME_START + 1 previous_timestamp = start_timestamp for x in range(config.FRAME_START + 1, config.FRAME_END + 1): timestamp = ds.TimeStamp(x, config.CAM) timestamp.connect_with_previous(previous_timestamp) previous_timestamp = timestamp print 'start validation' print(' date = ' + str(config.DATE[0]) + '/' + str(config.DATE[1]) + '/' + str(config.DATE[2]) + ', ' + 'time = ' + str(config.TIME[0]) + ':' + str(config.TIME[1])) print ' cam = ' + str(config.CAM) + ', frames = ' + str( config.FRAME_START) + ' til ' + str(config.FRAME_END) database_connection = db.Connection() timestamp = start_timestamp for x in range(0, duration): print 'processing timestamp ' + timestamp.time_name dset = self.dset_store.get(timestamp, database_connection) for d in dset.detections: updated_id = database_connection.get_updated_id(d) truth_id = database_connection.get_truth_id(d) path_number = database_connection.get_path_number(d) for m in self.modules: m.update(d, updated_id, truth_id, path_number) timestamp = timestamp.get_next() if timestamp is None: break print 'validation finished' print '--------------------------------' result_text = '' for m in self.modules: result_text += m.get_result() print result_text
def __init__(self, handlers): settings = { 'template_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "templates")), 'static_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "static")) } if 'tornado' in config: tornado_settings = config['tornado'] for key in tornado_settings.keys(): settings[key] = tornado_settings[key] tornado.web.Application.__init__(self, handlers, **settings) if 'mysql' in config: log.info("--> tornado initializing mysql") import database try: self.db = database.Connection() except Exception as e: log.error("Could not connect to MySQL: %s" % log.exc(e)) elif 'mongo' in config: log.info("--> tornado initializing mongo") try: mongo = config['mongo'] import pymongo connection = pymongo.Connection(mongo['host']) self.db = connection[mongo['database']] except Exception as e: log.error("Could not connect to mongo: %s" % log.exc(e)) if 'redis' in config: log.info("--> tornado initializing redis") import redis self.redis = redis.StrictRedis() if 'memcache' in config: log.info("--> torando initializing memcache") import memcache self.cache = memcache.Client([config['memcache']['address'] + ":" + str(config['memcache']['port'])]) self.jobs = None if 'beanstalk' in config: log.info("--> tornado initializing beanstalk") import jobs self.jobs = jobs.Jobs() # intialize oauth server try: self.oauth_server = oauth2.Server(signature_methods={'HMAC-SHA1': oauth2.SignatureMethod_HMAC_SHA1()}) except ImportError: self.oauth_server = None Application.instance = self
def insert_data(sql, index="test-index", doc_type="pdcust", convert_d=False): ''' convert_d: 是否转换日期格式, 当前不需要传入此值, 暂时放弃 ''' # 通过数据库直接传入数据拜访数据 db = database.Connection() ss = db.query(sql) for line in ss: if convert_d: line["VDATE"] = convert_date(line["VDATE"].strip()) print(line) es.index(index=index, doc_type=doc_type, body=line)
def store_to_db(data): db = database.Connection(host="127.0.0.1", database='crawler', user='******', password='******') affect = db.execute_rowcount("DELETE FROM bk") logger.info("delete bk affect :%s" % str(affect)) insert_list_data = map( lambda x: '("%s")' % MySQLdb.escape_string(x.encode("utf8")), data) sql = "INSERT INTO bk(data) VALUES %s" % ','.join(insert_list_data) affect = db.execute_rowcount(sql) logger.info("insert bk affect :%s" % str(affect)) db.close()
def start( self ): self.dset_store.clear() for m in self.modules: m.reset() timestamp = config.START_TIMESTAMP duration = config.FRAMES_DURATION print 'start validation' print ' host = ' + config.DB_HOST + ', date = ' + timestamp.date_name + ', cam = ' + str(timestamp.cam) print ' start time = ' + timestamp.time_name + ', duration = ' + str(duration) + ' frames' database_connection = db.Connection() if not timestamp.exists( database_connection ): database_connection.close() print 'timestamp ' + timestamp.time_name + ' not found' print 'validation stopped' print '--------------------------------' return for x in range( 0, duration ): print 'processing timestamp ' + timestamp.time_name dset = self.dset_store.get( timestamp, database_connection ) for d in dset.detections: updated_id = database_connection.get_updated_id( d ) truth_id = database_connection.get_truth_id( d ) path_number = database_connection.get_path_number( d ) for m in self.modules: m.update( d, updated_id, truth_id, path_number ) timestamp = timestamp.get_next() if timestamp is None: break database_connection.close() print 'validation finished' print '--------------------------------' result_text = '' for m in self.modules: result_text += m.get_result() print result_text
def account_password(bot, update): user = update.message.from_user password = pylast.md5(update.message.text) db = database.Connection() db.update_password(user.username, password) text = ( 'Готово, теперь ты можешь загружать журналы! ' 'Для этого тебе нужно просто отправить мне файл.\n\n' 'И да, для безопасности советую удалить сообщение с паролем из чата.') update.message.reply_text(text) return ConversationHandler.END
def get_node_config(nodename): with database.Connection() as cur: cur.execute( "select battery_type,battery_capacity,gpio from nodes where nodename=%s", (nodename, )) row = cur.fetchone() if row is None: return None # else battery_type, battery_capacity, gpio = row # battery_type : Battery type(1=Sealed,2=Gel,3=Flooded,4=3S/6S,5=7S) # battery_capacity : Battery capacity in Ah config = {"bt": int(battery_type), "bc": int(battery_capacity)} if gpio is not None: config["gpio"] = 1 if gpio else 0 return config
def downloadFiles(beginHash, endHash, outputPath): '''Downloads files from the production database with their original filenames, between two given hashes in time (inclusive). ''' connection = database.Connection(service.secrets['connections']['pro']) fileList = connection.fetch( ''' select fileHash, fileName from files where creationTimestamp >= ( select creationTimestamp from files where fileHash = :s ) and creationTimestamp <= ( select creationTimestamp from files where fileHash = :s ) and state = 'Acknowledged' order by creationTimestamp ''', (beginHash, endHash)) if len(fileList) == 0: logging.error( 'Empty file list. Make sure you typed the correct hashes and that the beginHash was first in time.' ) return -1 logging.info('File list:') for (fileHash, fileName) in fileList: logging.info(' %s %s', fileHash, fileName) if 'y' != raw_input('Download? [y] ').strip().lower(): logging.error('Aborted by user.') return -1 for (fileHash, fileName) in fileList: logging.info('Downloading %s %s...', fileHash, fileName) with open(os.path.join(outputPath, '%s.tar.bz2' % fileName), 'wb') as f: f.write( connection.fetch( ''' select fileContent from files where fileHash = :s ''', (fileHash, ))[0][0])