def parse_results(self, results, cp, benchmark): data = self.__workload_parser(results) print data a = benchmark.workload_name b = benchmark.tipology wn = a + b database.database(st,cp.name,cp.vm_user,cp.size_id,benchmark.tool_name,wn,cp.platform,data, None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None)
def parse_results(self, results, cp, benchmark): data = self.__workload_parser(results) a = benchmark.workload_name b = benchmark.tipology wn = a + b database.database(st,cp.name,cp.vm_user,cp.size_id,benchmark.tool_name,wn,cp.platform, data[0],data[1],data[2],data[3],data[4],data[5],data[6],data[7],data[8], data[9],data[10],data[11],data[12],data[13],data[14],data[15],data[16],data[17], data[18],None,None,None,None,None,None,None,None,None,None,None,None,None)
def parse_results(self, results, cp, benchmark): data = self.__workload_parser(results) a = benchmark.workload_name #b = benchmark.tipology #wn = a + b database.database(st,cp.name,cp.vm_user,cp.size_id,benchmark.tool_name,benchmark.workload_name,cp.platform, data[0],data[1],data[2],data[3],data[4],data[5], None,None,None,None,None,None,None,None,None, None,None,None,None,None,None,None,None,None, None,None,None,None,None,None,None,None)
def all(): try: utils.dirtycheck() except exceptions.DirtyRepository as e: print("Warning: " + str(e)) base() parse_kconfig() gen_fixed() checkmeasure() database.database() # check if database is initialized
def __init__(self, dbpath, report_date): self.report_date = report_date self.batch_limit = 5000 self.db = database(dbpath) self.db.connect() logging.debug("init: database connection completed") self.write_batch = self.db.write_batch self.db = database(dbpath) self.db.connect() logging.debug("init: database connection completed")
def load_submissions_live(self): self.db = database() self.db.connect() #for submission in praw.helpers.submission_stream(self.r, self.config['reddit']['subreddit'], # limit=500, verbosity=2): for submission in self.subreddit.get_unmoderated(limit=1000): if submission.fullname in self.done: continue try: self.db.insert_submission(submission) print("inserting") except: print(bcolors.FAIL + "Unable to insert submission " + submission.fullname + bcolors.ENDC) continue if submission.approved_by is not None: continue # flood protection print("flood detection...") self.flood_protection(submission) print("max sumbission...") self.max_submissions(submission) self.done += submission.fullname time.sleep(0.5)
def _update(self): while True: for dp in self.datapaths.values(): parser = dp.ofproto_parser ofproto = dp.ofproto self.logger.info("dpid="+str(dp.id)); if dp.id == 161 : self.logger.info("Check SQL Database") db = database() list = db.db_getList() for data in list: print data["id"],"(",type(data["id"]),")"," ",data["address"],"(",type(data["address"]),")"," ",data["access"],"(",type(data["access"]),")" if data["access"] == 1: self.logger.info("DENY "+str(data["address"])) match = parser.OFPMatch(eth_type=0x0800, ipv4_src=str(data["address"])) actions = {} self.add_flow(dp, data["id"]+10, match, actions) if data["access"] == 0: self.logger.info("ALLOW "+str(data["address"])) match = parser.OFPMatch(eth_type=0x0800, ipv4_src=str(data["address"])) mod = parser.OFPFlowMod(dp, command=ofproto.OFPFC_DELETE, out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY, match=match) dp.send_msg(mod) hub.sleep(10)
def boot(config, to_database = True): try: out = utils.callsubprocess('boot', conf.boot_command, conf.boot_output, \ True, timeout = conf.boot_timeout) result = 'nominal' except exceptions.ProcessFailed as e: result = 'failed' out = e.output traceback.print_exc() except exceptions.ProcessTimeout as e: result = 'timeout' out = e.output traceback.print_exc() value = None try: res = utils.callsubprocess('parse_command', conf.parse_command, conf.parse_output, True, stdin = out) value = float(res[0]) except Exception as e: print("W: parse exception: " + e.__str__()) if to_database: dtb = database.database() txt = '' for ln in out: for c in ln: if not c.encode(sys.getdefaultencoding()) == b'\0': txt += c txt += '\n' dtb.add_measure(txt, result, config.id, value)
def _mac(self, req, **kwargs): ip_addr = kwargs['ip_addr'].encode('utf-8') db = database(DB_PATH) dpid_list = db.getDPIDLIST() bflag = False info = [] mac_addr = '' logined = 1 for dpid in dpid_list: rcd_dpid = db.findDPIDByX(dpid,'IP_ADDR',ip_addr) for rcd in rcd_dpid: mac_addr = rcd[0].encode('utf-8') bflag = True break if bflag == True: break info.append(mac_addr) rcd_dev = db.findDEVICEByX('MAC_ADDR',mac_addr) if 0 == len(rcd_dev): logined = 0 info.append(logined) body = json.dumps(info) return Response(content_type='application/json', body=body)
def prepare(): """Prepare for measuring Outcome is Linux image for generated configuration.""" def get(): confs = dtb.get_unmeasured() for pr in __confs_prepared__: for cn in confs.copy(): if pr == cn.hash: confs.remove(cn) break return confs print("Preparing new image.") global __confs_unmeasured__ if len(__confs_unmeasured__) == 0: dtb = database.database() confs = get() if len(confs) == 0: configurations.generate() confs = get() if len(confs) == 0: raise exceptions.NoApplicableConfiguration() __confs_unmeasured__ = list(confs) con = __confs_unmeasured__.pop() kernel.config(con.config) img = kernel.make(con.hash) print("Prepared image: " + img) __confs_prepared__.append(con.hash) return img, con
def __init__(self,debug = False): self.title = "SimpleRSS {0}".format(VERSION) #instantiate classes if (debug == True): self.logwriter = logWriter() else: self.logwriter = logWriter("/dev/null") self.config = self.getConfigs() self.database = database(os.path.join(self.getConfigPath(),'database.db3'), VERSION) self.screen = screen(self.config) self.rssworker = rss() self.screen.setWindowTitle(self.title); self.moveUpKeys = [KEY_UP,ord('k')] self.moveDownKeys = [KEY_DOWN,ord('j')] try: self.showFirstPage() except Exception as e: self.screen.close() print("simpleRSS crashed:") traceback.print_exc() self.screen.close() return
def __init__(self): self.db = database.database() self.t = CursesTimer.CursesTimer() self.u = Utils.Utils() self.NofPomodoro = 4 #休憩まで何回ポモドーロするか self.workTime = 25 #1ポモドーロ何分か self.sBreakTime = 5 #短い休みは何分 self.lBreakTime = 15 #長い休みは何分 self.minute = 60 #1分間は self.workComment = "Working" self.shortBreakComment = "Break" self.longBreakComment = "LongBreak" self.audio = Audio.Audio() self.audio.setAudio_file("bell.mp3") self.pomodoroCount = self.db.getTodaysPomodoro() #今日何回ポモドーロしたか self.dayend = 0 #1日の終わりを何時にするか(0~23) today = datetime.datetime.today() #今日の日付 #次の1日の終わり self.nextEndDatetime = datetime.datetime.today().\ replace(hour = self.dayend, minute = 0, second = 0, microsecond = 0) #もし今のhourがdayendより前ならenddatetimeは今日 if today.hour < self.dayend: self.nextEndDatetime = self.nextEndDatetime.replace(day = today.day) #もし今のhourがdayendより後ならenddatatimeは明日 if today.hour >= self.dayend: self.nextEndDatetime = self.nextEndDatetime.replace(day = today.today().day + 1)
def load_submissions_live(self): self.db = database() self.db.connect() for submission in praw.helpers.submission_stream(self.r, self.config['reddit']['subreddit'], limit=None, verbosity=2): #for submission in self.subreddit.get_submissions(): self.db.insert_submission(submission)
def get_modlog(self): db = database() db.connect() i = 0 exitAt = 100 for x in self.subreddit.get_mod_log(limit=int(self.config["reddit"]["max_requests"])): if x is not None: result = db.insert_modlog(x) if not hasattr(x, "target_fullname"): return try: type = x.target_fullname.split("_")[0] id = x.target_fullname.split("_")[1] if type == "t3": db.insert_submission(self.get_submission(id)) except Exception as e: print(str(e)) print("couldn't extract type") if result == False: i += 1 if i == exitAt: print("OK Ive had enough") break
def __init__(self,sock): self.shutdown = False self.socket = sock self.socket.setblocking(1) self.db = database.database() self.debug = True self.rec_id = None
def __calchash__(con): dt = database.database() csort = dt.get_configsort() cstr = "" for c in csort: try: if con[c]: cstr += c except KeyError: pass # Add missing for key, val in con.items(): try: csort.index(key) except ValueError: indx = len(csort) csort.append(key) dt.add_configsort(key) if val: cstr += key hsh = hashlib.md5(bytes(cstr, 'UTF-8')) return hsh.hexdigest()
def process_item(self, item, spider): db = database.database() try: if item['url'] is None: db.log(item['source'], "Url is Null") return #Generate hash for url - used as primary key for database sha1 = hashlib.sha1() sha1.update(item['url']) hash = sha1.hexdigest() numTLD = db.countTLD(item['url']) #Persist item in database, 1337 is default value db.addItem(item['source'],hash,item['url'], numTLD) #keep reset duplicate counter if added to database successfully self.dupCounter[item['source']] = 0 #whois = whois_geoip.whois_geoip(item['url']) #db.addWhois(hash, item['source'], whois) except MySQLdb.Error, e: #duplication check if e.args[0] == 1062: if item['source'] in self.dupCounter: self.dupCounter[item['source']] +=1 else: self.dupCounter[item['source']] = 1 if self.dupCounter[item['source']] > 1000: self.crawler.engine.close_spider(spider, 'Pages have been crawled')
def googleIt(url): db = database() source = url en = ["it","zh-Hans","fr","nl","es","pt-BR","ca","pa","qu","mr","mo","mn","ne","pcm","nn","or","qu"] random.shuffle(en) search = pygoogle.pygoogle(hl=en[0],query="site:"+source) urlList = search.get_urls() print urlList sha1 = hashlib.sha1() for eachUrl in urlList: #Generate hash for url - used as primary key for database try: eachUrl = u"".join(eachUrl).encode('utf-8').strip() sha1.update(eachUrl) hash = sha1.hexdigest() numTLD = db.countTLD(eachUrl) #Persist item in database db.addGoodUrl(source,hash, eachUrl, numTLD) except: doNothing =0 print 'Done' db.close()
def _hosts(self, req, **kwargs): body = None db = database(DB_PATH) if 'dpid' in kwargs: dpid = dpid_lib.str_to_dpid(kwargs['dpid']) rcd_dpid = selectDPID(dpid=dpid) host_of_dpid = [] tmp = {'mac':'a', 'port':0,'ip':'a', 'slave':1} for x in rcd_dpid: tmp['mac'], tmp['port'], tmp['ip'], tmp['slave'] = x[0].encode('utf-8'), x[1], x[2].encode('utf-8'), x[3] hosts_of_dpid.append(dict(tmp)) body = json.dumps(hosts_of_dpid) else: rcd_dev = db.selectDEVICE() hosts = [] tmp = {'user_id':0, 'dpid':0,'mac':'a', 'vlan':0, 'ip': 'a', 'port':0} #add gateway tmp['mac'] = db.selectGATEWAY()[0][0].encode('utf-8') tmp['ip'] = db.selectGATEWAY()[0][1].encode('utf-8') gw_dpid = db.getDPIDBySlave(mac_addr=tmp['mac']) if None != gw_dpid: tmp['port'] = db.findDPIDByX(gw_dpid,'MAC_ADDR',tmp['mac'])[0][0] tmp['dpid'] = dpid_lib.dpid_to_str(gw_dpid) hosts.append(dict(tmp)) #add host for dev in rcd_dev: tmp['mac'], tmp['vlan'], tmp['user_id'] = dev[0].encode('utf-8'), dev[1], dev[2] dpid = db.getDPIDBySlave(mac_addr=tmp['mac']) tmp['dpid'] = dpid_lib.dpid_to_str(dpid) rcd_host = db.findDPIDByX(dpid,'MAC_ADDR',tmp['mac']) tmp['port'], tmp['ip'] = rcd_host[0][0], rcd_host[0][1].encode('utf-8') hosts.append(dict(tmp)) body = json.dumps(hosts) return Response(content_type='application/json', body=body)
def asignartarea( self, investigador = "0" ): database = db.database( "basedatosCAP.db" ) investigadores = database.getNames( "investigador" ) asistentes = database.getNames( "asistente" ) proyectos = database.getWorkingPapers( int(investigador) ) _asist = "" _proy = "" _inv = "<option selected value="">-Investigador-</selected>" # Creates a list with all reasearchers if investigador != "0": _inv = "<option value = %d>%s</option>\n" % ( int(investigador), database.getName( "investigador", int(investigador) ) ) for x in investigadores: if database.getId("investigador", x) != int(investigador): _inv = _inv + """<option value = %d>%s</option>\n""" % ( database.getId( "investigador", x), x ) # Creates a list with all the assistants for x in asistentes: _asist = _asist + """<option value = "%d"> %s</option>\n""" % ( database.getId( "asistente", x), x ) # Creates a list with all reasearchers for x in proyectos: _proy = _proy + """<option value = %d>%s</option>\n""" % ( database.getIdWP(x), x ) globals()["name"] = "Asignar tarea" return [ _header_Admin % globals(), _asignar % (_inv, _proy, _asist ), _footer ]
def enable(course_id): db = database() if session.get('login', 0) == 0: return redirect(url_for('login')) else: db.course_enable(session['user_id'], course_id, 1) return redirect(url_for('home'))
def sql2json(): offlinedb = 'instant_info_dq' third_from = 'dadi' cinemasql = 'SELECT third_id, third_from, cinemas_id,name FROM '+ offlinedb +'.t_movie_poi WHERE cinemas_id != 0 AND third_from LIKE \'' + third_from + '\' limit 1' logging.debug('cinema sql is '+ cinemasql) cinemas_id = [] try: db = database.database('local').get_connection() #print db curs = db.cursor() curs.execute(cinemasql) rows=curs.fetchall(); desc = curs.description #print desc list = [] for row in rows: print row print row[0] list.append(dict(itertools.izip([col[0] for col in desc],row))) for content in list: print content curs.close() db.close() except MySQLdb.Error as e: logging.error('mysql error msg is '+str(e))
def switchoff(): db = database() if session.get('login', 0) == 0: return redirect(url_for('login')) else: db.switch(session['user_id'], -1) return redirect(url_for('home'))
def file_disable(course_id): db = database() if session.get('login', 0) == 0: return redirect(url_for('login')) else: db.file_enable(session['user_id'], course_id, 0) return redirect('/course/' + course_id)
def home(): db = database() if session.get('login', 0) == 0: return redirect(url_for('login')) else: user = db.get_user(session['user_id']) courses = db.get_courses(session['user_id']) return render('home.html' , user = user, courses = courses)
def course(course_id): db = database() if session.get('login', 0) == 0: return redirect(url_for('login')) else: course = db.get_course_from_id(session['user_id'], course_id) homework = db.get_homework(session['user_id'], course_id) return render('course.html', course = course, homework = homework)
def workingpaper( self ): database = db.database( "basedatosCAP.db" ) investigadores = database.getNames( "investigador" ) _inv = "" for x in investigadores: _inv = _inv + """<option value = "%d"> %s</option>""" % ( database.getId( "investigador", x), x ) return [_header % ("Crear nuevo working paper"), _wp % _inv, _footer ]
def __init__(self, ws): ws.url_map = Map([ Rule('/api/disk/info', endpoint='disk'), Rule('/api/database/info', endpoint='database'), Rule('/api/query/top/<count>', endpoint='top'), Rule('/', endpoint='index') ]) self.db = database()
def update_tests(): db = database() for filepath in TESTS_DIR.walkfiles('*.json'): with open(filepath) as f: print "Reading {0}".format(filepath) data = f.read() json_dict = json.loads(data) db['tests'].update({'name': json_dict['name']}, json_dict, upsert=True)
def spider_closed(self, spider): db = database.database() keys = self.crawler.stats.get_stats().keys() for eachKey in keys: if "exception_type_count" in eachKey: items = eachKey.split("/") db.log(spider.source ,items[-1]) db.close()
import re import json import pandas as pd import numpy as np import matplotlib.pyplot as plt import pylab from datetime import datetime, timedelta from database import database dat = database() ############# get graphs # dat.tNOC() # dat.tNOU() # dat.tNOP() # dat.tNOCPU() #plot # dat.aNOCPU() # dat.aNOCPP() # dat.sDOCPU() # dat.tNOCPP() #plot # dat.nOCPED() #plot # dat.nOCPH() #plot # dat.nODUPP() #plot # dat.nODUCPH() #plot plt.show() dat.close()
def fill_all_posX_posY(): """ Read the values of longitude, latitude, calibration center and north calculate the posX and posY fill it in the database """ db = database.database() datetime_group = db.get_field_time_interval("groups", "Datetime", "1940-03-01 00:00", "2018-05-31 23:00") datetime_group_set = set(datetime_group) for el in datetime_group_set: try: calibrated = db.get_field_datetime("drawings", "Calibrated", el)[0] if calibrated >0 : longitude = db.get_field_datetime("groups", "Dipole2Long", el) latitude = db.get_field_datetime("groups", "Dipole2Lat", el) zurich = db.get_field_datetime("groups", "Zurich", el) x_center = db.get_field_datetime("calibrations", "CenterX", el)[0] y_center = db.get_field_datetime("calibrations", "CenterY", el)[0] x_north = db.get_field_datetime("calibrations", "NorthX", el)[0] y_north = db.get_field_datetime("calibrations", "NorthY", el)[0] angle_P = db.get_field_datetime("drawings", "AngleP", el)[0] angle_B0 = db.get_field_datetime("drawings", "AngleB", el)[0] angle_L0 = db.get_field_datetime("drawings", "AngleL", el)[0] height = get_height_from_image_lst(archdrawing_path, el) center = coordinates.Cartesian(x_center, y_center) north = coordinates.Cartesian(x_north, y_north) for group_el in range(len(longitude)): if zurich[group_el] in ['B','C','D','E','F','G','X']: longitude_group = longitude[group_el] latitude_group = latitude[group_el] theta = (angle_L0 * math.pi/180.) - longitude_group phi = math.pi/2 - latitude_group (x_upper, y_upper, z_upper) = coordinates.cartesian_from_HGC_upper_left_origin( x_center, y_center, x_north, y_north, longitude_group, latitude_group, angle_P, angle_B0, angle_L0, height) db.write_field_datetime_group('groups', 'Dipole2_PosX', "{:.0f}".format(x_upper), el, group_el) db.write_field_datetime_group('groups', 'Dipole2_PosY', "{:.0f}".format(y_upper), el, group_el) """ # check qu'on retrouve bien les bonnes lat et #longitude en faisant le calcul inverse long2, lat2 = coordinates.heliographic_from_drawing( x_center, height - y_center, x_north, height - y_north, x_upper, height - y_upper, angle_P, angle_B0, angle_L0) print("***check") print("longitude:", longitude_group, long2) print("latitude:", latitude_group, lat2) """ """ print(longitude_group, latitude_group, x_center, x_north, x_upper, x_upper - int(x_upper), radius, radius_database, int(radius_database), """ except IndexError: print("there is an index error for the date: {} ".format(el))
from database import database from qiwi import qiwi from telegram import InlineKeyboardButton, InlineKeyboardMarkup from telegram.ext import (Updater, CommandHandler, CallbackQueryHandler, ConversationHandler, PicklePersistence) logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) MENU, CHOOSING, LOOKING, BUY, KEY, CHECK, LOOKING_KEYS = range(7) db = database() payments = qiwi() menu_keyboard = [[ InlineKeyboardButton("На что можно сходить?", callback_data='catalog') ], [InlineKeyboardButton("Мои покупки", callback_data='purchases')], [InlineKeyboardButton("Отзывы", url="скоро добавим")], [InlineKeyboardButton("Поддержка", callback_data='he')]] menu_markup = InlineKeyboardMarkup(menu_keyboard, one_time_keyboard=True) def start(update, context): update.message.reply_text("Главное меню", reply_markup=menu_markup) return MENU