def test_blast_search_one_hit(query_seq, blast_hsp_one_hit): global run_time_one_hit run_time_one_hit = 9999 check_init() passed = True run_time_t1 = ttime() """ try: results = blast.search_one_hit(blast_db, query=query_seq, T=13, X=5, S=30) except Exception as e: print(e) passed = False finally: run_time_one_hit = 9999 """ run_time_t2 = ttime() assert passed, 'Error in Blast.search_one_hit(sequence)' print(len(blast_hsp_one_hit)) print(len(results)) compare_blast_results(blast_hsp_one_hit, results, 'one-hit, sequence') run_time_one_hit = int(run_time_t2 - run_time_t1)
def test_blast_search_one_hit_with_pssm(query_pssm, blast_hsp_one_hit_pssm): global run_time_one_hit_pssm run_time_one_hit_pssm = 9999 check_init() passed = True run_time_t1 = ttime() """ try: results = blast.search_one_hit(blast_db, pssm=query_pssm, T=13, X=5, S=30) except Exception as e: print(e) passed = False finally: run_time_one_hit_pssm = 9999 """ run_time_t2 = ttime() assert passed, 'Error in Blast.search_one_hit(pssm)' compare_blast_results(blast_hsp_one_hit_pssm, results, 'one-hit, pssm') run_time_one_hit_pssm = int(run_time_t2 - run_time_t1)
def test_blast_search_two_hit_with_pssm(query_pssm, blast_hsp_two_hit_pssm): global run_time_two_hit_pssm run_time_two_hit_pssm = 9999 check_init() passed = True run_time_t1 = ttime() try: results = blast.search_two_hit(blast_db, pssm=query_pssm, T=11, X=5, S=30, A=40) except Exception: passed = False finally: run_time_two_hit_pssm = 9999 run_time_t2 = ttime() assert passed, 'Error in Blast.search_two_hit(pssm)' compare_blast_results(blast_hsp_two_hit_pssm, results, 'two-hit, pssm') run_time_two_hit_pssm = int(run_time_t2 - run_time_t1)
def test_blast_search_two_hit(query_seq, blast_hsp_two_hit): global run_time_two_hit run_time_two_hit = 9999 check_init() passed = True run_time_t1 = ttime() try: results = blast.search_two_hit(blast_db, query=query_seq, T=11, X=5, S=30, A=40) except Exception: passed = False finally: run_time_two_hit = 9999 run_time_t2 = ttime() assert passed, 'Error in Blast.search_two_hit(sequence)' compare_blast_results(blast_hsp_two_hit, results, 'two-hit, sequence') run_time_two_hit = int(run_time_t2 - run_time_t1)
def udprint(self, cmd, data=None): now = ttime() self.lcd.write(STX) ch = None while ch != ord("B") and (ttime() - now) < 30: try: ch = self.lcd.read() except: ch = None if (ttime() - now) > 30: return False self.lcd.write(cmd) if data is not None: self.lcd.write(len(data)) for elem in data: ch = elem if isinstance(data, (list, tuple)) else ord(elem) self.lcd.write(ch) ch = None while ch != ord("F") and (ttime() - now) < 30: try: ch = self.lcd.read() except: ch = None if (ttime() - now) > 30: return False return True
def wrapper(self, get_func, func): '''Wraps the function with a timer and showing the progress.''' timer = ttime() self.l_progress.configure(text='', bg='red') self.l_progress.update() try: get_func(func) func() # if the entries input aren't numbers except ValueError: self.l_progress.configure(text='The numbers are invalid!', bg='yellow') self.master.after( 1000, lambda: self.l_progress.configure(text='', bg='green')) self.delete() # if you move the mouse to coordinate 0,0 except pag.FailSafeException: print('!!ABORTED!!') self.l_progress.configure(text='!!ABORTED!!', bg='yellow') # if no error exist else: self.l_progress.configure(text='', bg='green') print('\tTime = {} seconds\n'.format(ttime() - timer)) self.delete()
def get_parallels(uid): start=ttime() sutta_parallels = [] for sut in parallels.findall('parallel/ll[@sutta="{}"]'.format(uid)): p = sut.getparent() sutta_parallels.extend((e.get('sutta'), p.get('partial', False), p.get('note', None)) for e in p if e is not sut) print('Parallels generation for {} took {} s'.format(uid, ttime() - start)) return sutta_parallels
def get_parallels_accl(uid): start=ttime() sutta_parallels = [] for sut in uid_map[uid]: p = sut.getparent() sutta_parallels.extend((e.get('sutta'), p.get('partial', False), p.get('note', None)) for e in p if e is not sut) print('Parallels generation for {} took {} s'.format(uid, ttime() - start)) return sutta_parallels
def predict(): start = ttime() Manager.from_timestamp(time(14, 10)).run() db.session.commit() end = ttime() return Response('done (%fs)' % (end - start), mimetype='text/plain')
def get_parallels(uid): start = ttime() sutta_parallels = [] for sut in parallels.findall('parallel/ll[@sutta="{}"]'.format(uid)): p = sut.getparent() sutta_parallels.extend( (e.get('sutta'), p.get('partial', False), p.get('note', None)) for e in p if e is not sut) print('Parallels generation for {} took {} s'.format(uid, ttime() - start)) return sutta_parallels
def get_parallels_accl(uid): start = ttime() sutta_parallels = [] for sut in uid_map[uid]: p = sut.getparent() sutta_parallels.extend( (e.get('sutta'), p.get('partial', False), p.get('note', None)) for e in p if e is not sut) print('Parallels generation for {} took {} s'.format(uid, ttime() - start)) return sutta_parallels
def updateLastReported(self): """Return null. Updates the time last reported to the current time and checks if it is necessary to report.""" now = int(ttime()) if now - self.lastReported > self.reportInterval: self.report() self.lastReported = now return
def make_snapshot(): if plasma_config["MIN_SNAPSHOT_SECONDS"] == 0: return timestamp = ttime() max_snapshot = get_max_snapshot() if max_snapshot and timestamp - int( max_snapshot) < plasma_config["MIN_SNAPSHOT_SECONDS"]: # print("snapshot skip") return snapshot_dir = os.path.join(plasma_config["PICKLE_DIR"], str(int(timestamp))) if not os.path.exists(snapshot_dir): os.mkdir(snapshot_dir) else: # print("snapshot skip") return for file_name in os.listdir(plasma_config["PICKLE_DIR"]): file_path = os.path.join(plasma_config["PICKLE_DIR"], file_name) if os.path.isfile(file_path) and file_name.endswith(".pickle"): shutil.copy(file_path, os.path.join(snapshot_dir, file_name)) print("snapshot created") snapshots = list_snapshots() if len(snapshots) > plasma_config["MAX_SNAPSHOTS"]: for snapshot in snapshots[:len(snapshots) - plasma_config["MAX_SNAPSHOTS"]]: shutil.rmtree(os.path.join(plasma_config["PICKLE_DIR"], snapshot), ignore_errors=True)
def __start__( self, prefix = None, debug = 3, host = "ws://", port = 80, password = "", dummy = "", proxy = ["", 0, "", ""] ): prefix = self.name if prefix is None else prefix self.info.eventPrefix = prefix self.prefix = prefix self.debug = debug self.proxy = proxy self.connFlag = False self.msgWait = DEFAULT_WAIT self.lastMessage = ttime() self.queryData = {} self.debug = debug _ = eg.scheduler.AddTask(1.0, self.establishSubscriber) self.url = self.normalizeURL(host, port) self.port = port if not isinstance(password, eg.Password): passw = eg.Password(None) passw.Set(password) else: passw = password self.password = b64encode(passw.Get())
def submit_block(self, block): block = rlp.decode(utils.decode_hex(block), Block) if block.merklize_transaction_set( ) != self.current_block.merklize_transaction_set(): raise InvalidBlockMerkleException( 'input block merkle mismatch with the current block') valid_signature = block.sig != b'\x00' * 65 and block.sender == self.authority if not valid_signature: raise InvalidBlockSignatureException('failed to submit block') if len(block.transaction_set) == 0: print("no transaction in block, do nothing") return if (int(ttime()) + plasma_config["BLOCK_EXPIRE_BUFFER_SECONDS"] > block.min_expire_timestamp): # raise BlockExpiredException('failed to submit block') print('block expired, drop it!') self.current_block = Block() self.save() print("submit block ...") self._submit_block(block)
def get_next_reference(self): try: # Very dirty hack, but kinda works for reference (i.e. it doesn't have to be unique) next_ref = SaleOrder.objects.all().aggregate(models.Max('id'))['id__max']+1 except: next_ref = 1 full_ref = '%.5d/%s' % (next_ref, str(str(ttime()*10)[8:-2])) return full_ref
def safe_sleep(self, time, increment=0.01): """Same as time.sleep except the tk window will still respond to user inputs. Arguments: time - the time to sleep for in seconds. increment - the time to wait between redrawing the output window Side Effects: Pauses execution (via time.sleep) """ count = time/increment#number of loops required while count > 0: start = ttime() self.tk.update() count -= 1 #sleeps for increment minus GUI processing time this helps keep safe_sleep consistent sleep(pos(ttime() - start + increment))
def establishSubscriber(self): if self.wsC: return self.wsC = WebSocketClient(self.url, self) self.ct = Thread(target = self.wsC.start) self.ct.start() self.lastMessage = ttime() self.stopWatchdog() self.watchdog = eg.scheduler.AddTask(0.01, self.watcher)
def get_next_reference(self): try: # Very dirty hack, but kinda works for reference (i.e. it doesn't have to be unique) next_ref = SaleOrder.objects.all().aggregate( models.Max('id'))['id__max'] + 1 except: next_ref = 1 full_ref = '%.5d/%s' % (next_ref, str(str(ttime() * 10)[8:-2])) return full_ref
def watcher(self): if not self.info.isStarted: return if (ttime() - self.lastMessage) > self.msgWait: self.Log(self.text.reconnect, 2) self.msgWait = min(600000, self.msgWait * 2) self.refreshWebSocket() self.stopWatchdog() self.watchdog = eg.scheduler.AddTask(5.0, self.watcher)
def work(self): try: utxo_pos, exitable_at = self.root_chain.call().getNextExit() except Exception as e: return if ttime() > exitable_at: send_transaction_sync(self.root_chain.web3, self.root_chain.functions.finalizeExits(), options={'gas': 300000}) # self.root_chain.transact({'from': '0x' + self.authority.hex(), "gas": 300000}).finalizeExits() print("finalize exit, triggered by %s %s" % (utxo_pos, exitable_at))
def __init__(self, blknum1, txindex1, oindex1, blknum2, txindex2, oindex2, newowner1, contractaddress1, amount1, tokenid1, newowner2, contractaddress2, amount2, tokenid2, fee=DEFAULT_FEE, expiretimestamp=None, salt=None, sig1=b'\x00' * 65, sig2=b'\x00' * 65): if expiretimestamp is None: expiretimestamp = int(ttime()) + DEFAULT_DELAY_SECONDS if salt is None: salt = randint(1000000000000, 9999999999999) # Input 1 self.blknum1 = blknum1 self.txindex1 = txindex1 self.oindex1 = oindex1 self.sig1 = sig1 # Input 2 self.blknum2 = blknum2 self.txindex2 = txindex2 self.oindex2 = oindex2 self.sig2 = sig2 # Outputs self.newowner1 = utils.normalize_address(newowner1) self.contractaddress1 = utils.normalize_address(contractaddress1) self.amount1 = amount1 self.tokenid1 = tokenid1 self.newowner2 = utils.normalize_address(newowner2) self.contractaddress2 = utils.normalize_address(contractaddress2) self.amount2 = amount2 self.tokenid2 = tokenid2 self.fee = fee self.expiretimestamp = expiretimestamp self.salt = salt self.confirmation1 = None self.confirmation2 = None self.spent1 = False self.spent2 = False
def vinaya_ll(uid): start=ttime() out = [] #groups = parallels.findall('parallel/ll[@sutta="{}"]/..'.format(uid)) groups = [e.getparent() for e in uid_map[uid]] if len(set(len(g) for g in groups)) == 1: for elements in zip(*groups): seen = set() for e in elements: uid = e.get('sutta') or e.get('division') if uid in seen: continue seen.add(uid) out.append(uid) else: seen = set() for elements in itertools.chain(*groups): pass print('Parallels generation for {} took {} s'.format(uid, ttime() - start)) return out
def vinaya_ll(uid): start = ttime() out = [] #groups = parallels.findall('parallel/ll[@sutta="{}"]/..'.format(uid)) groups = [e.getparent() for e in uid_map[uid]] if len(set(len(g) for g in groups)) == 1: for elements in zip(*groups): seen = set() for e in elements: uid = e.get('sutta') or e.get('division') if uid in seen: continue seen.add(uid) out.append(uid) else: seen = set() for elements in itertools.chain(*groups): pass print('Parallels generation for {} took {} s'.format(uid, ttime() - start)) return out
def activenot(ques, yes, no, play, time, stat): if stat.game_active: ques.show_q() yes.blitme() no.blitme() stat.current = ttime() stat.timeleft = stat.endtime - stat.current if stat.timeleft <= 0: stat.game_active = False else: time.prep() time.draw() else: play.blitme()
def on_message(self, _, m): if not self.info.isStarted: if self.wsC: self.wsC.close() if m is None: return try: m = loads(m) if 'command' in m and m['command'] == 'configfile': del m['apwd'] del m['pswd'] self.Log(self.text.wsMssg % repr(m), 5) self.lastMessage = ttime() self.msgWait = DEFAULT_WAIT except: eg.PrintTraceback() self.refreshWebSocket() return if "token" in m: token = m['token'] event = self.queryData[token] del m["token"] self.queryData[token] = m SetEvent(event) return if 'command' in m: cmd = m['command'] if cmd == 'nop': pass elif cmd == 'password': self.wsC.send("{'command':'password','password':'******'}" % self.password) elif cmd == 'authorized': self.wsC.send("{'command':'getconf'}") self.TriggerEvent(self.text.config) elif cmd == 'configfile': self.gpios = {} for item in m['gpios']: if item[1] and item[0] != m['wled']: self.gpios[item[0]] = (item[2], item[3]) # title, out self.wsC.send("{'command':'pinlist'}") elif cmd == 'pinlist': pass elif cmd == 'change': suffix = m['title'] if m['id'] != 'A0': suffix += ".%s" % PINSTATES[m['value']] self.TriggerEvent(suffix, payload=m['value']) else: eg.PrintNotice(unknmsg % (self.info.eventPrefix, repr(m)))
def test_blast_search_two_hit(query_seq, blast_hsp_two_hit): global run_time_two_hit run_time_two_hit = 9999 check_init() passed = True run_time_t1 = ttime() try: results = blast.search_two_hit(blast_db, query=query_seq, T=11, X=5, S=30, A=40) except Exception as e: print(e) passed = False finally: run_time_two_hit = 9999 run_time_t2 = ttime() assert passed, 'Error in Blast.search_two_hit(sequence)' print(len(blast_hsp_two_hit)) print(len(results)) diff = [l for l in blast_hsp_two_hit if l not in results] print(len(diff)) print(diff[0:10]) compare_blast_results(blast_hsp_two_hit, results, 'two-hit, sequence') run_time_two_hit = int(run_time_t2 - run_time_t1)
def submit_curblock(self): block = self.current_block if len(block.transaction_set) > 0: if (int(ttime()) + plasma_config["BLOCK_EXPIRE_BUFFER_SECONDS"] > block.min_expire_timestamp): print('block expired, drop it') self.current_block = Block() self.blocks[self.current_block_number] = self.current_block self.save() else: block.sign(plasma_config["AUTHORITY_KEY"]) block.merklize_transaction_set() print("submit block #%s" % self.current_block_number) self._submit_block(block)
def generate_docservice_url(request, doc_id, temporary=True, prefix=None): docservice_key = getattr(request.registry, "docservice_key", None) parsed_url = urlparse(request.registry.docservice_url) query = {} if temporary: expires = int(ttime()) + 300 # EXPIRES mess = "{}\0{}".format(doc_id, expires) query["Expires"] = expires else: mess = doc_id if prefix: mess = "{}/{}".format(prefix, mess) query["Prefix"] = prefix query["Signature"] = quote(b64encode(docservice_key.signature(mess.encode("utf-8")))) query["KeyID"] = docservice_key.hex_vk()[:8] return urlunsplit((parsed_url.scheme, parsed_url.netloc, "/get/{}".format(doc_id), urlencode(query), ""))
def generate_docservice_url(request, doc_id, temporary=True, prefix=None): docservice_key = getattr(request.registry, 'docservice_key', None) parsed_url = urlparse(request.registry.docservice_url) query = {} if temporary: expires = int(ttime()) + 300 # EXPIRES mess = "{}\0{}".format(doc_id, expires) query['Expires'] = expires else: mess = doc_id if prefix: mess = '{}/{}'.format(prefix, mess) query['Prefix'] = prefix query['Signature'] = quote(b64encode(docservice_key.signature(mess.encode("utf-8")))) query['KeyID'] = docservice_key.hex_vk()[:8] return urlunsplit((parsed_url.scheme, parsed_url.netloc, '/get/{}'.format(doc_id), urlencode(query), ''))
def get_brand_wc_pay_request(body, out_trade_no, total_fee, ip, attach=""): # total_fee 为字符串,单位是分 params = {} params["appId"] = settings.WXPAY_APPID params["timeStamp"] = "%.f" % ttime() params["nonceStr"] = random_str(13) package = {} package["bank_type"] = "WX" package["body"] = body # 商品描述; package["attach"] = attach # 附加数据,原样返回; package["partner"] = settings.WXPAY_PARTNERID package[ "out_trade_no"] = out_trade_no # 商户系统内部的订单号,32 个字符内、可包含字 母,确保在商户系统唯一; package["total_fee"] = total_fee # 订单总金额,单位为分; package["fee_type"] = 1 package["notify_url"] = settings.get_notify_url() package["spbill_create_ip"] = ip package["time_start"] = "" package["time_expire"] = "" package["transport_fee"] = "" package["product_fee"] = "" package["goods_tag"] = "" package["input_charset"] = settings.INPUT_CHARSET package, packageprestr = params_filter(package) sign_value = build_mysign(packageprestr, settings.WXPAY_PARTNERKEY, sign_type="MD5") package = params_urlencode(package) package = params_to_string(package) + "&sign=" + sign_value params["package"] = package params["appkey"] = settings.WXPAY_PAYSIGNKEY lower_params = {} for key in params: lower_params[key.lower()] = params[key] lower_params, lower_paramsprestr = params_filter(lower_params) pay_sign = sha1(lower_paramsprestr).hexdigest() del params["appkey"] params["signType"] = "SHA1" params["paySign"] = pay_sign return params
def generate_docservice_url(request, doc_id, temporary=True, prefix=None): signer = getattr(request.registry, "docservice_key", None) keyid = signer.verify_key.encode(encoder=HexEncoder)[:8].decode() parsed_url = urlparse(request.registry.docservice_url) query = {} if temporary: expires = int(ttime()) + 300 # EXPIRES mess = "{}\0{}".format(doc_id, expires) query["Expires"] = expires else: mess = doc_id if prefix: mess = "{}/{}".format(prefix, mess) query["Prefix"] = prefix query["Signature"] = b64encode(signer.sign(mess.encode()).signature) query["KeyID"] = keyid return urlunsplit((parsed_url.scheme, parsed_url.netloc, "/get/{}".format(doc_id), urlencode(query), ""))
def _get_time(): precision = int(getenv('PBT_CAR_EXECTIME_PRECISION', 0)) now = ttime() execs = now - float(getenv('PBT_CAR_EXECTIME_SECS', now)) if precision > 0: subsecs = ".%s" % int((execs - int(execs)) * 10**precision) subsecs += '0' * (precision - len(subsecs) + 1) else: subsecs = '' hours = int(execs / 3600) mins = int((execs - hours) / 60) secs = int(execs - hours * 3600 - mins * 60) exectime = "%.2d:%.2d:%02d%s" % (hours, mins, secs, subsecs) return exectime
def get_brand_wc_pay_request(body, out_trade_no, total_fee, ip, attach=""): # total_fee 为字符串,单位是分 params = {} params["appId"] = settings.WXPAY_APPID params["timeStamp"] = "%.f" % ttime() params["nonceStr"] = random_str(13) package = {} package["bank_type"] = "WX" package["body"] = body # 商品描述; package["attach"] = attach # 附加数据,原样返回; package["partner"] = settings.WXPAY_PARTNERID package["out_trade_no"] = out_trade_no # 商户系统内部的订单号,32 个字符内、可包含字 母,确保在商户系统唯一; package["total_fee"] = total_fee # 订单总金额,单位为分; package["fee_type"] = 1 package["notify_url"] = settings.get_notify_url() package["spbill_create_ip"] = ip package["time_start"] = "" package["time_expire"] = "" package["transport_fee"] = "" package["product_fee"] = "" package["goods_tag"] = "" package["input_charset"] = settings.INPUT_CHARSET package,packageprestr = params_filter(package) sign_value = build_mysign(packageprestr, settings.WXPAY_PARTNERKEY, sign_type = "MD5") package = params_urlencode(package) package = params_to_string(package) + "&sign=" + sign_value params["package"] = package params["appkey"] = settings.WXPAY_PAYSIGNKEY lower_params = {} for key in params: lower_params[key.lower()] = params[key] lower_params,lower_paramsprestr = params_filter(lower_params) pay_sign = sha1(lower_paramsprestr).hexdigest() del params["appkey"] params["signType"] = "SHA1" params["paySign"] = pay_sign return params
def generate_docservice_url(request, doc_id, temporary=True, prefix=None): docservice_key = getattr(request.registry, 'docservice_key', None) parsed_url = urlparse(request.registry.docservice_url) query = {} if temporary: expires = int(ttime()) + 300 # EXPIRES mess = "{}\0{}".format(doc_id, expires) query['Expires'] = expires else: mess = doc_id if prefix: mess = '{}/{}'.format(prefix, mess) query['Prefix'] = prefix signature = docservice_key.sign(mess.encode()).signature query['Signature'] = quote(b64encode(signature)) query['KeyID'] = docservice_key.verify_key.encode( encoder=HexEncoder)[:8].decode() return urlunsplit( (parsed_url.scheme, parsed_url.netloc, '/get/{}'.format(doc_id), urlencode(query), ''))
def deliver_notify(access_token, openid, transid, out_trade_no): url = DELIVER_NOTIFY_URL + "?access_token=" + access_token payload = { "appid": settings.WXPAY_APPID, "openid": openid, "transid": transid, "out_trade_no": out_trade_no, "deliver_timestamp": "%.f" % ttime(), "deliver_status": "1", "deliver_msg": "ok", } payload["appkey"] = settings.WXPAY_PAYSIGNKEY payload, payloadprestr = params_filter(payload) app_signature = sha1(payloadprestr).hexdigest() del payload["appkey"] payload["app_signature"] = app_signature payload["sign_method"] = "sha1" payload = json.dumps(payload) res = json.loads(urlopen(url, payload).read()) return res
def handle(self, pkt): # Data udp packets only if pkt.haslayer(RadioTap) and pkt.haslayer(Dot11) and pkt.type == 2 and pkt.haslayer(UDP): ip = pkt.getlayer(IP) udp=pkt.getlayer(UDP) # With the proper IP address and UDP port if ip.dst == self.address[0] and udp.dport == self.address[1] and pkt.haslayer(Raw): # Radiotap/802.11/802.11 QoS/LLC/SNAP/IP/UDP data = pkt[Raw].load if len(data) >= MIN_PACKET_SIZE: flips = self.popcorn(data) self.received += 1 if flips > 0: self.error += 1 _tuxtime = ttime() _rssi = pkt[RadioTap].dBm_AntSignal _ber = float(flips) / (8*len(data)) _prr = float(self.received) / (self.error + self.received) print '{0}, {1}, {2}, {3}'.format(_tuxtime, _rssi, _ber, _prr)
def deliver_notify(access_token, openid, transid, out_trade_no): url = DELIVER_NOTIFY_URL + "?access_token=" + access_token payload = { "appid": settings.WXPAY_APPID, "openid": openid, "transid": transid, "out_trade_no": out_trade_no, "deliver_timestamp": "%.f" % ttime(), "deliver_status": "1", "deliver_msg": "ok", } payload["appkey"] = settings.WXPAY_PAYSIGNKEY payload,payloadprestr = params_filter(payload) app_signature = sha1(payloadprestr).hexdigest() del payload["appkey"] payload["app_signature"] = app_signature payload["sign_method"] = "sha1" payload = json.dumps(payload) res = json.loads(urlopen(url, payload).read()) return res
def download(url, outdir): path = pjoin(outdir, basename(url)) if exists(path): raise FileExists(basename(url)) with closing(urlopen(url)) as connection: spam_page = html.parse(connection) form = spam_page.find('.//form[@id="ff"]') if form is None: raise InvalidRSURL(url) dl_url = form.get('action') with closing(urlopen(dl_url, 'dl.start=Free')) as connection: data = connection.read() pd = re.search('(\d+) minute', data) if pd is not None: minwait = int(pd.group(1)) starttime = datetime.now() + timedelta(minutes=minwait) print "\t... waiting %d minutes (starting at %s) ..." % \ (minwait, starttime.strftime("%x %X")) sleep(minwait * 60) return download(url, outdir) if re.search(r"(Currently a lot of users|There are no more download " \ "slots|Unfortunately right now our servers are overloaded)", data, re.I) is not None: minwait = 5 starttime = datetime.now() + timedelta(minutes=minwait) print "\t... no more download slots available - waiting %d " \ "minutes (starting at %s) ..." % (minwait, starttime.strftime("%x %X")) sleep(minwait * 60) return download(url, outdir) timeout = int(re.search('var c=(\d+);', data).group(1)) dl_url = re.search('<form name="dlf" action="(.*?)" ', data).group(1) sleep(timeout) # Retrieve URL with closing(urlopen(dl_url)) as connection: with TemporaryFile('w+b') as outfile: leeched = 0 size = int(connection.info()['content-length']) starttime = ttime() while True: data = connection.read(8192) if not data: break leeched += len(data) elapsed = ttime() - starttime speed = leeched / 1024 / elapsed eta = (size - leeched) / 1024 / speed eta = datetime.now() + timedelta(seconds=eta) sys.stdout.write('\r\t%.01f%%\tavg. down: %.01f KiB/s' \ '\tETA: %s ' % (leeched / size * 100, speed, eta.strftime("%X"))) sys.stdout.flush() outfile.write(data) # XXX: this probably needs proper handling (within urllib?) if leeched != size: print "\t... the download could have failed!" \ "(possible timeout, check file size)" outfile.seek(0) with open(path, 'wb') as routfile: shutil.copyfileobj(outfile, routfile) print
def profileit(msg='profile'): t = ttime() yield print msg, ttime() - t
for i in range(numMessages): for msg in M.retr(i+1)[1]: msg_data.append(msg) # Now we save the messages to a list and then search through them later except: print RED + '[!] Error encountered.. Exiting..' + END sys.exit(1) else: print YELLOW + '[+] Emails downloaded and sorted..deleting emails' + END numMessages = len(M.list()[1]) for i in range(numMessages): for msg in M.dele(i+1)[1]: print RED + '[!] Message Deleted...' + END M.quit() # Time to write the msg(s) to a text file so we have them for further analysis t = int(ttime()) filename = str(t) + '.txt' f = open(filename, 'a') for m in msg_data: f.write('%s\n' % m) for s in re.finditer('sender IP is (\d*.\d*.\d*.\d*)', m): sender_ip_lookup.append(s.group(1)) for t in re.finditer('http://\S*', m): url_list.append(t.group().strip('"')) print GREEN + '[+] Msg(s) written to: ' + filename + END f.close() # Check the URL's in the url_list stack and then split them so we can resolve the IP addresses for y in url_list: if 'http://' in y: split_domain = y.split('/')[2]
main_logger.info("Percorso di esecuzione %s" % path ) ########## File vari: DB sDBName = bbtConfig.get('Database','dbname') sDBPath = os.path.join(os.path.abspath('..'), bbtConfig.get('Database','dbfolder'), sDBName) main_logger.info("Database utilizzato %s" % sDBPath ) if not os.path.isfile(sDBPath): main_logger.error( "Errore! File %s inesistente!" % sDBPath) bbt_parameters = [] bbt_parameters = get_bbtparameters(sDBPath) if len(bbt_parameters) == 0: main_logger.error( "Attenzione! Nel DB %s non ci sono i dati necessari!" % sDBPath) main_logger.info("Ci sono %d pk" % len(bbt_parameters) ) totIterations = mp_np*nIter if bGeorandom: geo_start_time = ttime() insert_georandom(sDBPath,totIterations, bbt_parameters,sKey) geo_tot_time = ttime() - geo_start_time main_logger.info("Generazione dei parametri geotecnici per %d iterazioni su %d segmenti ha richiesto %d secondi" % (totIterations,len(bbt_parameters) ,geo_tot_time ) ) else: main_logger.info("Generazione dei parametri geotecnici saltata") iMax = check_eval4Geo(sDBPath,"XXX") if iMax >= totIterations: main_logger.info("Sono disponibili %d iterazioni" % iMax) else: main_logger.info("Ci sono %d iterazioni disponibili per i parametri geotecnici su totali %d necessarie, ci sono ancora da generare %d iterazioni!" % (iMax,totIterations, totIterations - iMax)) raise ValueError("Ci sono %d iterazioni disponibili per i parametri geotecnici su totali %d necessarie, ci sono ancora da generare %d iterazioni!" % (iMax,totIterations, totIterations - iMax)) # danzi.tn@20151116 if bPerformTBMClean: main_logger.info("Richiesta la cancellazione di tutti i dati") clean_all_eval_ad_kpi(sDBPath)
def mp_producer(parms): idWorker, nIter, sDBPath, loopTbms ,sKey = parms # ritardo per evitare conflitti su DB tsleep(idWorker*10+1) start_time = ttime() now = datetime.datetime.now() strnow = now.strftime("%Y%m%d%H%M%S") main_logger = createLogger(idWorker,"mp_producer") main_logger.info("[%d]############################# Starts at %s" % (idWorker,strnow)) #with plock: # print "[%d]############################# Starts at %s" % (idWorker,strnow) #inizializzo le info sui tracciati dai file di configurazione inizio_GLEST = bbtConfig.getfloat('Import','inizio_GLEST') fine_GLEST = bbtConfig.getfloat('Import','fine_GLEST') inizio_GLSUD = bbtConfig.getfloat('Import','inizio_GLSUD') fine_GLSUD = bbtConfig.getfloat('Import','fine_GLSUD') inizio_CE = bbtConfig.getfloat('Import','inizio_CE') fine_CE = bbtConfig.getfloat('Import','fine_CE') #differenza tra CE e GLEST in modo tale che GLNORD = delta_GLEST_CE - CE delta_GLEST_CE = bbtConfig.getfloat('Import','delta_GLEST_CE') projectRefCost = bbtConfig.getfloat('Import','project_ref_cost') # mln di euro # danzi.tn@20151115 recepimento modifiche su InfoAlignment fatte da Garbriele #LEGGO I PARAMETRI DA FILE DI CONFIGURAZIONE fCShiledMin = bbtConfig.getfloat('Alignment','frictionCShiledMin') fCShiledMode = bbtConfig.getfloat('Alignment','frictionCShiledMode') fCShiledMax = bbtConfig.getfloat('Alignment','frictionCShiledMax') #CREO OGGETTO fcShield = FrictionCoeff(fCShiledMin,fCShiledMode,fCShiledMax) #LEGGO I PARAMETRI DA FILE DI CONFIGURAZIONE fCCutterdMin = bbtConfig.getfloat('Alignment','frictionCCutterMin') fCCutterMode = bbtConfig.getfloat('Alignment','frictionCCutterMode') fCCutterMax = bbtConfig.getfloat('Alignment','frictionCCutterMax') #CREO OGGETTO fcCutter = FrictionCoeff(fCCutterdMin,fCCutterMode,fCCutterMax) alnAll = [] aln=InfoAlignment('Galleria di linea direzione Sud', 'GLSUD', inizio_GLSUD, fine_GLSUD,fCCutterMode, fCShiledMode) alnAll.append(aln) aln=InfoAlignment('Cunicolo esplorativo direzione Nord', 'CE', delta_GLEST_CE - fine_CE, delta_GLEST_CE - inizio_CE , fCCutterMode, fCShiledMode) alnAll.append(aln) aln=InfoAlignment('Galleria di linea direzione Nord', 'GLNORD',inizio_GLEST, fine_GLEST, fCCutterMode, fCShiledMode) alnAll.append(aln) kpiTbmList = [] main_logger.debug("[%d]############################# Inizia a recuperare le itarazioni di %s dalla %d alla %d" % (idWorker,sKey,idWorker*nIter, (idWorker+1)*nIter)) bbt_bbtparameterseval = get_mainbbtparameterseval(sDBPath,sKey,idWorker*nIter, (idWorker+1)*nIter) main_logger.debug("[%d]############################# ...recuperate %d iterazioni, memoria totale" % (idWorker,len(bbt_bbtparameterseval))) for iIterationNo in range(nIter): mainIterationNo = idWorker*nIter + iIterationNo tbmSegmentCum = 0 iter_start_time = ttime() bbttbmkpis = [] bbt_evalparameters = [] iCheckEvalparameters = 0 iCheckBbttbmkpis = 0 # Per tutti i Tunnel main_logger.info("[%d]########### iteration %d - %d" % (idWorker, iIterationNo, mainIterationNo)) #with plock: # print "[%d]########### iteration %d - %d" % (idWorker, iIterationNo, mainIterationNo) for alnCurr in alnAll: for tbmKey in loopTbms: tbmData = loopTbms[tbmKey] # Se la TBM e' conforme al TUnnell if alnCurr.tbmKey in tbmData.alignmentCode: tbm = TBM(tbmData, 'V') kpiTbm = KpiTbm4Tunnel(alnCurr.description, mainIterationNo) iCheckBbttbmkpis += 1 kpiTbm.setKPI4TBM(alnCurr,tbmKey,tbm,projectRefCost) # cerco i segmenti che rientrano tra inizio e fine del Tunnell matches_params = [bpar for bpar in bbt_bbtparameterseval[mainIterationNo] if alnCurr.pkStart <= bpar.inizio and bpar.fine <= alnCurr.pkEnd] for bbt_parameter in matches_params: bbtparameter4seg = build_bbtparameterVal4seg(bbt_parameter) iCheckEvalparameters += 1 if bbtparameter4seg == None: main_logger.error("[%d] %s, %s per pk %d parametri Geo non trovati" % (idWorker, alnCurr.description, tbmKey, bbt_parameter.fine) ) continue # danzi.tn@20151115 recepimento modifiche su InfoAlignment fatte da Garbriele if iIterationNo > 2: alnCurr.frictionCoeff = fcShield.rvs() alnCurr.fiRi = fcCutter.rvs() else: alnCurr.frictionCoeff = fCShiledMode alnCurr.fiRi = fCCutterMode try: tbmSegBefore = ttime() tbmsect = TBMSegment(bbtparameter4seg, tbm, alnCurr.fiRi, alnCurr.frictionCoeff) tbmSegAfter = ttime() tbmSegmentCum += (tbmSegAfter - tbmSegBefore) except Exception as e: main_logger.error("[%d] %s, %s per pk %d TBMSegment va in errore: %s" % (idWorker, alnCurr.description, tbmKey, bbt_parameter.fine , e) ) main_logger.error("[%d] bbtparameter4seg = %s" % str(bbtparameter4seg)) continue kpiTbm.setKPI4SEG(alnCurr,tbmsect,bbtparameter4seg) #danzi.tn@20151114 inseriti nuovi parametri calcolati su TunnelSegment bbt_evalparameters.append((strnow, mainIterationNo,alnCurr.description, tbmKey, bbt_parameter.fine,bbt_parameter.he,bbt_parameter.hp,bbt_parameter.co,bbtparameter4seg.gamma,\ bbtparameter4seg.sci,bbtparameter4seg.mi,bbtparameter4seg.ei,bbtparameter4seg.cai,bbtparameter4seg.gsi,bbtparameter4seg.rmr,\ tbmsect.pkCe2Gl(bbt_parameter.fine),\ tbmsect.TunnelClosureAtShieldEnd*100. ,\ tbmsect.rockBurst.Val,\ tbmsect.frontStability.Ns,\ tbmsect.frontStability.lambdae,\ tbmsect.penetrationRate*1000. ,\ tbmsect.penetrationRateReduction*1000. ,\ tbmsect.contactThrust, \ tbmsect.torque, \ tbmsect.frictionForce, \ tbmsect.requiredThrustForce, \ tbmsect.availableThrust, \ tbmsect.dailyAdvanceRate, \ bbt_parameter.profilo_id, \ bbt_parameter.geoitem_id, \ bbt_parameter.title, \ bbtparameter4seg.sti, \ bbtparameter4seg.k0, \ tbmsect.t0, \ tbmsect.t1, \ tbmsect.t3, \ tbmsect.t4, \ tbmsect.t5, \ tbmsect.InSituCondition.SigmaV, \ tbmsect.Excavation.Radius, \ tbmsect.Rock.E, \ tbmsect.MohrCoulomb.psi, \ tbmsect.Rock.Ucs, \ tbmsect.InSituCondition.Gsi, \ tbmsect.HoekBrown.Mi, \ tbmsect.HoekBrown.D, \ tbmsect.HoekBrown.Mb, \ tbmsect.HoekBrown.S, \ tbmsect.HoekBrown.A, \ tbmsect.HoekBrown.Mr, \ tbmsect.HoekBrown.Sr, \ tbmsect.HoekBrown.Ar, \ tbmsect.UrPi_HB(0.), \ tbmsect.Rpl, \ tbmsect.Picr, \ tbmsect.LDP_Vlachopoulos_2009(0.), \ tbmsect.LDP_Vlachopoulos_2009(tbm.Slen), \ ) ) kpiTbm.updateKPI(alnCurr) bbttbmkpis += kpiTbm.getBbtTbmKpis() sys.stdout.flush() iter_end_time = ttime() main_logger.info("[%d]#### iteration %d - %d terminated in %d seconds (%d)" % (idWorker, iIterationNo, mainIterationNo, iter_end_time-iter_start_time, tbmSegmentCum)) main_logger.debug("[%d]### Start inserting %d (%d) Parameters and %d (21x%d) KPIs" % (idWorker, len(bbt_evalparameters),iCheckEvalparameters,len(bbttbmkpis),iCheckBbttbmkpis)) insert_eval4Iter(sDBPath,bbt_evalparameters,bbttbmkpis) insert_end_time = ttime() main_logger.info("[%d]]### Insert terminated in %d seconds" % (idWorker,insert_end_time-iter_end_time)) now = datetime.datetime.now() strnow = now.strftime("%Y%m%d%H%M%S") end_time = ttime() main_logger.info("[%d]############################# Ends at %s (%s seconds)" % (idWorker,strnow, end_time-start_time))
def dotransform(request, response): geo_header = """<html><head> <script type="text/javascript" src="http://maps.google.com/maps?file=api&v=2&key=AIzaSyDEMaaLU6t3XuijBcO484BBhUoluqpnFa4"></script> <script type="text/javascript" src="http://www.google.com/jsapi"></script> <script type="text/javascript"> google.load('visualization', '1', {packages: ['geomap']}); </script> <script type="text/javascript"> google.load("visualization", "1", {packages:["map"]}); google.setOnLoadCallback(drawMap); function drawMap() { var data = new google.visualization.DataTable(); data.addColumn('number','Lat'); data.addColumn('number','Lon'); data.addColumn('string','IP'); """ geo_footer = """ var chart = new google.visualization.Map(document.getElementById('map_60aa')); chart.draw(data, {showTip:true}); } google.setOnLoadCallback(drawVisualization); </script> </head> <body> <div id="map_60aa"></div> </body> </html>""" map_code = [] # Download GeoIP Database from MaxMinds if not os.path.exists('/opt/geoipdb/geoipdb.dat'): return response + UIMessage('Need local install of MaxMinds Geo IP database, use the download script in resource/external/geoipdownload.sh') gi = pygeoip.GeoIP('/opt/geoipdb/geoipdb.dat') pkts = rdpcap(request.value) try: tmpfolder = request.fields['sniffMyPackets.outputfld'] except: return response + UIMessage('No output folder defined, run the L0 - Prepare pcap transform') ip_list = [] # Create the IP list from the pcap file for x in pkts: if x.haslayer(IP): src = x.getlayer(IP).src if src != '0.0.0.0': ip_list.append(src) coordinates = [] ip_exclusions = ['192.168.', '172.', '10.', '127.'] for ip in ip_list: if ip_exclusions[0] in ip or ip_exclusions[1] in ip or ip_exclusions[2] in ip or ip_exclusions[3] in ip: pass else: rec = gi.record_by_addr(ip) lng = rec['longitude'] lat = rec['latitude'] coords = lat, lng, ip if coords not in coordinates: coordinates.append(coords) map_code.append(" data.addRows(%d);" % (len(coordinates)) + '\n') c = 0 for lat, lng, src in coordinates: map_code.append(" data.setValue(%d, 0, %d);" % (c, lat) + '\n') map_code.append(" data.setValue(%d, 1, %d);" % (c, lng) + '\n') map_code.append(" data.setValue(%d, 2, '%s');" % (c, src) + '\n') c += 1 # # Create the text output for a html file to save s = str(geo_header) + ' '.join(map_code) + str(geo_footer) # # Create the file and save the output from s using time as a filename t = int(ttime()) filename = tmpfolder + '/' + str(t) + '.html' f = open(filename, 'w') f.write(s) f.close() # cmd = 'xdg-open ' + filename # os.system(cmd) # Return a GeoMap entity with the path to the html file e = GeoMap(filename) e.linkcolor = 0x2314CA e.linklabel = 'GeoMap' response += e return response
def __call__( self, command = '', waitForCompletion = True, triggerEvent = False, additionalSuffix = "", disableParsingCommand = True, disableParsingAdditionalSuffix = True, payload = False, disableWOW64=False, runAsAdmin = False, ): if eg.config.refreshEnv: eg.Environment.Refresh() prefix = self.plugin.info.eventPrefix suffix = self.text.eventSuffix if additionalSuffix != "": suffix = "%s.%s" % (suffix, additionalSuffix) if not disableParsingCommand: command = eg.ParseString(command) if not disableParsingAdditionalSuffix: additionalSuffix = eg.ParseString(additionalSuffix) processInformation = self.processInformation = SHELLEXECUTEINFO() processInformation.cbSize = sizeof(processInformation) processInformation.hwnd = 0 processInformation.lpFile = 'cmd.exe' if waitForCompletion or triggerEvent: si = STARTUPINFO() si.dwFlags |= STARTF_USESHOWWINDOW proc = popen("chcp", si) # DOS console codepage data = proc.communicate()[0] if not proc.returncode: cp = "cp" + data.split()[-1].replace(".", "") proc.stdout.close() filename = join( eg.folderPath.TemporaryFiles, "EventGhost-output-%s.txt" % ttime() ) processInformation.lpParameters = '/C %s > %s' % (command, filename) processInformation.fMask = SEE_MASK_NOCLOSEPROCESS else: processInformation.lpParameters = '/C %s' % command if runAsAdmin: processInformation.lpVerb = "runas" processInformation.nShow = 0 processInformation.hInstApp = 0 disableWOW64 = disableWOW64 and IsWin64() if disableWOW64: prevVal = Wow64DisableWow64FsRedirection() if not windll.shell32.ShellExecuteExW(byref(processInformation)): raise self.Exception(FormatError()) if disableWOW64: Wow64RevertWow64FsRedirection(prevVal) if waitForCompletion: WaitForSingleObject(processInformation.hProcess, INFINITE) exitCode = DWORD() if not GetExitCodeProcess( processInformation.hProcess, byref(exitCode) ): raise self.Exception(FormatError()) try: data = code_open(filename, 'r', cp) lines = data.readlines() returnValue = "".join(lines) data.close() remove(filename) except: returnValue = "" if triggerEvent: if payload: eg.TriggerEvent( suffix, prefix = prefix, payload = returnValue.rstrip() ) else: eg.TriggerEvent(suffix, prefix = prefix) CloseHandle(processInformation.hProcess) return returnValue.rstrip() elif triggerEvent: te = self.TriggerEvent(processInformation, suffix, prefix, filename, cp, payload) te.start() else: CloseHandle(processInformation.hProcess)