def createStakedAccounts(b, e): ramFunds = round(config['funds']['ram_funds'] * 10000) configuredMinStake = round(config['funds']['min_stake'] * 10000) maxUnstaked = round(config['funds']['max_unstaked'] * 10000) for i in range(b, e): a = accounts[i] funds = a['funds'] print('#' * 80) print('# %d/%d %s %s' % (i, e, a['name'], intToCurrency(funds))) print('#' * 80) if funds < ramFunds: print('skipping %s: not enough funds to cover ram' % a['name']) continue minStake = min(funds - ramFunds, configuredMinStake) unstaked = min(funds - ramFunds - minStake, maxUnstaked) stake = funds - ramFunds - unstaked stakeNet = round(stake / 2) stakeCpu = stake - stakeNet print('%s: total funds=%s, ram=%s, net=%s, cpu=%s, unstaked=%s' % (a['name'], intToCurrency(a['funds']), intToCurrency(ramFunds), intToCurrency(stakeNet), intToCurrency(stakeCpu), intToCurrency(unstaked))) assert (funds == ramFunds + stakeNet + stakeCpu + unstaked) retry( config['cleos']['path'] + 'system newaccount --transfer airdrops %s %s --stake-net "%s" --stake-cpu "%s" --buy-ram "%s" ' % (a['name'], a['pub'], intToCurrency(stakeNet), intToCurrency(stakeCpu), intToCurrency(ramFunds))) if unstaked: retry(config['cleos']['path'] + 'transfer airdrops %s "%s"' % (a['name'], intToCurrency(unstaked)))
def result(): if request.method == 'GET' and request.args.get('short'): urlID = request.args.get('short') url = URL.query.filter_by(short=urlID).first_or_404( description=f'There is no data with {urlID}') visits = json.dumps([visit.serialize() for visit in url.visits], default=datetimeconverter) return render_template('short_url.html', url=url, visits=visits, title='Shortened URL') shortID, urlObject = None, None def insertURL(url): nonlocal shortID, urlObject urlObject, shortID = URL().set_value( full=url, userid=current_user.id if current_user.is_authenticated else 1) db.session.add(urlObject) db.session.commit() form = MainURLForm(request.form) if form.validate_on_submit(): try: insertURL(form.fullUrl.data) except exc.IntegrityError: db.session.rollback() retry(lambda: insertURL(form.fullUrl.data), 2) return redirect(url_for('main.result', short=[shortID])) return redirect(url_for('main.index'))
def save_page(page: pywikibot.page.BasePage, new_text: str, status: str, review_license: str) -> None: """Replaces the wikitext of the specified page with new_text If the global simulate variable is true, the wikitext will be printed instead of saved to Commons. """ summary = string.Template(config["review_summary"]).safe_substitute( status=status, review_license=review_license, version=__version__) if not simulate: utils.check_runpage(site, run_override) logger.info(f"Saving {page.title()}") utils.retry( utils.save_page, 3, text=new_text, page=page, summary=summary, bot=False, minor=False, ) else: logger.info("Saving disabled") logger.debug(summary) logger.debug(new_text)
def fail_warning(page: pywikibot.page.BasePage, review_license: str, is_old: bool = False) -> None: user_talk = get_author_talk(page) message = string.Template(config["old_fail_warn"] if is_old else config["fail_warn"]).safe_substitute( filename=page.title(with_ns=True), review_license=review_license) summary = string.Template(config["review_summary"]).safe_substitute( status="fail", review_license=review_license, version=__version__) if not simulate: utils.check_runpage(site, run_override) logger.info(f"Saving {user_talk.title()}") utils.retry( utils.save_page, 3, text=message, page=user_talk, summary=summary, bot=False, minor=False, mode="append", ) else: logger.info("Saving disabled") logger.info(summary) logger.info(message)
def proxyVote(b, e): vote(firstProducer, firstProducer + 1) proxy = accounts[firstProducer]['name'] retry(config['cleos']['path'] + 'system regproxy ' + proxy) sleep(1.0) for i in range(b, e): voter = accounts[i]['name'] retry(config['cleos']['path'] + 'system voteproducer proxy ' + voter + ' ' + proxy)
def setSystem(): retry(config['cleos']['path'] + 'set contract eosio ' + config['general']['contracts_dir'] + '/eosio.system/') sleep(1) run(config['cleos']['path'] + 'push action eosio setpriv' + jsonArg(['eosio.msig', 1]) + '-p eosio@active') run(config['cleos']['path'] + 'push action eosio init' + jsonArg(['0', '4,' + config['general']['symbol']]) + '-p eosio@active')
def scrape_jobs(self, domain): """ Scrape all jobs iteratively through this domain. :param domain: Domain to scan jobs from :type domain: str :returns: Generator yielding Craiglist job links to their listings :rtype: generator<str> """ JOB_XPATH = (".//div[contains(@class, 'content')]" "//p[contains(@class, 'row')]") MAP_TAG_XPATH = ".//span[@class='maptag']" JOB_LINK_XPATH = ".//a[@class='hdrlnk']" MAX_RESULTS = 100 payload = {"employment_type": "1", "s": 1} endpoint = os.path.join(domain, "search", "jjj") logger.debug("Hitting jobs endpoint %s w/payload %s", endpoint, payload) res = retry(functools.partial(requests.get, endpoint, params=payload), SLEEP_TIME) while True: try: # parse results doc = html.fromstring(res.text) listings = doc.xpath(JOB_XPATH) if not listings: logger.debug("Paginated scraping complete") break logger.debug("%s listings found", len(listings)) for listing in listings: # this signifies we can get the lat & long when drilling if listing.find(MAP_TAG_XPATH) is not None: anchor_elem = listing.find(JOB_LINK_XPATH) link = "{}{}".format(domain, anchor_elem.get("href")[1:]) # slice off the leading slash yield link, except Exception: logger.exception("Encountered an issue while scraping jobs", exc_info=True) finally: if len(listings) < MAX_RESULTS: logger.debug("Paginated scraping complete") break payload["s"] += MAX_RESULTS logger.debug("Sleeping Zzz") time.sleep(SLEEP_TIME) # sleep to avoid being blacklisted logger.debug("Hitting jobs endpoint %s w/payload %s", endpoint, payload) res = retry(functools.partial(requests.get, endpoint, params=payload), SLEEP_TIME)
def _do_iteration(self): log_info('Starting a new iteration') date = datetime.utcnow() # Update the solution tank state solution_tank_was_full = self.solution_tank_is_full self.solution_tank_is_full = self.solution_tank.is_full() # Volume is unknown and pH sensor can be dry if not self.solution_tank_is_full: raise Exception('Solution tank is empty') # Skip one more iteration to let the pH readings stabilize if not solution_tank_was_full: raise Exception('Solution tank has been empty for a while') temperature, _, ph = drop_uncertainty(*self.ph.get_t_v_ph()) if not in_range(ph, self.valid_ph_range): raise FatalException('Invalid pH: {:~.3gP}'.format(ph)) if not in_range(temperature, self.valid_ph_temperature_range): raise FatalException( 'Invalid pH temperature: {:~.3gP}'.format(temperature)) if hasattr(self, 'temperature'): temperature = self.temperature.get_temperature() supply_tank_volume = drop_uncertainty(self.supply_tank.get_volume()) if not in_range(supply_tank_volume, self.valid_supply_tank_volume_range): raise FatalException('Invalid supply tank volume: {:~.3gP}'.format( supply_tank_volume)) nutrients = self._estimate_nutrients(ph) data = { 'date': date.strftime('%Y-%m-%dT%H:%M:%SZ'), 'temperature_C': '%.1f' % temperature.m_as('degC'), 'pH': '%.2f' % ph.m_as('pH'), 'supply_tank_L': '%.0f' % supply_tank_volume.m_as('L'), 'nutrients_mL': '%.1f' % nutrients.m_as('mL') } retry(lambda: self.database.append(data), 'Database append failed') # Data is already in DB, ignore Thingspeak errors retry(lambda: self.thingspeak.append(data), 'Thingspeak append failed', rethrow=False) # We only add nutrients after their amount was logged to DB self.pump_x.pump(nutrients) self.pump_y.pump(nutrients)
def double(prefix, far=True): topology = CPUTopology() if far: cpu, bgcpu = topology.cpus_no_ht[:2] else: cpu = topology.cpus_no_ht[0] bgcpu = topology.ht_siblings[cpu][0] with cgmgr: vm = cgmgr.start(str(cpu)) bgvm = cgmgr.start(str(bgcpu)) time.sleep(BOOT_TIME) rpc = retry(rpyc.connect, args=(str(vm.addr),), kwargs={"port":6666}, retries=10) bgrpc = retry(rpyc.connect, args=(str(bgvm.addr),), kwargs={"port":6666}, retries=10) RPopen = rpc.root.Popen BGRPopen = bgrpc.root.Popen remains = len(benchmarks)**2 for bgname, bgcmd in benchmarks.items(): log.debug("waiting for idleness") wait_idleness(IDLENESS*3.3) log.warning("launching %s in bg" % bgname) bg = BGRPopen(bgcmd) log.debug("warming up for %s" % WARMUP_TIME) time.sleep(WARMUP_TIME) for name, cmd in benchmarks.items(): print("remains %s tests" % remains) remains -= 1 outdir = s("${prefix}/double/${bgname}/") try: os.makedirs(outdir) except FileExistsError: pass output = outdir + name perf_cmd = PERF_CMD.format(pid=vm.pid, t=MEASURE_TIME, events=events, output=output) log.debug("starting %s" % name) p = RPopen(cmd) log.debug("warming up for %s" % WARMUP_TIME) time.sleep(WARMUP_TIME) log.debug("starting measurements") run(perf_cmd) assert p.poll() is None, "test unexpectedly terminated" assert bg.poll() is None, "bg process suddenly died :(" log.debug("finishing tests") p.killall() gc.collect() bg.killall() time.sleep(1)
def get_market_price(self): ''' current close price for settings symbol 当前设置的交易对收盘价格 ''' return u.retry(lambda: self.client.Instrument.Instrument_get( symbol=s.SYMBOL).result())[0]["lastPrice"]
def get_margin(self): ''' account balance summary 当前的账户余额相关信息 ''' return u.retry( lambda: self.client.User.User_getMargin(currency="XBt").result())
def main(self): db_connection = MyCart.get_db_credentials() self.create_db_connection(**db_connection) self.create_all_table_if_not_exists() authentication_token = self.login() if authentication_token: self.user_id = authentication_token['user_id'] self.is_admin = authentication_token['is_admin'] user_name = authentication_token['user_name'] print_successful_login_message(user_name) cart = Cart() if self.is_admin: self.admin_flow() else: self.customer_flow() else: print_unsuccessful_login_message() if retry(): clear_screen() return self.main() else: quit()
def minion_key_accepted(master, minion, minion_key_cached): master.salt_key_accept(minion['id']) def accept(): return minion['id'] in master.salt_key()['minions'] assert retry(accept) is True
def test_pkg_refresh_db(minion): def test(minion): try: res = minion.salt_call('pkg.refresh_db') return res.get('testpackages', False) is True except TypeError: return False assert retry(partial(test, minion))
def get_margin(self): ''' account balance 当前的账户余额 ''' self.margin=u.retry(lambda: self.client .User.User_getMargin(currency="XBt").result()) return self.margin
def __enter__(self): vms = {} for n in self.names: vms[n] = cgmgr.start_vm(n) time.sleep(cfg.vmstart) for vm in vms.values(): rpc = retry(rpyc.connect, args=(str(vm.addr),), kwargs={"port": 6666}, retries=10) vm.rpc = rpc return vms
def get_position(self): ''' current order position including open and close position, return None if there is no position 当前的仓位,如果没有的话,返回None ''' ret = u.retry(lambda: self.client.Position.Position_get( filter=json.dumps({"symbol": s.SYMBOL})).result()) if ret: return ret[0] else: return None
def cancel_all(self): """ cancel all orders, including stop orders """ orders = u.retry(lambda: self.client.Order.Order_cancelAll().result()) for order in orders: logger.info( f"Cancel Order : (orderID, orderType, side, orderQty, limit, stop) = " f"({order['clOrdID']}, {order['ordType']}, {order['side']}, {order['orderQty']}, " f"{order['price']}, {order['stopPx']})") logger.info(f"Cancel All Order")
def get_position(self): """ current order position, return None if there is no ordering 当前的仓位,如果没有的话,返回None """ ret = u.retry(lambda: self.client .Position.Position_get(filter=json.dumps({"symbol": s.SYMBOL})).result()) if len(ret) > 0: self.position = ret[0] else: self.position = None return self.position
def get_latest_ohlcv(self, bin_size, length): ''' get data for open-high-low-close-volumn array 获取open-high-low-close-volumn数组数据 bin_size: data interval, available options: [1m,5m,1h,1d]. length: length must less than 750, which is the maximum size per reqeust made by Bitmex. It's enough for most strategy, if need more data we can consider using start_time and end_time Found that by using start and count pair looping to fetch data is not stable ''' source = u.retry(lambda: self.client.Trade.Trade_getBucketed( symbol=s.SYMBOL, binSize=bin_size, count=length, reverse=True). result()) source = u.to_data_frame(source, reverse=True) return source
def premine(): ram_funds = 10000 retry( config['cleos']['path'] + 'system newaccount --transfer eosio priv %s --stake-net "%s" --stake-cpu "%s" --buy-ram "%s"' % (config['accounts']['priv_incentive_public_key'], intToCurrency(privateIncentivePremine / 2 - ram_funds / 2), intToCurrency(privateIncentivePremine / 2 - ram_funds / 2), intToCurrency(ram_funds))) retry( config['cleos']['path'] + 'system newaccount --transfer eosio team %s --stake-net "%s" --stake-cpu "%s" --buy-ram "%s" ' % (config['accounts']['team_holding_public_key'], intToCurrency(teamHoldingPremine / 2 - ram_funds / 2), intToCurrency(teamHoldingPremine / 2 - ram_funds / 2), intToCurrency(ram_funds))) retry(config['cleos']['path'] + 'transfer eosio app "%s" ' % (intToCurrency(commercialApplicationsPremine))) retry(config['cleos']['path'] + 'transfer eosio market "%s" ' % (intToCurrency(marketPremine))) retry(config['cleos']['path'] + 'transfer eosio airdrops "%s" ' % (intToCurrency(airdropsPremine)))
def test_pkg_latest_version_already_installed(setup): config, initconfig = setup master = config['masters'][0] minion = master['minions'][1] def test(master, minion): try: resp = master['fixture'].salt(minion['id'], 'state.apply latest-again') assert resp assert minion['id'] in resp assert resp[minion['id']][ 'pkg_|-latest-version_|-test-package_|-latest']['result'] is True return True except TypeError: return False assert retry(partial(test, master, minion))
def compare_photo_hashes(page: pywikibot.FilePage, photo: iNaturalistID) -> bool: """Compares the photo on iNaturalist to the hash of the Commons file""" sha1sum = hashlib.sha1() try: image = utils.retry(get_ina_image, 3, photo=photo) except Exception as err: logger.exception(err) return False sha1sum.update(image) com_hash = page.latest_file_info.sha1 ina_hash = sha1sum.hexdigest() logger.debug(f"Commons sha1sum: {com_hash}") logger.debug(f"iNaturalist sha1sum: {ina_hash}") return compare_digest(com_hash, ina_hash)
def order(self, orderQty, stop=0): ''' This is 'Market' order 'buy' if orderQty is positive 'sell' if orderQty is nagative ''' clOrdID = 'Daxiang_' + u.random_str() side = 'Buy' if orderQty > 0 else 'Sell' if stop == 0: # market order orderType = 'Market' u.retry(lambda: self.client.Order.Order_new(symbol=s.SYMBOL, ordType=orderType, clOrdID=clOrdID, side=side, orderQty=orderQty). result()) u.logging_order(id=clOrdID, type=orderType, side=side, qty=orderQty, price=self.get_market_price()) else: # stop order orderType = 'Stop' u.retry(lambda: self.client.Order.Order_new(symbol=s.SYMBOL, ordType=orderType, clOrdID=clOrdID, side=side, orderQty=orderQty, stopPx=stop).result()) u.logging_order(id=clOrdID, type=orderType, side=side, qty=orderQty, stop=stop)
def get_open_orders(self): """ fetch my all open orders """ open_orders = u.retry(lambda: self.client.Order.Order_getOrders( filter=json.dumps({ "symbol": s.SYMBOL, "open": True })).result()) open_orders = [ o for o in open_orders if o["clOrdID"].startswith('Daxiang') ] if len(open_orders) > 0: return open_orders else: return None
def compare_ssim(orig: Image, photo: iNaturalistID, min_ssim: float = 0.0) -> Tuple[bool, float]: """Compares an iNaturalist photo to the Commons file using an SSIM score""" if not min_ssim: min_ssim = config.get("min_ssim", 0.9) assert min_ssim > 0 and min_ssim < 1 try: image = utils.retry(get_ina_image, 3, photo=photo) except Exception as err: logger.exception(err) return False, 0.0 ina_image = Image.open(BytesIO(image)) ssim = compute_ssim(orig, ina_image) logger.debug(f"SSIM value: {ssim}") return (ssim > min_ssim, ssim)
def scrape_single_job_details(self, endpoint): """ Scrapes a single job's page for details. :param endpoint: The endpoint where to get job details from :type endpoint: str :returns: Job details including title, company, latitude, and longitude :rtype: tuple """ TITLE_XPATH = ".//span[@id='titletextonly']" MAP_XPATH = ".//div[@id='map']" title = company = latitude = longitude = None try: logger.debug("Hitting single job endpoint %s", endpoint) res = retry(functools.partial(requests.get, endpoint), SLEEP_TIME) # parse results doc = html.fromstring(res.text) title_elem = doc.find(TITLE_XPATH) if title_elem is not None: title = title_elem.text else: raise MissingDataException("Title is missing from job listing") company = "A Company That's Hiring" # XXX no reliable way to parse this from CL map_elem = doc.find(MAP_XPATH) if map_elem is not None: latitude = map_elem.get("data-latitude") longitude = map_elem.get("data-longitude") else: raise MissingDataException("Map coordinates are missing from job listing") return title, company, latitude, longitude except MissingDataException: # raises are for failing fast pass except Exception: logger.exception("Encountered an issue while scraping single job details", exc_info=True)
def scrape_domains(self): """ Scrape for all Craigslist domains and output into CSV files. :returns: Generator yielding Craiglist cities & links to their listings :rtype: generator<str, str> """ DOMAINS_ENDPOINT = "http://www.craigslist.org/about/sites" DOMAIN_XPATH = ".//div[contains(@class, 'box')]//a" logger.debug("Scraping domains, hitting endpoint %s", DOMAINS_ENDPOINT) res = retry(functools.partial(requests.get, DOMAINS_ENDPOINT), SLEEP_TIME) doc = html.fromstring(res.text) for anchor_elem in doc.xpath(DOMAIN_XPATH): href = "http:{}".format(anchor_elem.get("href")) city = anchor_elem.text yield city, href
def __scrap_odds(self, info, match_id, xhash): for out_id, out_kind, handicap, bet_name in [ (out_1x2_id, 'x3', 0, 'o1x2'), (out_ou_id, 'x2', 2.5, 'ou25'), (out_ah_id, 'x2', -0.5, 'ahmin05') ]: match_data = f'/feed/match/1-{soccer_id}-{match_id}-{out_id}-{soccer_data_id}-{xhash}.dat' json_value = retry(self.fetch_odds, match_data, fetch_max_retry) out_key = f'E-{out_id}-{soccer_data_id}-0-{handicap}-0' opening_odds, opening_change_time, closing_odds, closing_change_time = {}, {}, {}, {} if json_value is not None and out_key in json_value: json_value = json_value[out_key] opening_odds, opening_change_time, closing_odds, closing_change_time = \ json_value['opening_odds'], json_value['opening_change_time'], json_value['odds'], json_value['change_time'] for book, book_name in bookmakers_map.items(): self.fill_info(out_kind, info, opening_odds, book, f'{bet_name}_{book_name}_opening') self.fill_info(out_kind, info, closing_odds, book, f'{bet_name}_{book_name}_closing') self.fill_info_time(info, opening_change_time, book, f'{bet_name}_{book_name}_opening') self.fill_info_time(info, closing_change_time, book, f'{bet_name}_{book_name}_closing')
def test_ping_minion(master, minion): def ping(): return master.salt(minion['id'], "test.ping")[minion['id']] assert retry(ping)
def test_ping_proxyminion(master, minion): def ping(): return master.salt(minion['id'], "test.ping")[minion['id']] is True assert retry(ping) is True
def test_pkg_list_updates(minion): res = minion.salt_call('pkg.list_updates', 'test-package') assert retry(partial(_pkg_list_updates, minion))
def regProducers(b, e): for i in range(b, e): a = accounts[i] retry(config['cleos']['path'] + 'system regproducer ' + a['name'] + ' ' + a['pub'] + ' https://' + a['name'] + '.com' + '/' + a['pub'])
def test_pkg_info_available(minion): assert retry(partial(_pkg_info_available, minion), definition_of_success=_pkg_info_available_dos)
def minion_key_cached(master, minion): def cache(): return minion['id'] in master.salt_key(minion['id'])['minions_pre'] assert retry(cache) is True
def test_archive_extracted(master, minion): ''' Test if the archive.extracted overwrites the destination. ''' assert retry(partial(_archextract, master, minion))
for cred in credentials: if cred[0] == libvirt.VIR_CRED_AUTHNAME: cred[4] = constants.SASL_USERNAME elif cred[0] == libvirt.VIR_CRED_PASSPHRASE: cred[4] = passwd return 0 auth = [[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_PASSPHRASE], req, None] with __connectionLock: conn = __connections.get(id(cif)) if not conn: libvirtOpenAuth = functools.partial(libvirt.openAuth, 'qemu:///system', auth, 0) conn = utils.retry(libvirtOpenAuth, timeout=10, sleep=0.2) __connections[id(cif)] = conn if cif is not None: for ev in (libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE, libvirt.VIR_DOMAIN_EVENT_ID_REBOOT, libvirt.VIR_DOMAIN_EVENT_ID_RTC_CHANGE, libvirt.VIR_DOMAIN_EVENT_ID_IO_ERROR_REASON, libvirt.VIR_DOMAIN_EVENT_ID_GRAPHICS, libvirt.VIR_DOMAIN_EVENT_ID_BLOCK_JOB, libvirt.VIR_DOMAIN_EVENT_ID_WATCHDOG): conn.domainEventRegisterAny(None, ev, __eventCallback, (cif, ev)) for name in dir(libvirt.virConnect): method = getattr(conn, name) if callable(method) and name[0] != '_': setattr(conn, name, wrapMethod(method))