def run(self): """ Main loop, cancels 'stale' loan offers, turns auto-renew off on active loans, and creates new loan offers at optimum price """ while self._running: try: # Check for old offers self.cancelOldOffers() # Create new offer (if can) self.createLoanOffers() # show active active = self.api.returnActiveLoans()['provided'] logger.info(GR('Active Loans:-----------------------------')) for i in active: logger.info('%s|%s:%s-[rate:%s]-[fees:%s]', BL(i['date']), OR(i['currency']), RD(i['amount']), GY(str(float(i['rate']) * 100) + '%'), GR(i['fees'])) except Exception as e: logger.exception(e) finally: # sleep with one eye open... for i in range(int(self.delay)): if not self._running: break sleep(1)
def run(protocol, csvfile): for path in sorted(glob.glob(f'../../data/grid4/*.json')): state = tools.load_json(path) (node_count, link_count) = tools.json_count(state) print(f'run {protocol} on {path}') network.apply(state=state, link_command=get_tc_command, remotes=remotes) tools.sleep(10) software_start_ms = tools.millis() software.start(protocol, remotes) software_startup_ms = tools.millis() - software_start_ms tools.sleep(30) paths = tools.get_random_paths(state, 2 * link_count) paths = tools.filter_paths(state, paths, min_hops=2, path_count=link_count) ping_result = tools.ping_paths(remotes=remotes, paths=paths, duration_ms=30000, verbosity='verbose') sysload_result = tools.sysload(remotes) software.clear(remotes) # add data to csv file extra = (['node_count', 'software_startup_ms'], [node_count, software_startup_ms]) tools.csv_update(csvfile, '\t', extra, ping_result.getData(), sysload_result) network.clear(remotes) # abort benchmark when less then 40% of the pings arrive if (ping_result.received / ping_result.transmitted) < 0.4: break
def getPeerList(self, url, data): if self.status == 'stopped': return try: page = yield getPage(url + '?' + data) except Exception as error: print 'failed to connect to tracker: ', url yield sleep(self.interval) self.getPeerList(url, data) else: res = bdecode(page) if len(res) == 1: print 'traker: ', res return peers = res['peers'] peers_list = [] while peers: addr = socket.inet_ntoa(peers[:4]) port = struct.unpack('!H', peers[4:6])[0] peers_list.append((addr, port)) peers = peers[6:] print 'get %d peers form %s' % (len(peers_list), url) self.reciever.updateTrackerPeers(peers_list) interval = res.get('interval', self.interval) yield sleep(interval) self.getPeerList(url, data)
def send_keep_alive(self): yield sleep(60.0) while self.connected: self.send_data('') print '<<-- keep alive' yield sleep(60.0)
def run(protocol, csvfile): tools.seed_random(23) node_count = 50 state = topology.create_nodes(node_count) mobility.randomize_positions(state, xy_range=1000) mobility.connect_range(state, max_links=150) # create network and start routing software network.apply(state=state, link_command=get_tc_command) software.start(protocol) tools.sleep(30) for step_distance in [50, 100, 150, 200, 250, 300, 350, 400]: print(f'{protocol}: step_distance {step_distance}') traffic_beg = tools.traffic() for n in range(0, 6): #with open(f'graph-{step_distance}-{n}.json', 'w+') as file: # json.dump(state, file, indent=' ') # connect nodes range wait_beg_ms = tools.millis() # update network representation mobility.move_random(state, distance=step_distance) mobility.connect_range(state, max_links=150) # update network network.apply(state=state, link_command=get_tc_command) # Wait until wait seconds are over, else error tools.wait(wait_beg_ms, 10) paths = tools.get_random_paths(state, 2 * 200) paths = tools.filter_paths(state, paths, min_hops=2, path_count=200) ping_result = tools.ping_paths(paths=paths, duration_ms=2000, verbosity='verbose') packets_arrived_pc = 100 * (ping_result.received / ping_result.send) traffic_end = tools.traffic() # add data to csv file extra = (['node_count', 'time_ms', 'step_distance_m', 'n', 'packets_arrived_pc'], [node_count, tools.millis() - wait_beg_ms, step_distance, n, packets_arrived_pc]) tools.csv_update(csvfile, '\t', extra, (traffic_end - traffic_beg).getData(), ping_result.getData()) traffic_beg = traffic_end software.clear() network.clear()
def getPeerList(self, url, data): """TODO: This is in serious need of refactoring... """ if self.status == 'stopped': return try: page = yield getPage(url + '?' + data) except Exception as error: log.err('Failed to connect to tracker: {0}'.format(url)) yield sleep(self.interval) self.getPeerList(url, data) else: try: res = bdecode(page) except ValueError: log.err("Received an invalid peer list from the tracker: " +\ "{0}".format(url)) else: if len(res) == 1: log.msg('Tracker: {0}'.format(res)) # TODO: What is this? return peers = res['peers'] peers_list = [] try: # Try parsing in binary format first while peers: addr = socket.inet_ntoa(peers[:4]) port = struct.unpack('!H', peers[4:6])[0] peers_list.append((addr, port)) peers = peers[6:] except: # Now try parsing in dictionary format try: for p in peers: peers_list.append((p["ip"], p["port"])) except: log.err("Received an invalid peer list from the " +\ "tracker: {0}".format(url)) log.msg('Received {0} peers from tracker: {1}'.format( len(peers_list), url)) self.btm.add_peers(peers_list) interval = res.get('interval', self.interval) yield sleep(interval) self.getPeerList(url, data)
def run(protocol, files, csvfile): tools.seed_random(1234) for path in sorted(glob.glob(files)): state = tools.load_json(path) (node_count, link_count) = tools.json_count(state) print(f'run {protocol} on {path}') network.apply(state=state, link_command=get_tc_command) tools.sleep(10) for offset in range(0, 60, 2): tmp_ms = tools.millis() traffic_beg = tools.traffic() traffic_ms = tools.millis() - tmp_ms tmp_ms = tools.millis() software.start(protocol) software_ms = tools.millis() - tmp_ms # Wait until wait seconds are over, else error tools.sleep(offset) paths = tools.get_random_paths(state, 2 * 200) paths = tools.filter_paths(state, paths, min_hops=2, path_count=200) ping_result = tools.ping_paths(paths=paths, duration_ms=2000, verbosity='verbose') traffic_end = tools.traffic() sysload_result = tools.sysload() software.clear() # add data to csv file extra = (['node_count', 'traffic_ms', 'software_ms', 'offset_ms'], [node_count, traffic_ms, software_ms, offset * 1000]) tools.csv_update(csvfile, '\t', extra, (traffic_end - traffic_beg).getData(), ping_result.getData(), sysload_result) network.clear()
def work(self): w = 56.835567 l = 60.590891 move_by_w = move_by_l = 0 speed = random.random() _w = 0.001 _l = 0.001 yield task.deferLater(reactor, 0, self.login) while True: if random.randrange(10) > 8: move_by_w = random.randrange(-1, 2) if random.randrange(10) > 8: move_by_l = random.randrange(-1, 2) if not (move_by_w and move_by_l): if random.randrange(10) > 5: move_by_w = 1 else: move_by_l = 1 if random.randrange(10) > 5: speed = random.random() w += _w * move_by_w * speed l += _l * move_by_l * speed if w < 56.672508: w = 56.672508 if w > 57.009542: w = 57.009542 if l < 60.235048: l = 60.235048 if l > 60.850268: l = 60.850268 yield task.deferLater(reactor, 0, self.send_location, w, l) yield sleep(settings.WORKER_SLEEP_TIME) yield task.deferLater(reactor, 0, self.logout)
def run(protocol, files, csvfile): for path in sorted(glob.glob(files)): state = tools.load_json(path) (node_count, link_count) = tools.json_count(state) # Limit node count to 300 if node_count > 300: continue print(f'run {protocol} on {path}') network.apply(state=state, link_command=get_tc_command, remotes=remotes) tools.sleep(10) software_start_ms = tools.millis() software.start(protocol, remotes) software_startup_ms = tools.millis() - software_start_ms tools.sleep(300) start_ms = tools.millis() traffic_beg = tools.traffic(remotes) paths = tools.get_random_paths(state, 2 * 200) paths = tools.filter_paths(state, paths, min_hops=2, path_count=200) ping_result = tools.ping_paths(remotes=remotes, paths=paths, duration_ms=300000, verbosity='verbose') traffic_ms = tools.millis() - start_ms traffic_end = tools.traffic(remotes) sysload_result = tools.sysload(remotes) software.clear(remotes) network.clear(remotes) # add data to csv file extra = (['node_count', 'traffic_ms', 'software_startup_ms'], [node_count, traffic_ms, software_startup_ms]) tools.csv_update(csvfile, '\t', extra, (traffic_end - traffic_beg).getData(), ping_result.getData(), sysload_result)
def getPeerList(self, url, data): """TODO: This is in serious need of refactoring... """ if self.status == "stopped": return try: page = yield getPage(url + "?" + data) except Exception as error: log.err("Failed to connect to tracker: {0}".format(url)) yield sleep(self.interval) self.getPeerList(url, data) else: try: res = bdecode(page) except BTError: log.err("Received an invalid peer list from the tracker: " + "{0}".format(url)) else: if len(res) == 1: log.msg("Tracker: {0}".format(res)) # TODO: What is this? return peers = res["peers"] peers_list = [] try: # Try parsing in binary format first while peers: addr = socket.inet_ntoa(peers[:4]) port = struct.unpack("!H", peers[4:6])[0] peers_list.append((addr, port)) peers = peers[6:] except: # Now try parsing in dictionary format try: for p in peers: peers_list.append((p["ip"], p["port"])) except: log.err("Received an invalid peer list from the " + "tracker: {0}".format(url)) log.msg("Received {0} peers from tracker: {1}".format(len(peers_list), url)) self.btm.add_peers(peers_list) interval = res.get("interval", self.interval) yield sleep(interval) self.getPeerList(url, data)
def deamon_read(self): while self.status == 'started': size = len(self.buffer) if size > self.buffer_max_size: remove_count = size - self.buffer_max_size remove_count += self.buffer_max_size / 5 for idx in self.buffer_record[:remove_count]: del self.buffer[idx] del self.buffer_record[:remove_count] yield sleep(10)
def deamon_read(self): while self.status == 'started': size = len(self.buffer) if size > self.buffer_max_size : remove_count = size - self.buffer_max_size remove_count += self.buffer_max_size / 5 for idx in self.buffer_record[:remove_count] : del self.buffer[idx] del self.buffer_record[:remove_count] yield sleep(10)
def run(protocol, csvfile): tools.seed_random(1377) for path in sorted(glob.glob(f'../../data/freifunk/*.json')): state = tools.load_json(path) (node_count, link_count) = tools.json_count(state) dataset_name = '{}-{:04d}'.format(os.path.basename(path)[9:-5], node_count) # limit to what the host can handle if node_count > 310: continue print(f'run {protocol} on {path}') state = network.apply(remotes=remotes, state=state, link_command=get_tc_command) tools.sleep(10) software.start(protocol, remotes) tools.sleep(300) start_ms = tools.millis() traffic_beg = tools.traffic(remotes) paths = tools.get_random_paths(state, 2 * node_count) paths = tools.filter_paths(state, paths, min_hops=2, path_count=node_count) ping_result = tools.ping_paths(remotes=remotes, paths=paths, duration_ms=300000, verbosity='verbose') sysload_result = tools.sysload(remotes) traffic_ms = tools.millis() - start_ms traffic_end = tools.traffic(remotes) software.clear(remotes) # add data to csv file extra = (['dataset_name', 'node_count', 'traffic_ms'], [dataset_name, node_count, traffic_ms]) tools.csv_update(csvfile, '\t', extra, (traffic_end - traffic_beg).getData(), ping_result.getData(), sysload_result) network.clear(remotes)
def __sendTaskRequest(self, new_task, timeout=None): if not new_task: return if timeout is None: timeout = len(new_task) * 60 for task in new_task: i, (beg, size) = task self.protocol.send_request(i, beg, size) self.piece_doing.append(task) yield sleep(timeout) self.__checkTimeout(new_task)
def __sendTaskRequest(self, new_task, timeout=None): if not new_task: return if timeout is None: timeout = len(new_task) * 60 for task in new_task : i, (beg, size) = task self.protocol.send_request(i, beg, size) self.piece_doing.append(task) yield sleep(timeout) self.__checkTimeout(new_task)
def connectRetry(self, connector): addr = self.getPeerAddr(connector) if addr in self.peers_retry: retry = self.peers_retry[addr] else: retry = 0 if retry > 50: self.peers_failed.add(addr) self.peers_connecting.remove(addr) del self.peers_retry[addr] else: yield sleep(5) connector.connect() retry += 1 self.peers_retry[addr] = retry
def work(self): w = 56835567 l = 60590891 while True: yield task.deferLater(reactor, 0, self.login) w += random.randint(-10000000, 10000000)/1000.0 l += random.randint(-10000000, 10000000)/1000.0 if w < 56838388: w = 56838388 if w > 56839803: w = 56839803 if l < 60552843: l = 60552843 if l > 60574815: l = 60574815 yield task.deferLater(reactor, 0, self.send_location, w/1000000.0, l/1000000.0) yield task.deferLater(reactor, 0, self.logout) yield sleep(settings.WORKER_SLEEP_TIME)
def start(self): self.status = 'started' info_hash = self.btm.metainfo.info_hash peer_id = self.btm.my_peer_id port = self.btm.app.btServer.listenPort request = { 'info_hash' : info_hash, 'peer_id' : peer_id, 'port' : port, 'compact' : 1, 'key' : 'abcd', 'uploaded' : 0, 'download' : 0, 'left' : 100, 'event' : 'started' } request_encode = urlencode(request) for url in self.btm.metainfo.announce_list : self.getPeerList(url, request_encode) yield sleep(1)
def start(self): self.status = 'started' info_hash = self.btm.metainfo.info_hash peer_id = self.btm.my_peer_id port = self.btm.app.btServer.listen_port request = { 'info_hash': info_hash, 'peer_id': peer_id, 'port': port, 'compact': 1, #'key' : 'abcd', # This is optional anyways 'uploaded': 0, 'downloaded': 0, 'left': 100, 'event': 'started' } request_encode = urlencode(request) for url in self.btm.metainfo.announce_list: self.getPeerList(url, request_encode) yield sleep(1)
def start(self): self.status = "started" info_hash = self.btm.metainfo.info_hash peer_id = self.btm.my_peer_id port = self.btm.app.btServer.listen_port request = { "info_hash": info_hash, "peer_id": peer_id, "port": port, "compact": 1, #'key' : 'abcd', # This is optional anyways "uploaded": 0, "downloaded": 0, "left": 100, "event": "started", } request_encode = urlencode(request) for url in self.btm.metainfo.announce_list: self.getPeerList(url, request_encode) yield sleep(1)
def send_keep_alive(self): yield sleep(60.0) while self.connected: self.send_data('') yield sleep(60.0)
def deamon_write(self): while self.status == 'started': self.__thread_write() yield sleep(10)
def conncectPeers(self, peers): for addr, port in peers: reactor.connectTCP(addr, port, self) yield sleep(0)
def main(API): response = API.LastJson check(response, status='get API') t0 = time.clock() count_follower = 0 like_count = 0 reset_count = 0 count_error = 0 for i in range(0, 100): if i % 2 != 0: choice = 'popular' print "popular feed" API.getPopularFeed() #APPEL API json = API.LastJson check(json, status='popular feed') max = 100 else: choice = "hastags" print "hastag feed" API.getHashtagFeed('french') #APPEL API json = API.LastJson check(json, status='hastags feed') max = 100 for media in json['items'][0:max]: sleep(3) if choice == 'hastags': if media['like_count'] < 5: print 'Likes inf to 5' count_error += 1 else: print( str(media['like_count']) + ' like on this publication OK') print 'SUCESS after ' + str(count_error) + ' trial' count_error = 0 else: print 'no verification' print 'USER : https://instagram.com/' + str( media['user']['username']) like(API=API, media=media, like_count=like_count) print str( media['image_versions2']['candidates'][0]['url']) + ' liked !' API.getUserFeed(media['caption']['user_id']) #APPEL API feed = API.LastJson for media_feed in feed['items'][4:5]: like(API=API, media=media_feed, like_count=like_count) print str(media_feed['image_versions2']['candidates'][0] ['url']) + ' liked !' if reset_count == 10: API.follow(media['caption']['user_id']) print str(media['user']['username']) + ' followed!' count_follower += 1 reset_count = 0 print 'number: ' + str(count_follower) print 'Security break ON...' time_process = (time.clock() - t0) / 60. print '%.2f min since the beginning' % time_process print str(count_follower) + ' users followed' print str(like_count) + ' media liked' sleep(60 + random.randint(1, 10)) print '\nSecurity break OFF' print 'count: ' + str(reset_count) + '/7' reset_count += 1
def main(self): print(' - Server will run on port %s - ' % self.port) while True: #self.recv() tools.sleep(self.timeout_time / 2) self.garbage_grabber()
def start(self): self.status = 'started' yield sleep(1)
def getPeerList(self, url, data): yield sleep(self.interval)
def main(): count = 0 for i in range(0, 3000): driver = webdriver.Chrome(path_to_chrome_driver) #driver = webdriver.PhantomJS(path_to_phantom_driver) driver.get("https://instagram.com") email = driver.find_element_by_xpath( "//input[@aria-label='Mobile Number or Email']") full_name = driver.find_element_by_xpath( "//input[@aria-label='Full Name']") username = driver.find_element_by_xpath( "//input[@aria-label='Username']") password = driver.find_element_by_xpath( "//input[@aria-label='Password']") info_generate, prime = info_generation() email.send_keys(info_generate['email']) sleep(3) full_name.send_keys(info_generate['full_name']) sleep(3) username.send_keys(info_generate['username']) sleep(3) password.send_keys(info_generate['password']) #driver.find_element_by_link_text("Sign up").click() test = driver.find_element_by_xpath( "//button[contains(text(), 'Sign up')]") sleep(3) test.click() sleep(4) try: driver.find_element_by_id('ssfErrorAlert') error = True print 'click error' except: error = False sleep(4) count_error = 0 while error: count_error += 1 sleep(3) test.click() sleep(int(count_error)) try: driver.find_element_by_id('ssfErrorAlert') error = True print 'click error' except: error = False print 'click ok' sleep(4) button_close = driver.find_element_by_xpath( "//button[contains(text(), 'Close')]") button_close.click() sleep(2) button_search = driver.find_element_by_xpath( "//span[contains(text(), 'search')]") button_search.click() sleep(2) search_bar = driver.find_element_by_xpath( "//input[@placeholder='Search']") search_bar.send_keys(my_username) count += 1 print str(count) + ' account created' save_info(info_generate, prime) driver.close() sleep(2)
self.state = 'invalid' self.errorTime = tools.now() # for time delay before next check self.errorCount += 1 logger.warning('ASIC #%d %s failed after %s (%s)', self.num, action, self.step, result['ERROR']) # Indicate - is it possible to check def isValid(self): # not quited early, existed or not checked yet - self.test.get('TestExist', True) and \ return self.state != 'stop' and \ (self.errorTime == 0 or tools.getSecondsAfter(self.errorTime) > DELAY_SECONDS) # passed 10 min after last attempt # execute module if __name__ == '__main__': print( 'Запущен бесконечный цикл контроля майнеров с интервалом в %d минут' % (DELAY_SECONDS / 60)) s9list = [] for i in range(1, 22): s9list.append(S9Manager(i)) while True: # check router if router.rebootIfNoInternet(logger): tools.sleep(60) # wait 1 min for reboot # cycled check for s9 in s9list: s9.check() # delay 10 min tools.sleep(DELAY_SECONDS)
on_error=self.on_error, on_close=self.on_close) self.ws.on_open = self.on_open self.t = Thread(target=self.ws.run_forever) self.t.daemon = True self.t._running = True self.t.start() logger.info('Ticker process started') def stop(self): self.t._running = False self.ws.close() self.t.join() logger.info('Ticker process stopped/joined') if __name__ == "__main__": import pprint from tools import sleep logging.basicConfig(level=logging.DEBUG) # websocket.enableTrace(True) ticker = Ticker() try: ticker.start() for i in range(3): sleep(5) pprint.pprint(ticker('USDT_BTC')) except Exception as e: logger.exception(e) ticker.stop()
'_id': self.summaryTime, 'summary': summarize_blocks(self.trollbox) }) async def main(self): self.api = Poloniex(jsonNums=float) self.tickDb = getMongoDb('markets') #self.trollDb = getMongoDb('trollbox') #self.summaryTime = self.trollDb.find_one() # if not self.summaryTime: # logger.info('No summary found.') # self.summaryTime = time() # else: # logger.info(self.summaryTime['summary']) # self.summaryTime = self.summaryTime['_id'] # logger.info('Last summary time: %s', str(self.summaryTime)) self.populateTicker() #self.push.subscribe(topic="trollbox", handler=self.onTroll) self.push.subscribe(topic="ticker", handler=self.onTick) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) app = Pushy() app.run() while True: try: sleep(1) except: app.stop()