def request(self, type=None, data=None, additional=None): """ Builds and sends request Args: type: Paths Enum data: Dict of data for request additional: Dict of data which takes the key and maps it to the keys value if set in string Returns: After sending the request it returns one of the following: json decoded object: Response from api false: Something went wrong with the call Json Decode Object Example: [ { "error":{ "type":101, "address":"/", "description": "link button not pressed" } } ] Raises: ExceptionNoToken: Token isn't being set on Huuey() """ address_set = self.address is not None type_set = type is not None token_set = self.token is not None if not address_set or not type_set: return False if type is Paths.Api: url = "".join([self.address, type.value[1]]) else: if not token_set: raise ExceptionNoToken('Token missing from api call') url = "".join([self.address, "/api/", self.token, type.value[1]]) if additional: for key in additional: url = url.replace(key, additional[key]) request = Requester.request(url=url, method=type.value[0], data=data) return request
def run(self): req = Requester() proc = Processor() pri = Printer() commandCrypto = CommandCrypto(proc) req.commandCrypto = commandCrypto commandQuit = CommandQuit(sys) req.commandQuit = commandQuit commandError = CommandError(None) req.commandError = commandError while True: command = req.request() result = command.execute() if result != '': pri.print(result)
class Worker(): def __init__(self, host: str, port: int, config: dict, tbb_path): """ Stores the work queue URL and various configuration options """ # Create the work url self.work_url = "http://{}:{}".format(host, port) # Store number of times work is completed per type self.work_type_counts = {'normal': 0, 'tor': 0} # Create a requests session self.session = requests.Session() # Disable keepalive self.session.keep_alive = False # Get a logger self.logger = logging.getLogger() # Store given config self.config = config self.tbb_path = tbb_path # Initialize members that will be created later self.client_id = None self.tcpdump = None self.proxy = None self.requester = None def __enter__(self): """ Ensures server is available Requests and stores a client ID from the server Gets a connection to the tcpdump daemon :throws Exception: if the client ID request fails """ # Send requests to the URLs service until the status # page returns a response waiting = True while waiting: try: self.logger.info("Attempting to contact work queue") self.session.get("{}/status".format(self.work_url)) waiting = False except Exception as _: self.logger.info( "Attempt to contact work queue failed. Retrying") # Request a client ID # TODO: look into renaming this "register" self.logger.info("Registering client with server") # TODO: work types as part of config response = self.session.post( "{}/client/add".format(self.work_url), json={'work_types': ['tor', 'normal']}) # Parse response as json response = response.json() # Extract client id from response if response['success']: self.client_id = response['client_id'] else: raise Exception(response['error']) # Start up a connection to the tcpdump daemon # TODO: parameterize socket path self.tcpdump = TcpDump('/tmp/tcpdump.socket') # Instantiate proxy object self.proxy = Proxy(self.tbb_path, self.config["tor"]) # Instantiate requester object self.requester = Requester(self.config["firefox"], self.config["tor"]["port"]) return self def __exit__(self, exc_type, exc_value, traceback): """ Informs the server that the client has stopped :param exc_type: :param exc_value: :param traceback: """ # If the program completed without error if exc_type is None: self.logger.info("Worker program finished without error") else: # Log the error self.logger.error("%s %s %s", exc_type, exc_value, traceback) # Indicate to the server that the client has stopped self.logger.info("Deregistering client from server") self.session.post( "{}/client/remove".format(self.work_url), json={'client_id': self.client_id}) # Stop the tcpdump daemon self.tcpdump.shutdown() def request_work(self): """ Requests a piece of work from the server """ # Make a request to the server to get a URL to navigate to try: # Make a request for work response = self.session.post( "{}/work/get".format(self.work_url), json={'client_id': self.client_id}) # 204 means no more URLs if response.status_code == 204: self.logger.info("No more URLs") return None # This will throw an exception if it fails, which is handled below work = response.json() return work except Exception as exc: self.logger.error("Failed to request work: %s", exc) return None def perform_work(self, work: dict): """ Performs a piece of work given by the server :param work: work as received from the server """ # Extract required variables from work mode = work["work_type"] # Once type is extracted, limit the scope of work work = work["work"] filename = work["filename"] url = "https://{}".format(work["url"]) global_index = work["index"] # Set work type counter self.work_type_counts[mode] += 1 # Scoped variables set inside try block error = None fatal = False # Store timestamp start_time = int(time.time() * 1e9) try: # Start packet capture self.tcpdump.start(filename) # Start proxy self.proxy.start(mode) # Start requester self.requester.start(mode) # Perform request in requester self.logger.info( "Navigating to %s in %s mode (local: %d) (global: %d)", url, mode, self.work_type_counts[mode], global_index) self.requester.request(url) # End requester self.requester.stop() # End proxy self.proxy.stop() # End packet capture self.tcpdump.stop() except TcpDumpError as err: self.logger.error(str(err)) error = err fatal = True except Exception as err: self.logger.error(str(err)) error = err # Store ending timestamp finish_time = int(time.time() * 1e9) # Create report report = { 'success': error is None, 'work_type': mode, 'work': work, 'type_index': self.work_type_counts[mode], 'start_time': start_time, 'finish_time': finish_time, # This will be stripped 'fatal': fatal } # Store the error if given if error is not None: report['error'] = str(error) # Return report return report def send_report(self, report: dict): # Stringify error if 'error' in report: report['error'] = str(report['error']) # Send the report self.session.post("{}/work/report".format(self.work_url), json=report) # FIXME: Make a dummy request to the server. to enforce the shutdown # Allow this to fail try: self.session.post("{}/status".format(self.work_url)) except: pass
class Controller: ''' Instanciate the app components and control the rep loop :seqdiag_note Entry point of the business layer ''' def __init__(self, printer, configMgr): if os.name == 'posix': FILE_PATH = '/sdcard/cryptopricer.ini' else: FILE_PATH = 'c:\\temp\\cryptopricer.ini' self.configMgr = configMgr self.priceRequester = PriceRequester() self.crypCompTranslator = CrypCompExchanges() self.processor = Processor(self.configMgr, self.priceRequester, self.crypCompTranslator) self.requester = Requester(self.configMgr) self.commandPrice = CommandPrice(self.processor, self.configMgr) self.commandCrypto = CommandCrypto(self.processor) self.requester.commandPrice = self.commandPrice self.requester.commandCrypto = self.commandCrypto self.commandQuit = CommandQuit(sys) self.requester.commandQuit = self.commandQuit self.commandError = CommandError(None) self.requester.commandError = self.commandError self.printer = printer def run(self): ''' Used essentially by the command line version of CryptoPricer. :return: nothing ''' while True: command = self.requester.request() result = command.execute() if result != '': strToPrint = self.printer.getPrintableData(result) print(strToPrint) def getPrintableResultForInput(self, inputStr): ''' Return the printable request result, the full request command without any command option and the full request command with any specified save mode option (option which is to be saved in the command history list. :param inputStr: :seqdiag_return printResult, fullCommandStr, fullCommandStrWithOptions, fullCommandStrWithSaveModeOptions :return: 1/ printable request result 2/ full request command without any command option 3/ full request command with any non save command option 4/ full request command with any specified save mode option, None if no save mode option is in effect Ex: 1/ 0.1 ETH/36 USD on Bitfinex: 21/11/17 10:00 360 2/ eth usd 0 bitfinex 3/ None (value command with save mode in effect !) 4/ eth usd 0 bitfinex -vs0.1eth 1/ 0.1 ETH/36 USD on Bitfinex: 21/11/17 10:00 360 2/ eth usd 0 bitfinex 3/ eth usd 0 bitfinex -v0.1eth 4/ None (no value command save option in effect) 1/ ETH/USD on Bitfinex: 21/11/17 10:00 360 2/ eth usd 0 bitfinex 3/ None (no value command in effect) 4/ None (no value command save option in effect) ''' command = self.requester.getCommand(inputStr) result = command.execute() if result != '': printResult = self.printer.getPrintableData(result) fullCommandStr, fullCommandStrWithOptions, fullCommandStrWithSaveModeOptions = self.printer.getFullCommandString(result) return printResult, fullCommandStr, fullCommandStrWithOptions, fullCommandStrWithSaveModeOptions
def discover(self): """ Grabs list of devices from meethue.com """ if Requester.verifyconnection(): self.bridges = Requester.request('www.meethue.com/api/nupnp', 'GET')
class LinkFinder(): error_with_link_flag = False error_with_link_detail = None def __init__(self, base_url, page_url, proxies, gather_titles, search_text, timeout, delay): self.base_url = base_url self.page_url = page_url self.delay = delay # Delay in seconds between each HTTP Request self.urls = set() self.url_with_title = dict() self.gather_titles = gather_titles self.page_title = None self.search_text = search_text self.requester = Requester(page_url=page_url, host=parse.urlparse(self.page_url).netloc, proxies=proxies, timeout=timeout) def find_urls(self): response = self.requester.request() time.sleep(self.delay) if isinstance(response, tuple): # Some error occured with the page, we did not get a response content. Returning it to the spider to handle LinkFinder.error_with_link_flag = True LinkFinder.error_with_link_detail = response else: bs4_object = BeautifulSoup(response, 'html.parser') if self.search_text is not None: if self.search_text.lower() in bs4_object.find( 'body').text.lower(): print("Given search text was found in - ", self.page_url) self.page_title = bs4_object.find("title").text all_anchor_tags = bs4_object.find_all('a') for anchor_tag in all_anchor_tags: href = anchor_tag.get('href') if href != "" and href is not None: """ If the href value contains a relative link, create the full link by using the base url. But if href value conatins a full link, then don't do anything. This is automatically handled by urljoin method """ url = parse.urljoin(self.base_url, anchor_tag.get('href')) if not parse.urlparse( self.base_url).netloc in parse.urlparse( url).netloc: # Don't go out of the given site continue if "#" in href: continue self.urls.add(url) def get_urls(self): self.find_urls() if LinkFinder.error_with_link_flag: LinkFinder.error_with_link_flag = False return LinkFinder.error_with_link_detail else: to_return = dict() to_return["page_url"] = self.page_url to_return["page_title"] = self.page_title to_return["urls_in_page"] = self.urls return to_return