def nextMutation(self): log.debug("Moving onto next mutation") # [step_idx, mutation_idx] c_step_idx, c_mutation_idx = self.factory.mutation log.debug("[%s]: c_step_idx: %s | c_mutation_idx: %s" % (self.role, c_step_idx, c_mutation_idx)) if c_step_idx >= (len(self.steps) - 1): log.err("No censorship fingerprint bisected.") log.err("Givinig up.") self.transport.loseConnection() return # This means we have mutated all bytes in the step # we should proceed to mutating the next step. log.debug("steps: %s | %s" % (self.steps, self.steps[c_step_idx])) if c_mutation_idx >= (len(self.steps[c_step_idx].values()[0]) - 1): log.debug("Finished mutating step") # increase step self.factory.mutation[0] += 1 # reset mutation idx self.factory.mutation[1] = 0 else: log.debug("Mutating next byte in step") # increase mutation index self.factory.mutation[1] += 1
def createTables(): """ XXX this is to be refactored and only exists for experimentation. """ from oonib.db import models for model_name in models.__all__: try: model = getattr(m, model_name) except Exception, e: log.err("Error in db initting") log.err(e) try: log.debug("Creating %s" % model) yield tables.runCreateTable(model, transactor, database) except Exception, e: log.debug(str(e))
def get(self, inputID): bn = os.path.basename(inputID) + ".desc" try: f = open(os.path.join(config.main.input_dir, bn)) except IOError: log.err("No Input Descriptor found for id %s" % inputID) raise e.InputDescriptorNotFound with f: inputDesc = yaml.safe_load(f) response = {'id': inputID} for k in ['name', 'description', 'version', 'author', 'date']: try: response[k] = inputDesc[k] except KeyError: log.err("Invalid Input Descriptor found for id %s" % inputID) raise e.InputDescriptorNotFound self.write(response)
def get(self, deckID): # note: # we don't have to sanitize deckID, because it's already checked # against matching a certain pattern in the handler. bn = os.path.basename(deckID + '.desc') try: with open(os.path.join(config.main.deck_dir, bn)) as f: response = {} deckDesc = yaml.safe_load(f) for k in ['name', 'description', 'version', 'author', 'date']: response[k] = deckDesc[k] self.write(response) except IOError: log.err("Deck %s missing" % deckID) raise e.MissingDeck except KeyError: log.err("Deck %s missing required keys!" % deckID) raise e.MissingDeckKeys
def get(self, inputID): bn = os.path.basename(inputID) + ".desc" try: with open(os.path.join(config.main.input_dir, bn)) as f: response = {} inputDesc = yaml.safe_load(f) for k in ["name", "description", "version", "author", "date"]: response[k] = inputDesc[k] response["id"] = inputID self.write(response) except IOError: log.err("No Input Descriptor found for id %s" % inputID) self.set_status(404) self.write({"error": "missing-input"}) except Exception, e: log.exception(e) log.err("Invalid Input Descriptor found for id %s" % inputID) self.set_status(500) self.write({"error": "invalid-input-descriptor"})
def get(self, inputID): bn = os.path.basename(inputID) + ".desc" try: with open(os.path.join(config.main.input_dir, bn)) as f: response = {} inputDesc = yaml.safe_load(f) for k in ['name', 'description', 'version', 'author', 'date']: response[k] = inputDesc[k] response['id'] = inputID self.write(response) except IOError: log.err("No Input Descriptor found for id %s" % inputID) self.set_status(404) self.write({'error': 'missing-input'}) except Exception, e: log.exception(e) log.err("Invalid Input Descriptor found for id %s" % inputID) self.set_status(500) self.write({'error': 'invalid-input-descriptor'})
def get(self, inputID): bn = os.path.basename(inputID) + ".desc" try: f = open(os.path.join(config.main.input_dir, bn)) except IOError: log.err("No Input Descriptor found for id %s" % inputID) self.set_status(404) self.write({'error': 'missing-input'}) return with f: inputDesc = yaml.safe_load(f) response = {'id': inputID} for k in ['name', 'description', 'version', 'author', 'date']: try: response[k] = inputDesc[k] except Exception, e: # XXX this should probably be KeyError log.exception(e) log.err("Invalid Input Descriptor found for id %s" % inputID) self.set_status(500) self.write({'error': 'invalid-input-descriptor'}) return
def lineReceived(self, line): if (len(self.__header) + len(line)) >= self.maxHeaderLineLength \ and not self.__first_line: log.err("Maximum header length reached.") return self.transport.loseConnection() if self.__first_line: self.requestLine = line self.__first_line = 0 elif line == '': # We have reached the end of the headers. if self.__header: self.headerReceived(self.__header) self.__header = '' self.allHeadersReceived() self.setRawMode() elif line[0] in ' \t': # This is to support header field value folding over multiple lines # as specified by rfc2616. self.__header += '\n' + line else: if self.__header: self.headerReceived(self.__header) self.__header = line
def lineReceived(self, line): if (len(self.__header) + len(line)) >= self.maxHeaderLineLength \ and not self.__first_line: log.err("Maximum header length reached.") return self.transport.loseConnection() if self.__first_line: self.requestLine = line self.__first_line = 0 elif line == '': # We have reached the end of the headers. if self.__header: self.headerReceived(self.__header) self.__header = '' self.allHeadersReceived() self.setRawMode() elif line[0] in ' \t': # This is to support header field value folding over multiple lines # as specified by rfc2616. self.__header += '\n'+line else: if self.__header: self.headerReceived(self.__header) self.__header = line
def headerReceived(self, line): try: header, data = line.split(':', 1) self.headers.append((header, data.strip())) except: log.err("Got malformed HTTP Header request field") log.err("%s" % line) if len(self.headers) >= self.maxHeaders: log.err("Maximum number of headers received.") self.closeConnection()
def txSetupFailed(self, failure): log.err("Setup failed") log.exception(failure)
def http_request(self, url, http_request_headers, include_http_responses=False): key = url + json.dumps(http_request_headers) cached_value = yield self.lookup('http_request', key) if cached_value is not None: if include_http_responses is not True: cached_value.pop('responses', None) defer.returnValue(cached_value) page_info = { 'body_length': -1, 'status_code': -1, 'headers': {}, 'failure': None } agent = ContentDecoderAgent( FixedRedirectAgent(TrueHeadersAgent(reactor), ignorePrivateRedirects=True), [('gzip', GzipDecoder)] ) try: retries = 0 while True: try: response = yield agent.request('GET', url, TrueHeaders(http_request_headers)) headers = {} for name, value in response.headers.getAllRawHeaders(): headers[name] = unicode(value[0], errors='ignore') body_length = -1 body = None try: body = yield readBody(response) body_length = len(body) except PartialDownloadError as pde: if pde.response: body_length = len(pde.response) body = pde.response page_info['body_length'] = body_length page_info['status_code'] = response.code page_info['headers'] = headers page_info['title'] = extractTitle(body) response.body = body page_info['responses'] = encodeResponses(response) break except: if retries > self.http_retries: raise retries += 1 except DNSLookupError: page_info['failure'] = 'dns_lookup_error' except TimeoutError: page_info['failure'] = 'generic_timeout_error' except ConnectionRefusedError: page_info['failure'] = 'connection_refused_error' except ConnectError: page_info['failure'] = 'connect_error' except Exception as exc: # XXX map more failures page_info['failure'] = 'unknown_error' log.err("Unknown error occurred") log.exception(exc) yield self.cache_value('http_request', key, page_info) if include_http_responses is not True: page_info.pop('responses', None) defer.returnValue(page_info)