def _tor_startup_failure(self, failure): log.err("Failed to start tor") log.exception(failure) self._reset_tor_state() self.notify(DirectorEvent("error", "Failed to start Tor")) return failure
def getNetTests(self): nettests = {} def is_nettest(filename): return not filename == '__init__.py' and filename.endswith('.py') for category in self.categories: dirname = os.path.join(config.nettest_directory, category) # print path to all filenames. for filename in os.listdir(dirname): if is_nettest(filename): net_test_file = os.path.join(dirname, filename) try: nettest = getNetTestInformation(net_test_file) except: log.err("Error processing %s" % filename) continue nettest['category'] = category.replace('/', '') if nettest['id'] in nettests: log.err("Found a two tests with the same name %s, %s" % (net_test_file, nettests[nettest['id']]['path'])) else: category = dirname.replace(config.nettest_directory, '') nettests[nettest['id']] = nettest return nettests
def _setUp(self): log.debug("Setting up HTTPTest") try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will" "not work") self.control_agent = Agent(reactor, sockshost="127.0.0.1", socksport=config.advanced.tor_socksport) sockshost, socksport = (None, None) if self.localOptions['socksproxy']: self.report['socksproxy'] = self.localOptions['socksproxy'] sockshost, socksport = self.localOptions['socksproxy'].split(':') socksport = int(socksport) self.agent = Agent(reactor, sockshost=sockshost, socksport=socksport) if self.followRedirects: try: from twisted.web.client import RedirectAgent self.control_agent = RedirectAgent(self.control_agent) self.agent = RedirectAgent(self.agent) except: log.err("Warning! You are running an old version of twisted"\ "(<= 10.1). I will not be able to follow redirects."\ "This may make the testing less precise.") self.report['errors'].append("Could not import RedirectAgent") self.processInputs() log.debug("Finished test setup")
def notify(self, event): for handler in self._subscribers: try: handler(event) except Exception as exc: log.err("Failed to run handler") log.exception(exc)
def errback(failure): failure.trap(ConnectionRefusedError, SOCKSError) if type(failure.value) is ConnectionRefusedError: log.err("Connection refused. The backend may be down") else: log.err("Sock error. The SOCK proxy may be down") self.report["failure"] = str(failure.value)
def _cbResponse(self, response, request, headers_processor, body_processor): if not response: log.err("Got no response") return else: log.debug("Got response %s" % response) if str(response.code).startswith('3'): self.processRedirect(response.headers.getRawHeaders('Location')[0]) # [!] We are passing to the headers_processor the headers dict and # not the Headers() object response_headers_dict = list(response.headers.getAllRawHeaders()) if headers_processor: headers_processor(response_headers_dict) else: self.processResponseHeaders(response_headers_dict) finished = defer.Deferred() response.deliverBody(BodyReceiver(finished)) finished.addCallback(self._processResponseBody, request, response, body_processor) return finished
def loadDeck(self, deckFile, global_options={}): with open(deckFile) as f: self.id = sha256(f.read()).hexdigest() f.seek(0) test_deck = yaml.safe_load(f) for test in test_deck: try: nettest_path = nettest_to_path(test['options']['test_file']) except e.NetTestNotFound: log.err("Could not find %s" % test['options']['test_file']) log.msg("Skipping...") continue annotations = test['options'].get('annotations', {}) if global_options.get('annotations') is not None: annotations = global_options["annotations"] collector_address = test['options'].get('collector', None) if global_options.get('collector') is not None: collector_address = global_options['collector'] net_test_loader = NetTestLoader(test['options']['subargs'], annotations=annotations, test_file=nettest_path) if collector_address is not None: net_test_loader.collector = CollectorClient( collector_address ) if test['options'].get('bouncer', None) is not None: self.bouncer = self._BouncerClient(test['options']['bouncer']) if self.bouncer.backend_type is "onion": self.requiresTor = True self.insert(net_test_loader)
def getReachableTestHelper(self, test_helper_name, test_helper_address, test_helper_alternate): # For the moment we look for alternate addresses only of # web_connectivity test helpers. if test_helper_name == 'web-connectivity': for web_connectivity_settings in self.sortAddressesByPriority( test_helper_address, test_helper_alternate): web_connectivity_test_helper = WebConnectivityClient( settings=web_connectivity_settings) if not web_connectivity_test_helper.isSupported(): log.err("Unsupported %s web_connectivity test_helper " "%s" % ( web_connectivity_settings['type'], web_connectivity_settings['address'] )) continue reachable = yield web_connectivity_test_helper.isReachable() if not reachable: log.err("Unreachable %s web_connectivity test helper %s" % ( web_connectivity_settings['type'], web_connectivity_settings['address'] )) continue defer.returnValue(web_connectivity_settings) raise e.NoReachableTestHelpers else: defer.returnValue(test_helper_address.encode('ascii'))
def IPToLocation(ipaddr): from ooni.settings import config country_file = config.get_data_file_path('GeoIP/GeoIP.dat') asn_file = config.get_data_file_path('GeoIP/GeoIPASNum.dat') location = {'city': None, 'countrycode': 'ZZ', 'asn': 'AS0'} if not asn_file or not country_file: log.err("Could not find GeoIP data file in data directories." "Try running ooniresources or" " edit your ooniprobe.conf") return location country_dat = GeoIP(country_file) asn_dat = GeoIP(asn_file) country_code = country_dat.country_code_by_addr(ipaddr) if country_code is not None: location['countrycode'] = country_code asn = asn_dat.org_by_addr(ipaddr) if asn is not None: location['asn'] = asn.split(' ')[0] return location
def runWithDirector(): """ Instance the director, parse command line options and start an ooniprobe test! """ global_options = parseOptions() log.start(global_options['logfile']) # contains (test_cases, options, cmd_line_options) test_list = [] if global_options['testdeck']: test_deck = yaml.safe_load(open(global_options['testdeck'])) for test in test_deck: test_list.append(NetTestLoader(test['options'])) else: log.debug("No test deck detected") test_list.append(NetTestLoader(global_options)) # check each test's usageOptions for net_test_loader in test_list: try: net_test_loader.checkOptions() except MissingRequiredOption, option_name: log.err('Missing required option: "%s"' % option_name) print net_test_loader.usageOptions().getUsage() sys.exit(2) except usage.UsageError, e: log.err(e) print net_test_loader.usageOptions().getUsage() sys.exit(2)
def post_director_start(_): for net_test_loader in test_list: yaml_reporter = YAMLReporter(net_test_loader.testDetails) reporters = [yaml_reporter] if global_options['collector']: try: oonib_reporter = OONIBReporter(net_test_loader.testDetails, global_options['collector']) reporters.append(oonib_reporter) except InvalidOONIBCollectorAddress: log.err("Invalid format for oonib collector address.") log.msg("Should be in the format http://<collector_address>:<port>") log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion") sys.exit(1) # Select one of the baked-in reporters unless the user has requested otherwise if not global_options['no-default-reporter']: with open('collector') as f: reporter_url = random.choice(f.readlines()) reporter_url = reporter_url.split('#')[0].strip() oonib_reporter = OONIBReporter(net_test_loader.testDetails, reporter_url) reporters.append(oonib_reporter) log.debug("adding callback for startNetTest") d.addCallback(director.startNetTest, net_test_loader, reporters) d.addCallback(shutdown)
def runTestCases(test_cases, options, cmd_line_options): log.debug("Running %s" % test_cases) log.debug("Options %s" % options) log.debug("cmd_line_options %s" % dict(cmd_line_options)) test_inputs = options['inputs'] oonib_reporter = OONIBReporter(cmd_line_options) yaml_reporter = YAMLReporter(cmd_line_options) if cmd_line_options['collector']: log.msg("Using remote collector, please be patient while we create the report.") try: yield oonib_reporter.createReport(options) except OONIBReportError: log.err("Error in creating new report") log.msg("We will only create reports to a file") oonib_reporter = None else: oonib_reporter = None yield yaml_reporter.createReport(options) log.msg("Reporting to file %s" % yaml_reporter._stream.name) try: input_unit_factory = InputUnitFactory(test_inputs) input_unit_factory.inputUnitSize = int(cmd_line_options['parallelism']) except Exception, e: log.exception(e)
def writeReportEntry(self, entry): log.debug("Writing report with OONIB reporter") content = '---\n' content += safe_dump(entry) content += '...\n' url = self.collectorAddress + '/report' request = {'report_id': self.reportID, 'content': content} log.debug("Updating report with id %s (%s)" % (self.reportID, url)) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(json.dumps(request)) try: response = yield self.agent.request("PUT", url, bodyProducer=bodyProducer) except: # XXX we must trap this in the runner and make sure to report the # data later. log.err("Error in writing report entry") raise errors.OONIBReportUpdateError
def run(): options = Options() try: options.parseOptions() except usage.UsageError as error_message: print "%s: %s" % (sys.argv[0], error_message) print "%s: Try --help for usage details." % (sys.argv[0]) sys.exit(1) if not any(options.values()): print("%s: no command specified" % sys.argv[0]) print options sys.exit(1) if options['update-inputs']: print "Downloading inputs" try: yield update.download_inputs() except Exception as exc: log.err("failed to download geoip files") log.exception(exc) if options['update-geoip']: print "Downloading geoip files" try: yield update.download_geoip() except Exception as exc: log.err("failed to download geoip files") log.exception(exc)
def ebRequestWriting(err): if self._state == "TRANSMITTING": self._state = "GENERATION_FAILED" self.transport.loseConnection() self._finishedRequest.errback(Failure(RequestGenerationFailed([err]))) else: log.err(err, "Error writing request, but not in valid state " "to finalize request: %s" % self._state)
def getX509Name(certificate, get_components=False): """Get the DER-encoded form of the Name fields of an X509 certificate. @param certificate: A :class:`OpenSSL.crypto.X509Name` object. @param get_components: A boolean. If True, returns a list of tuples of the (name, value)s of each Name field in the :param:`certificate`. If False, returns the DER encoded form of the Name fields of the :param:`certificate`. """ x509_name = None try: assert isinstance(certificate, crypto.X509Name), \ "getX509Name takes OpenSSL.crypto.X509Name as first argument!" x509_name = crypto.X509Name(certificate) except AssertionError as ae: log.err(ae) except Exception as exc: log.exception(exc) if not x509_name is None: if not get_components: return x509_name.der() else: return x509_name.get_components() else: log.debug("getX509Name: got None for ivar x509_name")
def loadResumeFile(): """ Sets the singleton stateDict object to the content of the resume file. If the file is empty then it will create an empty one. Raises: :class:ooni.runner.InvalidResumeFile if the resume file is not valid """ if not config.stateDict: try: with open(config.resume_filename) as f: config.stateDict = yaml.safe_load(f) except: log.err("Error loading YAML file") raise InvalidResumeFile if not config.stateDict: with open(config.resume_filename, "w+") as f: yaml.safe_dump(dict(), f) config.stateDict = dict() elif isinstance(config.stateDict, dict): return else: log.err("The resume file is of the wrong format") raise InvalidResumeFile
def _setup_failed(self, failure): self.tor_output.seek(0) map(log.debug, self.tor_output.readlines()) self.tor_output.seek(0) if len(self.retry_with) == 0: self.started.errback(errors.UnableToStartTor()) return while len(self.retry_with) > 0: self._reset_tor_config() self.tor_config.UseBridges = 1 transport = self.retry_with.pop(0) log.msg("Failed to start Tor. Retrying with {0}".format(transport)) try: bridge_lines = getattr(constants, '{0}_BRIDGES'.format(transport).upper()) except AttributeError: continue try: self.tor_config.ClientTransportPlugin = get_client_transport(transport) except UninstalledTransport: log.err("Pluggable transport {0} is not installed".format( transport)) continue except UnrecognizedTransport: log.err("Unrecognized transport type") continue self.tor_config.Bridge = bridge_lines self.launch() break
def gotError(failure): log.err("Failed to perform lookup") log.exception(failure) failure.trap(gaierror, TimeoutError) DNSTest.addToReport(self, query, resolver=dns_server, query_type = 'PTR', failure=failure) return None
def setupGlobalOptions(logging, start_tor, check_incoherences): global_options = parseOptions() config.global_options = global_options config.set_paths() config.initialize_ooni_home() try: config.read_config_file(check_incoherences=check_incoherences) except errors.ConfigFileIncoherent: sys.exit(6) if not config.is_initialized(): initializeOoniprobe(global_options) if global_options['verbose']: config.advanced.debug = True if not start_tor: config.advanced.start_tor = False if logging: log.start(global_options['logfile']) if config.privacy.includepcap or global_options['pcapfile']: from ooni.utils.net import hasRawSocketPermission if hasRawSocketPermission(): from ooni.utils.txscapy import ScapyFactory config.scapyFactory = ScapyFactory(config.advanced.interface) else: log.err("Insufficient Privileges to capture packets." " See ooniprobe.conf privacy.includepcap") sys.exit(2) global_options['check_incoherences'] = check_incoherences return global_options
def writeReportEntry(self, entry): log.debug("Writing report with OONIB reporter") url = self.collectorAddress + '/report/' + self.reportID if "json" in self.supportedFormats: serialisation_format = 'json' else: serialisation_format = 'yaml' request = { 'format': serialisation_format, 'content': self.serializeEntry(entry, serialisation_format) } log.debug("Updating report with id %s (%s)" % (self.reportID, url)) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(request_json) try: yield self.agent.request("POST", str(url), bodyProducer=bodyProducer) except Exception as exc: log.err("Error in writing report entry") log.exception(exc) raise errors.OONIBReportUpdateError
def _test_http_request(self): http_blocked = True for dc_id, address in TELEGRAM_DCS: if http_blocked == False: break for port in [80, 443]: url = 'http://{}:{}'.format(address, port) try: response = yield self.doRequest(url, 'POST') except Exception as exc: failure_string = failureToString(defer.failure.Failure(exc)) log.err("Failed to connect to {}: {}".format(url, failure_string)) continue log.debug("Got back status code {}".format(response.code)) log.debug("{}".format(response.body)) if response.code == 501: http_blocked = False break if http_blocked == True: self.report['telegram_http_blocking'] = True log.msg("Telegram servers are BLOCKED based on HTTP") else: self.report['telegram_http_blocking'] = False log.msg("Telegram servers are not blocked based on HTTP")
def setUp(self): log.debug("Setting up HTTPTest") try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will" "not work") from twisted.web.client import Agent from twisted.internet import reactor self.agent = Agent(reactor) if self.followRedirects: try: from twisted.web.client import RedirectAgent self.agent = RedirectAgent(self.agent) except: log.err("Warning! You are running an old version of twisted"\ "(<= 10.1). I will not be able to follow redirects."\ "This may make the testing less precise.") self.report['errors'].append("Could not import RedirectAgent") self.request = {} self.response = {} log.debug("Finished test setup")
def IPToLocation(ipaddr): city_file = os.path.join(config.advanced.geoip_data_dir, 'GeoLiteCity.dat') country_file = os.path.join(config.advanced.geoip_data_dir, 'GeoIP.dat') asn_file = os.path.join(config.advanced.geoip_data_dir, 'GeoIPASNum.dat') location = {'city': None, 'countrycode': None, 'asn': None} try: city_dat = GeoIP(city_file) try: location['city'] = city_dat.record_by_addr(ipaddr)['city'] except TypeError: location['city'] = None country_dat = GeoIP(country_file) location['countrycode'] = country_dat.country_code_by_addr(ipaddr) if not location['countrycode']: location['countrycode'] = 'ZZ' asn_dat = GeoIP(asn_file) try: location['asn'] = asn_dat.org_by_addr(ipaddr).split(' ')[0] except AttributeError: location['asn'] = 'AS0' except IOError: log.err("Could not find GeoIP data files. Go into %s " "and run make geoip or change the geoip_data_dir " "in the config file" % config.advanced.geoip_data_dir) raise GeoIPDataFilesNotFound return location
def set_paths(self, ooni_home=None): if ooni_home: self._custom_home = ooni_home self.nettest_directory = abspath(os.path.join(__file__, '..', 'nettests')) self.ooni_home = os.path.join(expanduser('~'+self.current_user), '.ooni') if self._custom_home: self.ooni_home = self._custom_home self.inputs_directory = os.path.join(self.ooni_home, 'inputs') self.decks_directory = os.path.join(self.ooni_home, 'decks') self.reports_directory = os.path.join(self.ooni_home, 'reports') self.report_log_file = os.path.join(self.ooni_home, 'reporting.yml') self.resources_directory = os.path.join(self.data_directory, "resources") if self.global_options.get('configfile'): config_file = self.global_options['configfile'] self.config_file = expanduser(config_file) else: self.config_file = os.path.join(self.ooni_home, 'ooniprobe.conf') if 'logfile' in self.basic: self.basic.logfile = expanduser(self.basic.logfile.replace('~','~'+self.current_user)) if not os.path.exists(self.data_directory): log.err("Data directory %s does not exists" % self.data_directory) log.err("Edit data_dir inside of %s" % self.config_file)
def remove_public_relays(state, bridges): """ Remove bridges from our bridge list which are also listed as public relays. This must be called after Tor has fully bootstrapped and we have a :class:`ooni.lib.txtorcon.TorState` with the :attr:`ooni.lib.txtorcon.TorState.routers` attribute assigned. XXX Does state.router.values() have all of the relays in the consensus, or just the ones we know about so far? XXX FIXME: There is a problem in that Tor needs a Bridge line to already be configured in order to bootstrap. However, after bootstrapping, we grab the microdescriptors of all the relays and check if any of our bridges are listed as public relays. Because of this, the first bridge does not get checked for being a relay. """ IPs = map(lambda addr: addr.split(':',1)[0], bridges['all']) both = set(state.routers.values()).intersection(IPs) if len(both) > 0: try: updated = map(lambda node: remove_node_from_list(node), both) log.debug("Bridges in both: %s" % both) log.debug("Updated = %s" % updated) #if not updated: # defer.returnValue(state) #else: # defer.returnValue(state) return state except Exception, e: log.err("Removing public relays %s from bridge list failed:\n%s" % (both, e))
def dns_resolve(self, hostname, nameserver=None): """ Resolves hostname(s) though nameserver to corresponding address(es). hostname may be either a single hostname string, or a list of strings. If nameserver is not given, use local DNS resolver, and if that fails try using 8.8.8.8. """ if isinstance(hostname, str): hostname = [hostname] response = [] answer = None for hn in hostname: try: answer = yield self.performALookup(hn) if not answer: answer = yield self.performALookup(hn, ('8.8.8.8', 53)) except error.DNSNameError: log.msg("DNS resolution for %s returned NXDOMAIN" % hn) response.append('NXDOMAIN') except Exception: log.err("DNS Resolution failed") finally: if not answer: defer.returnValue(response) for addr in answer: response.append(addr) defer.returnValue(response)
def _cbResponse(self, response, headers_processor, body_processor): log.debug("Got response %s" % response) if not response: self.report['response'] = None log.err("We got an empty response") return self.response['headers'] = list(response.headers.getAllRawHeaders()) self.response['code'] = response.code self.response['length'] = response.length self.response['version'] = response.length if str(self.response['code']).startswith('3'): self.processRedirect(response.headers.getRawHeaders('Location')[0]) if headers_processor: headers_processor(self.response['headers']) else: self.processResponseHeaders(self.response['headers']) finished = defer.Deferred() response.deliverBody(BodyReceiver(finished)) finished.addCallback(self._processResponseBody, body_processor) return finished
def checkAllTasksDone(self): log.debug("Checking all tasks for completion %s == %s" % (self.doneTasks, self.tasks)) if self.completedScheduling and self.doneTasks == self.tasks: if self.allTasksDone.called: log.err("allTasksDone was already called. This is probably a bug.") else: self.allTasksDone.callback(self.doneTasks)
def nextMutation(self): log.debug("Moving onto next mutation") # [step_idx, mutation_idx] c_step_idx, c_mutation_idx = self.factory.mutation log.debug("[%s]: c_step_idx: %s | c_mutation_idx: %s" % (self.role, c_step_idx, c_mutation_idx)) if c_step_idx >= (len(self.steps) - 1): log.err("No censorship fingerprint bisected.") log.err("Givinig up.") self.transport.loseConnection() return # This means we have mutated all bytes in the step # we should proceed to mutating the next step. log.debug("steps: %s | %s" % (self.steps, self.steps[c_step_idx])) if c_mutation_idx >= (len(self.steps[c_step_idx].values()[0]) - 1): log.debug("Finished mutating step") # increase step self.factory.mutation[0] += 1 # reset mutation idx self.factory.mutation[1] = 0 else: log.debug("Mutating next byte in step") # increase mutation index self.factory.mutation[1] += 1
def runTestCases(test_cases, options, cmd_line_options, yamloo_filename): try: assert len(options) != 0, "Length of options is zero!" except AssertionError, ae: test_inputs = [] log.err(ae)
def err(err): log.err("Failed to get descriptor for input %s" % input_hash) log.exception(err)
def createReport(self): """ Creates a report on the oonib collector. """ # XXX we should probably be setting this inside of the constructor, # however config.tor.socks_port is not set until Tor is started and the # reporter is instantiated before Tor is started. We probably want to # do this with some deferred kung foo or instantiate the reporter after # tor is started. from ooni.utils.hacks import SOCKS5Agent from twisted.internet import reactor if self.collectorAddress.startswith('httpo://'): self.collectorAddress = \ self.collectorAddress.replace('httpo://', 'http://') proxyEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', config.tor.socks_port) self.agent = SOCKS5Agent(reactor, proxyEndpoint=proxyEndpoint) elif self.collectorAddress.startswith('https://'): # XXX add support for securely reporting to HTTPS collectors. log.err("HTTPS based collectors are currently not supported.") url = self.collectorAddress + '/report' content = '---\n' content += safe_dump(self.testDetails) content += '...\n' request = { 'software_name': self.testDetails['software_name'], 'software_version': self.testDetails['software_version'], 'probe_asn': self.testDetails['probe_asn'], 'test_name': self.testDetails['test_name'], 'test_version': self.testDetails['test_version'], 'input_hashes': self.testDetails['input_hashes'], # XXX there is a bunch of redundancy in the arguments getting sent # to the backend. This may need to get changed in the client and # the backend. 'content': content } log.msg("Reporting %s" % url) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(json.dumps(request)) log.msg("Creating report with OONIB Reporter. Please be patient.") log.msg("This may take up to 1-2 minutes...") try: response = yield self.agent.request("POST", url, bodyProducer=bodyProducer) except ConnectionRefusedError: log.err("Connection to reporting backend failed " "(ConnectionRefusedError)") raise errors.OONIBReportCreationError except errors.HostUnreachable: log.err("Host is not reachable (HostUnreachable error") raise errors.OONIBReportCreationError except Exception, e: log.err("Failed to connect to reporter backend") log.exception(e) raise errors.OONIBReportCreationError
from yaml.representer import SafeRepresenter from yaml.emitter import Emitter from yaml.serializer import Serializer from yaml.resolver import Resolver from twisted.python.util import untilConcludes from twisted.internet import defer from twisted.internet.error import ConnectionRefusedError from twisted.python.failure import Failure from twisted.internet.endpoints import TCP4ClientEndpoint from ooni.utils import log from ooni.tasks import Measurement try: from scapy.packet import Packet except ImportError: log.err("Scapy is not installed.") class Packet(object): pass from ooni import errors from ooni import otime from ooni.utils import pushFilenameStack, generate_filename from ooni.utils.net import BodyReceiver, StringProducer from ooni.settings import config from ooni.tasks import ReportEntry
def createReport(self): """ Creates a report on the oonib collector. """ # XXX we should probably be setting this inside of the constructor, # however config.tor.socks_port is not set until Tor is started and the # reporter is instantiated before Tor is started. We probably want to # do this with some deferred kung foo or instantiate the reporter after # tor is started. if self.collectorAddress.startswith('httpo://'): self.collectorAddress = \ self.collectorAddress.replace('httpo://', 'http://') proxyEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', config.tor.socks_port) self.agent = SOCKS5Agent(reactor, proxyEndpoint=proxyEndpoint) url = self.collectorAddress + '/report' request = { 'software_name': self.testDetails['software_name'], 'software_version': self.testDetails['software_version'], 'probe_asn': self.testDetails['probe_asn'], 'probe_cc': self.testDetails['probe_cc'], 'test_name': self.testDetails['test_name'], 'test_version': self.testDetails['test_version'], 'test_start_time': self.testDetails['test_start_time'], 'input_hashes': self.testDetails['input_hashes'], 'data_format_version': self.testDetails['data_format_version'], 'format': 'json' } # import values from the environment request.update([(k.lower(), v) for (k, v) in os.environ.iteritems() if k.startswith('PROBE_')]) log.msg("Reporting %s" % url) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(request_json) log.msg("Creating report with OONIB Reporter. Please be patient.") log.msg("This may take up to 1-2 minutes...") try: response = yield self.agent.request("POST", url, bodyProducer=bodyProducer) except ConnectionRefusedError: log.err("Connection to reporting backend failed " "(ConnectionRefusedError)") raise errors.OONIBReportCreationError except errors.HostUnreachable: log.err("Host is not reachable (HostUnreachable error") raise errors.OONIBReportCreationError except Exception, e: log.err("Failed to connect to reporter backend") log.exception(e) raise errors.OONIBReportCreationError
def test_a_lookup(self): """ We perform an A lookup on the DNS test servers for the domains to be tested and an A lookup on the known good DNS server. We then compare the results from test_resolvers and that from control_resolver and see if they match up. If they match up then no censorship is happening (tampering: false). If they do not we do a reverse lookup (PTR) on the test_resolvers and the control resolver for every IP address we got back and check to see if anyone of them matches the control ones. If they do, then we take note of the fact that censorship is probably not happening (tampering: reverse-match). If they do not match then censorship is probably going on (tampering: true). """ log.msg("Doing the test lookups on %s" % self.input) hostname = self.input self.report['tampering'] = {} try: control_answers = yield self.performALookup( hostname, self.control_dns_server) if not control_answers: log.err("Got no response from control DNS server %s:%d, " "perhaps the DNS resolver is down?" % self.control_dns_server) self.report['tampering']["%s:%d" % self.control_dns_server] = 'no_answer' except: self.report['tampering']["%s:%d" % self.control_dns_server] = 'error' control_answers = None for test_resolver in self.test_resolvers: log.msg("Testing resolver: %s" % test_resolver) test_dns_server = (test_resolver, 53) try: experiment_answers = yield self.performALookup( hostname, test_dns_server) except Exception: log.err("Problem performing the DNS lookup") self.report['tampering'][test_resolver] = 'dns_lookup_error' continue if not experiment_answers: log.err("Got no response, perhaps the DNS resolver is down?") self.report['tampering'][test_resolver] = 'no_answer' continue else: log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver)) def lookup_details(): """ A closure useful for printing test details. """ log.msg("test resolver: %s" % test_resolver) log.msg("experiment answers: %s" % experiment_answers) log.msg("control answers: %s" % control_answers) log.debug("Comparing %s with %s" % (experiment_answers, control_answers)) if not control_answers: log.msg("Skipping control resolver comparison") self.report['tampering'][test_resolver] = None elif set(experiment_answers) & set(control_answers): lookup_details() log.msg("tampering: false") self.report['tampering'][test_resolver] = False else: log.msg("Trying to do reverse lookup") experiment_reverse = yield self.performPTRLookup( experiment_answers[0], test_dns_server) control_reverse = yield self.performPTRLookup( control_answers[0], self.control_dns_server) if experiment_reverse == control_reverse: log.msg("Further testing has eliminated false positives") lookup_details() log.msg("tampering: reverse_match") self.report['tampering'][test_resolver] = 'reverse_match' else: log.msg("Reverse lookups do not match") lookup_details() log.msg("tampering: true") self.report['tampering'][test_resolver] = True
def run_vendor_tests(self, *a, **kw): """ These are several vendor tests used to detect the presence of a captive portal. Each test compares HTTP status code and content to the control results and has its own User-Agent string, in order to emulate the test as it would occur on the device it was intended for. Vendor tests are defined in the format: [exp_url, ctrl_result, ctrl_code, ua, test_name] """ vendor_tests = [['http://www.apple.com/library/test/success.html', 'Success', '200', 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3', 'Apple HTTP Captive Portal'], ['http://tools.ietf.org/html/draft-nottingham-http-portal-02', '428 Network Authentication Required', '428', 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0', 'W3 Captive Portal'], ['http://www.msftncsi.com/ncsi.txt', 'Microsoft NCSI', '200', 'Microsoft NCSI', 'MS HTTP Captive Portal', ]] cm = self.http_content_match_fuzzy_opt sm = self.http_status_code_match snm = self.http_status_code_no_match @defer.inlineCallbacks def compare_content(status_func, fuzzy, experiment_url, control_result, control_code, headers, test_name): log.msg("") log.msg("Running the %s test..." % test_name) content_match, experiment_code, experiment_headers = yield cm(experiment_url, control_result, headers, fuzzy) status_match = status_func(experiment_code, control_code) if status_match and content_match: log.msg("The %s test was unable to detect" % test_name) log.msg("a captive portal.") defer.returnValue(True) else: log.msg("The %s test shows that your network" % test_name) log.msg("is filtered.") defer.returnValue(False) result = {} for vt in vendor_tests: report = {} experiment_url = vt[0] control_result = vt[1] control_code = vt[2] headers = {'User-Agent': vt[3]} test_name = vt[4] args = (experiment_url, control_result, control_code, headers, test_name) if test_name == "MS HTTP Captive Portal": report['result'] = yield compare_content(sm, False, *args) elif test_name == "Apple HTTP Captive Portal": report['result'] = yield compare_content(sm, True, *args) elif test_name == "W3 Captive Portal": report['result'] = yield compare_content(snm, True, *args) else: log.err("Ooni is trying to run an undefined CP vendor test.") report['URL'] = experiment_url report['http_status_summary'] = control_result report['http_status_number'] = control_code report['User_Agent'] = vt[3] result[test_name] = report defer.returnValue(result)
def error(): log.err("Could not find GeoIP data file in %s." "Try running ooniresources --update-geoip or" " edit your ooniprobe.conf" % config.advanced.geoip_data_dir)
def test_psiphon(self): log.debug('PsiphonTest.test_psiphon') self.createCommand() if not os.path.exists(self.psiphonpath): log.err('psiphon path does not exists, is it installed?') self.report['psiphon_found'] = False log.debug("Adding %s to report" % self.report) # XXX: the original code written by juga0 readed # > return defer.succeed(None) # but this caused `ooniprobe -ng` to hang forever, so I # rewrote the code to return a deferred and simulate calling # its callback method, to trigger an event. # -sbs reactor.callLater(0.0, self.bootstrapped.callback, None) return self.bootstrapped self.report['psiphon_found'] = True log.debug("Adding %s to report" % self.report) # Using pty to see output lines as soon as they get wrotten in the # buffer, otherwise the test might not see lines until the buffer is # full with some block size and therefore the test would # terminate with error finished = self.run(self.command, env=dict(PYTHONPATH=self.psiphonpath), path=self.psiphonpath, usePTY=1) # here psiphon command has been run, and if it finds the text # 'Press Ctrl-C to terminate' in handleRead it will write to the # report self.report['bootstrapped_success'] = True self.report['bootstrapped_success'] = False def callDoRequest(_): log.debug("PsiphonTest.callDoRequest: %r" %(_,)) d = self.doRequest(self.url) def addSuccessToReport(res): log.debug("PsiphonTest.callDoRequest.addSuccessToReport") if res.body.startswith(self.localOptions['expected-body']): self.report['request_success'] = True else: self.report['request_success'] = False return res d.addCallback(addSuccessToReport) def addFailureToReport(res): log.debug("PsiphonTest.callDoRequest.addFailureToReport. res=%r" % (res,)) self.report['request_success'] = False return res d.addErrback(addFailureToReport) return d self.bootstrapped.addCallback(callDoRequest) def cleanup(_): log.debug('PsiphonTest:cleanup') try: self.processDirector.transport.signalProcess('INT') except ProcessExitedAlready: pass os.remove(self.command[1]) return finished self.bootstrapped.addBoth(cleanup) return self.bootstrapped
def getNonLoopbackIfaces(platform_name=None): try: ifaces = getIfaces(platform_name) except UnsupportedPlatform, up: log.err(up)
class OONIBReporter(OReporter): def __init__(self, test_details, collector_address): self.collectorAddress = collector_address self.validateCollectorAddress() self.reportID = None self.supportedFormats = ["yaml"] if self.collectorAddress.startswith('https://'): # not sure if there's something else it needs. Seems to work. # Very difficult to get it to work with self-signed certs. self.agent = Agent(reactor) elif self.collectorAddress.startswith('http://'): log.msg("Warning using unencrypted collector") self.agent = Agent(reactor) OReporter.__init__(self, test_details) def validateCollectorAddress(self): """ Will raise :class:ooni.errors.InvalidOONIBCollectorAddress an exception if the oonib reporter is not valid. """ regexp = '^(http|https|httpo):\/\/[a-zA-Z0-9\-\.]+(:\d+)?$' if not re.match(regexp, self.collectorAddress): raise errors.InvalidOONIBCollectorAddress def serializeEntry(self, entry, serialisation_format="yaml"): if serialisation_format == "json": if isinstance(entry, Measurement): report_entry = { 'input': entry.testInstance.report.pop('input', None), 'id': str(uuid.uuid4()), 'test_start_time': entry.testInstance.report.pop('test_start_time', None), 'measurement_start_time': entry.testInstance.report.pop('measurement_start_time', None), 'test_runtime': entry.testInstance.report.pop('test_runtime', None), 'test_keys': entry.testInstance.report } elif isinstance(entry, dict): report_entry = { 'input': entry.pop('input', None), 'id': str(uuid.uuid4()), 'test_start_time': entry.pop('test_start_time', None), 'measurement_start_time': entry.pop('measurement_start_time', None), 'test_runtime': entry.pop('test_runtime', None), 'test_keys': entry } else: raise Exception("Failed to serialise entry") report_entry.update(self.testDetails) return report_entry else: content = '---\n' if isinstance(entry, Measurement): report_entry = entry.testInstance.report elif isinstance(entry, dict): report_entry = entry else: raise Exception("Failed to serialise entry") content += safe_dump(report_entry) content += '...\n' return content @defer.inlineCallbacks def writeReportEntry(self, entry): log.debug("Writing report with OONIB reporter") url = self.collectorAddress + '/report/' + self.reportID if "json" in self.supportedFormats: serialisation_format = 'json' else: serialisation_format = 'yaml' request = { 'format': serialisation_format, 'content': self.serializeEntry(entry, serialisation_format) } log.debug("Updating report with id %s (%s)" % (self.reportID, url)) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(request_json) try: yield self.agent.request("POST", str(url), bodyProducer=bodyProducer) except Exception as exc: log.err("Error in writing report entry") log.exception(exc) raise errors.OONIBReportUpdateError @defer.inlineCallbacks def createReport(self): """ Creates a report on the oonib collector. """ # XXX we should probably be setting this inside of the constructor, # however config.tor.socks_port is not set until Tor is started and the # reporter is instantiated before Tor is started. We probably want to # do this with some deferred kung foo or instantiate the reporter after # tor is started. if self.collectorAddress.startswith('httpo://'): self.collectorAddress = \ self.collectorAddress.replace('httpo://', 'http://') proxyEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', config.tor.socks_port) self.agent = SOCKS5Agent(reactor, proxyEndpoint=proxyEndpoint) url = self.collectorAddress + '/report' request = { 'software_name': self.testDetails['software_name'], 'software_version': self.testDetails['software_version'], 'probe_asn': self.testDetails['probe_asn'], 'probe_cc': self.testDetails['probe_cc'], 'test_name': self.testDetails['test_name'], 'test_version': self.testDetails['test_version'], 'test_start_time': self.testDetails['test_start_time'], 'input_hashes': self.testDetails['input_hashes'], 'data_format_version': self.testDetails['data_format_version'], 'format': 'json' } # import values from the environment request.update([(k.lower(), v) for (k, v) in os.environ.iteritems() if k.startswith('PROBE_')]) log.msg("Reporting %s" % url) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(request_json) log.msg("Creating report with OONIB Reporter. Please be patient.") log.msg("This may take up to 1-2 minutes...") try: response = yield self.agent.request("POST", url, bodyProducer=bodyProducer) except ConnectionRefusedError: log.err("Connection to reporting backend failed " "(ConnectionRefusedError)") raise errors.OONIBReportCreationError except errors.HostUnreachable: log.err("Host is not reachable (HostUnreachable error") raise errors.OONIBReportCreationError except Exception, e: log.err("Failed to connect to reporter backend") log.exception(e) raise errors.OONIBReportCreationError # This is a little trix to allow us to unspool the response. We create # a deferred and call yield on it. response_body = defer.Deferred() response.deliverBody(BodyReceiver(response_body)) backend_response = yield response_body try: parsed_response = json.loads(backend_response) except Exception, e: log.err("Failed to parse collector response %s" % backend_response) log.exception(e) raise errors.OONIBReportCreationError
def control_err(failure): failure_string = failureToString(failure) log.err("Failed to perform control lookup: %s" % failure_string) self.report['control_failure'] = failure_string
def _task_failed(self, failure, task): """ Fired when a task failed to run due to an error. """ log.err("Failed to run {0}".format(task.identifier)) log.exception(failure)
class OONIBReporter(OReporter): def __init__(self, test_details, collector_address): self.collectorAddress = collector_address self.validateCollectorAddress() self.reportID = None OReporter.__init__(self, test_details) def validateCollectorAddress(self): """ Will raise :class:ooni.errors.InvalidOONIBCollectorAddress an exception if the oonib reporter is not valid. """ regexp = '^(http|httpo):\/\/[a-zA-Z0-9\-\.]+(:\d+)?$' if not re.match(regexp, self.collectorAddress): raise errors.InvalidOONIBCollectorAddress @defer.inlineCallbacks def writeReportEntry(self, entry): log.debug("Writing report with OONIB reporter") content = '---\n' if isinstance(entry, Measurement): content += safe_dump(entry.testInstance.report) elif isinstance(entry, Failure): content += entry.value elif isinstance(entry, dict): content += safe_dump(entry) content += '...\n' url = self.collectorAddress + '/report' request = {'report_id': self.reportID, 'content': content} log.debug("Updating report with id %s (%s)" % (self.reportID, url)) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(json.dumps(request)) try: yield self.agent.request("PUT", url, bodyProducer=bodyProducer) except: # XXX we must trap this in the runner and make sure to report the # data later. log.err("Error in writing report entry") raise errors.OONIBReportUpdateError @defer.inlineCallbacks def createReport(self): """ Creates a report on the oonib collector. """ # XXX we should probably be setting this inside of the constructor, # however config.tor.socks_port is not set until Tor is started and the # reporter is instantiated before Tor is started. We probably want to # do this with some deferred kung foo or instantiate the reporter after # tor is started. from ooni.utils.hacks import SOCKS5Agent from twisted.internet import reactor if self.collectorAddress.startswith('httpo://'): self.collectorAddress = \ self.collectorAddress.replace('httpo://', 'http://') proxyEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', config.tor.socks_port) self.agent = SOCKS5Agent(reactor, proxyEndpoint=proxyEndpoint) elif self.collectorAddress.startswith('https://'): # XXX add support for securely reporting to HTTPS collectors. log.err("HTTPS based collectors are currently not supported.") url = self.collectorAddress + '/report' content = '---\n' content += safe_dump(self.testDetails) content += '...\n' request = { 'software_name': self.testDetails['software_name'], 'software_version': self.testDetails['software_version'], 'probe_asn': self.testDetails['probe_asn'], 'test_name': self.testDetails['test_name'], 'test_version': self.testDetails['test_version'], 'input_hashes': self.testDetails['input_hashes'], # XXX there is a bunch of redundancy in the arguments getting sent # to the backend. This may need to get changed in the client and # the backend. 'content': content } log.msg("Reporting %s" % url) request_json = json.dumps(request) log.debug("Sending %s" % request_json) bodyProducer = StringProducer(json.dumps(request)) log.msg("Creating report with OONIB Reporter. Please be patient.") log.msg("This may take up to 1-2 minutes...") try: response = yield self.agent.request("POST", url, bodyProducer=bodyProducer) except ConnectionRefusedError: log.err("Connection to reporting backend failed " "(ConnectionRefusedError)") raise errors.OONIBReportCreationError except errors.HostUnreachable: log.err("Host is not reachable (HostUnreachable error") raise errors.OONIBReportCreationError except Exception, e: log.err("Failed to connect to reporter backend") log.exception(e) raise errors.OONIBReportCreationError # This is a little trix to allow us to unspool the response. We create # a deferred and call yield on it. response_body = defer.Deferred() response.deliverBody(BodyReceiver(response_body)) backend_response = yield response_body try: parsed_response = json.loads(backend_response) except Exception, e: log.err("Failed to parse collector response %s" % backend_response) log.exception(e) raise errors.OONIBReportCreationError
def err(err): log.err("Failed to get descriptor for deck %s" % deck_hash) print err log.exception(err)
def oonib_report_failed(result): log.err("Failed to close oonib report.")
from twisted.internet import reactor, defer from ooni.utils import log from ooni import errors try: from pygeoip import GeoIP except ImportError: try: import GeoIP as CGeoIP def GeoIP(database_path, *args, **kwargs): return CGeoIP.open(database_path, CGeoIP.GEOIP_STANDARD) except ImportError: log.err( "Unable to import pygeoip. We will not be able to run geo IP related measurements" ) class GeoIPDataFilesNotFound(Exception): pass def ip_to_location(ipaddr): from ooni.settings import config country_file = config.get_data_file_path( 'resources/maxmind-geoip/GeoIP.dat') asn_file = config.get_data_file_path( 'resources/maxmind-geoip/GeoIPASNum.dat')
def err(err): log.err("Failed to download the input file %s" % input_hash) log.exception(err)
def test_parasitic_tcp_traceroute(self): """ Establishes a TCP stream, then sequentially sends TCP packets with increasing TTL until we reach the ttl of the destination. Requires the backend to respond with an ACK to our SYN packet (i.e. the port must be open) XXX this currently does not work properly. The problem lies in the fact that we are currently using the scapy layer 3 socket. This socket makes packets received be trapped by the kernel TCP stack, therefore when we send out a SYN and get back a SYN-ACK the kernel stack will reply with a RST because it did not send a SYN. The quick fix to this would be to establish a TCP stream using socket calls and then "cannibalizing" the TCP session with scapy. The real fix is to make scapy use libpcap instead of raw sockets obviously as we previously did... arg. """ sport = self.get_sport() dport = self.dport ipid = int(RandShort()) ip_layer = IP(dst=self.dst_ip, id=ipid, ttl=self.max_ttl) syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0) log.msg("Sending...") syn.show2() synack = yield self.sr1(syn) log.msg("Got response...") synack.show2() if not synack: log.err("Got no response. Try increasing max_ttl") return if synack[TCP].flags == 11: log.msg("Got back a FIN ACK. The destination port is closed") return elif synack[TCP].flags == 18: log.msg("Got a SYN ACK. All is well.") else: log.err("Got an unexpected result") return ack = ip_layer/TCP(sport=synack.dport, dport=dport, flags="A", seq=synack.ack, ack=synack.seq + 1) yield self.send(ack) self.report['hops'] = [] # For the time being we make the assumption that we are NATted and # that the NAT will forward the packet to the destination even if the TTL has for ttl in range(1, self.max_ttl): log.msg("Sending packet with ttl of %s" % ttl) ip_layer.ttl = ttl empty_tcp_packet = ip_layer/TCP(sport=synack.dport, dport=dport, flags="A", seq=synack.ack, ack=synack.seq + 1) answer = yield self.sr1(empty_tcp_packet) if not answer: log.err("Got no response for ttl %s" % ttl) continue try: icmp = answer[ICMP] report = {'ttl': empty_tcp_packet.ttl, 'address': answer.src, 'rtt': answer.time - empty_tcp_packet.time } log.msg("%s: %s" % (dport, report)) self.report['hops'].append(report) except IndexError: if answer.src == self.dst_ip: answer.show() log.msg("Reached the destination. We have finished the traceroute") return
def err(err): log.err("Failed to download the deck %s" % deck_hash) print err log.exception(err)
def generateMeasurements(self): """ This is a generator that yields measurements and registers the callbacks for when a measurement is successful or has failed. FIXME: If this generator throws exception TaskManager scheduler is irreversibly damaged. """ self._startTime = time.time() for test_class, test_methods in self.testCases: # load a singular input processor for all instances all_inputs = test_class.inputs for test_input in all_inputs: measurements = [] test_instance = test_class() # Set each instances inputs to a singular input processor test_instance.inputs = all_inputs test_instance._setUp() test_instance.summary = self.summary for method in test_methods: try: measurement = self.makeMeasurement( test_instance, method, test_input) except Exception: log.exception(failure.Failure()) log.err('Failed to run %s %s %s' % (test_instance, method, test_input)) continue # it's better to skip single measurement... log.debug("Running %s %s" % (test_instance, method)) measurements.append(measurement.done) self.state.taskCreated() yield measurement # This is to skip setting callbacks on measurements that # cannot be run. if len(measurements) == 0: continue # When the measurement.done callbacks have all fired # call the postProcessor before writing the report if self.report: post = defer.DeferredList(measurements) @post.addBoth def set_runtime(results): runtime = time.time() - test_instance._start_time for _, m in results: m.testInstance.report['test_runtime'] = runtime test_instance.report['test_runtime'] = runtime return results # Call the postProcessor, which must return a single report # or a deferred post.addCallback(test_instance.postProcessor) def noPostProcessor(failure, report): failure.trap(e.NoPostProcessor) return report post.addErrback(noPostProcessor, test_instance.report) post.addCallback(self.report.write) if self.report and self.director: # ghetto hax to keep NetTestState counts are accurate [post.addBoth(self.doneReport) for _ in measurements] self.state.allTasksScheduled()
def failed(self, failure): log.err("Failed to lookup via %s" % self.url) log.exception(failure) return failure
def errReceived(self, data): log.err('Script error: %s' % data) self.transport.signalProcess('KILL')
def director_startup_handled_failures(failure): log.err("Could not start the director") failure.trap(errors.TorNotRunning, errors.InvalidOONIBCollectorAddress, errors.UnableToLoadDeckInput, errors.CouldNotFindTestHelper, errors.CouldNotFindTestCollector, errors.ProbeIPUnknown, errors.InvalidInputFile, errors.ConfigFileIncoherent) if isinstance(failure.value, errors.TorNotRunning): log.err("Tor does not appear to be running") log.err("Reporting with the collector %s is not possible" % global_options['collector']) log.msg( "Try with a different collector or disable collector reporting with -n" ) elif isinstance(failure.value, errors.InvalidOONIBCollectorAddress): log.err("Invalid format for oonib collector address.") log.msg( "Should be in the format http://<collector_address>:<port>") log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion") elif isinstance(failure.value, errors.UnableToLoadDeckInput): log.err("Unable to fetch the required inputs for the test deck.") log.msg( "Please file a ticket on our issue tracker: https://github.com/thetorproject/ooni-probe/issues" ) elif isinstance(failure.value, errors.CouldNotFindTestHelper): log.err("Unable to obtain the required test helpers.") log.msg( "Try with a different bouncer or check that Tor is running properly." ) elif isinstance(failure.value, errors.CouldNotFindTestCollector): log.err("Could not find a valid collector.") log.msg( "Try with a different bouncer, specify a collector with -c or disable reporting to a collector with -n." ) elif isinstance(failure.value, errors.ProbeIPUnknown): log.err("Failed to lookup probe IP address.") log.msg("Check your internet connection.") elif isinstance(failure.value, errors.InvalidInputFile): log.err("Invalid input file \"%s\"" % failure.value) elif isinstance(failure.value, errors.ConfigFileIncoherent): log.err("Incoherent config file") if config.advanced.debug: log.exception(failure)
tmp_test_case_object = obj() tmp_test_case_object._processOptions(options) except usage.UsageError, e: test_name = tmp_test_case_object.name print "There was an error in running %s!" % test_name print "%s" % e options.opt_help() raise usage.UsageError("Error in parsing command line args for %s" % test_name) if obj.requiresRoot: try: checkForRoot() except NotRootError: log.err("%s requires root to run" % obj.name) sys.exit(1) return obj def isTestCase(obj): try: return issubclass(obj, NetTestCase) except TypeError: return False def findTestClassesFromConfig(cmd_line_options): """ Takes as input the command line config parameters and returns the test
def __init__(self, pcap_filename, *arg, **kw): log.err( "Initializing DummyPcapWriter. We will not actually write to a pcapfile" )
def director_startup_other_failures(failure): log.err("An unhandled exception occurred while starting the director!") log.exception(failure)
return ifup else: raise IfaceError def getNonLoopbackIfaces(platform_name=None): try: ifaces = getIfaces(platform_name) except UnsupportedPlatform, up: log.err(up) if not ifaces: log.msg("Unable to discover network interfaces...") return None else: found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo'] log.debug("getNonLoopbackIfaces: Found non-loopback interfaces: %s" % found) try: interfaces = checkInterfaces(found) except IfaceError, ie: log.err(ie) return None else: return interfaces def getLocalAddress(): default_iface = getDefaultIface() return default_iface.ipaddr
def runWithDirector(logging=True, start_tor=True, check_incoherences=True): """ Instance the director, parse command line options and start an ooniprobe test! """ global_options = parseOptions() config.global_options = global_options config.set_paths() config.initialize_ooni_home() try: config.read_config_file(check_incoherences=check_incoherences) except errors.ConfigFileIncoherent: sys.exit(6) if global_options['verbose']: config.advanced.debug = True if not start_tor: config.advanced.start_tor = False if logging: log.start(global_options['logfile']) if config.privacy.includepcap: if hasRawSocketPermission(): from ooni.utils.txscapy import ScapyFactory config.scapyFactory = ScapyFactory(config.advanced.interface) else: log.err("Insufficient Privileges to capture packets." " See ooniprobe.conf privacy.includepcap") sys.exit(2) director = Director() if global_options['list']: print "# Installed nettests" for net_test_id, net_test in director.getNetTests().items(): print "* %s (%s/%s)" % (net_test['name'], net_test['category'], net_test['id']) print " %s" % net_test['description'] sys.exit(0) elif global_options['printdeck']: del global_options['printdeck'] print "# Copy and paste the lines below into a test deck to run the specified test with the specified arguments" print yaml.safe_dump([{'options': global_options}]).strip() sys.exit(0) if global_options.get('annotations') is not None: annotations = {} for annotation in global_options["annotations"].split(","): pair = annotation.split(":") if len(pair) == 2: key = pair[0].strip() value = pair[1].strip() annotations[key] = value else: log.err("Invalid annotation: %s" % annotation) sys.exit(1) global_options["annotations"] = annotations if global_options['no-collector']: log.msg("Not reporting using a collector") global_options['collector'] = None start_tor = False else: start_tor = True deck = Deck(no_collector=global_options['no-collector']) deck.bouncer = global_options['bouncer'] if global_options['collector']: start_tor |= True try: if global_options['testdeck']: deck.loadDeck(global_options['testdeck']) else: log.debug("No test deck detected") test_file = nettest_to_path(global_options['test_file'], True) net_test_loader = NetTestLoader(global_options['subargs'], test_file=test_file) if global_options['collector']: net_test_loader.collector = global_options['collector'] deck.insert(net_test_loader) except errors.MissingRequiredOption as option_name: log.err('Missing required option: "%s"' % option_name) incomplete_net_test_loader = option_name.net_test_loader print incomplete_net_test_loader.usageOptions().getUsage() sys.exit(2) except errors.NetTestNotFound as path: log.err('Requested NetTest file not found (%s)' % path) sys.exit(3) except errors.OONIUsageError as e: log.err(e) print e.net_test_loader.usageOptions().getUsage() sys.exit(4) except Exception as e: if config.advanced.debug: log.exception(e) log.err(e) sys.exit(5) start_tor |= deck.requiresTor d = director.start(start_tor=start_tor, check_incoherences=check_incoherences) def setup_nettest(_): try: return deck.setup() except errors.UnableToLoadDeckInput as error: return defer.failure.Failure(error) def director_startup_handled_failures(failure): log.err("Could not start the director") failure.trap(errors.TorNotRunning, errors.InvalidOONIBCollectorAddress, errors.UnableToLoadDeckInput, errors.CouldNotFindTestHelper, errors.CouldNotFindTestCollector, errors.ProbeIPUnknown, errors.InvalidInputFile, errors.ConfigFileIncoherent) if isinstance(failure.value, errors.TorNotRunning): log.err("Tor does not appear to be running") log.err("Reporting with the collector %s is not possible" % global_options['collector']) log.msg( "Try with a different collector or disable collector reporting with -n" ) elif isinstance(failure.value, errors.InvalidOONIBCollectorAddress): log.err("Invalid format for oonib collector address.") log.msg( "Should be in the format http://<collector_address>:<port>") log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion") elif isinstance(failure.value, errors.UnableToLoadDeckInput): log.err("Unable to fetch the required inputs for the test deck.") log.msg( "Please file a ticket on our issue tracker: https://github.com/thetorproject/ooni-probe/issues" ) elif isinstance(failure.value, errors.CouldNotFindTestHelper): log.err("Unable to obtain the required test helpers.") log.msg( "Try with a different bouncer or check that Tor is running properly." ) elif isinstance(failure.value, errors.CouldNotFindTestCollector): log.err("Could not find a valid collector.") log.msg( "Try with a different bouncer, specify a collector with -c or disable reporting to a collector with -n." ) elif isinstance(failure.value, errors.ProbeIPUnknown): log.err("Failed to lookup probe IP address.") log.msg("Check your internet connection.") elif isinstance(failure.value, errors.InvalidInputFile): log.err("Invalid input file \"%s\"" % failure.value) elif isinstance(failure.value, errors.ConfigFileIncoherent): log.err("Incoherent config file") if config.advanced.debug: log.exception(failure) def director_startup_other_failures(failure): log.err("An unhandled exception occurred while starting the director!") log.exception(failure) # Wait until director has started up (including bootstrapping Tor) # before adding tests def post_director_start(_): for net_test_loader in deck.netTestLoaders: # Decks can specify different collectors # for each net test, so that each NetTest # may be paired with a test_helper and its collector # However, a user can override this behavior by # specifying a collector from the command-line (-c). # If a collector is not specified in the deck, or the # deck is a singleton, the default collector set in # ooniprobe.conf will be used collector = None if not global_options['no-collector']: if global_options['collector']: collector = global_options['collector'] elif 'collector' in config.reports \ and config.reports['collector']: collector = config.reports['collector'] elif net_test_loader.collector: collector = net_test_loader.collector if collector and collector.startswith('httpo:') \ and (not (config.tor_state or config.tor.socks_port)): raise errors.TorNotRunning test_details = net_test_loader.testDetails test_details['annotations'] = global_options['annotations'] director.startNetTest(net_test_loader, global_options['reportfile'], collector) return director.allTestsDone def start(): d.addCallback(setup_nettest) d.addCallback(post_director_start) d.addErrback(director_startup_handled_failures) d.addErrback(director_startup_other_failures) return d return start()