def dns_resolve(self, hostname, nameserver=None): """ Resolves hostname(s) though nameserver to corresponding address(es). hostname may be either a single hostname string, or a list of strings. If nameserver is not given, use local DNS resolver, and if that fails try using 8.8.8.8. """ if isinstance(hostname, str): hostname = [hostname] response = [] answer = None for hn in hostname: try: answer = yield self.performALookup(hn) if not answer: answer = yield self.performALookup(hn, ('8.8.8.8', 53)) except error.DNSNameError: log.msg("DNS resolution for %s returned NXDOMAIN" % hn) response.append('NXDOMAIN') except Exception: log.err("DNS Resolution failed") finally: if not answer: defer.returnValue(response) for addr in answer: response.append(addr) defer.returnValue(response)
def run_traceroute(self, protocol): st = MPTraceroute() if self.localOptions['maxttl']: st.ttl_max = int(self.localOptions['maxttl']) if self.localOptions['dstport']: st.dst_ports = [int(self.localOptions['dstport'])] if self.localOptions['interval']: st.interval = float(self.localOptions['interval']) log.msg("Running %s traceroute towards %s" % (protocol, self.localOptions['backend'])) log.msg("This will take about %s seconds" % st.timeout) config.scapyFactory.registerProtocol(st) traceroute = getattr(st, protocol + 'Traceroute') yield traceroute(self.localOptions['backend']) st.stopListening() st.matchResponses() for packet in st.sent_packets: self.report['sent_packets'].append(scapyt.representPacket(packet)) for packet in st.matched_packets.values(): self.report['answered_packets'].append(scapyt.representPacket(packet)) for ttl in xrange(st.ttl_min, st.ttl_max): matchedPackets = filter( lambda x: x.ttl == ttl, st.matched_packets.keys()) for packet in matchedPackets: for response in st.matched_packets[packet]: self.addToReport(packet, response)
def test_control_a_lookup(self): question = IP(dst=self.controlResolverAddr) / \ UDP() / \ DNS(rd=1, qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname)) log.msg("Performing query to %s with %s:%s" % (self.hostname, self.controlResolverAddr, self.controlResolverPort)) yield self.sr1(question)
def setup_complete(proto): try: proto.transport.signalProcess('TERM') except error.ProcessExitedAlready: proto.transport.loseConnection() log.msg("Successfully connected to Tor") self.report['success'] = True
def process_a_answers(self, message, resolver_address): log.msg("Processing A answers for %s" % resolver_address) log.debug("These are the answers I got %s" % message.answers) all_a = [] a_a = [] for answer in message.answers: if answer.type is 1: # A type query r = answer.payload.dottedQuad() self.report['a_lookups'][resolver_address] = r a_a.append(r) lookup = str(answer.payload) all_a.append(lookup) if resolver_address == 'control': self.report['control_server'] = self.localOptions['backend'] self.report['control_lookup'] = all_a self.control_a_lookups = a_a else: self.test_a_lookups[resolver_address] = a_a self.report['test_lookups'][resolver_address] = all_a log.msg("Done")
def _setup_failed(self, failure): self.tor_output.seek(0) map(log.debug, self.tor_output.readlines()) self.tor_output.seek(0) if len(self.retry_with) == 0: self.started.errback(errors.UnableToStartTor()) return while len(self.retry_with) > 0: self._reset_tor_config() self.tor_config.UseBridges = 1 transport = self.retry_with.pop(0) log.msg("Failed to start Tor. Retrying with {0}".format(transport)) try: bridge_lines = getattr(constants, '{0}_BRIDGES'.format(transport).upper()) except AttributeError: continue try: self.tor_config.ClientTransportPlugin = get_client_transport(transport) except UninstalledTransport: log.err("Pluggable transport {0} is not installed".format( transport)) continue except UnrecognizedTransport: log.err("Unrecognized transport type") continue self.tor_config.Bridge = bridge_lines self.launch() break
def tryInterfaces(self, ifaces): try: from scapy.all import sr1 ## we want this check to be blocking except: log.msg("This test requires scapy: www.secdev.org/projects/scapy") raise SystemExit ifup = {} while ifaces: for ifname, ifaddr in ifaces: log.debug("Currently testing network capabilities of interface" + "%s by sending a packet to our address %s" % (ifname, ifaddr)) try: pkt = IP(dst=ifaddr)/ICMP() ans, unans = sr(pkt, iface=ifname, timeout=self.timeout) except Exception, e: raise PermissionsError if e.find("Errno 1") else log.err(e) else: ## xxx i think this logic might be wrong log.debug("Interface test packet\n%s\n\n%s" % (pkt.summary(), pkt.show2())) if ans.summary(): log.info("Received answer for test packet on interface" +"%s :\n%s" % (ifname, ans.summary())) ifup.update(ifname, ifaddr) else: log.info("Our interface test packet was unanswered:\n%s" % unans.summary())
def readmsg(_, channel, queue_object, consumer_tag, counter): # Wait for a message and decode it. if counter >= lifetime: log.msg("Counter") queue_object.close(LifetimeExceeded()) yield channel.basic_cancel(consumer_tag=consumer_tag) finished.callback(None) else: log.msg("Waiting for message") try: ch, method, properties, body = yield queue_object.get() log.msg("Got message") data = json.loads(body) counter += 1 log.msg("Received %d/%d: %s" % (counter, lifetime, data['url'],)) # acknowledge the message ch.basic_ack(delivery_tag=method.delivery_tag) d = runTestWithDirector(director=director, start_tor=start_tor, global_options=global_options, url=data['url'].encode('utf8')) # When the test has been completed, go back to waiting for a message. d.addCallback(readmsg, channel, queue_object, consumer_tag, counter+1) except exceptions.AMQPError, v: log.msg("Error") log.exception(v) finished.errback(v)
def postProcessor(self, measurements): experiment = control = None for status, measurement in measurements: net_test_method = measurement.netTestMethod.im_func.func_name if net_test_method == "test_get_experiment": if isinstance(measurement.result, failure.Failure): self.report['experiment_failure'] = failureToString( measurement.result) else: experiment = measurement.result elif net_test_method == "test_get_control": if isinstance(measurement.result, failure.Failure): self.report['control_failure'] = failureToString( measurement.result) else: control = measurement.result if experiment and control: if hasattr(experiment, 'body') and hasattr(control, 'body') \ and experiment.body and control.body: self.report['control_cloudflare'] = False if 'Attention Required! | CloudFlare' in control.body: log.msg("The control body contains a blockpage from " "cloudflare. This will skew our results.") self.report['control_cloudflare'] = True self.compare_body_lengths(len(control.body), len(experiment.body)) if hasattr(experiment, 'headers') and hasattr(control, 'headers') \ and experiment.headers and control.headers: self.compare_headers(control.headers, experiment.headers) return self.report
def startSniffing(self, testDetails): """ Start sniffing with Scapy. Exits if required privileges (root) are not available. """ from ooni.utils.txscapy import ScapySniffer, ScapyFactory if config.scapyFactory is None: config.scapyFactory = ScapyFactory(config.advanced.interface) if not config.reports.pcap: prefix = 'report' else: prefix = config.reports.pcap filename = config.global_options['reportfile'] if 'reportfile' in config.global_options.keys() else None filename_pcap = generate_filename(testDetails, filename=filename, prefix=prefix, extension='pcap') if len(self.sniffers) > 0: pcap_filenames = set(sniffer.pcapwriter.filename for sniffer in self.sniffers.values()) pcap_filenames.add(filename_pcap) log.msg("pcap files %s can be messed up because several netTests are being executed in parallel." % ','.join(pcap_filenames)) sniffer = ScapySniffer(filename_pcap) self.sniffers[testDetails['test_name']] = sniffer config.scapyFactory.registerProtocol(sniffer) log.msg("Starting packet capture to: %s" % filename_pcap)
def _test_http_request(self): http_blocked = True for dc_id, address in TELEGRAM_DCS: if http_blocked == False: break for port in [80, 443]: url = 'http://{}:{}'.format(address, port) try: response = yield self.doRequest(url, 'POST') except Exception as exc: failure_string = failureToString(defer.failure.Failure(exc)) log.err("Failed to connect to {}: {}".format(url, failure_string)) continue log.debug("Got back status code {}".format(response.code)) log.debug("{}".format(response.body)) if response.code == 501: http_blocked = False break if http_blocked == True: self.report['telegram_http_blocking'] = True log.msg("Telegram servers are BLOCKED based on HTTP") else: self.report['telegram_http_blocking'] = False log.msg("Telegram servers are not blocked based on HTTP")
def createDeck(global_options, url=None): from ooni.nettest import NetTestLoader from ooni.deck import Deck, nettest_to_path if url: log.msg("Creating deck for: %s" % (url)) if global_options['no-yamloo']: log.msg("Will not write to a yamloo report file") deck = Deck(bouncer=global_options['bouncer'], no_collector=global_options['no-collector']) try: if global_options['testdeck']: deck.loadDeck(global_options['testdeck'], global_options) else: log.debug("No test deck detected") test_file = nettest_to_path(global_options['test_file'], True) if url is not None: args = ('-u', url) else: args = tuple() if any(global_options['subargs']): args = global_options['subargs'] + args net_test_loader = NetTestLoader(args, test_file=test_file, annotations=global_options['annotations']) if global_options['collector']: net_test_loader.collector = \ CollectorClient(global_options['collector']) deck.insert(net_test_loader) except errors.MissingRequiredOption as option_name: log.err('Missing required option: "%s"' % option_name) incomplete_net_test_loader = option_name.net_test_loader print incomplete_net_test_loader.usageOptions().getUsage() sys.exit(2) except errors.NetTestNotFound as path: log.err('Requested NetTest file not found (%s)' % path) sys.exit(3) except errors.OONIUsageError as e: log.err(e) print e.net_test_loader.usageOptions().getUsage() sys.exit(4) except errors.HTTPSCollectorUnsupported: log.err("HTTPS collectors require a twisted version of at least 14.0.2.") sys.exit(6) except errors.InsecureBackend: log.err("Attempting to report to an insecure collector.") log.err("To enable reporting to insecure collector set the " "advanced->insecure_backend option to true in " "your ooniprobe.conf file.") sys.exit(7) except Exception as e: if config.advanced.debug: log.exception(e) log.err(e) sys.exit(5) return deck
def checkInterfaces(ifaces=None, timeout=1): """ @param ifaces: A dictionary in the form of ifaces['if_name'] = 'if_addr'. """ try: from scapy.all import IP, ICMP from scapy.all import sr1 ## we want this check to be blocking except: log.msg(("Scapy required: www.secdev.org/projects/scapy")) ifup = {} if not ifaces: log.debug("checkInterfaces(): no interfaces specified!") return None for iface in ifaces: for ifname, ifaddr in iface: log.debug("checkInterfaces(): testing iface {} by pinging" + " local address {}".format(ifname, ifaddr)) try: pkt = IP(dst=ifaddr) / ICMP() ans, unans = sr(pkt, iface=ifname, timeout=5, retry=3) except Exception, e: raise PermissionsError if e.find("Errno 1") else log.err(e) else: if ans.summary(): log.debug("checkInterfaces(): got answer on interface %s" + ":\n%s".format(ifname, ans.summary())) ifup.update(ifname, ifaddr) else: log.debug("Interface test packet was unanswered:\n%s" % unans.summary())
def test_daphn3(self): host = self.localOptions['host'] port = int(self.localOptions['port']) def failure(failure): log.msg("Failed to connect") self.report['censored'] = True self.report['mutation'] = 0 raise Exception("Error in connection, perhaps the backend is censored") return def success(protocol): log.msg("Successfully connected") protocol.sendPayload() return protocol.d log.msg("Connecting to %s:%s" % (host, port)) endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port) daphn3_factory = Daphn3ClientFactory() daphn3_factory.steps = self.input daphn3_factory.report = self.report d = endpoint.connect(daphn3_factory) d.addErrback(failure) d.addCallback(success) return d
def startNetTest(self, _, net_test_loader, reporters): """ Create the Report for the NetTest and start the report NetTest. Args: net_test_loader: an instance of :class:ooni.nettest.NetTestLoader _: #XXX very dirty hack """ if config.privacy.includepcap: log.msg("Starting") if not config.reports.pcap: config.reports.pcap = config.generatePcapFilename(net_test_loader.testDetails) self.startSniffing() report = Report(reporters, self.reportEntryManager) net_test = NetTest(net_test_loader, report) net_test.director = self yield net_test.report.open() self.measurementManager.schedule(net_test.generateMeasurements()) self.activeNetTests.append(net_test) net_test.done.addBoth(report.close) net_test.done.addBoth(self.netTestDone, net_test) yield net_test.done
def start_sniffing(self, test_details): """ Start sniffing with Scapy. Exits if required privileges (root) are not available. """ from ooni.utils.txscapy import ScapySniffer, ScapyFactory if config.scapyFactory is None: config.scapyFactory = ScapyFactory(config.advanced.interface) # XXX this is dumb option to have in the ooniprobe.conf. Drop it in # the future. prefix = config.reports.pcap if prefix is None: prefix = 'report' filename_pcap = config.global_options.get('pcapfile', None) if filename_pcap is None: filename_pcap = generate_filename(test_details, prefix=prefix, extension='pcap') if len(self.sniffers) > 0: pcap_filenames = set(sniffer.pcapwriter.filename for sniffer in self.sniffers.values()) pcap_filenames.add(filename_pcap) log.msg("pcap files %s can be messed up because several netTests are being executed in parallel." % ','.join(pcap_filenames)) sniffer = ScapySniffer(filename_pcap) self.sniffers[test_details['test_name']] = sniffer config.scapyFactory.registerProtocol(sniffer) log.msg("Starting packet capture to: %s" % filename_pcap)
def test_search_bad_request(self): """ Attempts to perform a request with a random invalid HTTP method. If we are being MITMed by a Transparent Squid HTTP proxy we will get back a response containing the X-Squid-Error header. """ def process_headers(headers): log.debug("Processing headers in test_search_bad_request") if 'X-Squid-Error' in headers: log.msg("Detected the presence of a transparent HTTP "\ "squid proxy") self.report['trans_http_proxy'] = True else: log.msg("Did not detect the presence of transparent HTTP "\ "squid proxy") self.report['transparent_http_proxy'] = False log.msg("Testing Squid proxy presence by sending a random bad request") headers = {} #headers["Host"] = [self.input] method = utils.randomSTR(10, True) self.report['transparent_http_proxy'] = None return self.doRequest(self.localOptions['backend'], method=method, headers=headers, headers_processor=process_headers)
def resumeTest(test_filename, input_unit_factory): """ Returns the an input_unit_factory that is at the index of the previous run of the test for the specified test_filename. Args: test_filename (str): the filename of the test that is being run including the .py extension. input_unit_factory (:class:ooni.inputunit.InputUnitFactory): with the same input of the past run. Returns: :class:ooni.inputunit.InputUnitFactory that is at the index of the previous test run. """ try: idx = config.stateDict[test_filename] for x in range(idx): try: input_unit_factory.next() except StopIteration: log.msg("Previous run was complete") return input_unit_factory return input_unit_factory except KeyError: log.debug("No resume key found for selected test name. It is therefore 0") config.stateDict[test_filename] = 0 return input_unit_factory
def get_preferred_bouncer(): log.msg("getting prefered bouncer...") preferred_backend = config.advanced.get( "preferred_backend", "onion" ) log.msg("prefered bouncer: step 2... %s") log.msg("CANONICAL_BOUNCER_{0}".format( preferred_backend.upper() )) bouncer_address = getattr( constants, "CANONICAL_BOUNCER_{0}".format( preferred_backend.upper() ) ) log.msg("prefered bouncer: step 3...") if preferred_backend == "cloudfront": return BouncerClient( settings={ 'address': bouncer_address[0], 'front': bouncer_address[1], 'type': 'cloudfront' }) else: log.msg("prefered bouncer: step 4...") return BouncerClient(bouncer_address)
def measurementFailed(self, failure, measurement): log.msg("Failed doing measurement: %s" % measurement) self.totalMeasurementRuntime += measurement.runtime self.failedMeasurements += 1 self.failures.append((failure, measurement)) return None
def upload(report_file, collector=None, bouncer=None): print "Attempting to upload %s" % report_file with open(config.report_log_file) as f: report_log = yaml.safe_load(f) report = parser.ReportLoader(report_file) if bouncer: oonib_client = OONIBClient(bouncer) collector = yield oonib_client.lookupTestCollector(report.header["test_name"]) if collector is None: try: collector = report_log[report_file]["collector"] if collector is None: raise KeyError except KeyError: raise Exception("No collector or bouncer specified and collector not in report log.") oonib_reporter = OONIBReporter(report.header, collector) log.msg("Creating report for %s with %s" % (report_file, collector)) report_id = yield oonib_reporter.createReport() yield oonib_report_log.created(report_file, collector, report_id) for entry in report: print "Writing entry" yield oonib_reporter.writeReportEntry(entry) log.msg("Closing report.") yield oonib_reporter.finish() yield oonib_report_log.closed(report_file)
def setUp(self): self.report['test_lookups'] = {} self.report['test_reverse'] = {} self.report['control_lookup'] = [] self.report['a_lookups'] = {} self.report['tampering'] = {} self.test_a_lookups = {} self.control_a_lookups = [] self.control_reverse = None self.test_reverse = {} if not self.localOptions['testresolvers']: log.msg("You did not specify a file of DNS servers to test!", "See the '--testresolvers' option.") self.test_resolvers = ['8.8.8.8'] return try: fp = open(self.localOptions['testresolvers']) except: raise usage.UsageError("Invalid test resolvers file") self.test_resolvers = [x.strip() for x in fp.readlines()] fp.close()
def do_reverse_lookups(self, result): """ Take a resolved address in the form "176.139.79.178.in-addr.arpa." and attempt to reverse the domain with both the control and test DNS servers to see if they match. :param result: A resolved domain name. """ log.msg("Doing the reverse lookups %s" % self.input) list_of_ds = [] resolver = [(self.localOptions['controlresolver'], 53)] res = self.createResolver(servers=resolver) test_reverse = self.reverse_lookup(self.control_a_lookups[0], res, timeout=self.lookupTimeout) test_reverse.addCallback(self.process_ptr_answers, 'control') test_reverse.addErrback(self.ptr_lookup_error, 'control') list_of_ds.append(test_reverse) for test_resolver in self.test_resolvers: try: ip = self.test_a_lookups[test_resolver][0] except: break d = self.reverse_lookup(ip, res) d.addCallback(self.process_ptr_answers, test_resolver) d.addErrback(self.ptr_lookup_error, test_resolver) list_of_ds.append(d) dl = defer.DeferredList(list_of_ds) return dl
def process_headers(headers): """ Checks if any of the headers that squid is known to add match the squid regexp. We are looking for something that looks like this: via: 1.0 cache_server:3128 (squid/2.6.STABLE21) x-cache: MISS from cache_server x-cache-lookup: MISS from cache_server:3128 """ squid_headers = {'via': r'.* \((squid.*)\)', 'x-cache': r'MISS from (\w+)', 'x-cache-lookup': r'MISS from (\w+:?\d+?)' } self.report['transparent_http_proxy'] = False for key in squid_headers.keys(): if key in headers: log.debug("Found %s in headers" % key) m = re.search(squid_headers[key], headers[key]) if m: log.msg("Detected the presence of squid transparent"\ " HTTP Proxy") self.report['transparent_http_proxy'] = True
def getWindowsIfaces(): from twisted.internet.test import _win32ifaces log.msg("Attempting to discover network interfaces...") ifaces = _win32ifaces._interfaces() ifup = tryInterfaces(ifaces) return ifup
def post_director_start(_): for net_test_loader in deck.netTestLoaders: # Decks can specify different collectors # for each net test, so that each NetTest # may be paired with a test_helper and its collector # However, a user can override this behavior by # specifying a collector from the command-line (-c). # If a collector is not specified in the deck, or the # deck is a singleton, the default collector set in # ooniprobe.conf will be used collector = None if not global_options["no-collector"]: if global_options["collector"]: collector = global_options["collector"] elif net_test_loader.collector: collector = net_test_loader.collector if collector and collector.startswith("httpo:") and (not (config.tor_state or config.tor.socks_port)): raise errors.TorNotRunning test_details = net_test_loader.testDetails yaml_reporter = YAMLReporter(test_details) reporters = [yaml_reporter] if collector: log.msg("Reporting using collector: %s" % collector) try: oonib_reporter = OONIBReporter(test_details, collector) reporters.append(oonib_reporter) except errors.InvalidOONIBCollectorAddress, e: raise e log.debug("adding callback for startNetTest") director.startNetTest(net_test_loader, reporters)
def experiment(self, args): log.msg("Doing test") # What you return here gets handed as input to control from ooni.lib.txtorcon import TorProtocolFactory, TorConfig, TorState from ooni.lib.txtorcon import DEFAULT_VALUE, launch_tor def updates(prog, tag, summary): log.msg("%d%%: %s" % (prog, summary)) return def setup_failed(args): log.msg("Setup Failed.") report.update({'failed': args}) return report def setup_complete(proto): log.msg("Setup Complete.") report.update({'success': args}) return report config = TorConfig() import random config.SocksPort = random.randint(1024, 2**16) config.ControlPort = random.randint(1024, 2**16) if 'bridge' in args: config.UseBridges = 1 config.Bridge = args['bridge'] config.save() print config.create_torrc() report = {'tor_config': config.config} log.msg("Starting Tor") d = launch_tor(config, self.reactor, progress_updates=updates) d.addCallback(setup_complete) d.addErrback(setup_failed) return d
def experiment_tcp_connect(self, socket): log.msg("* connecting to {}".format(socket)) ip_address, port = socket.split(":") port = int(port) result = { 'ip': ip_address, 'port': port, 'status': { 'success': None, 'failure': None, 'blocked': None } } point = TCP4ClientEndpoint(reactor, ip_address, port) d = point.connect(TCPConnectFactory()) @d.addCallback def cb(p): result['status']['success'] = True result['status']['blocked'] = False self.report['tcp_connect'].append(result) @d.addErrback def eb(failure): result['status']['success'] = False result['status']['failure'] = failureToString(failure) self.report['tcp_connect'].append(result) return d
def getPosixIfaces(): from twisted.internet.test import _posixifaces log.msg("Attempting to discover network interfaces...") ifaces = _posixifaces._interfaces() ifup = tryInterfaces(ifaces) return ifup
def loadPolicy(self): # XXX implement caching of policies oonibclient = OONIBClient(self.address) log.msg("Looking up nettest policy for %s" % self.address) self.nettest_policy = yield oonibclient.getNettestPolicy() log.msg("Looking up input policy for %s" % self.address) self.input_policy = yield oonibclient.getInputPolicy()
def post_director_start(_): for net_test_loader in test_list: collector = global_options['collector'] test_details = net_test_loader.testDetails yaml_reporter = YAMLReporter(test_details) reporters = [yaml_reporter] if collector and collector.startswith('httpo') \ and (not (config.tor_state or config.tor.socks_port)): raise errors.TorNotRunning elif collector: log.msg("Reporting using collector: %s" % collector) try: oonib_reporter = OONIBReporter(test_details, collector) reporters.append(oonib_reporter) except errors.InvalidOONIBCollectorAddress, e: raise e log.debug("adding callback for startNetTest") d.addCallback(director.startNetTest, net_test_loader, reporters)
class TxtorconImportError(ImportError): """ Raised when ooni.lib.txtorcon cannot be imported from. Checks our current working directory and the path given to see if txtorcon has been initialized via /ooni/lib/Makefile. """ from os import getcwd, path cwd, tx = getcwd(), 'lib/txtorcon/torconfig.py' try: log.msg("Unable to import from ooni.lib.txtorcon") if cwd.endswith('ooni'): check = path.join(cwd, tx) elif cwd.endswith('utils'): check = path.join(cwd, '../'+tx) else: check = path.join(cwd, 'ooni/'+tx) assert path.isfile(check) except: log.msg("Error: Some OONI libraries are missing!") log.msg("Please go to /ooni/lib/ and do \"make all\"")
def dns_resolve_match(self, experiment_hostname, control_address): """ Resolve experiment_hostname, and check to see that it returns an experiment_address which matches the control_address. If they match, returns True and experiment_address; otherwise returns False and experiment_address. """ experiment_address = yield self.dns_resolve(experiment_hostname) if not experiment_address: log.debug("dns_resolve() for %s failed" % experiment_hostname) ret = None, experiment_address defer.returnValue(ret) if len(set(experiment_address) & set([control_address])) > 0: ret = True, experiment_address defer.returnValue(ret) else: log.msg("DNS comparison of control '%s' does not" % control_address) log.msg("match experiment response '%s'" % experiment_address) ret = False, experiment_address defer.returnValue(ret)
def test_parasitic_tcp_traceroute(self): """ Establishes a TCP stream and send the packets inside of such stream. Requires the backend to respond with an ACK to our SYN packet. """ max_ttl, timeout = self.max_ttl_and_timeout() sport = self.get_sport() dport = self.dport ipid = int(RandShort()) packet = IP(dst=self.localOptions['backend'], ttl=max_ttl, id=ipid)/TCP(sport=sport, dport=dport, flags="S", seq=0) log.msg("Sending SYN towards %s" % dport) try: answered, unanswered = yield self.sr(packet, timeout=timeout) except Exception, e: log.exception(e)
def update_url_lists(self, country_code): countries = ["global"] if country_code != "ZZ": countries.append(country_code) for cc in countries: cc = cc.lower() in_file = self.resources.child("citizenlab-test-lists").child("{0}.csv".format(cc)) if not in_file.exists(): yield check_for_update(country_code) if not in_file.exists(): log.msg("Could not find input for country " "{0} in {1}".format(cc, in_file.path)) continue # XXX maybe move this to some utility function. # It's duplicated in oonideckgen. data_fname = "citizenlab-test-lists_{0}.txt".format(cc) desc_fname = "citizenlab-test-lists_{0}.desc".format(cc) out_file = self.path.child("data").child(data_fname) write_txt_from_csv(in_file, out_file, lambda row: "{}\n".format(row[0]) ) desc_file = self.path.child("descriptors").child(desc_fname) if cc == "global": name = "List of globally accessed websites" else: # XXX resolve this to a human readable country name country_name = cc name = "List of websites for {0}".format(country_name) write_descriptor(desc_file, name, "citizenlab_{0}_urls".format(cc), out_file.path, "file/url") self._cache_stale = True yield defer.succeed(None)
def lookup_details(): """ A closure useful for printing test details. """ log.msg("test resolver: %s" % test_resolver) log.msg("experiment answers: %s" % experiment_answers) log.msg("control answers: %s" % control_answers)
def createReport(self): """ Creates a report on the oonib collector. """ log.msg("Creating report with OONIB Reporter. Please be patient.") log.msg("This may take up to 1-2 minutes...") try: response = yield self.collector_client.createReport( self.testDetails) except ConnectionRefusedError: log.err("Connection to reporting backend failed " "(ConnectionRefusedError)") raise errors.OONIBReportCreationError except errors.HostUnreachable: log.err("Host is not reachable (HostUnreachable error") raise errors.OONIBReportCreationError except (errors.OONIBInvalidInputHash, errors.OONIBInvalidNettestName): log.err("The specified input or nettests cannot be submitted to " "this collector.") log.msg("Try running a different test or try reporting to a " "different collector.") raise errors.OONIBReportCreationError except Exception, e: log.err("Failed to connect to reporter backend") log.exception(e) raise errors.OONIBReportCreationError
def refresh_deck_list(self): """ This checks if there are some decks that have been enabled and should be scheduled as periodic tasks to run on the next scheduler cycle and if some have been disabled and should not be run. It does so by listing the enabled decks and checking if the enabled ones are already scheduled or if some of the scheduled ones are not amongst the enabled decks. """ to_enable = [] for deck_id, deck in deck_store.list_enabled(): if deck.schedule is None: continue to_enable.append((deck_id, deck.schedule)) # If we are not initialized we should not enable anything if not config.is_initialized(): log.msg("We are not initialized skipping setup of decks") to_enable = [] for scheduled_task in self._scheduled_tasks[:]: if not isinstance(scheduled_task, RunDeck): continue info = (scheduled_task.deck_id, scheduled_task.schedule) if info in to_enable: # If the task is already scheduled there is no need to # enable it. log.debug("The deck {0} is already scheduled".format(scheduled_task.deck_id)) to_enable.remove(info) else: # If one of the tasks that is scheduled is no longer in the # scheduled tasks. We should disable it. log.debug("The deck task {0} should be disabled".format(scheduled_task.deck_id)) self.unschedule(scheduled_task) for deck_id, schedule in to_enable: log.debug("Scheduling to run {0}".format(deck_id)) self.schedule(RunDeck(self.director, deck_id, schedule))
def connectionSucceeded(connection, host, timeout): """If we have created a connection, set the socket options, and log the connection state and peer name. :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`. :param tuple host: A tuple of the remote host's IP address as a string, and an integer specifying the remote host port, i.e. ('1.1.1.1',443) """ ## xxx TODO to get this to work with a non-blocking socket, see how ## twisted.internet.tcp.Client handles socket objects. connection.setblocking(1) ## Set the timeout on the connection: ## ## We want to set SO_RCVTIMEO and SO_SNDTIMEO, which both are ## defined in the socket option definitions in <sys/socket.h>, and ## which both take as their value, according to socket(7), a ## struct timeval, which is defined in the libc manual: ## https://www.gnu.org/software/libc/manual/html_node/Elapsed-Time.html timeval = struct.pack('ll', int(timeout), 0) connection.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, timeval) connection.setsockopt(socket.SOL_SOCKET, socket.SO_SNDTIMEO, timeval) ## Set the connection state to client mode: connection.set_connect_state() peer_name, peer_port = connection.getpeername() if peer_name: log.msg("Connected to %s" % peer_name) else: log.debug("Couldn't get peer name from connection: %s" % host) log.msg("Connected to %s" % host) log.debug("Connection state: %s " % connection.state_string()) return connection
def __init__(self, test_details, report_destination='.', report_filename=None): self.reportDestination = report_destination if not os.path.isdir(report_destination): raise errors.InvalidDestination report_filename = generate_filename(test_details, filename=report_filename, prefix='report', extension='yamloo') report_path = os.path.join(self.reportDestination, report_filename) if os.path.exists(report_path): log.msg("Report already exists with filename %s" % report_path) pushFilenameStack(report_path) self.report_path = os.path.abspath(report_path) OReporter.__init__(self, test_details)
def upload(report_file, collector=None, bouncer=None): oonib_report_log = OONIBReportLog() print "Attempting to upload %s" % report_file with open(config.report_log_file) as f: report_log = yaml.safe_load(f) report = parser.ReportLoader(report_file) if bouncer and not collector: oonib_client = OONIBClient(bouncer) net_tests = [{ 'test-helpers': [], 'input-hashes': report.header['input_hashes'], 'name': report.header['test_name'], 'version': report.header['test_version'], }] result = yield oonib_client.lookupTestCollector(net_tests) collector = str(result['net-tests'][0]['collector']) if collector is None: try: collector = report_log[report_file]['collector'] if collector is None: raise KeyError except KeyError: raise Exception("No collector or bouncer specified" " and collector not in report log.") oonib_reporter = OONIBReporter(report.header, collector) log.msg("Creating report for %s with %s" % (report_file, collector)) report_id = yield oonib_reporter.createReport() yield oonib_report_log.created(report_file, collector, report_id) for entry in report: print "Writing entry" yield oonib_reporter.writeReportEntry(entry) log.msg("Closing report.") yield oonib_reporter.finish() yield oonib_report_log.closed(report_file)
def test_squid_headers(self): """ Detects the presence of a squid transparent HTTP proxy based on the response headers it adds to the responses to requests. """ def process_headers(headers): """ Checks if any of the headers that squid is known to add match the squid regexp. We are looking for something that looks like this: via: 1.0 cache_server:3128 (squid/2.6.STABLE21) x-cache: MISS from cache_server x-cache-lookup: MISS from cache_server:3128 """ squid_headers = {'via': r'.* \((squid.*)\)', 'x-cache': r'MISS from (\w+)', 'x-cache-lookup': r'MISS from (\w+:?\d+?)' } self.report['transparent_http_proxy'] = False for key in squid_headers.keys(): if key in headers: log.debug("Found %s in headers" % key) m = re.search(squid_headers[key], headers[key]) if m: log.msg("Detected the presence of squid transparent"\ " HTTP Proxy") self.report['transparent_http_proxy'] = True log.msg("Testing Squid proxy by looking at response headers") headers = {} #headers["Host"] = [self.input] method = "GET" self.report['transparent_http_proxy'] = None d = self.doRequest(self.localOptions['backend'], method=method, headers=headers, headers_processor=process_headers) return d
def runWithDirector(): """ Instance the director, parse command line options and start an ooniprobe test! """ global_options = parseOptions() log.start(global_options['logfile']) # contains (test_cases, options, cmd_line_options) test_list = [] if global_options['no-collector']: log.msg("Not reporting using a collector") global_options['collector'] = None if global_options['testdeck']: test_deck = yaml.safe_load(open(global_options['testdeck'])) for test in test_deck: test_list.append( NetTestLoader(test['options']['subargs'], test_file=test['options']['test_file'])) else: log.debug("No test deck detected") test_list.append( NetTestLoader(global_options['subargs'], test_file=global_options['test_file'])) # check each test's usageOptions for net_test_loader in test_list: try: net_test_loader.checkOptions() except MissingRequiredOption, option_name: log.err('Missing required option: "%s"' % option_name) print net_test_loader.usageOptions().getUsage() sys.exit(2) except usage.UsageError, e: log.err(e) print net_test_loader.usageOptions().getUsage() sys.exit(2)
def readmsg(_, channel, queue_object, consumer_tag, counter): # Wait for a message and decode it. if counter >= lifetime: log.msg("Counter") queue_object.close(LifetimeExceeded()) yield channel.basic_cancel(consumer_tag=consumer_tag) finished.callback(None) else: log.msg("Waiting for message") try: ch, method, properties, body = yield queue_object.get() log.msg("Got message") data = json.loads(body) counter += 1 log.msg("Received %d/%d: %s" % ( counter, lifetime, data['url'], )) # acknowledge the message ch.basic_ack(delivery_tag=method.delivery_tag) d = runTestWithDirector(director=director, start_tor=start_tor, global_options=global_options, url=data['url'].encode('utf8'), check_incoherences=check_incoherences) # When the test has been completed, go back to waiting for a message. d.addCallback(readmsg, channel, queue_object, consumer_tag, counter + 1) except exceptions.AMQPError, v: log.msg("Error") log.exception(v) finished.errback(v)
def _test_dns_resolution(self): consistent_addresses = {} for key, hostname in FB_HOSTNAMES.items(): consistent_addresses[key] = [] consistent = False try: addresses = yield self.performALookup(hostname) for address in addresses: if is_facebook_ip(address): consistent = True consistent_addresses[key].append(address) except Exception: log.err("Failed to lookup {0}: {1}".format(key, hostname)) finally: msg = "{0}: {1} appears to present ".format(key, hostname) if consistent == True: msg += "consistent DNS" else: msg += "INCONSISTENT DNS" log.msg(msg) self.report['facebook_{0}_dns_consistent'.format(key)] = consistent defer.returnValue(consistent_addresses)
def initialize_ooni_home(self, custom_home=None): if custom_home: self._custom_home = custom_home self.set_paths() ooni_home = self.ooni_home if not os.path.isdir(ooni_home): log.msg("Ooni home directory does not exist") log.msg("Creating it in '%s'" % ooni_home) os.mkdir(ooni_home) # also ensure the subdirectories exist sub_directories = [ self.inputs_directory, self.decks_enabled_directory, self.decks_available_directory, self.scheduler_directory, self.measurements_directory, self.resources_directory ] for path in sub_directories: try: os.makedirs(path) except OSError as exc: if exc.errno != errno.EEXIST: raise
def check_for_manipulation(self, response, payload, manipulation_type): log.debug("Checking if %s == %s" % (response, payload)) if response != payload: log.msg("{0}: Detected manipulation!".format(manipulation_type)) log.msg(response) self.report['tampering'] = True else: log.msg("{0}: No manipulation detected.".format(manipulation_type)) self.report['tampering'] = False
def doneNetTest(self, result): if self.summary: log.msg("Summary for %s" % self.testDetails['test_name']) log.msg("------------" + "-"*len(self.testDetails['test_name'])) for test_class in self.uniqueClasses(): test_instance = test_class() test_instance.displaySummary(self.summary) if self.testDetails["report_id"]: log.msg("Report ID: %s" % self.testDetails["report_id"])
def check_for_manipulation(self, response, payload): log.debug("Checking if %s == %s" % (response, payload)) if response != payload: log.msg("Detected manipulation!") log.msg(response) self.report['tampering'] = True else: log.msg("No manipulation detected.") self.report['tampering'] = False
def state_complete(state): """Called when we've got a TorState.""" log.msg("We've completely booted up a Tor version %s at PID %d" % (state.protocol.version, state.tor_pid)) log.msg("This Tor has the following %d Circuits:" % len(state.circuits)) for circ in state.circuits.values(): log.msg("%s" % circ) return state
def check_for_censorship(self, body, test_name): """ XXX this is to be filled in with either a domclass based classified or with a rule that will allow to detect that the body of the result is that of a censored site. """ # If we don't see a json dict we know that something is wrong for # sure if not body.startswith("{"): log.msg("This does not appear to be JSON") self.report['transparent_http_proxy'] = True self.check_for_censorship(body) return try: content = json.loads(body) except: log.msg("The json does not parse, this is not what we expected") self.report['transparent_http_proxy'] = True self.check_for_censorship(body) return # We base the determination of the presence of a transparent HTTP # proxy on the basis of the response containing the json that is to be # returned by a HTTP Request Test Helper if 'request_headers' in content and \ 'request_line' in content and \ 'headers_dict' in content: log.msg("Found the keys I expected in %s" % content) self.report['transparent_http_proxy'] = self.report[ 'transparent_http_proxy'] | False self.report[test_name] = False else: log.msg("Did not find the keys I expected in %s" % content) self.report['transparent_http_proxy'] = True if self.localOptions['content']: self.report[test_name] = True censorship_page = open(self.localOptions['content']) response_page = iter(body.split("\n")) for censorship_line in censorship_page: response_line = response_page.next() if response_line != censorship_line: self.report[test_name] = False break censorship_page.close()
def circuit_failed(self, circuit, reason): """ If building a circuit has failed, try to remove it from our list of :ivar:`waiting_circuits`, else request to build it. :param circuit: An item from :ivar:`ooni.lib.txtorcon.TorState.circuits`. :param reason: A :class:`twisted.python.fail.Failure` instance. :return: None """ if self.waiting_on(circuit): log.msg("Circuit %s failed for reason %s" % (circuit.id, reason)) circid, d = None, None for c in self.waiting_circuits: if c[0] == circuit.id: circid, d = c if d is None: raise Exception("Expected to find circuit.") self.waiting_circuits.remove((circid, d)) log.msg("Trying to build a circuit for %s" % circid) self.request_circuit_build(d)
def finished(packets): answered, unanswered = packets if 'answered_packets' not in self.report: self.report['answered_packets'] = [] if 'sent_packets' not in self.report: self.report['sent_packets'] = [] for snd, rcv in answered: log.debug("Writing report for scapy test") sent_packet = snd received_packet = rcv if not config.privacy.includeip: log.msg("Detected you would not like to include your ip in the report") log.msg("Stripping source and destination IPs from the reports") sent_packet.src = '127.0.0.1' received_packet.dst = '127.0.0.1' pkt_report_r = createPacketReport(received_packet) pkt_report_s = createPacketReport(sent_packet) self.report['answered_packets'].append(pkt_report_r) self.report['sent_packets'].append(pkt_report_s) log.debug("Done") return packets
def test_cacheobject(self): """ This detects the presence of a squid transparent HTTP proxy by sending a request for cache_object://localhost/info. The response to this request will usually also contain the squid version number. """ log.debug("Running") def process_body(body): if "Access Denied." in body: self.report['transparent_http_proxy'] = True else: self.report['transparent_http_proxy'] = False log.msg("Testing Squid proxy presence by sending a request for "\ "cache_object") headers = {} #headers["Host"] = [self.input] self.report['trans_http_proxy'] = None method = "GET" body = "cache_object://localhost/info" return self.doRequest(self.localOptions['backend'], method=method, body=body, headers=headers, body_processor=process_body)
def _setup_failed(self, failure): log.msg("Starting Tor failed: {}".format(failure.value)) self.tor_output.seek(0) map(log.debug, self.tor_output.readlines()) self.tor_output.seek(0) if len(self.retry_with) == 0: self.started.errback(errors.UnableToStartTor()) return while len(self.retry_with) > 0: self._reset_tor_config() self.tor_config.UseBridges = 1 transport = self.retry_with.pop(0) log.msg("Failed to start Tor. Retrying with {0}".format(transport)) try: bridge_lines = getattr(constants, '{0}_BRIDGES'.format(transport).upper()) except AttributeError: continue try: self.tor_config.ClientTransportPlugin = get_client_transport( transport) except UninstalledTransport: log.err("Pluggable transport {0} is not installed".format( transport)) continue except UnrecognizedTransport: log.err("Unrecognized transport type") continue self.tor_config.Bridge = bridge_lines self.launch() break
def got_response(response): log.msg("received response %s from helper"%response) if response == '': log.msg('no peer available at this moment') self.report['status'] = 'no peer found' else: self.report['status'] = '' with open(self.localOptions['peer_list'], 'a+') as peer_list: for peer in peer_list: if peer[:-1] == response: log.msg('we already know the peer') self.report['status'] = 'known peer found: %s'%response break if self.report['status'] == '': #no repetition log.msg('new peer discovered') self.report['status'] = 'new peer found: %s'%response peer_list.write(response+'\n')
def compare_body_lengths(self, body_length_a, body_length_b): if body_length_b == 0 and body_length_a != 0: rel = float(body_length_b)/float(body_length_a) elif body_length_b == 0 and body_length_a == 0: rel = float(1) else: rel = float(body_length_a)/float(body_length_b) if rel > 1: rel = 1/rel self.report['body_proportion'] = rel if rel > float(self.factor): log.msg("The two body lengths appear to match") log.msg("censorship is probably not happening") self.report['body_length_match'] = True else: log.msg("The two body lengths appear to not match") log.msg("censorship could be happening") self.report['body_length_match'] = False
def lookup(self): if self.address: defer.returnValue(self.address) else: try: yield self.askTor() log.msg("Found your IP via Tor") self.resolveGeodata() defer.returnValue(self.address) except errors.TorStateNotFound: log.debug("Tor is not running. Skipping IP lookup via Tor.") except Exception: log.msg("Unable to lookup the probe IP via Tor.") try: yield self.askGeoIPService() log.msg("Found your IP via a GeoIP service") self.resolveGeodata() defer.returnValue(self.address) except Exception: log.msg("Unable to lookup the probe IP via GeoIPService") raise
def director_startup_failed(failure): log.err("Failed to start the director") r = failure.trap(errors.TorNotRunning, errors.InvalidOONIBCollectorAddress) if r == errors.TorNotRunning: log.err("Tor does not appear to be running") log.err("Reporting with the collector %s is not possible" % global_options['collector']) log.msg( "Try with a different collector or disable collector reporting with -n" ) elif r == errors.InvalidOONIBCollectorAddress: log.err("Invalid format for oonib collector address.") log.msg( "Should be in the format http://<collector_address>:<port>") log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion") reactor.stop()
def lookupTestHelpers(self): self.oonibclient.address = self.bouncer required_test_helpers = [] requires_collector = [] for net_test_loader in self.netTestLoaders: if not net_test_loader.collector and not self.no_collector: requires_collector.append(net_test_loader) for th in net_test_loader.requiredTestHelpers: # {'name':'', 'option':'', 'test_class':''} if th['test_class'].localOptions[th['option']]: continue required_test_helpers.append(th['name']) if not required_test_helpers and not requires_collector: defer.returnValue(None) response = yield self.oonibclient.lookupTestHelpers( required_test_helpers) for net_test_loader in self.netTestLoaders: log.msg("Setting collector and test helpers for %s" % net_test_loader.testDetails['test_name']) # Only set the collector if the no collector has been specified # from the command line or via the test deck. if not net_test_loader.requiredTestHelpers and \ net_test_loader in requires_collector: log.msg("Using the default collector: %s" % response['default']['collector']) net_test_loader.collector = response['default'][ 'collector'].encode('utf-8') continue for th in net_test_loader.requiredTestHelpers: # Only set helpers which are not already specified if th['name'] not in required_test_helpers: continue test_helper = response[th['name']] log.msg("Using this helper: %s" % test_helper) th['test_class'].localOptions[ th['option']] = test_helper['address'].encode('utf-8') if net_test_loader in requires_collector: net_test_loader.collector = test_helper[ 'collector'].encode('utf-8')