Exemple #1
0
    def __init__(self, options, test_file=None, test_string=None,
                 annotations=None):
        self.options = options
        if annotations is None:
            annotations = {}
        if not isinstance(annotations, dict):
            log.warn("BUG: Annotations is not a dictionary. Resetting it.")
            annotations = {}
        self.annotations = annotations
        self.annotations['platform'] = self.annotations.get('platform',
                                                            config.platform)

        self.requiresTor = False

        self.testName = ""
        self.testVersion = ""
        self.reportId = None

        self.testHelpers = {}
        self.missingTestHelpers = []
        self.usageOptions = None
        self.inputFiles = []

        self._testCases = []
        self.localOptions = None

        if test_file:
            self.loadNetTestFile(test_file)
        elif test_string:
            self.loadNetTestString(test_string)
Exemple #2
0
    def task(self):
        if config.basic.measurement_quota is None:
            return
        maximum_bytes = human_size_to_bytes(config.basic.measurement_quota)
        used_bytes = directory_usage(config.measurements_directory)
        warning_path = os.path.join(config.running_path, 'quota_warning')

        if (float(used_bytes) / float(maximum_bytes)) >= self._warn_when:
            log.warn("You are about to reach the maximum allowed quota. Be careful")
            with open(warning_path, "w") as out_file:
                out_file.write("{0} {1}".format(used_bytes,
                                                maximum_bytes))
        else:
            try:
                os.remove(warning_path)
            except OSError as ose:
                if ose.errno != errno.ENOENT:
                    raise

        if float(used_bytes) < float(maximum_bytes):
            # We are within the allow quota exit.
            return

        # We should begin to delete old reports
        amount_to_delete = float(used_bytes) - float(maximum_bytes)
        amount_deleted = 0
        measurement_path = FilePath(config.measurements_directory)

        kept_measurements = []
        stale_measurements = []
        remaining_measurements = []
        measurements_by_date = sorted(list_measurements(compute_size=True),
                                      key=lambda k: k['test_start_time'])
        for measurement in measurements_by_date:
            if measurement['keep'] is True:
                kept_measurements.append(measurement)
            elif measurement['stale'] is True:
                stale_measurements.append(measurement)
            else:
                remaining_measurements.append(measurement)

        # This is the order in which we should begin deleting measurements.
        ordered_measurements = (stale_measurements +
                                remaining_measurements +
                                kept_measurements)
        while amount_deleted < amount_to_delete:
            measurement = ordered_measurements.pop(0)
            log.warn("Deleting report {0}".format(measurement["id"]))
            measurement_path.child(measurement['id']).remove()
            amount_deleted += measurement['size']
    def http_content_match_fuzzy_opt(self,
                                     experimental_url,
                                     control_result,
                                     headers=None,
                                     fuzzy=False):
        """
        Makes an HTTP request on port 80 for experimental_url, then
        compares the response_content of experimental_url with the
        control_result. Optionally, if the fuzzy parameter is set to
        True, the response_content is compared with a regex of the
        control_result. If the response_content from the
        experimental_url and the control_result match, returns True
        with the HTTP status code and headers; False, status code, and
        headers if otherwise.
        """

        if headers is None:
            default_ua = self.local_options['user-agent']
            headers = {'User-Agent': default_ua}

        response, response_headers = self.http_fetch(experimental_url, headers)
        response_content = response.read()
        response_code = response.code
        if response_content is None:
            log.warn("HTTP connection appears to have failed.")
            return False, False, False

        if fuzzy:
            pattern = re.compile(control_result)
            match = pattern.search(response_content)
            log.msg("Fuzzy HTTP content comparison for experiment URL")
            log.msg("'%s'" % experimental_url)
            if not match:
                log.msg("does not match!")
                return False, response_code, response_headers
            else:
                log.msg("and the expected control result yielded a match.")
                return True, response_code, response_headers
        else:
            if str(response_content) != str(control_result):
                log.msg("HTTP content comparison of experiment URL")
                log.msg("'%s'" % experimental_url)
                log.msg("and the expected control result do not match.")
                return False, response_code, response_headers
            else:
                return True, response_code, response_headers
    def http_content_match_fuzzy_opt(self, experimental_url, control_result,
                                     headers=None, fuzzy=False):
        """
        Makes an HTTP request on port 80 for experimental_url, then
        compares the response_content of experimental_url with the
        control_result. Optionally, if the fuzzy parameter is set to
        True, the response_content is compared with a regex of the
        control_result. If the response_content from the
        experimental_url and the control_result match, returns True
        with the HTTP status code and headers; False, status code, and
        headers if otherwise.
        """

        if headers is None:
            default_ua = self.local_options['user-agent']
            headers = {'User-Agent': default_ua}

        response, response_headers = self.http_fetch(experimental_url, headers)
        response_content = response.read()
        response_code = response.code
        if response_content is None:
            log.warn("HTTP connection appears to have failed.")
            return False, False, False

        if fuzzy:
            pattern = re.compile(control_result)
            match = pattern.search(response_content)
            log.msg("Fuzzy HTTP content comparison for experiment URL")
            log.msg("'%s'" % experimental_url)
            if not match:
                log.msg("does not match!")
                return False, response_code, response_headers
            else:
                log.msg("and the expected control result yielded a match.")
                return True, response_code, response_headers
        else:
            if str(response_content) != str(control_result):
                log.msg("HTTP content comparison of experiment URL")
                log.msg("'%s'" % experimental_url)
                log.msg("and the expected control result do not match.")
                return False, response_code, response_headers
            else:
                return True, response_code, response_headers
    def run_vendor_tests(self, *a, **kw):
        """
        These are several vendor tests used to detect the presence of
        a captive portal. Each test compares HTTP status code and
        content to the control results and has its own User-Agent
        string, in order to emulate the test as it would occur on the
        device it was intended for. Vendor tests are defined in the
        format:
        [exp_url, ctrl_result, ctrl_code, ua, test_name]
        """

        vendor_tests = [['http://www.apple.com/library/test/success.html',
                         'Success',
                         '200',
                         'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
                         'Apple HTTP Captive Portal'],
                        ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
                         '428 Network Authentication Required',
                         '428',
                         'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
                         'W3 Captive Portal'],
                        ['http://www.msftncsi.com/ncsi.txt',
                         'Microsoft NCSI',
                         '200',
                         'Microsoft NCSI',
                         'MS HTTP Captive Portal',]]

        cm = self.http_content_match_fuzzy_opt
        sm = self.http_status_code_match
        snm = self.http_status_code_no_match

        def compare_content(status_func, fuzzy, experiment_url, control_result,
                            control_code, headers, test_name):
            log.msg("")
            log.msg("Running the %s test..." % test_name)

            content_match, experiment_code, experiment_headers = cm(experiment_url,
                                                                    control_result,
                                                                    headers, fuzzy)
            status_match = status_func(experiment_code, control_code)

            if status_match and content_match:
                log.msg("The %s test was unable to detect" % test_name)
                log.msg("a captive portal.")
                return True
            else:
                log.msg("The %s test shows that your network" % test_name)
                log.msg("is filtered.")
                return False

        result = []
        for vt in vendor_tests:
            report = {}
            report['vt'] = vt

            experiment_url = vt[0]
            control_result = vt[1]
            control_code = vt[2]
            headers = {'User-Agent': vt[3]}
            test_name = vt[4]

            args = (experiment_url, control_result, control_code, headers, test_name)

            if test_name == "MS HTTP Captive Portal":
                report['result'] = compare_content(sm, False, *args)

            elif test_name == "Apple HTTP Captive Portal":
                report['result'] = compare_content(sm, True, *args)

            elif test_name == "W3 Captive Portal":
                report['result'] = compare_content(snm, True, *args)

            else:
                log.warn("Ooni is trying to run an undefined CP vendor test.")
            result.append(report)
        return result
Exemple #6
0
def upload(report_file, collector=None, bouncer=None, measurement_id=None):
    oonib_report_log = OONIBReportLog()
    collector_client = None
    if collector:
        collector_client = CollectorClient(address=collector)

    try:
        # Try to guess the measurement_id from the file path
        measurement_id = report_path_to_id(report_file)
    except NoIDFound:
        pass

    log.msg("Attempting to upload %s" % report_file)

    if report_file.endswith(".njson"):
        report = NJSONReportLoader(report_file)
    else:
        log.warn("Uploading of YAML formatted reports will be dropped in "
                 "future versions")
        report = YAMLReportLoader(report_file)

    if bouncer and collector_client is None:
        collector_client = yield lookup_collector_client(
            report.header, bouncer)

    if collector_client is None:
        if measurement_id:
            report_log = yield oonib_report_log.get_report_log(measurement_id)
            collector_settings = report_log['collector']
            print(collector_settings)
            if collector_settings is None or len(collector_settings) == 0:
                log.warn("Skipping uploading of %s since this measurement "
                         "was run by specifying no collector." % report_file)
                defer.returnValue(None)
            elif isinstance(collector_settings, dict):
                collector_client = CollectorClient(settings=collector_settings)
            elif isinstance(collector_settings, str):
                collector_client = CollectorClient(address=collector_settings)
        else:
            log.msg("Looking up collector with canonical bouncer." %
                    report_file)
            collector_client = yield lookup_collector_client(
                report.header, CANONICAL_BOUNCER_ONION)

    oonib_reporter = OONIBReporter(report.header, collector_client)
    log.msg("Creating report for %s with %s" %
            (report_file, collector_client.settings))
    report_id = yield oonib_reporter.createReport()
    report.header['report_id'] = report_id
    if measurement_id:
        log.debug("Marking it as created")
        yield oonib_report_log.created(measurement_id,
                                       collector_client.settings)
    log.msg("Writing report entries")
    for entry in report:
        yield oonib_reporter.writeReportEntry(entry)
        log.msg("Written entry")
    log.msg("Closing report")
    yield oonib_reporter.finish()
    if measurement_id:
        log.debug("Closing log")
        yield oonib_report_log.closed(measurement_id)
Exemple #7
0
 def addFailure(self, *args):
     OReporter.addFailure(self, *args)
     log.warn(args)
def upload(report_file, collector=None, bouncer=None, measurement_id=None):
    oonib_report_log = OONIBReportLog()
    collector_client = None
    if collector:
        collector_client = CollectorClient(address=collector)

    try:
        # Try to guess the measurement_id from the file path
        measurement_id = report_path_to_id(report_file)
    except NoIDFound:
        pass

    log.msg("Attempting to upload %s" % report_file)

    if report_file.endswith(".njson"):
        report = NJSONReportLoader(report_file)
    else:
        log.warn("Uploading of YAML formatted reports will be dropped in "
                 "future versions")
        report = YAMLReportLoader(report_file)

    if bouncer and collector_client is None:
        collector_client = yield lookup_collector_client(report.header,
                                                         bouncer)

    if collector_client is None:
        if measurement_id:
            report_log = yield oonib_report_log.get_report_log(measurement_id)
            collector_settings = report_log['collector']
            print(collector_settings)
            if collector_settings is None or len(collector_settings) == 0:
                log.warn("Skipping uploading of %s since this measurement "
                         "was run by specifying no collector." %
                          report_file)
                defer.returnValue(None)
            elif isinstance(collector_settings, dict):
                collector_client = CollectorClient(settings=collector_settings)
            elif isinstance(collector_settings, str):
                collector_client = CollectorClient(address=collector_settings)
        else:
            log.msg("Looking up collector with canonical bouncer." % report_file)
            collector_client = yield lookup_collector_client(report.header,
                                                             CANONICAL_BOUNCER_ONION)

    oonib_reporter = OONIBReporter(report.header, collector_client)
    log.msg("Creating report for %s with %s" % (report_file,
                                                collector_client.settings))
    report_id = yield oonib_reporter.createReport()
    report.header['report_id'] = report_id
    if measurement_id:
        log.debug("Marking it as created")
        yield oonib_report_log.created(measurement_id,
                                       collector_client.settings)
    log.msg("Writing report entries")
    for entry in report:
        yield oonib_reporter.writeReportEntry(entry)
        log.msg("Written entry")
    log.msg("Closing report")
    yield oonib_reporter.finish()
    if measurement_id:
        log.debug("Closing log")
        yield oonib_report_log.closed(measurement_id)
    def run_vendor_tests(self, *a, **kw):
        """
        These are several vendor tests used to detect the presence of
        a captive portal. Each test compares HTTP status code and
        content to the control results and has its own User-Agent
        string, in order to emulate the test as it would occur on the
        device it was intended for. Vendor tests are defined in the
        format:
        [exp_url, ctrl_result, ctrl_code, ua, test_name]
        """

        vendor_tests = [
            [
                'http://www.apple.com/library/test/success.html', 'Success',
                '200',
                'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
                'Apple HTTP Captive Portal'
            ],
            [
                'http://tools.ietf.org/html/draft-nottingham-http-portal-02',
                '428 Network Authentication Required', '428',
                'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
                'W3 Captive Portal'
            ],
            [
                'http://www.msftncsi.com/ncsi.txt',
                'Microsoft NCSI',
                '200',
                'Microsoft NCSI',
                'MS HTTP Captive Portal',
            ]
        ]

        cm = self.http_content_match_fuzzy_opt
        sm = self.http_status_code_match
        snm = self.http_status_code_no_match

        def compare_content(status_func, fuzzy, experiment_url, control_result,
                            control_code, headers, test_name):
            log.msg("")
            log.msg("Running the %s test..." % test_name)

            content_match, experiment_code, experiment_headers = cm(
                experiment_url, control_result, headers, fuzzy)
            status_match = status_func(experiment_code, control_code)

            if status_match and content_match:
                log.msg("The %s test was unable to detect" % test_name)
                log.msg("a captive portal.")
                return True
            else:
                log.msg("The %s test shows that your network" % test_name)
                log.msg("is filtered.")
                return False

        result = []
        for vt in vendor_tests:
            report = {}
            report['vt'] = vt

            experiment_url = vt[0]
            control_result = vt[1]
            control_code = vt[2]
            headers = {'User-Agent': vt[3]}
            test_name = vt[4]

            args = (experiment_url, control_result, control_code, headers,
                    test_name)

            if test_name == "MS HTTP Captive Portal":
                report['result'] = compare_content(sm, False, *args)

            elif test_name == "Apple HTTP Captive Portal":
                report['result'] = compare_content(sm, True, *args)

            elif test_name == "W3 Captive Portal":
                report['result'] = compare_content(snm, True, *args)

            else:
                log.warn("Ooni is trying to run an undefined CP vendor test.")
            result.append(report)
        return result