def analyze(self, rules=None):
        """
        Analyze the currently loaded headers against provided rules.

        :param rules: Override rules to compare headers against
        :type rules: dict
        :return: Audit report
        :rtype: list
        """

        for header, value in self.headers.items():
            if type(value) == str:
                self.headers[header] = value.lower()
            if type(value) == list:
                value = [item.lower() for item in value]
                self.headers[header] = value

        if not rules:
            rules = load_rules()
        for rule, config in rules.items():
            self.__validate_rules(config, header=rule)
            if 'Directives' in config and rule in self.headers:
                for directive, d_config in config['Directives'].items():
                    self.__validate_rules(d_config,
                                          header=rule,
                                          directive=directive)
        return self.report
Exemple #2
0
 def test_load_rules_custom(self):
     with open(
             os.path.join(os.path.dirname(__file__),
                          'testfiles/custom_rules.yml'), 'r') as f:
         rules = load_rules(f)
         f.close()
     self.assertNotEqual(rules, self.default_rules['Headers'])
     self.assertEqual(rules, self.custom_rules['Headers'])
Exemple #3
0
def single(target_url, json_output, debug, rule_file, merge, junit):
    """
    Scan a single http(s) endpoint with drheader.

    NOTE: URL parameters are currently only supported on bulk scans.
    """

    if debug:
        logging.basicConfig(level=logging.DEBUG)

    logging.debug('Validating: {}'.format(target_url))
    if not validators.url(target_url):
        raise click.ClickException(
            message='"{}" is not a valid URL.'.format(target_url))

    rules = load_rules(rule_file, merge)

    try:
        logging.debug('Querying headers...')
        drheader_instance = Drheader(url=target_url)
    except Exception as e:
        if debug:
            raise click.ClickException(e)
        else:
            raise click.ClickException('Failed to get headers.')

    try:
        logging.debug('Analyzing headers...')
        drheader_instance.analyze(rules)
    except Exception as e:
        if debug:
            raise click.ClickException(e)
        else:
            raise click.ClickException('Failed to analyze headers.')

    if json_output:
        click.echo(json.dumps(drheader_instance.report))
    else:
        click.echo()
        if not drheader_instance.report:
            click.echo('No issues found!')
        else:
            click.echo('{0} issues found'.format(len(
                drheader_instance.report)))
            for i in drheader_instance.report:
                values = []
                for k, v in i.items():
                    values.append([k, v])
                click.echo('----')
                click.echo(tabulate(values, tablefmt="presto"))
    if junit:
        file_junit_report(rules, drheader_instance.report)
    return 0
Exemple #4
0
 def test_load_rules_bad_parameter(self):
     with self.assertRaises(AttributeError):
         load_rules(2)
Exemple #5
0
 def test_load_rules_default(self):
     rules = load_rules()
     self.assertEqual(rules, self.default_rules['Headers'])
Exemple #6
0
def compare(file, json_output, debug, rule_file, rule_uri, merge):
    """
    If you have headers you would like to test with drheader, you can "compare" them with your ruleset this command.

    This command requires a valid json file as input.

    Example:

        \b
        [
            {
                "url": "https://test.com",
                "headers": {
                    "X-XSS-Protection": "1; mode=block",
                    "Content-Security-Policy": "default-src 'none'; script-src 'self' unsafe-inline; object-src 'self';"
                    "Strict-Transport-Security": "max-age=31536000; includeSubDomains",
                    "X-Frame-Options": "SAMEORIGIN",
                    "X-Content-Type-Options": "nosniff",
                    "Referrer-Policy": "strict-origin",
                    "Cache-Control": "no-cache, no-store, must-revalidate",
                    "Pragma": "no-cache",
                    "Set-Cookie": ["HttpOnly; Secure"]
                },
                "status_code": 200
            },
            ...
        ]
    """
    exit_code = EXIT_CODE_NO_ERROR
    audit = []
    schema = {
        "type": "array",
        "items": {
            "type": "object",
            "properties": {
                "url": {
                    "type": "string",
                    'format': 'uri'
                },
                "headers": {
                    "type": "object"
                },
                "status_code": {
                    "type": "integer"
                }
            },
            "required": ['headers', 'url']
        }
    }

    if debug:
        logging.basicConfig(level=logging.DEBUG)

    try:
        data = json.loads(file.read())
        jsonschema.validate(instance=data,
                            schema=schema,
                            format_checker=jsonschema.FormatChecker())
        logging.debug('Found {} URLs'.format(len(data)))
    except Exception as e:
        raise click.ClickException(e)

    if rule_uri and not rule_file:
        if not validators.url(rule_uri):
            raise click.ClickException(
                message='"{}" is not a valid URL.'.format(rule_uri))
        try:
            rule_file = get_rules_from_uri(rule_uri)
        except Exception as e:
            if debug:
                raise click.ClickException(e)
            else:
                raise click.ClickException(
                    'No content retrieved from rules-uri.')

    rules = load_rules(rule_file, merge)

    for i in data:
        logging.debug('Analysing : {}'.format(i['url']))
        drheader_instance = Drheader(url=i['url'], headers=i['headers'])
        drheader_instance.analyze(rules)
        audit.append({'url': i['url'], 'report': drheader_instance.report})
        if drheader_instance.report:
            exit_code = EXIT_CODE_FAILURE

    echo_bulk_report(audit, json_output)
    sys.exit(exit_code)
Exemple #7
0
def bulk(ctx, file, json_output, input_format, debug, rule_file, rule_uri,
         merge):
    """
    Scan multiple http(s) endpoints with drheader.

    The default file format is json:

        \b
        [
          {
            "url": "https://example.com",
            "params": {
                "example_parameter_key": "example_parameter_value"
            }
          },
          ...
        ]

    You can also use a txt file for input (using the "-ff txt" option):

        \b
        https://example.com
        https://example.co.uk

    NOTE: URL parameters are currently only supported on bulk scans.
    """
    exit_code = EXIT_CODE_NO_ERROR
    audit = []
    urls = []
    schema = {
        "type": "array",
        "items": {
            "type": "object",
            "properties": {
                "url": {
                    "type": "string",
                    'format': 'uri'
                },
                "params": {
                    "type": "string"
                },
            },
            "required": ['url']
        }
    }

    if debug:
        logging.basicConfig(level=logging.DEBUG)

    if input_format == 'txt':
        urls_temp = list(filter(None, file.read().splitlines()))
        for i in urls_temp:
            urls.append({'url': i})
        for i, v in enumerate(urls):
            logging.debug('Found: {}'.format(v))
            if not validators.url(v['url']):
                raise click.ClickException(
                    message='[line {}] "{}" is not a valid URL.'.format(
                        i + 1, v['url']))
    else:
        try:
            urls = json.loads(file.read())
            jsonschema.validate(instance=urls,
                                schema=schema,
                                format_checker=jsonschema.FormatChecker())
        except Exception as e:
            raise click.ClickException(e)

    logging.debug('Found {} URLs'.format(len(urls)))

    if rule_uri and not rule_file:
        if not validators.url(rule_uri):
            raise click.ClickException(
                message='"{}" is not a valid URL.'.format(rule_uri))
        try:
            rule_file = get_rules_from_uri(rule_uri)
        except Exception as e:
            if debug:
                raise click.ClickException(e)
            else:
                raise click.ClickException(
                    'No content retrieved from rules-uri.')

    rules = load_rules(rule_file, merge)

    for i, v in enumerate(urls):
        logging.debug('Querying: {}...'.format(v))
        drheader_instance = Drheader(url=v['url'],
                                     params=v.get('params', None),
                                     verify=ctx.obj['verify'])
        logging.debug('Analysing: {}...'.format(v))
        drheader_instance.analyze(rules)
        audit.append({'url': v['url'], 'report': drheader_instance.report})
        if drheader_instance.report:
            exit_code = EXIT_CODE_FAILURE

    echo_bulk_report(audit, json_output)
    sys.exit(exit_code)
Exemple #8
0
def single(ctx, target_url, json_output, debug, rule_file, rule_uri, merge,
           junit):
    """
    Scan a single http(s) endpoint with drheader.

    NOTE: URL parameters are currently only supported on bulk scans.
    """
    exit_code = EXIT_CODE_NO_ERROR
    if debug:
        logging.basicConfig(level=logging.DEBUG)

    logging.debug('Validating: {}'.format(target_url))
    if not validators.url(target_url):
        raise click.ClickException(
            message='"{}" is not a valid URL.'.format(target_url))

    if rule_uri and not rule_file:
        if not validators.url(rule_uri):
            raise click.ClickException(
                message='"{}" is not a valid URL.'.format(rule_uri))
        try:
            rule_file = get_rules_from_uri(rule_uri)
        except Exception as e:
            if debug:
                raise click.ClickException(e)
            else:
                raise click.ClickException(
                    'No content retrieved from rules-uri.')

    rules = load_rules(rule_file, merge)

    try:
        logging.debug('Querying headers...')
        drheader_instance = Drheader(url=target_url, verify=ctx.obj['verify'])
    except Exception as e:
        if debug:
            raise click.ClickException(e)
        else:
            raise click.ClickException('Failed to get headers.')

    try:
        logging.debug('Analyzing headers...')
        drheader_instance.analyze(rules)
    except Exception as e:
        if debug:
            raise click.ClickException(e)
        else:
            raise click.ClickException('Failed to analyze headers.')

    if drheader_instance.report:
        exit_code = EXIT_CODE_FAILURE

    if json_output:
        click.echo(json.dumps(drheader_instance.report))
    else:
        click.echo()
        if not drheader_instance.report:
            click.echo('No issues found!')
        else:
            click.echo('{0} issues found'.format(len(
                drheader_instance.report)))
            for i in drheader_instance.report:
                values = []
                for k, v in i.items():
                    values.append([k, v])
                click.echo('----')
                click.echo(tabulate(values, tablefmt="presto"))
    if junit:
        file_junit_report(rules, drheader_instance.report)
    sys.exit(exit_code)