コード例 #1
0
def compare(file, json_output, debug, rule_file, rule_uri, merge):
    """
    If you have headers you would like to test with drheader, you can "compare" them with your ruleset this command.

    This command requires a valid json file as input.

    Example:

        \b
        [
            {
                "url": "https://test.com",
                "headers": {
                    "X-XSS-Protection": "1; mode=block",
                    "Content-Security-Policy": "default-src 'none'; script-src 'self' unsafe-inline; object-src 'self';"
                    "Strict-Transport-Security": "max-age=31536000; includeSubDomains",
                    "X-Frame-Options": "SAMEORIGIN",
                    "X-Content-Type-Options": "nosniff",
                    "Referrer-Policy": "strict-origin",
                    "Cache-Control": "no-cache, no-store, must-revalidate",
                    "Pragma": "no-cache",
                    "Set-Cookie": ["HttpOnly; Secure"]
                },
                "status_code": 200
            },
            ...
        ]
    """
    exit_code = EXIT_CODE_NO_ERROR
    audit = []
    schema = {
        "type": "array",
        "items": {
            "type": "object",
            "properties": {
                "url": {
                    "type": "string",
                    'format': 'uri'
                },
                "headers": {
                    "type": "object"
                },
                "status_code": {
                    "type": "integer"
                }
            },
            "required": ['headers', 'url']
        }
    }

    if debug:
        logging.basicConfig(level=logging.DEBUG)

    try:
        data = json.loads(file.read())
        jsonschema.validate(instance=data,
                            schema=schema,
                            format_checker=jsonschema.FormatChecker())
        logging.debug('Found {} URLs'.format(len(data)))
    except Exception as e:
        raise click.ClickException(e)

    if rule_uri and not rule_file:
        if not validators.url(rule_uri):
            raise click.ClickException(
                message='"{}" is not a valid URL.'.format(rule_uri))
        try:
            rule_file = get_rules_from_uri(rule_uri)
        except Exception as e:
            if debug:
                raise click.ClickException(e)
            else:
                raise click.ClickException(
                    'No content retrieved from rules-uri.')

    rules = load_rules(rule_file, merge)

    for i in data:
        logging.debug('Analysing : {}'.format(i['url']))
        drheader_instance = Drheader(url=i['url'], headers=i['headers'])
        drheader_instance.analyze(rules)
        audit.append({'url': i['url'], 'report': drheader_instance.report})
        if drheader_instance.report:
            exit_code = EXIT_CODE_FAILURE

    echo_bulk_report(audit, json_output)
    sys.exit(exit_code)
コード例 #2
0
def bulk(ctx, file, json_output, input_format, debug, rule_file, rule_uri,
         merge):
    """
    Scan multiple http(s) endpoints with drheader.

    The default file format is json:

        \b
        [
          {
            "url": "https://example.com",
            "params": {
                "example_parameter_key": "example_parameter_value"
            }
          },
          ...
        ]

    You can also use a txt file for input (using the "-ff txt" option):

        \b
        https://example.com
        https://example.co.uk

    NOTE: URL parameters are currently only supported on bulk scans.
    """
    exit_code = EXIT_CODE_NO_ERROR
    audit = []
    urls = []
    schema = {
        "type": "array",
        "items": {
            "type": "object",
            "properties": {
                "url": {
                    "type": "string",
                    'format': 'uri'
                },
                "params": {
                    "type": "string"
                },
            },
            "required": ['url']
        }
    }

    if debug:
        logging.basicConfig(level=logging.DEBUG)

    if input_format == 'txt':
        urls_temp = list(filter(None, file.read().splitlines()))
        for i in urls_temp:
            urls.append({'url': i})
        for i, v in enumerate(urls):
            logging.debug('Found: {}'.format(v))
            if not validators.url(v['url']):
                raise click.ClickException(
                    message='[line {}] "{}" is not a valid URL.'.format(
                        i + 1, v['url']))
    else:
        try:
            urls = json.loads(file.read())
            jsonschema.validate(instance=urls,
                                schema=schema,
                                format_checker=jsonschema.FormatChecker())
        except Exception as e:
            raise click.ClickException(e)

    logging.debug('Found {} URLs'.format(len(urls)))

    if rule_uri and not rule_file:
        if not validators.url(rule_uri):
            raise click.ClickException(
                message='"{}" is not a valid URL.'.format(rule_uri))
        try:
            rule_file = get_rules_from_uri(rule_uri)
        except Exception as e:
            if debug:
                raise click.ClickException(e)
            else:
                raise click.ClickException(
                    'No content retrieved from rules-uri.')

    rules = load_rules(rule_file, merge)

    for i, v in enumerate(urls):
        logging.debug('Querying: {}...'.format(v))
        drheader_instance = Drheader(url=v['url'],
                                     params=v.get('params', None),
                                     verify=ctx.obj['verify'])
        logging.debug('Analysing: {}...'.format(v))
        drheader_instance.analyze(rules)
        audit.append({'url': v['url'], 'report': drheader_instance.report})
        if drheader_instance.report:
            exit_code = EXIT_CODE_FAILURE

    echo_bulk_report(audit, json_output)
    sys.exit(exit_code)