def test_json_merge_patch():
    """
    Ensures all extension JSON Schema successfully patch and change core JSON Schema, generating schema that are valid
    JSON Schema Draft 4, use codelists correctly, and have required metadata.
    """
    schemas = {}

    basenames = (
        'record-package-schema.json',
        'release-package-schema.json',
        'release-schema.json',
        'versioned-release-validation-schema.json',
    )

    if ocds_version or not use_development_version:
        url_pattern = ocds_schema_base_url + ocds_tag + '/{}'
    else:
        url_pattern = development_base_url + '/{}'

    for basename in basenames:
        schemas[basename] = http_get(url_pattern.format(basename)).json()

        if basename == 'release-schema.json':
            path = os.path.join(extensiondir, 'extension.json')
            with open(path) as f:
                metadata = json.load(f, object_pairs_hook=rejecting_dict)
                schemas[basename] = extend_schema(basename,
                                                  schemas[basename],
                                                  metadata,
                                                  codelists=external_codelists)

    # This loop is somewhat unnecessary, as repositories contain at most one of each schema file.
    for path, name, text, data in walk_json_data(patch):
        if is_json_merge_patch(data):
            if name in basenames:
                unpatched = deepcopy(schemas[name])
                try:
                    patched = merge(unpatched, data)
                except Exception as e:
                    assert False, f'Exception: {e} {path}'

                # All metadata should be present.
                validate_json_schema(path,
                                     name,
                                     patched,
                                     metaschemas()['metaschema'],
                                     full_schema=True)

                # Empty patches aren't allowed. json_merge_patch mutates `unpatched`, so `schemas[name]` is tested.
                assert patched != schemas[name]
    misindented_files_paths = [
        path for path in get_misindented_files() if "src/" not in path[0]
    ]
    warn_and_assert(
        misindented_files_paths,
        "{0} is not indented as expected",
        "Files are not indented as expected. See warnings below",
    )


def test_invalid_json():
    warn_and_assert(get_invalid_json_files(), "{0} is not valid JSON: {1}",
                    "JSON files are invalid. See warnings below.")


schemas = [(path, name, data) for path, name, _, data in walk_json_data(
    top=absolute_path_to_source_schema_dir) if is_json_schema(data)
           and not path.endswith('tests/schema/meta-schema.json')]
with open(os.path.join(this_dir, 'schema', 'meta-schema.json')) as fp:
    metaschema = json.load(fp)


@pytest.mark.parametrize("path,name,data", schemas)
def test_schema_valid(path, name, data):

    # skip schemas generated by tests for now
    if "schema/testing" in path or "src/" in path:
        return
    validate_json_schema(path, name, data, metaschema)


@pytest.mark.parametrize("path,name,data", schemas)
    misindented_files_paths = [
        path for path in get_misindented_files() if "src/" not in path[0]
    ]
    warn_and_assert(
        misindented_files_paths,
        "{0} is not indented as expected",
        "Files are not indented as expected. See warnings below",
    )


def test_invalid_json():
    warn_and_assert(get_invalid_json_files(), "{0} is not valid JSON: {1}",
                    "JSON files are invalid. See warnings below.")


schemas = [(path, name, data) for path, name, _, data in walk_json_data()
           if is_json_schema(data)]
metaschema = http_get(
    "https://standard.open-contracting.org/schema/1__1__4/meta-schema.json"
).json()

metaschema["properties"]["version"] = {
    "type": "string",
}
metaschema["properties"]["propertyOrder"] = {
    "type": "integer",
}


@pytest.mark.parametrize("path,name,data", schemas)
def test_schema_valid(path, name, data):
def test_extension_json():
    """
    Ensures the extension's extension.json file is valid against extension-schema.json, all codelists are included, and
    all URLs resolve.
    """
    path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..',
                        'schema', 'extension-schema.json')
    if os.path.isfile(path):
        with open(path) as f:
            schema = json.load(f)
    else:
        url = 'https://raw.githubusercontent.com/open-contracting/standard-maintenance-scripts/main/schema/extension-schema.json'  # noqa: E501
        schema = http_get(url).json()

    expected_codelists = {
        name
        for _, name, _, _, _ in walk_csv_data(
            top=os.path.join(extensiondir, 'codelists'))
    }
    expected_schemas = {
        name
        for _, name, _, _ in walk_json_data(patch, top=extensiondir)
        if name.endswith('-schema.json')
    }

    path = os.path.join(extensiondir, 'extension.json')
    if os.path.isfile(path):
        with open(path) as f:
            data = json.load(f, object_pairs_hook=rejecting_dict)

        validate_json_schema(path, 'extension.json', data, schema)

        urls = data.get('dependencies', []) + data.get('testDependencies', [])
        for url in urls:
            try:
                status_code = http_head(url).status_code
            except requests.exceptions.ConnectionError as e:
                assert False, f'{e} on {url}'
            else:
                assert status_code == 200, f'HTTP {status_code} on {url}'

        urls = list(data['documentationUrl'].values())
        for url in urls:
            try:
                status_code = http_get(url).status_code  # allow redirects
            except requests.exceptions.ConnectionError as e:
                assert False, f'{e} on {url}'
            else:
                assert status_code == 200, f'HTTP {status_code} on {url}'

        actual_codelists = set(data.get('codelists', []))
        if actual_codelists != expected_codelists:
            added, removed = difference(actual_codelists, expected_codelists)
            assert False, f'{path} has mismatch with codelists{added}{removed}'

        actual_schemas = set(data.get('schemas', []))
        if actual_schemas != expected_schemas:
            added, removed = difference(actual_schemas, expected_schemas)
            assert False, f'{path} has mismatch with schema{added}{removed}'
    else:
        # This code is never reached, as the test is only run if there is an extension.json file.
        assert False, 'expected an extension.json file'
    match = re.search(r'\d+__\d+__\d+', text)
    if match:
        tag = match.group(0)
        if tag not in ocds_tags:
            if ocds_version or not use_development_version:
                text = text.replace(tag, ocds_tag)
            else:
                text = text.replace(ocds_schema_base_url + tag,
                                    development_base_url)
    return text


excluded = ('.git', '.ve', '_static', 'build', 'fixtures', 'node_modules')
json_schemas = [
    (path, name, data)
    for path, name, _, data in walk_json_data(patch, excluded=excluded)
    if is_json_schema(data) and repo_name not in (
        'pelican-backend', 'sphinxcontrib-opencontracting')
]


def merge(*objs):
    """
    Copied from json_merge_patch.
    """
    result = objs[0]
    for obj in objs[1:]:
        result = _merge_obj(result, obj)
    return result