예제 #1
0
def patch_schema(basename='release-schema.json'):
    schema = http_get(url_prefix + '/' + basename).json()
    patched = extend_schema(basename, schema, read_metadata())
    with open(os.path.join(cwd, basename)) as f:
        json_merge_patch.merge(patched, json.load(f))

    return patched
    def patched_release_schema(self,
                               schema=None,
                               extension_field=None,
                               language='en'):
        """
        Returns the patched release schema.

        :param dict schema: the release schema
        :param str extension_field: the property with which to annotate each definition and field with the name of the
                                    extension in which the definition or field is defined
        :param str language: the language to use for the name of the extension
        """
        if not schema:
            schema = json.loads(
                self.get_standard_file_contents('release-schema.json'))

        json_merge_patch.merge(
            schema,
            self.release_schema_patch(extension_field=extension_field,
                                      language=language))

        if self.schema_base_url:
            schema['id'] = urljoin(self.schema_base_url, 'release-schema.json')

        return schema
 def get_dependencies(extension, basename):
     dependencies = extension.get('dependencies', []) + extension.get(
         'testDependencies', [])
     for url in dependencies:
         dependency = requests.get(url).json()
         external_codelists.update(dependency.get('codelists', []))
         schema_url = '{}/{}'.format(url.rsplit('/', 1)[0], basename)
         json_merge_patch.merge(schemas[basename],
                                requests.get(schema_url).json())
         get_dependencies(dependency, basename)
예제 #4
0
    def release_schema_patch(self):
        """
        Returns the consolidated release schema patch.
        """
        profile_patch = OrderedDict()

        # Replaces `null` with sentinel values, to preserve the null'ing of fields by extensions in the final patch.
        for extension in self.extensions():
            data = re.sub(r':\s*null\b', ': "REPLACE_WITH_NULL"',
                          extension.remote('release-schema.json'))
            json_merge_patch.merge(profile_patch, _json_loads(data))

        return _json_loads(
            json.dumps(profile_patch).replace('"REPLACE_WITH_NULL"', 'null'))
 def execute(self, context):
     self._validate_all_body_fields()
     try:
         # Idempotence check (sort of) - we want to check if the new template
         # is already created and if is, then we assume it was created by previous run
         # of CopyTemplate operator - we do not check if content of the template
         # is as expected. Templates are immutable so we cannot update it anyway
         # and deleting/recreating is not worth the hassle especially
         # that we cannot delete template if it is already used in some Instance
         # Group Manager. We assume success if the template is simply present
         existing_template = self._hook.get_instance_template(
             project_id=self.project_id, resource_id=self.body_patch['name'])
         self.log.info("The {} template already existed. It was likely "
                       "created by previous run of the operator. Assuming success.")
         return existing_template
     except HttpError as e:
         # We actually expect to get 404 / Not Found here as the template should
         # not yet exist
         if not e.resp.status == 404:
             raise e
     old_body = self._hook.get_instance_template(project_id=self.project_id,
                                                 resource_id=self.resource_id)
     new_body = deepcopy(old_body)
     self._field_sanitizer.sanitize(new_body)
     new_body = merge(new_body, self.body_patch)
     self.log.info("Calling insert instance template with updated body: {}".
                   format(new_body))
     self._hook.insert_instance_template(project_id=self.project_id,
                                         body=new_body,
                                         request_id=self.request_id)
     return self._hook.get_instance_template(project_id=self.project_id,
                                             resource_id=self.body_patch['name'])
 def execute(self, context):
     self._validate_all_body_fields()
     try:
         # Idempotence check (sort of) - we want to check if the new template
         # is already created and if is, then we assume it was created by previous run
         # of CopyTemplate operator - we do not check if content of the template
         # is as expected. Templates are immutable so we cannot update it anyway
         # and deleting/recreating is not worth the hassle especially
         # that we cannot delete template if it is already used in some Instance
         # Group Manager. We assume success if the template is simply present
         existing_template = self._hook.get_instance_template(
             resource_id=self.body_patch['name'],
             project_id=self.project_id)
         self.log.info(
             "The %s template already existed. It was likely created by previous run of the operator. "
             "Assuming success.", existing_template)
         return existing_template
     except HttpError as e:
         # We actually expect to get 404 / Not Found here as the template should
         # not yet exist
         if not e.resp.status == 404:
             raise e
     old_body = self._hook.get_instance_template(
         resource_id=self.resource_id, project_id=self.project_id)
     new_body = deepcopy(old_body)
     self._field_sanitizer.sanitize(new_body)
     new_body = merge(new_body, self.body_patch)
     self.log.info("Calling insert instance template with updated body: %s",
                   new_body)
     self._hook.insert_instance_template(body=new_body,
                                         request_id=self.request_id,
                                         project_id=self.project_id)
     return self._hook.get_instance_template(
         resource_id=self.body_patch['name'], project_id=self.project_id)
예제 #7
0
def _patch_schema_recursive(schema, version, lang, include_test_dependencies=False):
    dependencies = version['metadata'].get('dependencies', [])
    if include_test_dependencies:
        dependencies += version['metadata'].get('testDependencies', [])

    extension_versions_by_base_url = _extension_versions_by_base_url()

    for url in dependencies:
        version = extension_versions_by_base_url[url[:-14]]  # remove "extension.json"
        patch = version['schemas']['release-schema.json'][lang]

        # Make it possible to determine the source of the definitions.
        for name, definition in patch.get('definitions', {}).items():
            if name not in schema['definitions']:
                definition['extension_explorer:source'] = {'identifier': version['id'], 'version': version['version']}

        json_merge_patch.merge(schema, patch)
        _patch_schema_recursive(schema, version, lang, include_test_dependencies=include_test_dependencies)
예제 #8
0
    def apply_extensions(self, schema_obj):
        if not self.extensions:
            return
        for extensions_descriptor_url in self.extensions.keys():
            i = extensions_descriptor_url.rfind('/')
            url = '{}/{}'.format(extensions_descriptor_url[:i],
                                 'release-schema.json')

            try:
                extension = requests.get(url)
            except requests.exceptions.RequestException:
                self.invalid_extension[
                    extensions_descriptor_url] = 'fetching failed'
                continue
            if extension.ok:
                try:
                    extension_data = extension.json()
                except json.JSONDecodeError:
                    self.invalid_extension[
                        extensions_descriptor_url] = 'invalid JSON'
                    continue
            else:
                self.invalid_extension[
                    extensions_descriptor_url] = '{}: {}'.format(
                        extension.status_code, extension.reason.lower())
                continue

            schema_obj = json_merge_patch.merge(schema_obj, extension_data)
            extensions_descriptor = requests.get(
                extensions_descriptor_url).json()
            cur_language = translation.get_language()

            extension_description = {'url': url}

            # Section to be removed when extensions conform to new schema
            old_documentation_url = extensions_descriptor.get(
                'documentation_url', '')
            if old_documentation_url and 'documentationUrl' not in extensions_descriptor:
                extensions_descriptor['documentationUrl'] = {
                    'en': old_documentation_url
                }
            # End section

            for field in ['description', 'name', 'documentationUrl']:
                field_object = extensions_descriptor.get(field, {})
                if isinstance(field_object, str):
                    field_value = field_object
                else:
                    field_value = field_object.get(cur_language)
                    if not field_value:
                        field_value = field_object.get('en', '')
                extension_description[field] = field_value

            self.extensions[extensions_descriptor_url] = extension_description
            self.extended = True
예제 #9
0
    def patched_release_schema(self):
        """
        Returns the patched release schema.
        """
        content = self.get_standard_file_contents('release-schema.json')
        patched = json_merge_patch.merge(_json_loads(content),
                                         self.release_schema_patch())
        if self.schema_base_url:
            patched['id'] = urljoin(self.schema_base_url,
                                    'release-schema.json')

        return patched
    def release_schema_patch(self, extension_field=None, language='en'):
        """
        Returns the consolidated release schema patch.

        :param str extension_field: the property with which to annotate each definition and field with the name of the
                                    extension in which the definition or field is defined
        :param str language: the language to use for the name of the extension
        """
        output = {}

        # Replaces `null` with sentinel values, to preserve the null'ing of fields by extensions in the final patch.
        for extension in self.extensions():
            patch = extension.remote('release-schema.json', default='{}')
            patch = json.loads(
                re.sub(r':\s*null\b', ': "REPLACE_WITH_NULL"', patch))
            if extension_field:
                _add_extension_field(patch,
                                     extension.metadata['name'][language],
                                     extension_field)
            json_merge_patch.merge(output, patch)

        return json.loads(
            json.dumps(output).replace('"REPLACE_WITH_NULL"', 'null'))
예제 #11
0
def test_example_valid():
    """
    Ensures all JSON snippets in the extension's documentation are snippets of OCDS data with no additional fields.
    """
    def set_additional_properties_false(data):
        if isinstance(data, list):
            for item in data:
                set_additional_properties_false(item)
        elif isinstance(data, dict):
            if 'properties' in data:
                data['additionalProperties'] = False
            for value in data.values():
                set_additional_properties_false(value)

    for basename in ('release-schema.json', 'release-package-schema.json',
                     'record-package-schema.json'):
        if os.path.isfile(os.path.join(cwd, basename)):
            patched = patch_schema(basename)
            break
    else:
        return

    set_additional_properties_false(patched)

    for i, text, data in examples():
        # Skip packages (only occurs once in ocds_ppp_extension).
        if 'releases' in data:
            continue

        release = deepcopy(minimal_release)
        json_merge_patch.merge(release, data)
        if 'tender' in release and 'id' not in release['tender']:
            release['tender']['id'] = '1'

        errors = validate_schema('README.md', release, patched)

        assert not errors, f'README.md: JSON block {i} is invalid. See warnings below.'
예제 #12
0
def setup(app):
    app.add_config_value(
        'recommonmark_config',
        {
            #'url_resolver': lambda url: github_doc_root + url,
            'auto_toc_tree_section': 'Contents',
            'enable_eval_rst': True
        },
        True)
    app.add_transform(AutoStructify)

    import json
    import json_merge_patch
    import os
    import glob
    from collections import OrderedDict

    schema = OrderedDict()

    for file in glob.glob("../schema/firstmilefarmdataframe/*.json"
                          ) + glob.glob("../schema/components/*.json"):
        try:
            with open(file, 'r') as schema_file:
                print("Merging " + file)
                schema_element = json.loads(schema_file.read(),
                                            object_pairs_hook=OrderedDict)
                schema = json_merge_patch.merge(schema,
                                                schema_element,
                                                position='last')
        except Exception:
            print("Problem merging from " + file)
            pass

    with open("_static/first-mile-farm-data-schema.json", "w") as outfile:
        outfile.write(json.dumps(schema, indent=2))

    print("Full schema in first-mile-schema.json updated")

    from flattentool import create_template, unflatten, flatten
    create_template(schema="_static/first-mile-farm-data-schema.json",
                    output_name="_static/flattened")

    global html_static_path
    for file in glob.glob("extensions/*/*.json"):
        html_static_path = html_static_path + [file]

    for file in glob.glob("../schema/components/*.json"):
        html_static_path = html_static_path + [file]
예제 #13
0
    def take_action(self, parsed_args):
        vnf_client = self.app.eclsdk.conn.virtual_network_appliance

        target = vnf_client.\
            get_virtual_network_appliance(
            parsed_args.virtual_network_appliance)

        rows = ['ID', 'Name', 'Description', 'Tags']
        row_headers = rows

        # serialize request parmeter as JSON
        requested_param = {}
        if hasattr(parsed_args, 'name') and parsed_args.name is not None:
            requested_param['name'] = parsed_args.name
        if hasattr(parsed_args,
                   'description') and parsed_args.description is not None:
            requested_param['description'] = parsed_args.description
        if hasattr(parsed_args, 'tags') and parsed_args.tags is not None:
            tags = parsed_args.tags or '{}'
            try:
                requested_param['tags'] = json.loads(tags)
            except Exception:
                msg = _("You must specify JSON object format")
                raise exceptions.CommandError(msg)

        # serialize current parmeter as JSON
        current_param = {
            'name': target.name,
            'description': target.description,
            'tags': target.tags,
        }
        origin_param = copy.deepcopy(current_param)
        merged_param = jmp.merge(current_param, requested_param)
        patch = jmp.create_patch(origin_param, merged_param)

        if target.tags != requested_param['tags']:
            patch[tags] = requested_param['tags']

        if not patch:
            msg = _('No change will be expected')
            raise exceptions.CommandError(msg)

        data = vnf_client.update_virtual_network_appliance(
            parsed_args.virtual_network_appliance, **patch)

        _set_interface_names_for_display(data)

        return row_headers, utils.get_item_properties(data, rows)
예제 #14
0
def merge(files, output, position):
    result = None
    for file in files:
        with open(file) as input_file:
            input_json = json.load(input_file, object_pairs_hook=OrderedDict)
        if result is None:
            result = input_json
        else:
            result = json_merge_patch.merge(result, input_json, position=position)

    merged = json.dumps(result, indent=4)

    if output:
        with open(output, 'w+') as output_file:
            output_file.write(merged)
    else:
        print(merged)
예제 #15
0
 def patch(self) -> Entry:
     """
     Modify a Phonebook Entry.
     The exising content of the Entry will patched with the JSON request body according to RFC7386 https://tools.ietf.org/html/rfc7386
     :return: the modified Phonebook Entry
     """
     actual = objToDict(self._db_adapter.get(self._entry_id))
     if actual is None:
         abort(404,
               detail='Entry does not exists with ID: ' + self._entry_id)
     patch = request.json
     if 'id' in patch and patch['id'] != self._entry_id:
         abort(400, detail='Entry ID in URL and body mismatch')
     result = json_merge_patch.merge(actual, patch)
     result.pop('id')
     self._db_adapter.modify(Entry(self._entry_id, **result))
     return self._db_adapter.get(self._entry_id)
예제 #16
0
def load_config(
    config_content: str,
    merge_content: Optional[str] = None,
    patch_content: Optional[str] = None,
) -> _JSONDict:
    config_data = yaml.safe_load(config_content)
    if config_data is None:
        config_data = {}
    if not isinstance(config_data, dict):
        raise SystemExit(f"Invalid configuration format: {type(config_data)!r}")

    if merge_content is not None:
        merge_data = yaml.safe_load(merge_content)
        config_data = merge(config_data, merge_data)

    if patch_content is not None:
        patch_data = yaml.safe_load(patch_content)
        json_patch = JsonPatch(patch_data)
        config_data = json_patch.apply(config_data)
    return cast(_JSONDict, config_data)
예제 #17
0
    def take_action(self, parsed_args):
        vnf_client = self.app.eclsdk.conn.virtual_network_appliance

        target = vnf_client.\
            get_virtual_network_appliance(
            parsed_args.virtual_network_appliance)

        rows = ROWS_FOR_SHOW
        row_headers = rows

        aaps = []
        adds = parsed_args.adds or []
        delete_aaps = []
        deletes = parsed_args.deletes or []

        if len(adds) == 0 and len(deletes) == 0:
            msg = _("No options are specified.")
            raise exceptions.CommandError(msg)

        VALID_KEYS = [
            'interface-slot-no', 'ip-address', 'mac-address', 'type', 'vrid'
        ]
        for aap_str in adds:
            aap_info = {}
            aap_info.update(utils.parse_vna_interface(aap_str, VALID_KEYS))
            aaps.append(aap_info)

        for aap_str in deletes:
            slot_no = re.sub(r'^interface-slot-no=(\d+)$', '\\1', aap_str)
            if not re.match(r'^\d+$', slot_no):
                msg = 'No interface number is specified.'
                raise exceptions.CommandError(msg)

            delete_aaps.append(slot_no)

        # duplicate interfaces check
        tmp_aaps = []
        for aap in aaps:
            tmp_aaps.append(aap.get('interface-slot-no'))

        for slot_no in delete_aaps:
            tmp_aaps.append(slot_no)

        if len(tmp_aaps) != len(set(tmp_aaps)):
            msg = _("Interfaces are duplicates")
            raise exceptions.CommandError(msg)

        requested_aap_object = {}
        for aap in aaps:
            slot_no = aap.get('interface-slot-no', None)
            if not slot_no:
                msg = 'No interface number is specified.'
                raise exceptions.CommandError(msg)

            # create key <-> value if not exist.
            if_key = 'interface_' + str(slot_no)
            requested_aap_object.setdefault(if_key,
                                            {'allowed_address_pairs': []})

            ip_address = aap.get('ip-address')
            mac_address = aap.get('mac-address')
            aap_type = aap.get('type')
            vrid = aap.get('vrid')

            each_aap_info = {'mac_address': '', 'type': '', 'vrid': None}

            if ip_address:
                each_aap_info.update({'ip_address': ip_address})

            if mac_address:
                each_aap_info.update({'mac_address': mac_address})

            if aap_type:
                each_aap_info.update({'type': aap_type})

            if vrid:
                try:
                    each_aap_info.update({'vrid': int(vrid)})
                except ValueError:
                    msg = 'vrid should be a positive number 1〜255'
                    raise exceptions.CommandError(msg)

            requested_aap_object[if_key]['allowed_address_pairs'].\
                append(each_aap_info)

            if mac_address and (aap_type == "vrrp" or vrid):
                msg = 'mac_address and vrrp type cannot be set ' \
                      'at the same time.'
                raise exceptions.CommandError(msg)

        for slot_no in delete_aaps:
            if_key = 'interface_' + slot_no
            requested_aap_object[if_key] = {'allowed_address_pairs': []}

        current_interface_object = copy.deepcopy(target.interfaces)
        merged_interface_object = jmp.merge(current_interface_object,
                                            requested_aap_object)
        patch = jmp.create_patch(target.interfaces, merged_interface_object)

        patch = {'interfaces': patch}
        if not patch:
            msg = _('No change will be expected')
            raise exceptions.CommandError(msg)

        data = vnf_client.update_virtual_network_appliance(
            parsed_args.virtual_network_appliance, **patch)

        _set_interfaces_for_display(data)

        return row_headers, utils.get_item_properties(data, rows)
예제 #18
0
    def take_action(self, parsed_args):
        vnf_client = self.app.eclsdk.conn.virtual_network_appliance

        target = vnf_client.\
            get_virtual_network_appliance(
            parsed_args.virtual_network_appliance)

        rows = ROWS_FOR_SHOW
        row_headers = rows

        interfaces = []
        VALID_KEYS = [
            'slot-no', 'name', 'description', 'tags', 'net-id', 'fixed-ips'
        ]
        for if_str in parsed_args.interface:
            # if_info = {"net-id": "", "fixed-ips": "",
            #            "slot-no": ""}
            if_info = {}
            if_info.update(utils.parse_vna_interface(if_str, VALID_KEYS))

            interfaces.append(if_info)

        # conflict interfaces
        tmp_interfaces = []
        for interface in interfaces:
            tmp_interfaces.append(interface.get('slot-no'))

        if len(tmp_interfaces) != len(set(tmp_interfaces)):
            msg = _("Interfaces are duplicates")
            raise exceptions.CommandError(msg)

        requested_interface_object = {}
        tag_flag = False
        for interface in interfaces:
            slot_no = interface.get('slot-no')
            if_key = 'interface_' + str(slot_no)

            network_id = interface.get('net-id')
            fixed_ips_tmp = interface.get('fixed-ips')

            each_if_info = {}

            if 'name' in interface:
                name = interface.get('name', '')
                each_if_info.update({'name': name})

            if 'description' in interface:
                description = interface.get('description', '')
                each_if_info.update({'description': description})

            if 'tags' in interface:
                tag_flag = True
                tags = interface.get('tags')
                tags = tags if tags else '{}'
                try:
                    obj = json.loads(tags)
                except Exception:
                    msg = _("You must specify JSON object format")
                    raise exceptions.CommandError(msg)

                each_if_info.update({'tags': obj})

            if 'net-id' in interface:
                each_if_info.update({'network_id': network_id})

            if 'fixed-ips' in interface:
                fixed_ips = []
                if fixed_ips_tmp:
                    fixed_ips = [{
                        'ip_address': ip
                    } for ip in fixed_ips_tmp.split(':')]
                each_if_info.update({'fixed_ips': fixed_ips})

            interface_tmp = {if_key: each_if_info}
            requested_interface_object.update(interface_tmp)

        current_interface_object = copy.deepcopy(target.interfaces)
        merged_interface_object = jmp.merge(current_interface_object,
                                            requested_interface_object)
        patch = jmp.create_patch(target.interfaces, merged_interface_object)

        if patch == {} and tag_flag == False:
            msg = _('No change will be expected')
            raise exceptions.CommandError(msg)

        # ridding keys of unnecessary keys
        def __ridding_none_value(current, json_keys):
            for json_key in json_keys:
                if type(current) == 'dict':
                    next_current = tmp_current = current[json_key]
                    if tmp_current is None:
                        del current[json_key]
                    else:
                        next_keys = tmp_current.keys()
                        if len(next_keys) > 0:
                            current[json_key] = __ridding_none_value(
                                next_current, next_keys)
            return current

        if len(patch.keys()) > 0:
            patch = __ridding_none_value(patch, patch.keys())

        # replacing patched tags with requested tags
        for if_key in requested_interface_object.keys():
            interface = requested_interface_object[if_key]
            if 'tags' in interface:
                patch[if_key]['tags'] = interface.get('tags')

        patch = {'interfaces': patch}
        data = vnf_client.update_virtual_network_appliance(
            parsed_args.virtual_network_appliance, **patch)

        _set_interfaces_for_display(data)

        return (row_headers, utils.get_item_properties(data, rows))
예제 #19
0
    def take_action(self, parsed_args):
        vnf_client = self.app.eclsdk.conn.virtual_network_appliance

        target = vnf_client.\
            get_virtual_network_appliance(
            parsed_args.virtual_network_appliance)

        rows = [
            'ID',
            'Name',
            'Description',
            'Interface Names',
        ]
        row_headers = rows

        interfaces = []
        VALID_KEYS = ['slot-no', 'name']

        if parsed_args.interface:
            for if_str in parsed_args.interface:
                if_names = {}
                if_names.update(utils.parse_vna_interface(if_str))
                for k in if_names.keys():
                    if k not in VALID_KEYS:
                        msg = 'Invalid key %s is specified.' % k
                        raise exceptions.CommandError(msg)
                interfaces.append(if_names)

        # serialize request parmeter as JSON
        requested_param = {}
        if hasattr(parsed_args, 'name'):
            requested_param['name'] = parsed_args.name
        if hasattr(parsed_args, 'description'):
            requested_param['description'] = parsed_args.description
        for interface in interfaces:
            if 'interfaces' not in requested_param:
                requested_param['interfaces'] = {}
            slot_no = interface.get('slot-no')
            if_key = 'interface_%s' % slot_no
            name = interface.get('name')
            each_if_info = {}
            if name:
                each_if_info.update({'name': name})
            requested_param['interfaces'].update({if_key: each_if_info})

        # serialize current parmeter as JSON
        current_param = {
            'name': target.name,
            'description': target.description,
        }
        current_param['interfaces'] = copy.deepcopy(target.interfaces)
        origin_param = copy.deepcopy(current_param)
        merged_param = jmp.merge(current_param, requested_param)
        patch = jmp.create_patch(origin_param, merged_param)

        if not patch:
            msg = _('No change will be expected')
            raise exceptions.CommandError(msg)

        data = vnf_client.update_virtual_network_appliance(
            parsed_args.virtual_network_appliance, **patch)

        _set_interface_names_for_display(data)

        return row_headers, utils.get_item_properties(data, rows)
예제 #20
0
    def apply_extensions(self, schema_obj):
        if not self.extensions:
            return
        for extension_metadata_url in self.extensions.keys():
            if not extension_metadata_url:
                self.invalid_extension[extension_metadata_url] = "extension metadata URL is empty"
                continue

            try:
                response = get_request(extension_metadata_url, config=self.config)
            except requests.exceptions.RequestException:
                self.invalid_extension[extension_metadata_url] = "fetching failed"
                continue
            if response.ok:
                try:
                    extension_metadata = response.json()
                except json.JSONDecodeError:
                    self.invalid_extension[extension_metadata_url] = "extension metadata is not valid JSON"
                    continue
            else:
                self.invalid_extension[extension_metadata_url] = f"{response.status_code}: {response.reason.lower()}"
                continue

            i = extension_metadata_url.rfind("/")
            release_schema_patch_url = f"{extension_metadata_url[:i]}/release-schema.json"

            try:
                response = get_request(release_schema_patch_url, config=self.config)
            except requests.exceptions.RequestException:
                self.invalid_extension[extension_metadata_url] = "failed to get release schema patch"
                continue
            if response.ok:
                try:
                    release_schema_patch = response.json()
                except json.JSONDecodeError:
                    self.invalid_extension[extension_metadata_url] = "release schema patch is not valid JSON"
                    continue
            # Codelist-only extensions are allowed.
            elif response.status_code == 404:
                release_schema_patch_url = None
                release_schema_patch = {}
            else:
                self.invalid_extension[extension_metadata_url] = f"{response.status_code}: {response.reason.lower()}"
                continue

            schema_obj = json_merge_patch.merge(schema_obj, release_schema_patch)
            current_language = self.config.config["current_language"]

            extension_description = {
                "url": extension_metadata_url,
                "schema_url": release_schema_patch_url,
                "failed_codelists": {},
            }

            for field in ["description", "name", "documentationUrl"]:
                field_object = extension_metadata.get(field, {})
                if isinstance(field_object, str):
                    field_value = field_object
                else:
                    field_value = field_object.get(current_language)
                    if not field_value:
                        field_value = field_object.get("en", "")
                extension_description[field] = field_value

            codelists = extension_metadata.get("codelists")
            if codelists:
                extension_description["codelists"] = codelists

            self.extensions[extension_metadata_url] = extension_description
            self.extended = True
예제 #21
0
def make_metaschema():
    with open(draft4_path, 'r') as draft4, open(patch_path, 'r') as patch:
        draft4_schema = json.load(draft4, object_pairs_hook=OrderedDict)
        patch_schema = json.load(patch, object_pairs_hook=OrderedDict)

    return json_merge_patch.merge(draft4_schema, patch_schema)
예제 #22
0
    def take_action(self, parsed_args):
        vnf_client = self.app.eclsdk.conn.virtual_network_appliance

        target = vnf_client.\
            get_virtual_network_appliance(
            parsed_args.virtual_network_appliance)

        rows = ROWS_FOR_SHOW
        row_headers = rows

        interfaces = []
        VALID_KEYS = ['slot-no', 'net-id', 'fixed-ips']
        for if_str in parsed_args.interface:
            # if_info = {"net-id": "", "fixed-ips": "",
            #            "slot-no": ""}
            if_info = {}
            if_info.update(utils.parse_vna_interface(if_str))
            for k in if_info.keys():
                if k not in VALID_KEYS:
                    msg = 'Invalid key %s is specified.' % k
                    raise exceptions.CommandError(msg)

            interfaces.append(if_info)

        requested_interface_object = {}
        for interface in interfaces:
            slot_no = interface.get('slot-no')
            if_key = 'interface_' + str(slot_no)

            network_id = interface.get('net-id')
            fixed_ips_tmp = interface.get('fixed-ips')

            each_if_info = {}

            if network_id:
                each_if_info.update({'network_id': network_id})

            if fixed_ips_tmp:
                fixed_ips = [{
                    'ip_address': ip
                } for ip in fixed_ips_tmp.split(':')]
                each_if_info.update({'fixed_ips': fixed_ips})

            interface_tmp = {if_key: each_if_info}
            requested_interface_object.update(interface_tmp)

        current_interface_object = copy.deepcopy(target.interfaces)
        merged_interface_object = jmp.merge(current_interface_object,
                                            requested_interface_object)
        patch = jmp.create_patch(target.interfaces, merged_interface_object)

        patch = {'interfaces': patch}
        if not patch:
            msg = _('No change will be expected')
            raise exceptions.CommandError(msg)

        data = vnf_client.update_virtual_network_appliance(
            parsed_args.virtual_network_appliance, **patch)

        _set_interfaces_for_display(data)

        return (row_headers, utils.get_item_properties(data, rows))
예제 #23
0
def get_metaschema():
    return json_merge_patch.merge(
        json_load('metaschema/json-schema-draft-4.json'),
        json_load('metaschema/meta-schema-patch.json'))
import json
import json_merge_patch
from collections import OrderedDict
import sys
import urllib
import requests
import copy

core_schema_path = sys.argv[1]

with open(core_schema_path) as core_schema_file:
    core_schema = json.load(core_schema_file, object_pairs_hook=OrderedDict)

with urllib.request.urlopen(
        "http://standard.open-contracting.org/extension_registry/master/extensions.json"
) as url:
    extension_registry = json.loads(url.read().decode())

for extension in extension_registry["extensions"]:
    core_schema_copy = copy.deepcopy(core_schema)
    if extension["core"] == True:
        with urllib.request.urlopen(extension["url"] +
                                    "release-schema.json") as url:
            extension_schema = json.loads(url.read().decode(),
                                          object_pairs_hook=OrderedDict)
            extended_schema = json_merge_patch.merge(core_schema_copy,
                                                     extension_schema)
            with open("extension_schemas/" + extension["slug"] + ".json",
                      "w") as fp:
                json.dump(extended_schema, fp, indent=4)
for codelist in glob.glob('base-codelists/*.csv'):
    codelist_location = '../compiledCodelists/' + codelist.split('/')[-1]
    shutil.copy(codelist, codelist_location)
    append_extension(codelist_location, {'name': {
        'en': 'OCDS Core'
    }},
                     codelist.split('/')[-1])

for extension in extension_json['extensions']:
    try:
        if extension['slug'] in extensions_to_merge:
            print("Merging " + extension['slug'])
            extension_patch = requests.get(extension['url'].rstrip("/") + "/" +
                                           "release-schema.json").json()
            schema = json_merge_patch.merge(schema, extension_patch)
            ppp_extension = json_merge_patch.merge(ppp_extension,
                                                   extension_patch)

            extension_readme = requests.get(extension['url'].rstrip("/") +
                                            "/" + "README.md")
            with open('../docs/extensions/' + extension['slug'] + '.md',
                      'w') as readme:
                readme.write(extension_readme.text)
        else:
            print("Missing {}".format(extension['slug']))
            continue
    except KeyError:
        continue

    #skip codelists for ppp as we get these locally.
예제 #26
0
def get_metaschema():
    """
    Patches and returns the JSON Schema Draft 4 metaschema.
    """
    return json_merge_patch.merge(json_load('metaschema/json-schema-draft-4.json'),
                                  json_load('metaschema/meta-schema-patch.json'))
예제 #27
0
import json_merge_patch
import os
import glob
from collections import OrderedDict
from flattentool import create_template, unflatten, flatten

schema = OrderedDict()

for file in glob.glob("firstmilefarmdataframe/*.json") + glob.glob(
        "components/*.json"):
    try:
        with open(file, 'r') as schema_file:
            print("Merging " + file)
            schema_element = json.loads(schema_file.read(),
                                        object_pairs_hook=OrderedDict)
            schema = json_merge_patch.merge(schema,
                                            schema_element,
                                            position='last')
    except Exception:
        print("Problem merging from " + file)
        pass

with open("first-mile-farm-data-schema.json", "w") as outfile:
    outfile.write(json.dumps(schema, indent=2))

create_template(schema="first-mile-farm-data-schema.json",
                output_name="../csv",
                output_format='csv')

print("Full schema in first-mile-farm-data-schema.json updated")
예제 #28
0
    def take_action(self, parsed_args):
        vnf_client = self.app.eclsdk.conn.virtual_network_appliance

        target = vnf_client.\
            get_virtual_network_appliance(
            parsed_args.virtual_network_appliance)

        rows = ROWS_FOR_SHOW
        row_headers = rows

        aaps = []
        VALID_KEYS = [
            'interface-slot-no', 'ip-address', 'mac-address', 'type', 'vrid'
        ]
        for aap_str in parsed_args.allowed_address_pair:
            aap_info = {}
            aap_info.update(utils.parse_vna_interface(aap_str))

            for k in aap_info.keys():
                if k not in VALID_KEYS:
                    msg = 'Invalid key %s is specified.' % k
                    raise exceptions.CommandError(msg)

            aaps.append(aap_info)

        requested_aap_object = {}
        for aap in aaps:
            slot_no = aap.get('interface-slot-no', None)
            if not slot_no:
                msg = 'No interface number is specified.'
                raise exceptions.CommandError(msg)

            # create key <-> value if not exist.
            if_key = 'interface_' + str(slot_no)
            requested_aap_object.setdefault(if_key,
                                            {'allowed_address_pairs': []})

            ip_address = aap.get('ip-address')
            mac_address = aap.get('mac-address')
            aap_type = aap.get('type')
            vrid = aap.get('vrid')

            each_aap_info = {'mac_address': '', 'type': '', 'vrid': None}

            if ip_address:
                each_aap_info.update({'ip_address': ip_address})

            if mac_address:
                each_aap_info.update({'mac_address': mac_address})

            if aap_type:
                each_aap_info.update({'type': aap_type})

            if vrid:
                try:
                    each_aap_info.update({'vrid': int(vrid)})
                except ValueError:
                    msg = 'vrid should be a positive number 1〜255'
                    raise exceptions.CommandError(msg)

            requested_aap_object[if_key]['allowed_address_pairs'].\
                append(each_aap_info)

            if mac_address and (aap_type == "vrrp" or vrid):
                msg = 'mac_address and vrrp type cannot be set ' \
                      'at the same time.'
                raise exceptions.CommandError(msg)

        current_interface_object = copy.deepcopy(target.interfaces)
        merged_interface_object = jmp.merge(current_interface_object,
                                            requested_aap_object)
        patch = jmp.create_patch(target.interfaces, merged_interface_object)

        patch = {'interfaces': patch}
        if not patch:
            msg = _('No change will be expected')
            raise exceptions.CommandError(msg)

        data = vnf_client.update_virtual_network_appliance(
            parsed_args.virtual_network_appliance, **patch)

        _set_interfaces_for_display(data)

        return row_headers, utils.get_item_properties(data, rows)
    def apply_extensions(self, schema_obj):
        if not self.extensions:
            return
        for extensions_descriptor_url in self.extensions.keys():

            try:
                response = requests.get(extensions_descriptor_url)
                if not response.ok:
                    # extension descriptor is required to proceed
                    self.invalid_extension[extensions_descriptor_url] = '{}: {}'.format(
                        response.status_code, response.reason.lower())
                    continue
            except requests.exceptions.RequestException:
                self.invalid_extension[extensions_descriptor_url] = 'fetching failed'
                continue

            i = extensions_descriptor_url.rfind('/')
            url = '{}/{}'.format(extensions_descriptor_url[:i], 'release-schema.json')

            try:
                if self.cache_schema:
                    extension = cached_get_request(url)
                else:
                    extension = requests.get(url)
            except requests.exceptions.RequestException:
                continue

            if extension.ok:
                try:
                    extension_data = extension.json()
                except ValueError:  # would be json.JSONDecodeError for Python 3.5+
                    self.invalid_extension[extensions_descriptor_url] = 'release schema invalid JSON'
                    continue
            elif extension.status_code == 404:
                url = None
                extension_data = {}
            else:
                self.invalid_extension[extensions_descriptor_url] = '{}: {}'.format(
                    extension.status_code, extension.reason.lower())
                continue

            schema_obj = json_merge_patch.merge(schema_obj, extension_data)
            try:
                if self.cache_schema:
                    response = cached_get_request(extensions_descriptor_url)
                else:
                    response = requests.get(extensions_descriptor_url)
                extensions_descriptor = response.json()

            except ValueError:  # would be json.JSONDecodeError for Python 3.5+
                self.invalid_extension[extensions_descriptor_url] = 'invalid JSON'
                continue
            cur_language = self.lib_cove_ocds_config.config['current_language']

            extension_description = {'url': extensions_descriptor_url, 'release_schema_url': url}

            for field in ['description', 'name', 'documentationUrl']:
                field_object = extensions_descriptor.get(field, {})
                if isinstance(field_object, str):
                    field_value = field_object
                else:
                    field_value = field_object.get(cur_language)
                    if not field_value:
                        field_value = field_object.get('en', '')
                extension_description[field] = field_value
            extension_description['failed_codelists'] = {}
            codelists = extensions_descriptor.get('codelists')
            if codelists:
                extension_description['codelists'] = codelists

            self.extensions[extensions_descriptor_url] = extension_description
            self.extended = True
예제 #30
0
파일: view_mixins.py 프로젝트: dewdad/laxy
    def _try_json_patch(self, request, field='metadata'):
        """
        Partial update of the 'metadata' field on an object.

        If the header `Content-Type: application/merge-patch+json` is set,
        the `metadata` field is patched as per the specification in
        [RFC 7386](https://tools.ietf.org/html/rfc7386). eg, if the existing
        metadata was:

        ```json
        {"metadata": {"tags": ["A"], "name": "seqs.fastq.gz", "path": "/tmp"}}
        ```

        The patch in a request:

        ```json
        {"metadata": {"tags": ["B", "C"], "path": null}}
        ```

        Would change it to:

        ```json
        {"metadata": {"tags": ["B", "C"], "name": "seqs.fastq.gz"}}
        ```

        If `Content-Type: application/json-patch+json` is set, `metadata`
        should be an array of mutation operations to apply as per
        [RFC 6902](https://tools.ietf.org/html/rfc6902).

        <!--
        :param request:
        :type request:
        :return:
        :rtype:
        -->
        """
        content_type = get_content_type(request)
        if content_type in ['application/merge-patch+json',
                            'application/json-patch+json']:
            obj = self.get_object()
            if obj is None:
                return Response(status=status.HTTP_404_NOT_FOUND)

            if 'id' in request.data:
                return HttpResponse(status=status.HTTP_400_BAD_REQUEST,
                                    reason="id cannot be updated")

            if not hasattr(obj, field):
                return HttpResponse(status=status.HTTP_400_BAD_REQUEST,
                                    reason=f"Invalid field for this object type: {field}")

            metadata = request.data.get(field, None)
            if metadata is not None:
                if isinstance(metadata, list):
                    patch = [OrderedDict(op) for op in metadata]
                else:
                    patch = OrderedDict(metadata)

                # https://tools.ietf.org/html/rfc7386
                if content_type == 'application/merge-patch+json':
                    request.data[field] = json_merge_patch.merge(
                        OrderedDict(getattr(obj, field)),
                        patch)
                # https://tools.ietf.org/html/rfc6902
                if content_type == 'application/json-patch+json':
                    request.data[field] = jsonpatch.apply_patch(
                        OrderedDict(getattr(obj, field)),
                        patch)

            logger.debug(f"_try_json_patch - patched {field}: {request.data}")
            if hasattr(self, 'request_serializer'):

                serializer_method = self.request_serializer
            else:
                serializer_method = self.get_serializer
            serializer = serializer_method(instance=obj,
                                           data=request.data,
                                           context={'request': request},
                                           partial=True)
            if serializer.is_valid():
                serializer.save()
                return Response(status=status.HTTP_204_NO_CONTENT)

        return None