Esempio n. 1
0
 def inner(*args, **kwargs):
     if accepts is not None or responds is not None:
         if request.args.get(self.help_keyword, None) is not None:
             return new_endpoint.help_dict
     if accepts is not None:
         payload = request.get_json()
         if payload is None:
             if new_endpoint.payload_fully_optional:
                 return endpoint(*args, **kwargs)
             return self.handle_error_response(
                 400, 'No JSON payload.')
         try:
             # We don't bother to check for SchemaError, since
             # that *should* cause the program to crash. A Schema
             # error is caused by incorrectly formatting the
             # schema dict, and so should be considered an
             # application breaking bug, as the whole point of
             # this middleware is to marry documentation and
             # code.
             validate_json(payload, new_endpoint.validation_schema)
         except ValidationError as e:
             return self.handle_error_response(400, e.message)
         for key in new_endpoint.schema_base:
             if key in payload:
                 kwargs[key] = payload[key]
     return endpoint(*args, **kwargs)
def create():
    if request.method == 'POST':
        dataset_dict = request.get_json()
        if not dataset_dict:
            return jsonify(
                success=False,
                error="Data must be submitted in JSON format.",
            ), 400

        try:
            validate_json(dataset_dict, dataset.DATASET_JSON_SCHEMA)
        except ValidationError as e:
            return jsonify(
                success=False,
                error=str(e),
            ), 400

        dataset_id, error = dataset.create_from_dict(dataset_dict,
                                                     current_user.id)
        if dataset_id is None:
            return jsonify(
                success=False,
                error=str(error),
            ), 400

        return jsonify(
            success=True,
            dataset_id=dataset_id,
        )
    return render_template('datasets/editor.html', mode="create")
Esempio n. 3
0
    async def validate_body(self):
        if self.Meta.multipart:
            if self.request.content_type != 'multipart/form-data':
                logger.debug('Expected a multipart request but received "{}"'.format(self.request.content_type))
                error = ApiError().InvalidFormat('multipart/form-data content type is required')
                raise ViewError(errors=error)
            return

        if not self.Meta.body_data_schema:
            return

        try:
            data = await self.request.json()
        except JSONDecodeError as e:
            logger.debug('Bad request: {}, error: {}'.format(await self.request.text(), e.args))
            raise ViewError(errors=ApiError().InvalidFormat('Invalid json'))

        try:
            validate_json(data, self.Meta.body_data_schema)
        except ValidationError as e:
            logger.debug('Bad request data: {}, error: {}'.format(data, e.message))
            error = ApiError().InvalidDataSchema(e.message).Pointer(e.path)
            raise ViewValidationError(errors=error)

        self.body_data = data
    def from_json(json_value: dict) -> LibraryDescription:
        validate_json(instance=json_value,
                      schema=LibraryDescription.json_schema)

        library = LibraryDescription()

        if "sdk_version" in json_value:
            library._sdk_version = LibraryVersion.from_json(
                json_value["sdk_version"])

        if "documentation" in json_value:
            library._documentation = json_value["documentation"]

        if "variant" in json_value:
            library._variant = LibraryVariant(json_value["variant"])

        library._library = Library.from_json(json_value)

        if "patches" in json_value:
            for patch in json_value["patches"]:
                library._patches.append(LibraryPatch.from_json(patch))

        if "groups" in json_value:
            library._groups = json_value["groups"]

        return library
Esempio n. 5
0
    def get_dependencies(dependencies_json, container_directory, onerror=None):
        result = {}
        for dependency_name in dependencies_json:
            try:
                validate_json(instance=dependencies_json[dependency_name],
                              schema=Dependency.DEPENDENCY_SCHEMA)

                source_url = dependencies_json[dependency_name].get('url')
                source_url_type = dependencies_json[dependency_name].get(
                    'url_type')
                if source_url_type is not None:
                    source_url_type = DependencySourceType.from_snake_case_name(
                        source_url_type)

                dependency = Dependency(dependency_name, source_url,
                                        source_url_type,
                                        dependencies_json[dependency_name],
                                        container_directory)

                result[dependency_name] = dependency
            except:
                if onerror is not None:
                    onerror(dependency_name)
                continue

        return result
Esempio n. 6
0
    def _collect_binaries(self):
        binaries = self.args.get('binaries', list())
        root_binary_path = self.destination_path / 'bin'

        count = 0
        for binary in binaries:
            try:
                validate_json(instance=binary,
                              schema=Dependency.BINARY_JSON_SCHEMA)
            except:
                logger.exception(
                    f'{self.colourized_name} - Invalid binary entry for dependency of name \'{self.name}\'.'
                )
                continue

            # filepaths in the json are given relative to the dependency destination directory ("<containing_directory>/<dependency_name>")
            absolute_filepath = self.destination_path / binary['filepath']

            for config in binary['configs']:
                for platform in binary['platforms']:
                    binary_path = root_binary_path / config / platform

                    # create the binary file destination tree
                    binary_path.mkdir(exist_ok=True, parents=True)
                    shutil.copy(absolute_filepath,
                                binary_path / absolute_filepath.name)

            count += 1

        logger.info(
            f'{self.colourized_name} - Finished binary collection. Collected {count} binaries.'
        )
Esempio n. 7
0
    def _get_dependencies_config(directory):
        """
        Retrieves the dependencies.json data in the specified directory.

        :param directory:
            The directory to search for dependencies.json.
        :returns:
            A `dict` object containing the deserialized JSON data of the dependencies.json file or `None`
            if the file could not be found.

        """

        dependencies_config = DependencyDirectory._get_dependencies_config_path(
            directory)
        if dependencies_config is None:
            logger.error(
                f'Could not find \'dependencies.json\' file in {directory}.')
            return None

        with open(dependencies_config.absolute(), 'r') as dependencies_file:
            json_data = json.load(dependencies_file)

            try:
                validate_json(
                    instance=json_data,
                    schema=DependencyDirectory.DEPENDENCIES_CONFIG_SCHEMA)
                return json_data
            except:
                logger.exception(
                    f'Invalid dependencies.json file (\'{dependencies_config.absolute()}\').'
                )
                return None
Esempio n. 8
0
def validate(json: dict, schema: str) -> Tuple[bool, Union[str, None]]:
    """
    Validates the given json string against the given JSON schema as well as the logic constraints
    in the input data.
    :param json: JSON string whose structure must be validated
    :param schema: JSON schema that defines what must be validated
    :return: A tuple composed of 2 elements (bool, str | None).
             The first element is True if and only if the validation had success.
             The second element is a string that contains the validation error
             message, if any was thrown.
    """
    try:
        # validate the given JSON against the validation JSON schema
        validate_json(instance=json, schema=schema)

        # the length of `duration` and `expected_finish` must be equal to the value of `n_batches`
        if len(json['duration']) != len(json['expected_finish']) or len(json['duration']) != json['n_batches']:
            return False, 'The length of the `duration` and `expected_finish` lists must equal the value of `n_batches`'

        if not is_asc_sorted(json['expected_finish']):
            return False, 'The values in the `expected_finish` list must be ascendentally sorted'

        return True, None
    except ValidationError as err:
        logging.log(logging.ERROR, err)
        return False, err
Esempio n. 9
0
 def test_color_taxon(self):
     # Note: require grids 5×5!
     response = self.client.get(url_for("gn_synthese.get_color_taxon"))
     assert response.status_code == 200
     data = response.get_json()
     validate_json(instance=data,
                   schema={
                       'type': 'array',
                       'minItems': 1,
                       'items': {
                           'type': 'object',
                           'properties': {
                               'cd_nom': {
                                   'type': 'integer',
                               },
                               'id_area': {
                                   'type': 'integer',
                               },
                               'color': {
                                   'type': 'string',
                               },
                               'nb_obs': {
                                   'type': 'integer',
                               },
                               'last_date': {
                                   'type': 'string',
                               },
                           },
                           'minProperties': 5,
                           'additionalProperties': False,
                       },
                   })
Esempio n. 10
0
    def request_to_json(self, verbose=False):
        if verbose:
            print(
                f'- waiting for remote response (since {time.strftime("%Y-%m-%d %H:%M:%S")}), please wait for {self.url}/{self.run_analysis_handle}'
            )

        try:
            timeout = getattr(self, 'timeout', 120)

            self.last_request_t0 = time.time()
            response = requests.get(
                "%s/%s" % (self.url, self.run_analysis_handle),
                params=self.parameters_dict_payload,
                cookies=self.cookies,
                headers={
                    'Request-Timeout': str(timeout),
                    'Connection-Timeout': str(timeout),
                },
                timeout=timeout,
            )
            self.last_request_t_complete = time.time()

            response_json = self._decode_res_json(response)

            validate_json(response_json, self.dispatcher_response_schema)

            return response_json
        except json.decoder.JSONDecodeError as e:
            print(f"{C.RED}{C.BOLD}unable to decode json from response:{C.NC}")
            print(f"{C.RED}{response.text}{C.NC}")
            raise
Esempio n. 11
0
File: api.py Progetto: kubked/pestca
def get_request_json(schema):
    msg = request.get_json()
    if msg is None:
        raise ErrorHandler.not_json_format()
    try:
        validate_json(msg, schema)
    except ValidationError:
        raise ErrorHandler.wrong_msg_format()
    return msg
Esempio n. 12
0
 def _validate_data(self, obj, model_cls):
     """Validate data according JSON schema for the model class."""
     schema_fn = '{}.json'.format(model_cls.__name__.lower())
     here = os.path.dirname(os.path.abspath(__file__))
     schemas_dir = os.path.join(os.path.dirname(here), 'schemas')
     schema_path = os.path.join(schemas_dir, schema_fn)
     with open(schema_path) as json_file:
         schema = json.load(json_file)
         validate_json(obj, schema)
Esempio n. 13
0
    def _collect_includes(self):
        INCLUDE_ENTRY_JSON_SCHEMA = {
            'type': 'object',
            'properties': {
                'source': {
                    'type': 'string'
                },
                'destination': {
                    'type': 'string'
                }
            },
            'required': ['source', 'destination']
        }

        includes = self.args.get('includes', list())
        root_include_path = self.destination_path / self.args.get(
            'include_path', 'include')

        count = 0
        for include in includes:
            destination_path = root_include_path
            if not isinstance(include, str):
                # validate the include object
                try:
                    validate_json(instance=include,
                                  schema=INCLUDE_ENTRY_JSON_SCHEMA)
                except:
                    logger.exception(
                        f'{self.colourized_name} - Invalid include entry for dependency of name \'{self.name}\'.'
                    )
                    continue

                include_path = self.destination_path / include['source']
                destination_path = (root_include_path /
                                    include['destination']).resolve()
            else:
                include_path = self.destination_path / include

            if not include_path.is_dir():
                if not include_path.is_file():
                    logger.error(
                        f'{self.colourized_name} - Invalid include (\'{include}\') provided for dependency of name \'{self.name}\'.'
                    )
                    continue

                # the include specified is a file...
                destination_path.mkdir(exist_ok=True, parents=True)
                shutil.copy(include_path, destination_path)
            else:
                distutils.dir_util.copy_tree(str(include_path.resolve()),
                                             str(destination_path.resolve()))

            count += 1

        logger.info(
            f'{self.colourized_name} - Finished include directory collection. Collected {count} includes.'
        )
Esempio n. 14
0
    def from_json(json_value: dict) -> LibraryVersion:
        validate_json(instance=json_value, schema=LibraryVersion.json_schema)
        from_version = None
        to_version = None

        if "from" in json_value:
            from_version = Version.from_string(json_value["from"])
        if "to" in json_value:
            to_version = Version.from_string(json_value["to"])

        return LibraryVersion(from_version=from_version, to_version=to_version)
Esempio n. 15
0
    def from_json(json_value: dict):
        validate_json(instance=json_value, schema=Property.json_schema)
        prop = Property()
        for access in Access.PUBLIC.matches:
            if access.value in json_value:
                prop._items[access] = set(json_value[access.value])
            if Access.PUBLIC.value in json_value:
                prop._items[access].update(set(
                    json_value[Access.PUBLIC.value]))

        return prop
Esempio n. 16
0
    def from_json(json_value: dict) -> LibraryPatch:
        validate_json(instance=json_value,
                      schema=LibraryPatch.json_schema)

        patch = LibraryPatch(
            LibraryOperation(json_value["operation"]),
            LibraryVersion.from_json(json_value["sdk_version"])
        )
        patch._library = Library.from_json(json_value)

        return patch
Esempio n. 17
0
 def test_get_area_intersection(self):
     response = self.client.post(url_for("ref_geo.getAreasIntersection"),
                                 json={
                                     'geometry': polygon,
                                 })
     assert response.status_code == 200
     validate_json(instance=response.json,
                   schema={
                       'type': 'object',
                       'patternProperties': {
                           '[0-9]*': {
                               'type': 'object',
                               'properties': {
                                   'type_code': {
                                       'type': 'string',
                                   },
                                   'type_name': {
                                       'type': 'string',
                                   },
                                   'areas': {
                                       'type': 'array',
                                       'items': {
                                           'type': 'object',
                                           'properties': {
                                               'area_code': {
                                                   'type': 'string',
                                               },
                                               'area_name': {
                                                   'type': 'string',
                                               },
                                               'id_area': {
                                                   'type': 'integer',
                                               },
                                               'id_type': {
                                                   'type': 'integer',
                                               },
                                           },
                                           'additionalProperties': False,
                                       }
                                   },
                               },
                               'additionalProperties': False,
                           },
                       },
                       'additionalProperties': False,
                   })
     communes_type = BibAreasTypes.query.filter_by(type_code='COM').one()
     communes = {
         area['area_name']
         for area in response.json[str(communes_type.id_type)]['areas']
     }
     assert communes == self.expected_communes
Esempio n. 18
0
 def json_validator(self):
     try:
         cache_key = '{}:{}'.format(doc_class, role)
         json_schema = json_body.memoized_schemas.get(cache_key)
         if not json_schema:
             json_schema = doc_class.get_schema(role=role)
             json_body.memoized_schemas[cache_key] = json_schema
         validate_json(self.req.json, json_schema)
     except ValueError:
         raise exc.ValidationError()
     except JsonValidationError:
         raise exc.ValidationError()
     return func(self)
Esempio n. 19
0
    def from_json(json_value: dict) -> Library:
        validate_json(instance=json_value, schema=Library.json_schema)

        library_props = Library()
        if "sources" in json_value:
            library_props._sources = set(json_value["sources"])

        for property_name in LibraryProperty:
            if property_name.value in json_value:
                library_props._props[property_name] = Property.from_json(
                    json_value[property_name.value])

        return library_props
Esempio n. 20
0
 def json_validator(self):
     try:
         cache_key = '{}:{}'.format(doc_class, role)
         json_schema = json_body.memoized_schemas.get(cache_key)
         if not json_schema:
             json_schema = doc_class.get_schema(role=role)
             json_body.memoized_schemas[cache_key] = json_schema
         validate_json(self.req.json, json_schema)
     except ValueError:
         raise exc.ValidationError()
     except JsonValidationError:
         raise exc.ValidationError()
     return func(self)
Esempio n. 21
0
 def json_validator(self):
     try:
         key = doc_class.__name__ + role
         json_schema = json_body.memoized_schemas.get(key)
         if not json_schema:
             json_schema = doc_class.get_schema(role=role)
             json_body.memoized_schemas[key] = json_schema
         validate_json(self.request.json, json_schema)
     except ValueError:
         raise Exception  # TODO: raise proper API exception
     except JsonValidationError:
         raise Exception  # TODO raise proper API exception
     return func(self)
Esempio n. 22
0
    def from_json(json_value: dict) -> Example:
        validate_json(instance=json_value, schema=Example.json_schema)

        example = Example(
            json_value["path"],
            json_value["local_path"],
            Version.from_string(json_value["sdk_version"]),
        )

        for prop in ExampleProperty:
            if prop.value in json_value:
                example._props[prop] = set(json_value[prop.value])
            else:
                example._props[prop] = set()

        return example
Esempio n. 23
0
    def validate(self, schema_path=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema.json')):
        """
        Validate the playlist contents

        Args:
            schema_path (optional, default='./schema.json') - The path to the schema to validate the playlist against.

        Returns:
            void
        """
        with open(schema_path) as f:
            schema = json.load(f)

            try:
                validate_json(self.playlist_contents, schema)
            except ValidationError as e:
                # Encapsulate the error so we can hide the implementation to the client.
                raise PlaylistValidationError(e)
Esempio n. 24
0
def get_registration(ipfs: ipfsapi.client.Client, register: Contract,
                     address: str) -> Optional[Dict[str, Any]]:
    """ Retrieve a user's registration """
    global registration_cache

    hex_hash = register.functions.getUserFile(address).call()
    user_json = ipfs_get_json(ipfs, hex_hash)

    schema = load_registration_schema()
    try:
        validate_json(user_json, schema)
    except SchemaError as err:
        log.error("Error validation Registration schema.  Invalid schema.")
        raise err
    except ValidationError as err:
        log.exception("Invalid Registration schema.")
        return None

    return user_json
Esempio n. 25
0
    def _validate_json_response(response: Response, schema: Dict = None) -> Union[Dict, List, str]:
        """
        Validates that the response is JSON and adheres to the provided schema (optional).

        :param response: The response object to validate.
        :param schema: A schema to match the JSON against (optional).
        :return: Parsed JSON data from the response object.
        :raises UnexpectedResponseError: In case the response is malformed in any way.
        """
        if not is_response_json(response):
            raise ExpectedJsonException
        res = response.json()

        # If a schema is provided, make sure it matches
        if schema is not None:
            try:
                validate_json(res, schema)
            except js_exceptions.ValidationError:
                raise InvalidJsonSchemaException

        return res
Esempio n. 26
0
def edit(id):
    ds = dataset.get(id)
    if not ds:
        raise NotFound("Can't find this dataset.")

    if ds['author'] and ds['author'] != current_user.id:
        raise Unauthorized("You can't edit this dataset.")

    if request.method == 'POST':
        dataset_dict = request.get_json()
        if not dataset_dict:
            return jsonify(
                success=False,
                error="Data must be submitted in JSON format.",
            ), 400

        try:
            validate_json(dataset_dict, dataset.DATASET_JSON_SCHEMA)
        except ValidationError as e:
            return jsonify(
                success=False,
                error=str(e),
            ), 400

        error = dataset.update(str(id), dataset_dict, current_user.id)
        if error:
            return jsonify(
                success=False,
                error=str(error),
            ), 400

        return jsonify(
            success=True,
            dataset_id=id,
        )

    return render_template('datasets/editor.html',
                           mode="edit",
                           dataset_id=str(id),
                           dataset_name=ds['name'])
    def validate(self, response: Response) -> bool:
        """

        :param response:
        :return:
        """
        media_type = MediaType(media_type=response.headers.get('Content-Type'))

        if not media_type.matches_type(pattern=self.valid_type_pattern):
            raise InvalidContentType('{!s} is an invalid type'.format(media_type))

        schema_path = self.schema_finder.find_schema_for(media_type=media_type)

        data = self._extract_response_data(response)

        try:
            with open(schema_path) as schema_file:
                validate_json(data, schema=json.load(schema_file))
        except ValidationError as err:

            output = '{0},\n\ndata: {1}\n\nschema: {2}'.format(err.message,
                                                               self._format_json_str(err.instance),
                                                               self._format_json_str(err.schema))
            raise ValidationError(output)
Esempio n. 28
0
    def _get(self):
        """
        Retrieve and extract the dependency.
        """

        if self.source_url_type == DependencySourceType.Git:
            # TODO: Implement git dependencies.
            raise UnsupportedSourceTypeError(DependencySourceType.Git)
        elif self.source_url_type == DependencySourceType.Archive:
            # Extract and build filesystem
            with tempfile.NamedTemporaryFile(delete=False) as tmp_file_handle:
                logger.info(
                    f'{self.colourized_name} - Downloading archive ({self.source_url})'
                )

                start_time = time.time()
                response = requests.get(self.source_url, stream=True)
                total_length = response.headers.get('content-length')

                # no content length header
                if total_length is None:
                    tmp_file_handle.write(response.content)
                else:
                    total_length = int(total_length)
                    with click.progressbar(length=total_length,
                                           label='Downloading...') as bar:
                        for chunk in response.iter_content(chunk_size=4096):
                            tmp_file_handle.write(chunk)
                            bar.update(len(chunk))

            try:
                if not zipfile.is_zipfile(tmp_file_handle.name):
                    raise zipfile.BadZipFile()
            except:
                logger.exception(
                    f'Invalid archive file provided for \'{self.name}\' dependency.'
                )
                return False

            logger.info(f'{self.colourized_name} - Extracting archive')
            with zipfile.ZipFile(tmp_file_handle.name) as zip_file:
                archive_extract_items = self.args.get('archive_extract_items',
                                                      None)
                file_list = []

                ARCHIVE_EXTRACT_ITEMS_SCHEMA = {
                    'type': 'object',
                    'properties': {
                        'dirs': {
                            'type': 'array',
                            'items': {
                                'type': 'string'
                            }
                        },
                        'files': {
                            'type': 'array',
                            'items': {
                                'type': 'string'
                            }
                        }
                    }
                }

                try:
                    validate_json(instance=archive_extract_items,
                                  schema=ARCHIVE_EXTRACT_ITEMS_SCHEMA)
                    dirs = archive_extract_items.get('dirs', list())
                    files = archive_extract_items.get('files', list())

                    if len(dirs) == len(files) == 0:
                        raise

                    for target_dir in dirs:
                        for file in zip_file.namelist():
                            if file.startswith(target_dir):
                                file_list.append(file)

                    file_list += files
                except:
                    file_list = zip_file.namelist()

                with click.progressbar(file_list,
                                       label='Extracting...') as bar:
                    for name in bar:
                        zip_file.extract(name, self.destination_path)

            # Delete temporary file
            tmp_file_path = Path(tmp_file_handle.name)
            if tmp_file_path.is_file():
                tmp_file_path.unlink()

        return True
Esempio n. 29
0
 def _validate_against_schema(request: Request, data: Dict) -> None:
     schema = get_schema(get_schema_name(request.content_type))
     validate_json(data, schema=schema)