Ejemplo n.º 1
0
def check_json_matches_schema(jsondata,
                              schema_filename: str,
                              base_path: str = "",
                              base_uri: str = ""):
    """
    Check the given json data against the jsonschema in the given schema file,
    raising an exception on error.  The exception text includes one or more
    validation error messages.

    schema_filename is relative to the schema root directory.

    may raise SchemaError or ValidationError
    """
    set_schema_base_path(base_path=base_path, base_uri=base_uri)

    try:
        validator = Validator(_load_json_schema(schema_filename))
    except SchemaError as e:
        raise SchemaError('{} is invalid: {}'.format(schema_filename, e))

    err_msg_l = []
    for error in validator.iter_errors(jsondata):
        err_msg_l.append('{}: {}'.format(
            ' '.join([str(word) for word in error.path]), error.message))
    if err_msg_l:
        raise ValidationError(' + '.join(err_msg_l))
    else:
        return True
Ejemplo n.º 2
0
 def parse_json(schema_str: str):
     try:
         js = loads(schema_str)
         Draft7Validator.check_schema(js)
         assert "type" in js
         return TypedSchema(Draft7Validator(js), SchemaType.JSONSCHEMA)
     except (JSONDecodeError, SchemaError, AssertionError) as e:
         raise InvalidSchema from e
Ejemplo n.º 3
0
def validate(data: Dict, filename: Path) -> List[SchemaValidationError]:
    schema = load_schema(filename)
    v = Draft7Validator(schema)
    return [
        {'path': "/".join(str(x) for x in e.path),
         "message": e.message} for e in v.iter_errors(data)
    ]
Ejemplo n.º 4
0
def get_input_body_validator(
    provider_description: ActionProviderDescription,
) -> ActionInputValidatorType:
    """
    Inspects the value of the provider_description's input_schema to
    determine if it's a str, dict, or pydantic Model to figure out which
    validation function to use.

    If the input_schema is a str or dict, raw json_schema validation will
    be used. An jsonschema DraftValidator is created and applied to
    json_schema_input_validation creating a new partial which can be called by
    simply supplying the input to validate.

    If the input_schema is a pydantic BaseModel subclass, we apply the
    input_schema to pydantic_input_validation creating a new partial which can
    be called by simply supplying the input to validate.
    """
    input_schema = provider_description.input_schema

    if isinstance(input_schema, str):
        input_schema = json.loads(input_schema)
    elif isinstance(input_schema, dict):
        pass
    elif inspect.isclass(input_schema) and issubclass(input_schema, BaseModel):
        return partial(pydantic_input_validation, validator=input_schema)
    else:
        raise ActionProviderError(
            "Unable to determine input schema from ActionProviderDescription")

    return partial(json_schema_input_validation,
                   validator=Draft7Validator(input_schema))
Ejemplo n.º 5
0
def validate(data, filename):
    schema = load_schema(filename)
    v = Draft7Validator(schema)
    return [{
        'path': "/".join(str(x) for x in e.path),
        "message": e.message
    } for e in v.iter_errors(data)]
Ejemplo n.º 6
0
    def compare_schema(object, schema_name):

        url_schema = "./schemas/{schema}.json".format(schema=schema_name)
        with open(url_schema) as file:
            schema = json.load(file)
        validator = Draft7Validator(schema)
        errors = sorted(validator.iter_errors(object), key=lambda e: e.path)
        return errors
Ejemplo n.º 7
0
    def _validate_with_schema(self, data_field, schema_field):
        schema = getattr(self, schema_field)
        data = getattr(self, data_field)

        # If schema is None, then no schema has been specified on the instance and thus no validation should occur.
        if schema:
            try:
                Draft7Validator(schema.data_schema, format_checker=draft7_format_checker).validate(data)
            except JSONSchemaValidationError as e:
                raise ValidationError({data_field: [f"Validation using the JSON Schema {schema} failed.", e.message]})
Ejemplo n.º 8
0
    def clean(self):
        """
        Validate the schema
        """
        super().clean()

        try:
            Draft7Validator.check_schema(self.data_schema)
        except SchemaError as e:
            raise ValidationError({"data_schema": e.message})

        if (type(self.data_schema) is not dict
                or "properties" not in self.data_schema
                or self.data_schema.get("type") != "object"):
            raise ValidationError({
                "data_schema":
                "Nautobot only supports context data in the form of an object and thus the "
                "JSON schema must be of type object and specify a set of properties."
            })
Ejemplo n.º 9
0
 def process_item(self, item, spider):
     if not hasattr(item, "jsonschema"):
         return item
     item_dict = dict(item)
     item_dict["start"] = item_dict["start"].isoformat()[:19]
     item_dict["end"] = item_dict["end"].isoformat()[:19]
     validator = Draft7Validator(item.jsonschema)
     props = list(item.jsonschema["properties"].keys())
     errors = list(validator.iter_errors(item_dict))
     error_props = [self._get_prop_from_error(error) for error in errors]
     for prop in props:
         self.error_count[prop] += 1 if prop in error_props else 0
     self.item_count += 1
     return item
Ejemplo n.º 10
0
def validate_data(data: Dict[str, Any],
                  validator: Draft7Validator) -> ValidationResult:
    error_messages = []
    for error in validator.iter_errors(data):
        if error.path:
            # Elements of the error path may be integers or other non-string types,
            # but we need strings for use with join()
            error_path_for_message = ".".join([str(x) for x in error.path])
            error_message = f"'{error_path_for_message}' invalid due to {error.message}"
        else:
            error_message = error.message
        error_messages.append(error_message)

    error_msg = "; ".join(error_messages) if error_messages else None
    result = ValidationResult(errors=error_messages, error_msg=error_msg)
    return result
Ejemplo n.º 11
0
def json_validation(data: dict, schema: dict, log: list):
    """
    Валидация данных.
    Принимает json-данные, схему для проверки
    и список для дальнейшего логгирования
    """
    result_errors_list = []
    validator = Draft7Validator(schema)
    errors = validator.iter_errors(data['data'])
    for error in errors:
        if error.path:
            error_path = ' '.join(map(str, error.path))
            result_errors_list.append(
                f'{error.message}, обратите внимание на ключ {error_path}')
        else:
            result_errors_list.append(error.message)
    if result_errors_list:
        log.append([file, result_errors_list])
Ejemplo n.º 12
0
def validate_json(weedcoco, schema="weedcoco", schema_dir=SCHEMA_DIR):
    """Check that the weedcoco matches its JSON schema"""
    if schema not in MAIN_SCHEMAS:
        raise ValueError(f"schema should be one of {sorted(MAIN_SCHEMAS)}")
    # Allow the links between schemas to rely on local schema files
    try:
        # memoise the schema
        ref_store = validate_json.ref_store
    except AttributeError:
        schema_objects = [
            yaml.safe_load(path.open()) for path in schema_dir.glob("*.yaml")
        ]
        validate_json.ref_store = {obj["$id"]: obj for obj in schema_objects}
        ref_store = validate_json.ref_store
    schema_uri = MAIN_SCHEMAS[schema]
    main_schema = ref_store[schema_uri]
    validator = Draft7Validator(main_schema, format_checker=FORMAT_CHECKER)
    validator.resolver = RefResolver(schema_uri, main_schema, store=ref_store)
    errors = [error for error in validator.iter_errors(weedcoco)]
    if len(errors):
        raise JsonValidationError(
            f"{len(errors)} violations found: {' '.join(err.message for err in errors)}",
            errors,
        )
Ejemplo n.º 13
0
def test_local_metadata_passes(schema_local_dict, sample_metadata_dict_local):
    validator = Draft7Validator(schema_local_dict)
    validator.validate(sample_metadata_dict_local)
Ejemplo n.º 14
0
def test_remote_metadata_passes(schema_remote_dict,
                                sample_metadata_dict_remote):
    validator = Draft7Validator(schema_remote_dict)
    validator.validate(sample_metadata_dict_remote)
Ejemplo n.º 15
0
import yaml
from jsonschema.validators import Draft7Validator

log = logging.getLogger(__name__)

_schema_to_file_map = {
    "ActionRequest": "action_request.yaml",
    "ActionStatus": "action_status.yaml",
}
_validator_map: Dict[str, Draft7Validator] = {}

HERE: Path = Path(__file__).parent
for schema_name, yaml_file in _schema_to_file_map.items():
    with open(HERE / yaml_file, "r", encoding="utf-8") as specfile:
        jsonschema = yaml.safe_load(specfile)
        _validator_map[schema_name] = Draft7Validator(jsonschema)


class ValidationRequest(NamedTuple):
    provider_doc_type: str
    request_data: Dict[str, Any]


class ValidationResult(NamedTuple):
    errors: List[str]
    error_msg: Optional[str]


def request_validator(request: ValidationRequest) -> ValidationResult:
    schema = _validator_map.get(request.provider_doc_type)
    if schema is None:
Ejemplo n.º 16
0
def test_passing_json_schema_validation():
    validator = Draft7Validator(action_provider_json_input_schema)
    data_in = {"echo_string": "hello"}
    json_schema_input_validation(data_in, validator)
Ejemplo n.º 17
0
def preprocess(json_input):
    """Check whether there are errors related to JSON input format"""

    # 1. Set validator variable to validate the input according to the draft version 7 JSON schema
    validator = Draft7Validator(schema=json_schema)

    # 2. Show input errors in a readable, user-friendly way
    readable_input_errors = []
    global error
    for error in validator.iter_errors(instance=json_input):
        if len(error.path) == 0:
            readable_input_errors.append(f"In 'properties': {error.message}. ")
        else:
            readable_input_errors.append(
                f"In '{str(error.path[-1])}': {error.message}. ")

    # 3. Input validation. If no errors are found, preprocess the input data.
    global message
    message = ""

    if readable_input_errors:
        error = True
        message = readable_input_errors

    else:
        error = False
        message = "SUCCESS: Your data is valid."
        """Once data is valid, start preprocessing"""
        # 1. Handle with null (None) values: replace by default ones

        jid = json_input["data"]

        boolean_variables = [
            "garden",
            "equipped-kitchen",
            "swimmingpool",
            "furnished",
            "open-fire",
            "terrace",
        ]
        integer_variables = [
            "terrace-area",
            "garden-area",
            "facades-number",
            "land-area",
        ]

        # Numeric variables
        for num1 in integer_variables:
            try:
                if jid[num1] < 1:
                    jid[num1] = 0
                elif num1 == "facades-number":
                    if jid[num1] > 4:
                        jid[num1] = 4
            except:
                jid[num1] = 1

        # Boolean variables

        for bool1 in boolean_variables:
            try:
                if jid[bool1] in [True]:
                    jid[bool1] = 1
                else:
                    jid[bool1] = 0
            except:
                jid[bool1] = 0

        # Property-type
        try:
            jid_pt = jid["property-subtype"]
        except:
            jid_pt = jid["property-subtype"] = 0
        conn = sqlite3.connect("db/mydatabase.db")
        cursor = conn.cursor()
        cursor.execute("""select * from property_subtype""")
        df = pd.DataFrame(cursor.fetchall())
        conn.commit()
        conn.close()
        for col1 in df[0]:
            # for col1 in subtype:
            try:
                if jid_pt == col1:
                    jid["col1_" + col1] = 1
                else:
                    jid["col1_" + col1] = 0
            except:
                jid["col1_" + col1] = 0

        # Building-state

        try:
            jid_bs = jid["building-state"]
        except:
            jid_bs = jid["building-state"] = 0
        conn = sqlite3.connect("db/mydatabase.db")
        cursor = conn.cursor()
        cursor.execute("""select * from building_state_agg""")
        df = pd.DataFrame(cursor.fetchall())
        conn.commit()
        conn.close()
        # if int(ids[i]) not in [int(x) for x in df[0]]:
        for col2 in df[0]:
            # for col2 in ["AS_NEW", "GOOD", "JUST_RENOVATED", "TO_RENOVATE", "TO_RESTORE"]:
            # print("col2 in df[0]", col2)
            try:
                if jid_bs == col2:
                    jid["col2_" + col2] = 1
                else:
                    jid["col2_" + col2] = 0
            except:
                jid["col2_" + col2] = 0

        # Full-address
        try:  # 186,Kloosterstraat,Dilbeek,1702 = format
            if jid["full-address"] != "":
                longitude, latitude = longitude_latitude(jid["full-address"])
                jid["latitude"] = latitude
                jid["longitude"] = longitude
            else:
                jid["latitude"] = 0
                jid["longitude"] = 0
        except:
            jid["latitude"] = 0
            jid["longitude"] = 0
            jid["full-address"] = ""
        # print("jid['latitude'], jid['longitude']",
        #   jid["latitude"], jid["longitude"])
        jid.pop("property-subtype")
        jid.pop("building-state")
        jid.pop("facades-number")
        # jid.pop("property-type")
        jid.pop("full-address")
    # print("###################################### JSON",
    #       len(json_input["data"].keys()))
    return error, message, json_input
Ejemplo n.º 18
0
def test_failing_json_schema_validation():
    validator = Draft7Validator(action_provider_json_input_schema)
    with pytest.raises(BadActionRequest):
        json_schema_input_validation({}, validator)
Ejemplo n.º 19
0
    def get_extra_context(self, request, instance):
        """
        Reuse the model tables for config context, device, and virtual machine but inject
        the `ConfigContextSchemaValidationStateColumn` and an object edit action button.
        """
        # Prep the validator with the schema so it can be reused for all records
        validator = Draft7Validator(instance.data_schema)

        # Config context table
        config_context_table = tables.ConfigContextTable(
            data=instance.configcontext_set.all(),
            orderable=False,
            extra_columns=[
                (
                    "validation_state",
                    tables.ConfigContextSchemaValidationStateColumn(validator, "data", empty_values=()),
                ),
                ("actions", ButtonsColumn(model=ConfigContext, buttons=["edit"])),
            ],
        )
        paginate = {
            "paginator_class": EnhancedPaginator,
            "per_page": get_paginate_count(request),
        }
        RequestConfig(request, paginate).configure(config_context_table)

        # Device table
        device_table = DeviceTable(
            data=instance.device_set.prefetch_related(
                "tenant", "site", "rack", "device_type", "device_role", "primary_ip"
            ),
            orderable=False,
            extra_columns=[
                (
                    "validation_state",
                    tables.ConfigContextSchemaValidationStateColumn(validator, "local_context_data", empty_values=()),
                ),
                ("actions", ButtonsColumn(model=Device, buttons=["edit"])),
            ],
        )
        paginate = {
            "paginator_class": EnhancedPaginator,
            "per_page": get_paginate_count(request),
        }
        RequestConfig(request, paginate).configure(device_table)

        # Virtual machine table
        virtual_machine_table = VirtualMachineTable(
            data=instance.virtualmachine_set.prefetch_related("cluster", "role", "tenant", "primary_ip"),
            orderable=False,
            extra_columns=[
                (
                    "validation_state",
                    tables.ConfigContextSchemaValidationStateColumn(validator, "local_context_data", empty_values=()),
                ),
                ("actions", ButtonsColumn(model=VirtualMachine, buttons=["edit"])),
            ],
        )
        paginate = {
            "paginator_class": EnhancedPaginator,
            "per_page": get_paginate_count(request),
        }
        RequestConfig(request, paginate).configure(virtual_machine_table)

        return {
            "config_context_table": config_context_table,
            "device_table": device_table,
            "virtual_machine_table": virtual_machine_table,
            "active_tab": "validation",
        }
Ejemplo n.º 20
0
def addviladator(respose_data):
    with open("schema2.json", 'r') as f:
        dict_schema = json.load(f)
    va = Draft7Validator(dict_schema)
    va.validate(respose_data)
def test_input_schema_is_valid():
    Draft7Validator.check_schema(input_schema)
 def compare_schema(object):
     with open('../PivotalTrackerAT09/schemas/project_schema.json') as file:
         schema = json.load(file)
     validator = Draft7Validator(schema)
     errors = sorted(validator.iter_errors(object), key=lambda e: e.path)
     return errors
Ejemplo n.º 23
0
 def __init__(self, fields_json_schema):
     self.validator = Draft7Validator(fields_json_schema)
Ejemplo n.º 24
0
class GametaContext(object):
    """
    GametaContext for the current Gameta session

    Attributes:
        __schema__ (Dict): JSON Schema for Gameta .meta file
        validators (Dict[str, jsonschema.Draft7Validator]): JSON Schema validators for each object component
        reserved_params (Dict[str, List[str]): Reserved parameters for each object group

        project_dir (Optional[str]): Project directory
        is_metarepo (bool): Project is a metarepo
        gameta_data (Dict): Gameta data extracted and exported
        repositories (Dict[str, Dict]): Data of all the repositories contained in the metarepo
        tags (Dict[str, List[str]]): Repository data organised according to tags
        constants (Dict[str, Union[str, int, bool, float]]): Gameta constants data extracted
        commands (Dict): Gameta commands data extracted
        gitignore_data (List[str]): Gitignore data extracted from the .gitignore file
        env_vars (Dict): Extracted environment variables with keys prefixed with $
        files (Dict[str, File]): File formats supported
    """
    __schema__: Dict = {
        '$schema': "http://json-schema.org/draft-07/schema#",
        "type": "object",
        "properties": {
            "repositories": {
                "$ref": "#/definitions/repositories"
            },
            "commands": {
                "$ref": "#/definitions/commands"
            },
            "constants": {
                "$ref": "#/definitions/constants"
            },
            "required": ["repositories"]
        },
        'definitions': {
            "repositories": {
                "type": "object",
                "properties": {
                    "url": {
                        "type": ["string", "null"],
                        "format": "uri"
                    },
                    "path": {
                        "type": "string"
                    },
                    "tags": {
                        "type": "array",
                        "items": {
                            "type": "string"
                        }
                    },
                    "__metarepo__": {
                        "type": "boolean"
                    }
                },
                "required": ["url", "path", "__metarepo__"]
            },
            "commands": {
                "type": "object",
                "properties": {
                    "commands": {
                        "type": "array",
                        "items": {
                            "type": "string"
                        },
                    },
                    "description": {
                        "type": "string"
                    },
                    "raise_errors": {
                        "type": "boolean"
                    },
                    "shell": {
                        "type": "boolean"
                    },
                    "python": {
                        "type": "boolean"
                    },
                    "verbose": {
                        "type": "boolean"
                    },
                    "repositories": {
                        "type": "array",
                        "items": {
                            "type": "string"
                        },
                    },
                    "tags": {
                        "type": "array",
                        "items": {
                            "type": "string"
                        },
                    }
                },
                "minProperties": 6,
                "maxProperties": 8,
                "additionalProperties": False,
            },
            "constants": {
                "type": "object",
                "propertyNames": {
                    "pattern": "^[$A-Z0-9_-]"
                }
            }
        }
    }

    validators = {
        'meta': Draft7Validator(__schema__),
        'repositories':
        Draft7Validator(__schema__['definitions']['repositories']),
        'commands': Draft7Validator(__schema__['definitions']['commands']),
        'constants': Draft7Validator(__schema__['definitions']['constants'])
    }

    reserved_params: Dict[str, List[str]] = {
        'repositories':
        list(__schema__['definitions']['repositories']['properties'].keys()),
        'commands':
        list(__schema__['definitions']['commands']['properties'].keys())
    }

    def __init__(self):
        self.project_dir: Optional[str] = None
        self.gitignore_data: List[str] = []
        self.is_metarepo: bool = False
        self.gameta_data: Dict = {}
        self.constants: Dict[str, Union[str, int, bool, float]] = {}
        self.commands: Dict = {}
        self.repositories: Dict[str, Dict] = {}
        self.tags: Dict[str, List[str]] = {}

        self.env_vars: Dict = {'$' + k.upper(): v for k, v in environ.items()}

        self.files: Dict[str, File] = {
            'meta': Meta(self),
            'gitignore': GitIgnore(self)
        }

    @property
    def project_name(self) -> str:
        """
        Returns the name of the project

        Returns:
            str: Name of the project
        """
        return basename(self.project_dir)

    @property
    def meta(self) -> str:
        """
        Returns the path to the .meta file of the project, i.e. where it should be if the Project has not been
        initialised

        Returns:
            str: Path to the project's .meta file
        """
        return self.files['meta'].file

    @property
    def gitignore(self) -> str:
        """
        Returns the path to the .gitignore file of the project, i.e. where it should be if the Project has not been
        initialised

        Returns:
            str: Path to the project's .gitignore file
        """
        return self.files['gitignore'].file

    def add_gitignore(self, path: str) -> None:
        """
        Adds the path to the gitignore_data

        Args:
            path (str): Path to be added

        Returns:
            None
        """
        self.gitignore_data.append(path + '/\n')

    def remove_gitignore(self, path: str) -> None:
        """
        Removes the path from the gitignore_data

        Args:
            path (str): Path to be removed

        Returns:
            None
        """
        try:
            self.gitignore_data.remove(path + '/\n')
        except ValueError:
            return

    def is_primary_metarepo(self, repo: str) -> bool:
        """
        Returns a boolean if the repository is a primary meta-repository

        Args:
            repo (str): Repository to check

        Returns:
            bool: Flag to indicate if repository is a primary meta-repository
        """
        return abspath(self.repositories[repo]["path"]) == self.project_dir

    def load(self) -> None:
        """
        Loads data from all supported file formats

        Returns:
            None
        """
        for file, interface in self.files.items():
            interface.load()

    def export(self) -> None:
        """
        Exports data to all supported file formats

        Returns:
            None
        """
        for file, interface in self.files.items():
            interface.export()

    def generate_tags(self) -> None:
        """
        Updates the tag indexes of the repositories

        Returns:
            None
        """
        for repo, details in self.repositories.items():
            for tag in details.get('tags', []):
                if tag in self.tags:
                    self.tags[tag].append(repo)
                else:
                    self.tags[tag] = [repo]

    def apply(
        self,
        commands: List[str],
        repos: List[str] = (),
        shell: bool = False,
        python: bool = False,
    ) -> Generator[Tuple[str, str], None, None]:
        """
        Yields a list of commands to all repositories or a selected set of them, substitutes relevant parameters stored
        in .meta file

        Args:
            commands (List[str]): Commands to be applied
            repos (List[str]): Selected set of repositories
            shell (bool): Flag to indicate if a separate shell should be used
            python (bool): Flag to indicate if commands are to be tokenised as Python commands

        Returns:
            None
        """
        repositories: List[Tuple[str, Dict[str, str]]] = \
            [(repo, details) for repo, details in self.repositories.items() if repo in repos] or \
            list(self.repositories.items())

        for repo, details in repositories:
            # Generate complete set of parameters for substitution

            with self.cd(details['path']):
                repo_commands: List[str] = [
                    c.format(**self.generate_parameters(repo, details, python))
                    for c in deepcopy(commands)
                ]
                if python:
                    command: List[str] = self.python(repo_commands)
                elif shell:
                    command: List[str] = self.shell(repo_commands)
                else:
                    command: List[str] = self.tokenise(
                        ' && '.join(repo_commands))
                yield repo, command

    def generate_parameters(self,
                            repo: str,
                            repo_details: Dict,
                            python: bool = False) -> Dict:
        """
        Generates the set of parameters for each repository to be substituted into command strings. 
        
        Args:
            repo (str): Repository name of parameters to be generated
            repo_details (Dict): Repository details from .meta file
            python (bool): Flag to indicate if Python variables should be generated, defaults to False

        Returns:
            Dict: Generated set of parameters
        """

        combined_details: Dict = {
            k: v.format(**self.env_vars) if isinstance(v, str) else v
            for k, v in deepcopy(repo_details).items()
        }
        if python:
            repositories: Dict = deepcopy(self.repositories)
            repositories[repo] = deepcopy(combined_details)
            combined_details.update({
                '__repos__':
                json.dumps(repositories).replace("true", "True").replace(
                    "false", "False").replace("null", "None")
            })
        combined_details.update(self.constants)
        combined_details.update(self.env_vars)
        return combined_details

    @staticmethod
    def tokenise(command: str) -> List[str]:
        """
        Tokenises the commands into a form that is readily acceptable by subprocess

        Args:
            command (str): Constructed commands to be tokenised

        Returns:
            List[str]: Tokenised commands
        """
        return shlex.split(command)

    @contextmanager
    def cd(self, sub_directory: str) -> Generator[str, None, None]:
        """
        Changes directory to a subdirectory within the project

        Args:
            sub_directory (str): Relative subdirectory within the project

        Returns:
            Generator[str, None, None]: Path to current directory
        """
        cwd = getcwd()
        path = normpath(join(self.project_dir, sub_directory.lstrip('/')))
        chdir(path)
        yield path
        chdir(cwd)

    def shell(self, commands: List[str]) -> List[str]:
        """
        Prepares commands to be executed in a separate shell as subprocess does not natively handle piping

        Args:
            commands (List[str]): User-defined commands

        Returns:
            List[str]: Shell command string to be executed by subprocess
        """
        return self.tokenise(f'{SHELL} -c "' + ' && '.join(commands) + '"')

    def python(self, commands: List[str]) -> List[str]:
        """
        Prepares commands to be executed by Python interpreter via shell

        Args:
            commands List[str]: Python scripts

        Returns:
            List[str]: Python prepared commands to be executed by subprocess
        """
        return self.shell([
            "python3 -c \'{}\'".format(command.replace('"', '\\\"'))
            for command in commands
        ])
Ejemplo n.º 25
0
                'maxItems': 2,
            },
            'minItems': 3,
        },
        'POI': {
            'type': 'object',
            'additionalProperties': False,
            'properties': {
                'startTime': {
                    'type': 'string'
                },
                'endTime': {
                    'type': 'string'
                },
            },
            'required': ['startTime', 'endTime'],
        },
        'PlatformID': {
            'type': 'array',
            'items': {
                'type': 'number'
            },
            'minItems': 1,
        },
    },
    'required': ['TargetArea', 'POI'],
}

SEARCH_QUERY_VALIDATOR = Draft7Validator(SEARCH_SCHEMA)
OPPORTUNITY_QUERY_VALIDATOR = Draft7Validator(OPPORTUNITY_SCHEMA)