def test_get_scalar_loader():
    alfacase_content = YAML(
        value={"foo": {
            "value": YAML(value=1),
            "unit": YAML(value="m")
        }})
    description_document = DescriptionDocument(content=alfacase_content,
                                               file_path=Path())

    # Loading Scalar passing ``category``
    scalar_loader = get_scalar_loader(category="length")
    assert scalar_loader(key="foo",
                         alfacase_content=description_document) == Scalar(
                             1.0, "m", "length")

    # Load Scalar passing ``from_unit``
    scalar_loader = get_scalar_loader(from_unit="m")
    assert scalar_loader(key="foo",
                         alfacase_content=description_document) == Scalar(
                             1.0, "m", "length")

    # Passing None
    expected_msg = "Either 'category' or 'from_unit' parameter must be defined"
    with pytest.raises(ValueError, match=expected_msg):
        get_scalar_loader()

    # Informing both parameter
    expected_msg = "Both parameters 'category' and 'from_unit' were provided, only one must be informed"
    with pytest.raises(ValueError, match=expected_msg):
        get_scalar_loader(category="length", from_unit="m")
        def generate_description(self, alfacase_config: AlfacaseTestConfig):
            """
            Helper method to generate a "Description" from the given alfacase_config
            """
            alfacase_string = convert_description_to_alfacase(
                alfacase_config.description_expected
            )
            alfacase_content = strictyaml.dirty_load(
                yaml_string=alfacase_string,
                schema=alfacase_config.schema,
                allow_flow_style=True,
            )

            # 'LoadPvtModelsDescription' is special case and the DescriptionDocument doesn't need a FakeKey
            skip_dict = (
                alfacase_config.load_function_name == "load_pvt_models_description"
            )

            if alfacase_config.is_sequence:
                alfacase_content = [alfacase_content]
            elif alfacase_config.is_dict and not skip_dict:
                alfacase_content = YAML(
                    CommentedMap({YAML("FakeKey"): alfacase_content})
                )

            description_document = DescriptionDocument(
                content=alfacase_content, file_path=self.tmp_path / "test_case.alfacase"
            )
            return getattr(alfacase_to_case, alfacase_config.load_function_name)(
                description_document
            )
  def _ValidateEntityContent(self, entity: syaml.YAML) -> None:
    """Validates the contents of a single entity.

    The logic will select the appropriate validation schema based on the
    config_mode of the container and the defined EntityOperation, if any.

    Args:
      entity: YAML object for the entityContents

    Raises:
      KeyError: if self._config_mode is not set to a known value.
    """
    if ConfigMode.INITIALIZE == self._config_mode:
      schema = syaml.Map(_ENTITY_INIT_SCHEMA)
    elif ConfigMode.UPDATE == self._config_mode:
      schema = syaml.Map(_ENTITY_UPDATE_SCHEMA)
      if ENTITY_OPERATION_KEY in entity:
        if entity[ENTITY_OPERATION_KEY] == EntityOperation.ADD.value:
          schema = syaml.Map(_ENTITY_ADD_SCHEMA)
        elif entity[ENTITY_OPERATION_KEY] == EntityOperation.DELETE.value:
          schema = syaml.Map(_ENTITY_DELETE_SCHEMA)
    else:
      raise KeyError('No valid _config_mode is set')

    entity.revalidate(schema)

    if TRANSLATION_KEY in entity.data.keys():
      if ENTITY_CLOUD_DEVICE_ID_KEY not in entity.data.keys():
        raise KeyError('cloud_device_id required when translation is present.')
def test_load_pvt_tables_with_pvt_model_selector(
    description_document_for_pvt_tables_test, tmp_path
):
    """
    PvtModelsDescription.tables should provide a way to the user select one of multiples pvt models that are inside the file
    This syntax can be used either for absolute path or relative path

    # Ex.: " <tab_file> | <pvt_model_name> "
    """
    document = description_document_for_pvt_tables_test

    document.content["tables"]["acme"] = YAML(
        str(document.file_path.parent / "acme.tab|SOMELABEL")
    )
    document.content["tables"]["acme_2"] = YAML("acme.tab|SOMELABEL")

    pvt_model_description = load_pvt_models_description(document=document)
    assert pvt_model_description.tables == {
        "acme": document.file_path.parent / "acme.tab|SOMELABEL",
        "acme_2": document.file_path.parent / "acme.tab|SOMELABEL",
    }

    # Ensure that the pvt file has the pvt_model GaveaDST
    case_description.CaseDescription(
        pvt_models=pvt_model_description
    ).ensure_valid_references()
  def _ValidateEntityContent(self, entity: syaml.YAML) -> None:
    """Validates the contents of a single entity.

    The logic will select the appropriate validation schema based on the
    config_mode of the container and the defined EntityOperation, if any.

    Args:
      entity: YAML object for the entityContents

    Raises:
      KeyError: if self._config_mode is not set to a known value.
    """
    if ConfigMode.INITIALIZE == self._config_mode:
      schema = syaml.Map(_ENTITY_INIT_SCHEMA)
    elif ConfigMode.UPDATE == self._config_mode:
      schema = syaml.Map(_ENTITY_UPDATE_SCHEMA)
      if _ENTITY_MODE_KEY in entity:
        if entity[_ENTITY_MODE_KEY] == EntityOperation.ADD.value:
          schema = syaml.Map(_ENTITY_ADD_SCHEMA)
        elif entity[_ENTITY_MODE_KEY] == EntityOperation.DELETE.value:
          schema = syaml.Map(_ENTITY_DELETE_SCHEMA)
    else:
      raise KeyError('No valid _config_mode is set')

    entity.revalidate(schema)
  def _ValidateEntityBlock(self, block: syaml.YAML) -> None:
    """Validates a block of entities and adds them to the validated blocks.

    Args:
      block: YAML representing one or more entities

    Raises:
      ValueError: if block contains a key that has already been found.
    """
    for key in block.keys():
      if key in self._validated_entities:
        raise ValueError('Duplicate key {key}')
      self._ValidateEntityContent(block.get(key))
    self._validated_entities.update(block.data)
Exemple #7
0
def convert_description_to_alfacase(
        alfacase_description,
        *,
        enable_flow_style_on_numpy: bool = False) -> str:
    """
    Convert a given case (decorated with attrs) to YAML representation.
    The strictyaml conversion ("as_yaml") requires that all items from dict are strings.

    :param enable_flow_style_on_numpy:
        Signalize that numpy arrays should dumped with flow style enabled.

        enable_flow_style_on_numpy=False
        .. code-block:: python

            pressure:
                - 1
                - 2

        enable_flow_style_on_numpy=True
       .. code-block:: python

            pressure: [1, 2]

    """
    import attr
    from strictyaml import YAML
    from .case_to_alfacase import convert_dict_to_valid_alfacase_format

    case_description_dict = convert_dict_to_valid_alfacase_format(
        attr.asdict(alfacase_description, recurse=False),
        enable_flow_style_on_numpy=enable_flow_style_on_numpy,
    )
    return YAML(case_description_dict).as_yaml()
def _ParseTypeString(type_str: syaml.YAML) -> Tuple[str, str]:
  """Parses an entity type string into a namespace and type name.

  Args:
    type_str: entity type string from YAML

  Returns:
    Type namespace string
    Type name string
  """

  type_parse = type_str.split('/')

  if len(type_parse) == 1:
    print('Type improperly formatted, a namespace is missing: ', type_str)
    raise TypeError(
        f'Type improperly formatted, a namespace is missing: {type_str}\n' +
        'Proper formatting is: NAMESPACE/TYPE_NAME')

  if len(type_parse) > 2:
    print('Type improperly formatted: ', type_str)
    raise TypeError(f'Type improperly formatted: {type_str}\n' +
                    'Proper formatting is: NAMESPACE/TYPE_NAME')

  return type_parse[0], type_parse[1]
def test_load_pvt_tables_with_absolute_file(
    description_document_for_pvt_tables_test, tmp_path
):
    """
    PvtModelsDescription.tables should accept absolute path to a tab file
    """
    document = description_document_for_pvt_tables_test

    new_folder = tmp_path / "new_folder"
    new_folder.mkdir()

    shutil.copy2(
        src=tmp_path / "acme.tab",
        dst=tmp_path / "new_folder/acme.tab",
    )

    # YAML is pointing to a valid PVT file, and is an absolute Path.
    document.content["tables"]["acme"] = YAML(
        str(document.file_path.parent / "new_folder/acme.tab")
    )

    pvt_model_description = load_pvt_models_description(document=document)
    assert pvt_model_description.tables == {
        "acme": document.file_path.parent / "new_folder/acme.tab",
        "acme_2": document.file_path.parent / "acme.tab",
    }
Exemple #10
0
def test_read_valid_explicit_full_blown_pipelines_config():
    yaml_pipelines_config = """
    pipelines:
    - name: pytest
      type: test
      coverage: .coverage
      commands:
        partial-scope: pytest --cov=tia {tests}
        full-scope: pytest --cov=tia tests
      dirs:
      - path:       /foo_dir
        full-scope: yes
      - path:       /bar_dir
        full-scope: no
      files:
      - path:       foo_file.py
        full-scope: yes
      - path:       bar_file.py
        full-scope: no
    - name: pylint
      type: analyzer
      commands:
        partial-scope: pylint {files}
        full-scope: pylint tia
      dirs:
      - path:       /baz_dir
        full-scope: no
      files:
      - path:       baz_file.ini
        full-scope: yes
    """
    yaml_pipelines = read_and_validate_config(yaml_pipelines_config)
    expected_yaml_instance = YAML(
        OrderedDict([('pipelines', [
            OrderedDict([('name', 'pytest'), ('type', 'test'), ('coverage', '.coverage'),
                         (
                             'commands',
                             OrderedDict([('partial-scope', 'pytest --cov=tia {tests}'),
                                          ('full-scope', 'pytest --cov=tia tests')]),
                         ),
                         ('dirs', [
                             OrderedDict([('path', '/foo_dir'), ('full-scope', True)]),
                             OrderedDict([('path', '/bar_dir'), ('full-scope', False)])
                         ]),
                         ('files', [
                             OrderedDict([('path', 'foo_file.py'), ('full-scope', True)]),
                             OrderedDict([('path', 'bar_file.py'), ('full-scope', False)])
                         ])]),
            OrderedDict([('name', 'pylint'), ('type', 'analyzer'),
                         (
                             'commands',
                             OrderedDict([('partial-scope', 'pylint {files}'),
                                          ('full-scope', 'pylint tia')]),
                         ), ('dirs', [OrderedDict([('path', '/baz_dir'), ('full-scope', False)])]),
                         ('files', [OrderedDict([('path', 'baz_file.ini'), ('full-scope',
                                                                            True)])])])
        ])]))
    assert is_pipelines_config_valid(yaml_pipelines) == True
    assert yaml_pipelines == expected_yaml_instance
Exemple #11
0
    def _validate(cls, content: YAML):
        """
        Does a second pass validation of file content.

        Also applies this feature:
                - polar: foo:bar
            is translated to:
                - polar:
                    CL: foo:bar:CL
                    CD: foo:bar:CD

        Errors are raised if file content is incorrect.

        :param content:
        """
        step_names = set(content[PHASE_DEFINITIONS_TAG].keys())

        for phase_definition in content[PHASE_DEFINITIONS_TAG].values():
            cls._process_polar_definition(phase_definition)
            for segment_definition in phase_definition[STEPS_TAG]:
                cls._process_polar_definition(segment_definition)

        for route_definition in content[ROUTE_DEFINITIONS_TAG].values():
            # Routes are expected to contain some phases and ONE cruise phase
            cruise_step_count = 0
            for step in route_definition[STEPS_TAG]:
                cls._process_polar_definition(step)
                Ensure(step.keys()).contains_one_of(
                    [PHASE_TAG, CRUISE_TYPE_TAG])

                if PHASE_TAG in step:
                    Ensure(step[PHASE_TAG]).is_in(step_names)
                    YAML(step).revalidate(
                        Map(cls._get_phase_in_route_mapping()))
                else:  # CRUISE_TYPE_TAG in step
                    cruise_step_count += 1
                    YAML(step).revalidate(
                        Map(cls._get_segment_mapping(CRUISE_TYPE_TAG)))
            Ensure(cruise_step_count).is_less_than_or_equal_to(1)

        for step in content[MISSION_DEFINITION_TAG][STEPS_TAG]:
            step_type, step_name = tuple(*step.items())
            if step_type == PHASE_TAG:
                Ensure(step_name).is_in(content[PHASE_DEFINITIONS_TAG].keys())
            elif step_type == ROUTE_TAG:
                Ensure(step_name).is_in(content[ROUTE_DEFINITIONS_TAG].keys())
  def _ValidateMetadataContent(self, metadata_block: syaml.YAML) -> None:
    """Validates the metadata block and extracts the operation mode.

    Args:
      metadata_block: YAML contents of the config metadata block
    """
    mode_str = metadata_block.get(_CONFIG_MODE_KEY, None)
    if mode_str is not None:
      self._config_mode = ConfigMode.FromString(mode_str)
    else:
      self._config_mode = ConfigMode.Default()
def revalidate_typeschema(type_schema: YAML):
    """THIS MUTATES `type_schema`! calling the function would change {"number_of_columns": {"value": "1"}}
    to {"number_of_columns": {"value": 1}}

    Perform validation on each dictionary in the both lists.  This is required due to limitations in strictyaml.  See
    the strictyaml documentation on revalidation for details.  This checks that the provided values
    are valid together while the initial validation only checks that the map is in the right general format."""

    for requriment_type in RequirementTypes:
        for req in type_schema.get(str(requriment_type), []):
            field = Fields.from_string(req.data["field"])
            req.revalidate(field.to_requirements(requriment_type))
        def generate_description(
            self,
            alfacase_config: AlfacaseTestConfig,
            remove_redundant_input_type_data: bool = False,
        ):
            """
            Helper method to generate a "Description" from the given alfacase_config
            """
            alfacase_string = convert_description_to_alfacase(
                alfacase_config.description_expected,
                remove_redundant_input_type_data=
                remove_redundant_input_type_data,
            )
            alfacase_content = strictyaml.dirty_load(
                yaml_string=alfacase_string,
                schema=alfacase_config.schema,
                allow_flow_style=True,
            )

            # 'LoadPvtModelsDescription' is special case and the DescriptionDocument doesn't need a FakeKey
            skip_dict = (alfacase_config.load_function_name ==
                         "load_pvt_models_description")

            if alfacase_config.is_sequence:
                alfacase_content = [alfacase_content]
            elif alfacase_config.is_dict and not skip_dict:
                alfacase_content = YAML(
                    CommentedMap({YAML("FakeKey"): alfacase_content}))

            description_document = DescriptionDocument(
                content=alfacase_content,
                file_path=self.tmp_path / "test_case.alfacase")
            if hasattr(alfacase_to_case, alfacase_config.load_function_name):
                loader = getattr(alfacase_to_case,
                                 alfacase_config.load_function_name)
            else:
                loader = alfacase_to_case.get_instance_loader(
                    class_=alfacase_config.description_expected.__class__)

            return loader(description_document)
def test_get_array_loader():
    alfacase_content = YAML(
        value={"foo": {"values": YAML(value=[1, 2]), "unit": YAML(value="m")}}
    )
    description_document = DescriptionDocument(
        content=alfacase_content, file_path=Path()
    )
    # Loading Scalar passing ``category``
    array_loader = get_array_loader(category="length")
    assert array_loader(key="foo", alfacase_content=description_document) == Array(
        "length", [1.0, 2.0], "m"
    )

    # Load Scalar passing ``from_unit``
    array_loader = get_array_loader(from_unit="m")
    assert array_loader(key="foo", alfacase_content=description_document) == Array(
        "length", [1.0, 2.0], "m"
    )

    expected_msg = "Either 'category' or 'from_unit' parameter must be defined"
    with pytest.raises(ValueError, match=expected_msg):
        get_array_loader()
Exemple #16
0
def is_pipelines_config_valid(strictyaml_pipelines: YAML) -> YAML:
    """
    TODO: Refactor to test and analyzer specific config validation.
    """
    pipelines_schema = Map({
        "pipelines":
        Seq(
            Map({
                "name":
                Str(),
                "type":
                Enum(["test", "analyzer"]),
                Optional("coverage"):
                Str(),
                Optional("commands"):
                Map({
                    "partial-scope": Str(),
                    "full-scope": Str()
                }),
                Optional("dirs"):
                Seq(
                    Map({
                        "path": Str(),
                        Optional("full-scope", default=False): Bool()
                    })),
                Optional("files"):
                Seq(
                    Map({
                        "path": Str(),
                        Optional("full-scope", default=False): Bool()
                    }))
            }))
    })
    try:
        strictyaml_pipelines.revalidate(pipelines_schema)
        return True
    except YAMLValidationError:
        return False
def test_load_pvt_tables_with_invalid_file(
        description_document_for_pvt_tables_test, tmp_path):
    """
    PvtModelsDescription.tables should raise a RuntimError when tab file is not found.
    """
    document = description_document_for_pvt_tables_test

    # YAML pointing to a invalid PVT model file
    document.content["tables"]["gavea"] = YAML("Foo.tab")

    expected_msg = (
        "The PVT Table Foo.tab must be place within the test_case.alfacase file on *"
    )
    with pytest.raises(RuntimeError, match=expected_msg):
        load_pvt_models_description(document=document)
def _ParseLinks(links_body: syaml.YAML) -> Set[link.Link]:
  """Parses YAML defining links between the fields of one entity and another.

  Links are always defined on the target entity.

  Args:
    links_body: YAML body for the entity links

  Returns:
    A set of Link instances
  """

  links = set()

  for source_entity, field_map in links_body.items():
    links.add(link.Link(source_entity, field_map))

  return links
def _ParseTranslation(
    translation_body: syaml.YAML
) -> Dict[str, field_translation.FieldTranslation]:
  """Parses YAML defining the translation of an entity's points.

  Args:
    translation_body: YAML body for the entity translation

  Returns:
    A dictionary from field names to FieldTranslation instances
  """

  if isinstance(translation_body, str):
    return translation_body

  translation = {}
  # TODO(b/176094783): reuse the tuple from the ontology validator
  for std_field_name in translation_body.keys():
    if isinstance(translation_body[std_field_name], str):
      continue
    # TODO(b/176097512): Manually defined non UDMI translations should be
    #  accepted by the validator
    ft = translation_body[std_field_name]

    raw_field_name = str(ft[PRESENT_VALUE])\
      .replace(PRESENT_VALUE, '')\
      .replace(POINTS, '')\
      .replace('.', '')

    units = dict()
    if UNITS_KEY in ft.keys():
      units = ft[UNITS_KEY][VALUES_KEY]
    elif UNIT_VALUES_KEY in ft.keys():
      units = ft[UNIT_VALUES_KEY]

    states = dict()
    if STATES_KEY in ft.keys():
      states = ft[STATES_KEY]

    translation[std_field_name] = field_translation.FieldTranslation(
        std_field_name, raw_field_name, units, states)

  return translation
def _ParseConnections(
    connections_body: syaml.YAML) -> Set[connection.Connection]:
  """Parses YAML defining connections between one entity and other.

  Connections are always defined on the target entity.

  Args:
    connections_body: YAML body for the entity connections

  Returns:
    A set of Connection instances
  """

  connections = set()

  for source_entity, connection_type in connections_body.items():
    connections.add(connection.Connection(connection_type, source_entity))

  return connections
Exemple #21
0
def convert_description_to_alfacase(
    alfacase_description: case_description.CaseDescription,
    *,
    enable_flow_style_on_numpy: bool = False,
    remove_redundant_input_type_data: bool = True,
) -> str:
    """
    Convert a given case (decorated with attrs) to YAML representation.
    The strictyaml conversion ("as_yaml") requires that all items from dict are strings.

    :param alfacase_description:
        Alfasim case description.

    :param enable_flow_style_on_numpy:
        Signalize that numpy arrays should dumped with flow style enabled.

        enable_flow_style_on_numpy=False
        .. code-block:: python

            pressure:
                - 1
                - 2

        enable_flow_style_on_numpy=True
       .. code-block:: python

            pressure: [1, 2]

    :param remove_redundant_input_type_data:
        For transient entries remove input type selector, and the unused constant or curve entries.

    """
    import attr
    from strictyaml import YAML
    from .case_to_alfacase import convert_dict_to_valid_alfacase_format

    case_description_dict = convert_dict_to_valid_alfacase_format(
        attr.asdict(alfacase_description, recurse=False),
        enable_flow_style_on_numpy=enable_flow_style_on_numpy,
        remove_redundant_input_type_data=remove_redundant_input_type_data,
    )
    return YAML(case_description_dict).as_yaml()
Exemple #22
0
def test_read_valid_single_pipeline_with_files_only_config():
    yaml_pipelines_config = """
    pipelines:
    - name: pytest
      type: test
      coverage: .coverage
      files:
      - path:       foo_file.py
        full-scope: yes
      - path:       bar_file.py
        full-scope: no
    """
    yaml_pipelines = read_and_validate_config(yaml_pipelines_config)
    expected_yaml_instance = YAML(
        OrderedDict([('pipelines', [
            OrderedDict([('name', 'pytest'), ('type', 'test'), ('coverage', '.coverage'),
                         ('files', [
                             OrderedDict([('path', 'foo_file.py'), ('full-scope', True)]),
                             OrderedDict([('path', 'bar_file.py'), ('full-scope', False)])
                         ])])
        ])]))
    assert is_pipelines_config_valid(yaml_pipelines) == True
    assert yaml_pipelines == expected_yaml_instance
Exemple #23
0
def test_read_valid_parent_key_config():
    yaml_config = """
    pipelines:
    """
    assert read_and_validate_config(yaml_config) == YAML(OrderedDict([('pipelines', '')]))