コード例 #1
0
 def _add_integration_object(self, filename: Path, file: object,
                             error: RecordFluxError) -> None:
     try:
         self._packages[filename.stem] = IntegrationFile.parse_obj(file)
     except ValidationError as e:
         error.extend([(f"{e}", Subsystem.PARSER, Severity.ERROR,
                        self._to_location(filename.stem))])
コード例 #2
0
def test_rfi_get_size() -> None:
    integration = Integration()
    session_object = {
        "Session": {
            "S": {
                "Buffer_Size": {
                    "Default": 1024,
                    "Global": {
                        "Y": 2048,
                        "Z": 512,
                    },
                    "Local": {
                        "S": {
                            "Y": 8192,
                        }
                    },
                }
            }
        }
    }
    error = RecordFluxError()
    # pylint: disable = protected-access
    integration._add_integration_object(Path("p.rfi"), session_object, error)
    error.propagate()
    assert integration.get_size(ID("P::S"), None, None) == 1024
    assert integration.get_size(ID("P::S"), ID("X"), ID("S")) == 1024
    assert integration.get_size(ID("P::S"), ID("X"), ID("S")) == 1024
    assert integration.get_size(ID("P::S"), ID("X"), None) == 1024
    assert integration.get_size(ID("P::S2"), ID("X"), None) == 4096
    assert integration.get_size(ID("P::S"), ID("Y"), None) == 2048
    assert integration.get_size(ID("P::S"), ID("Y"), ID("S")) == 8192
    assert integration.get_size(ID("P::S"), ID("Z"), None) == 512
    assert integration.get_size(ID("P::S"), ID("Z"), ID("S")) == 512
コード例 #3
0
ファイル: declaration.py プロジェクト: Componolit/RecordFlux
    def check_type(self, declaration_type: rty.Type,
                   typify_variable: Callable[[Expr], Expr]) -> RecordFluxError:
        self.type_ = declaration_type
        expression = self.expression.substituted(typify_variable)
        assert isinstance(expression, Selected)
        self.expression = expression

        error = self.expression.prefix.check_type_instance(rty.Message)
        if error.errors:
            return error

        assert isinstance(self.expression.prefix.type_, rty.Message)

        error = RecordFluxError()
        for r in self.expression.prefix.type_.refinements:
            if ID(r.field) == self.expression.selector and r.sdu.is_compatible(
                    declaration_type):
                break
        else:
            error.extend([
                (
                    f'invalid renaming to "{self.identifier}"',
                    Subsystem.MODEL,
                    Severity.ERROR,
                    self.location,
                ),
                (
                    f'refinement for message "{self.expression.prefix.type_.identifier}"'
                    " would make operation legal",
                    Subsystem.MODEL,
                    Severity.INFO,
                    self.location,
                ),
            ], )
        return error + self.expression.check_type(rty.OPAQUE)
コード例 #4
0
ファイル: cli.py プロジェクト: Componolit/RecordFlux
def parse(
    files: Sequence[Path],
    skip_verification: bool = False,
    workers: int = 1,
    integration_files_dir: Optional[Path] = None,
) -> Tuple[Model, Integration]:
    parser = Parser(skip_verification,
                    cached=True,
                    workers=workers,
                    integration_files_dir=integration_files_dir)
    error = RecordFluxError()
    present_files = []

    for f in files:
        if not f.is_file():
            error.extend([(f'file not found: "{f}"', Subsystem.CLI,
                           Severity.ERROR, None)])
            continue

        present_files.append(Path(f))

    try:
        parser.parse(*present_files)
    except RecordFluxError as e:
        error.extend(e)

    try:
        model = parser.create_model()
    except RecordFluxError as e:
        error.extend(e)

    error.propagate()
    return model, parser.get_integration()
コード例 #5
0
ファイル: typing_.py プロジェクト: Componolit/RecordFlux
def check_type(
    actual: Type,
    expected: ty.Union[Type, ty.Tuple[Type, ...]],
    location: ty.Optional[Location],
    description: str,
) -> RecordFluxError:
    assert expected, "empty expected types"

    if actual == Undefined():
        return _undefined_type(location, description)

    error = RecordFluxError()

    expected_types = [expected] if isinstance(expected,
                                              Type) else list(expected)

    if Undefined() not in [actual, expected] and all(
            not actual.is_compatible(t) for t in expected_types):
        desc = (" or ".join(map(str, expected_types)) if isinstance(
            expected, tuple) else str(expected))
        error.extend([
            (f"expected {desc}", Subsystem.MODEL, Severity.ERROR, location),
            (f"found {actual}", Subsystem.MODEL, Severity.INFO, location),
        ], )

    return error
コード例 #6
0
 def _validate_states(package: str, integration: SessionIntegration,
                      session: Session, error: RecordFluxError) -> None:
     if integration.buffer_size.local_ is None:
         return
     for state_name, state_entry in integration.buffer_size.local_.items():
         state = None
         for s in session.states:
             if str(s.identifier.name) == state_name:
                 state = s
         if state is None:
             error.extend([(
                 (f'unknown state "{state_name}" in session '
                  f'"{session.identifier.name}"'),
                 Subsystem.PARSER,
                 Severity.ERROR,
                 Integration._to_location(package),
             )])
             return
         state_declaration_vars = [
             str(x.name) for x in state.declarations.keys()
         ]
         for var_name in state_entry.keys():
             if var_name not in state_declaration_vars:
                 error.extend([(
                     (f'unknown variable "{var_name}" in state '
                      f'"{state_name}" of session "{session.identifier.name}"'
                      ),
                     Subsystem.PARSER,
                     Severity.ERROR,
                     Integration._to_location(package),
                 )])
コード例 #7
0
    def __init__(self,
                 identifier: Union[str, Sequence[str], ID],
                 location: Location = None) -> None:
        self._parts: Sequence[str]
        self.location = location

        if isinstance(identifier, str):
            self._parts = re.split(r"\.|::", identifier)
        elif isinstance(identifier, Sequence):
            self._parts = identifier
        elif isinstance(identifier, ID):
            self._parts = list(identifier.parts)
            self.location = location or identifier.location
        else:
            assert False, f'unexpected identifier type "{type(identifier).__name__}"'

        error = RecordFluxError()
        if not self._parts:
            fatal_fail("empty identifier", Subsystem.ID, Severity.ERROR,
                       location)
        elif "" in self._parts:
            fatal_fail(f'empty part in identifier "{self}"', Subsystem.ID,
                       Severity.ERROR, location)
        else:
            for c in [" ", ".", ":"]:
                if any(c in part for part in self._parts):
                    fatal_fail(
                        f'"{c}" in identifier parts of "{self}"',
                        Subsystem.ID,
                        Severity.ERROR,
                        location,
                    )
        error.propagate()
コード例 #8
0
def parse_unproven_session(string: str) -> model.UnprovenSession:
    unit = lang.AnalysisContext().get_from_buffer(
        "<stdin>", string, rule=lang.GrammarRule.session_declaration_rule
    )
    error = RecordFluxError()
    if diagnostics_to_error(unit.diagnostics, error, STDIN):
        error.propagate()
    assert isinstance(unit.root, lang.SessionDecl)
    return create_unproven_session(unit.root, ID("Package"), Path("<stdin>"))
コード例 #9
0
ファイル: typing_.py プロジェクト: Componolit/RecordFlux
def _undefined_type(location: ty.Optional[Location],
                    description: str = "") -> RecordFluxError:
    error = RecordFluxError()
    error.extend([(
        "undefined" + (f" {description}" if description else ""),
        Subsystem.MODEL,
        Severity.ERROR,
        location,
    )], )
    return error
コード例 #10
0
def parse(
    data: str,
    rule: str,
) -> Tuple[lang.RFLXNode, pathlib.Path]:
    unit = lang.AnalysisContext().get_from_buffer("<stdin>", data, rule=rule)
    error = RecordFluxError()
    if diagnostics_to_error(unit.diagnostics, error, STDIN):
        error.propagate()
    assert isinstance(unit.root, lang.RFLXNode)
    return (unit.root, STDIN)
コード例 #11
0
 def _validate_globals(package: str, integration: SessionIntegration,
                       session: Session, error: RecordFluxError) -> None:
     if integration.buffer_size.global_ is None:
         return
     session_decl_vars = [str(x.name) for x in session.declarations.keys()]
     for var_name in integration.buffer_size.global_.keys():
         if var_name not in session_decl_vars:
             error.extend([(
                 (f'unknown global variable "{var_name}" '
                  f'in session "{session.identifier.name}"'),
                 Subsystem.PARSER,
                 Severity.ERROR,
                 Integration._to_location(package),
             )])
コード例 #12
0
def check(spec_file: Path) -> RecordFluxError:
    error = RecordFluxError()

    with open(spec_file, encoding="utf-8", newline="") as f:
        specification = f.read()

    if not specification:
        return error

    lines = specification.split("\n")
    enabled_checks = _determine_enabled_checks(error, lines[0], spec_file)

    if not enabled_checks:
        return error

    blank_lines = 0

    for i, l in enumerate(lines, start=1):
        if Check.BLANK_LINES in enabled_checks:
            blank_lines = _check_blank_lines(error, l, i, spec_file, blank_lines, len(lines))
        if Check.CHARACTERS in enabled_checks:
            _check_characters(error, l, i, spec_file)
        if Check.INDENTATION in enabled_checks:
            _check_indentation(error, l, i, spec_file)
        if Check.LINE_LENGTH in enabled_checks:
            _check_line_length(error, l, i, spec_file)
        if Check.TOKEN_SPACING in enabled_checks:
            _check_token_spacing(error, l, i, spec_file)
        if Check.TRAILING_SPACES in enabled_checks:
            _check_trailing_spaces(error, l, i, spec_file)

    return error
コード例 #13
0
def test_load_integration_file(
    tmp_path: Path, content: str, error_msg: Sequence[str], line: int, column: int
) -> None:
    test_rfi = tmp_path / "test.rfi"
    test_rfi.write_text(content)
    integration = Integration()
    error = RecordFluxError()
    regex = rf"^{test_rfi}:{line}:{column}: parser: error: "
    for elt in error_msg:
        regex += elt
        regex += rf'.*in "{test_rfi}", line [0-9]+, column [0-9]+.*'
    regex += "$"
    compiled_regex = re.compile(regex, re.DOTALL)
    with pytest.raises(RecordFluxError, match=compiled_regex):
        integration.load_integration_file(test_rfi, error)
        error.propagate()
コード例 #14
0
ファイル: parser.py プロジェクト: kug1977/RecordFlux
    def __parse(self,
                specfile: Path,
                transitions: List[Tuple[ID, ID]] = None) -> None:
        error = RecordFluxError()
        log.info("Parsing %s", specfile)

        if not transitions:
            transitions = []

        with open(specfile, "r") as filehandle:
            push_source(specfile)
            try:
                for specification in grammar.unit().parseFile(filehandle):
                    check_naming(error, specification.package, specfile.name)
                    self.__specifications.appendleft(specification)
                    for item in specification.context.items:
                        transition = (specification.package.identifier, item)
                        if transition in transitions:
                            error.append(
                                f'dependency cycle when including "{transitions[0][1]}"',
                                Subsystem.PARSER,
                                Severity.ERROR,
                                transitions[0][1].location,
                            )
                            error.extend([(
                                f'when including "{i}"',
                                Subsystem.PARSER,
                                Severity.INFO,
                                i.location,
                            ) for _, i in transitions[1:]])
                            continue
                        transitions.append(transition)
                        self.__parse(
                            specfile.parent / f"{str(item).lower()}.rflx",
                            transitions)
            except (ParseException, ParseFatalException) as e:
                error.append(
                    e.msg,
                    Subsystem.PARSER,
                    Severity.ERROR,
                    parser_location(e.loc, e.loc, e.pstr, specfile),
                )
            finally:
                pop_source()

        error.propagate()
コード例 #15
0
def test_load_integration_path(tmp_path: Path) -> None:
    subfolder = tmp_path / "sub"
    subfolder.mkdir()
    test_rfi = subfolder / "test.rfi"
    test_rfi.write_text("{ Session: { Session : { Buffer_Size : 0 }}}")
    integration = Integration(integration_files_dir=subfolder)
    error = RecordFluxError()
    regex = re.compile(
        (
            r"test.rfi:0:0: parser: error: 1 validation error for IntegrationFile.*"
            r"value is not a valid dict \(type=type_error.dict\)"
        ),
        re.DOTALL,
    )
    with pytest.raises(RecordFluxError, match=regex):
        integration.load_integration_file(tmp_path / "test.rflx", error)
        error.propagate()
コード例 #16
0
def test_rfi_add_integration(rfi_content: str, match_error: str) -> None:
    # pydantic messages end with the type of the error in parentheses.
    regex = re.compile(
        (
            "^test.rfi:0:0: parser: error: 1 validation error for "
            rf"IntegrationFile.*{match_error} \([^()]*\)$"
        ),
        re.DOTALL,
    )
    yaml = YAML()
    content = yaml.load(rfi_content)
    error = RecordFluxError()
    integration = Integration()
    with pytest.raises(RecordFluxError, match=regex):
        # pylint: disable = protected-access
        integration._add_integration_object(Path("test.rfi"), content, error)
        error.propagate()
コード例 #17
0
ファイル: declaration.py プロジェクト: Componolit/RecordFlux
    def check_type(self, declaration_type: rty.Type,
                   typify_variable: Callable[[Expr], Expr]) -> RecordFluxError:
        self.type_ = declaration_type

        if self.expression is not None:
            self.expression = self.expression.substituted(typify_variable)
            return self.expression.check_type(self.type_)

        return RecordFluxError()
コード例 #18
0
def _append(
    error: RecordFluxError,
    message: str,
    row: int,
    col: int,
    spec_file: Path,
    check_type: Check = None,
) -> None:
    error.extend(
        [
            (
                message + (f" [{check_type.value}]" if check_type else ""),
                Subsystem.STYLE,
                Severity.ERROR,
                Location((row, col), spec_file),
            )
        ]
    )
コード例 #19
0
 def validate(self, model: Model, error: RecordFluxError) -> None:
     for package, integration_file in self._packages.items():
         for session_name, integration in integration_file.session.items():
             matching_sessions = [
                 s for s in model.sessions
                 if package == str(s.package).lower()
                 and str(s.identifier.name) == session_name
             ]
             if not matching_sessions:
                 error.extend([(
                     f'unknown session "{session_name}"',
                     Subsystem.PARSER,
                     Severity.ERROR,
                     Integration._to_location(package),
                 )])
                 return
             assert len(matching_sessions) == 1
             session = matching_sessions[0]
             self._validate_globals(package, integration, session, error)
             self._validate_states(package, integration, session, error)
コード例 #20
0
ファイル: parser.py プロジェクト: kug1977/RecordFlux
def check_naming(error: RecordFluxError,
                 package: PackageSpec,
                 filename: str = None) -> None:
    if str(package.identifier).startswith("RFLX"):
        error.append(
            f'illegal prefix "RFLX" in package identifier "{package.identifier}"',
            Subsystem.PARSER,
            Severity.ERROR,
            package.identifier.location,
        )
    if package.identifier != package.end_identifier:
        error.append(
            f'inconsistent package identifier "{package.end_identifier}"',
            Subsystem.PARSER,
            Severity.ERROR,
            package.end_identifier.location,
        )
        error.append(
            f'previous identifier was "{package.identifier}"',
            Subsystem.PARSER,
            Severity.INFO,
            package.identifier.location,
        )
    if filename:
        expected_filename = f"{str(package.identifier).lower()}.rflx"
        if filename != expected_filename:
            error.append(
                f'file name does not match unit name "{package.identifier}",'
                f' should be "{expected_filename}"',
                Subsystem.PARSER,
                Severity.ERROR,
                package.identifier.location,
            )
    for t in package.types:
        if is_builtin_type(t.identifier.name):
            error.append(
                f'illegal redefinition of built-in type "{t.identifier.name}"',
                Subsystem.MODEL,
                Severity.ERROR,
                t.location,
            )
コード例 #21
0
 def load_integration_file(self, spec_file: Path,
                           error: RecordFluxError) -> None:
     integration_file = (spec_file.with_suffix(".rfi")
                         if self._integration_files_dir is None else
                         self._integration_files_dir /
                         (spec_file.stem + ".rfi"))
     if integration_file.exists():
         yaml = YAML()
         try:
             content = yaml.load(integration_file)
         except MarkedYAMLError as e:
             location = Location(
                 start=((0, 0) if e.problem_mark is None else
                        (e.problem_mark.line + 1,
                         e.problem_mark.column + 1)),
                 source=integration_file,
             )
             error.extend([(str(e), Subsystem.PARSER, Severity.ERROR,
                            location)])
             return
         self._add_integration_object(integration_file, content, error)
コード例 #22
0
def parse(files: List) -> Model:
    parser = Parser()

    error = RecordFluxError()
    for f in files:
        if not Path(f).is_file():
            error.append(f'file not found: "{f}"', Subsystem.CLI,
                         Severity.ERROR)
            continue
        try:
            parser.parse(Path(f))
        except RecordFluxError as e:
            error.extend(e)

    try:
        model = parser.create_model()
    except RecordFluxError as e:
        error.extend(e)

    error.propagate()
    return model
コード例 #23
0
ファイル: typing_.py プロジェクト: Componolit/RecordFlux
def check_type_instance(
    actual: Type,
    expected: ty.Union[ty.Type[Type], ty.Tuple[ty.Type[Type], ...]],
    location: ty.Optional[Location],
    description: str = "",
) -> RecordFluxError:
    assert expected, "empty expected types"

    if actual == Undefined():
        return _undefined_type(location, description)

    error = RecordFluxError()

    if not isinstance(actual, expected) and actual != Any():
        desc = (" or ".join(e.DESCRIPTIVE_NAME for e in expected)
                if isinstance(expected, tuple) else expected.DESCRIPTIVE_NAME)
        error.extend([
            (f"expected {desc}", Subsystem.MODEL, Severity.ERROR, location),
            (f"found {actual}", Subsystem.MODEL, Severity.INFO, location),
        ], )

    return error
コード例 #24
0
ファイル: parser.py プロジェクト: kug1977/RecordFlux
    def __evaluate_types(self, spec: Specification,
                         error: RecordFluxError) -> None:
        for t in spec.package.types:
            t.identifier = ID(f"{spec.package.identifier}.{t.name}",
                              t.identifier.location)

            if t.identifier in self.__types:
                error.append(
                    f'duplicate type "{t.identifier}"',
                    Subsystem.PARSER,
                    Severity.ERROR,
                    t.location,
                )
                error.append(
                    f'previous occurrence of "{t.identifier}"',
                    Subsystem.PARSER,
                    Severity.INFO,
                    self.__types[t.identifier].location,
                )
                continue

            new_type: Type

            try:
                if isinstance(t, Scalar):
                    new_type = t

                elif isinstance(t, ArraySpec):
                    new_type = create_array(t, self.__types)

                elif isinstance(t, MessageSpec):
                    new_type = create_message(t, self.__types)

                elif isinstance(t, DerivationSpec):
                    new_type = create_derived_message(t, self.__types)

                elif isinstance(t, RefinementSpec):
                    new_type = create_refinement(t, self.__types)

                else:
                    raise NotImplementedError(
                        f'unsupported type "{type(t).__name__}"')

                self.__types[t.identifier] = new_type
                error.extend(new_type.error)

            except RecordFluxError as e:
                error.extend(e)
コード例 #25
0
ファイル: parser.py プロジェクト: kug1977/RecordFlux
 def create_model(self) -> Model:
     error = RecordFluxError()
     for specification in self.__specifications:
         if specification.package.identifier in self.__evaluated_specifications:
             continue
         self.__evaluated_specifications.add(
             specification.package.identifier)
         try:
             self.__evaluate_specification(specification)
         except RecordFluxError as e:
             error.extend(e)
     try:
         result = Model(list(self.__types.values()))
     except RecordFluxError as e:
         error.extend(e)
     error.propagate()
     return result
コード例 #26
0
def assert_error(filename: pathlib.Path, expected: Sequence[str]) -> None:
    p = specification.Parser()
    with pytest.raises(RecordFluxError) as excinfo:
        error = RecordFluxError()

        try:
            p.parse(filename)
        except RecordFluxError as e:
            error.extend(e)

        try:
            p.create_model()
        except RecordFluxError as e:
            error.extend(e)

        error.propagate()

    assert str(excinfo.value).split("\n") == expected
コード例 #27
0
    def __init__(self,
                 identifier: StrID,
                 location: Location = None,
                 error: RecordFluxError = None) -> None:
        identifier = ID(identifier)
        self.error = error or RecordFluxError()

        if len(identifier.parts) != 2:
            self.error.extend([(
                f'invalid format for identifier "{identifier}"',
                Subsystem.MODEL,
                Severity.ERROR,
                identifier.location,
            )], )

        self.identifier = identifier
        self.location = location
コード例 #28
0
    def _check_duplicates(self) -> RecordFluxError:
        error = RecordFluxError()
        types: Dict[ID, type_.Type] = {}
        sessions: Dict[ID, session.Session] = {}

        for t in self._types:
            if t.identifier in types:
                error.extend(
                    [
                        (
                            f'conflicting refinement of "{t.pdu.identifier}" with'
                            f' "{t.sdu.identifier}"'
                            if isinstance(t, message.Refinement)
                            else f'name conflict for type "{t.identifier}"',
                            Subsystem.MODEL,
                            Severity.ERROR,
                            t.location,
                        ),
                        (
                            "previous occurrence of refinement"
                            if isinstance(t, message.Refinement)
                            else f'previous occurrence of "{t.identifier}"',
                            Subsystem.MODEL,
                            Severity.INFO,
                            types[t.identifier].location,
                        ),
                    ],
                )
            types[t.identifier] = t

        for s in self._sessions:
            if s.identifier in types or s.identifier in sessions:
                error.extend(
                    [
                        (
                            f'name conflict for session "{s.identifier}"',
                            Subsystem.MODEL,
                            Severity.ERROR,
                            s.location,
                        ),
                        (
                            f'previous occurrence of "{s.identifier}"',
                            Subsystem.MODEL,
                            Severity.INFO,
                            types[s.identifier].location
                            if s.identifier in types
                            else sessions[s.identifier].location,
                        ),
                    ],
                )
            sessions[s.identifier] = s

        return error
コード例 #29
0
ファイル: parser.py プロジェクト: kug1977/RecordFlux
def create_message(message: MessageSpec, types: Mapping[ID, Type]) -> Message:
    components = list(message.components)

    if components and components[0].name:
        components.insert(0, Component())

    field_types: Dict[Field, Type] = {}

    error = RecordFluxError()

    for component in components:
        if component.name and component.type_name:
            type_name = qualified_type_name(component.type_name,
                                            message.package)
            if type_name not in types:
                continue
            field_types[Field(component.name)] = types[type_name]

    structure: List[Link] = []

    for i, component in enumerate(components):
        if not component.name:
            error.extend([(
                "invalid first expression",
                Subsystem.PARSER,
                Severity.ERROR,
                then.first.location,
            ) for then in component.thens if then.first != UNDEFINED])

        source_node = Field(component.name) if component.name else INITIAL

        if not component.thens:
            name = components[i + 1].name if i + 1 < len(components) else None
            target_node = Field(name) if name else FINAL
            structure.append(Link(source_node, target_node))

        for then in component.thens:
            target_node = Field(then.name) if then.name else FINAL
            if then.name and target_node not in field_types.keys():
                error.append(
                    f'undefined field "{then.name}"',
                    Subsystem.PARSER,
                    Severity.ERROR,
                    then.name.location if then.name else None,
                )
                continue
            structure.append(
                Link(source_node, target_node, then.condition, then.length,
                     then.first, then.location))

    return (UnprovenMessage(message.identifier, structure, field_types,
                            message.location, error).merged().proven())
コード例 #30
0
ファイル: parser.py プロジェクト: kug1977/RecordFlux
 def parse_string(self, string: str) -> None:
     error = RecordFluxError()
     try:
         for specification in grammar.unit().parseString(string):
             self.__specifications.appendleft(specification)
             check_naming(error, specification.package)
     except (ParseException, ParseFatalException) as e:
         error.append(
             e.msg,
             Subsystem.PARSER,
             Severity.ERROR,
             parser_location(e.loc, e.loc, e.pstr),
         )
     error.propagate()