Exemplo n.º 1
0
def test_merge():
    """Tests merge operator."""
    # taken from spec: http://yaml.org/type/merge.html
    document = ("anchors:\n"
                "  - &CENTER { x: 1, y: 2 }\n"
                "  - &LEFT { x: 0, y: 2 }\n"
                "  - &BIG { r: 10 }\n"
                "  - &SMALL { r: 1 }\n"
                "maps:\n"
                "  - # Merge one map\n"
                "    << : *CENTER\n"
                "    r: 10\n"
                "  - # Merge multiple maps\n"
                "    << : [ *CENTER, *BIG ]\n"
                "  - # Override\n"
                "    << : [ *BIG, *LEFT, *SMALL ]\n"
                "    x: 1\n")
    loader = Loader()
    root = loader.load(document)

    assert root.get_node_at_path('/maps/0/x').value == 1
    assert root.get_node_at_path('/maps/0/y').value == 2
    assert root.get_node_at_path('/maps/0/r').value == 10
    assert root.get_node_at_path('/maps/1/x').value == 1
    assert root.get_node_at_path('/maps/1/y').value == 2
    assert root.get_node_at_path('/maps/1/r').value == 10
    assert root.get_node_at_path('/maps/2/x').value == 1
    assert root.get_node_at_path('/maps/2/y').value == 2
    assert root.get_node_at_path('/maps/2/r').value == 10
Exemplo n.º 2
0
def test_anchor_alias():
    """Tests the YAML reference using anchor-alias."""
    doc = ("map: &map\n"
           "  value: &val 42\n"
           "  sequence: &seq\n"
           "    - 1\n"
           "    - 2\n"
           "map2: *map\n"
           "seq2: *seq\n"
           "val2: *val")
    loader = Loader()
    root = loader.load(doc)
    equal_values = [
        ('/map/value', '/map2/value'),
        ('/map/sequence/0', '/map2/sequence/0'),
        ('/map/sequence/1', '/map2/sequence/1'),
        ('/map/sequence/0', '/seq2/0'),
        ('/map/sequence/1', '/seq2/1'),
        ('/map/value', '/val2'),
    ]
    for path1, path2 in equal_values:
        value1 = root.get_node_at_path(path1).value
        value2 = root.get_node_at_path(path2).value
        assert value1 == value2
Exemplo n.º 3
0
def test_alias_errors():
    """Tests invalid aliases."""
    document = ("- &r text\n" "- *x\n" "- *r\n" "- *y")
    loader = Loader()
    loader.load(document)
    assert len(loader.notification_handler.notifications) == 2
Exemplo n.º 4
0
def test_extract_node_properties():
    """Test extracting `TextValue` of node's tag and anchor."""
    doc = ("simple_tag: !tag 1\n"
           "tag_and_anchor: !tag &anchor 1\n"
           "anchor_and_tag: &anchor !tag 1\n"
           "multiline_tag_anchor:\n"
           "  !tag\n"
           "  &anchor\n"
           "  - 1")
    loader = Loader()
    loader._document = doc

    loader._event = Mock(tag='tag', start_mark=Mock(line=0, column=12))
    tag = loader._extract_tag()
    assert tag.span.start.line == 1
    assert tag.span.start.column == 14
    assert tag.span.end.line == 1
    assert tag.span.end.column == 17

    loader._event = Mock(tag='tag',
                         anchor='anchor',
                         start_mark=Mock(line=1, column=16))
    tag = loader._extract_tag()
    anchor = loader._extract_anchor()
    assert tag.span.start.line == 2
    assert tag.span.start.column == 18
    assert tag.span.end.line == 2
    assert tag.span.end.column == 21
    assert anchor.span.start.line == 2
    assert anchor.span.start.column == 23
    assert anchor.span.end.line == 2
    assert anchor.span.end.column == 29

    loader._event = Mock(tag='tag',
                         anchor='anchor',
                         start_mark=Mock(line=2, column=16))
    tag = loader._extract_tag()
    anchor = loader._extract_anchor()
    assert tag.span.start.line == 3
    assert tag.span.start.column == 26
    assert tag.span.end.line == 3
    assert tag.span.end.column == 29
    assert anchor.span.start.line == 3
    assert anchor.span.start.column == 18
    assert anchor.span.end.line == 3
    assert anchor.span.end.column == 24

    loader._event = Mock(tag='tag',
                         anchor='anchor',
                         start_mark=Mock(line=3, column=20))
    tag = loader._extract_tag()
    anchor = loader._extract_anchor()
    assert tag.span.start.line == 5
    assert tag.span.start.column == 4
    assert tag.span.end.line == 5
    assert tag.span.end.column == 7
    assert anchor.span.start.line == 6
    assert anchor.span.start.column == 4
    assert anchor.span.end.line == 6
    assert anchor.span.end.column == 10
Exemplo n.º 5
0
def test_parse(request=None):
    error_handler = NotificationHandler()
    mockcfg.set_empty_config()

    if request is not None:

        def fin_test_config():
            mockcfg.clean_config()

        request.addfinalizer(fin_test_config)
    loader = Loader(error_handler)

    # parse mapping, scalar
    document = ("format: ascii\n" "file: dual_sorp.vtk")
    root = loader.load(document)
    assert root.children[0].value == 'ascii'

    # parse sequence, scalar
    document = ("- ascii\n" "- utf-8")
    root = loader.load(document)
    assert root.children[1].value == 'utf-8'

    # test complex structure
    mockcfg.load_complex_structure_to_config()

    # test values - are scalars converted to the correct type?
    assert cfg.root.children[0].children[0].children[0].value is None
    assert cfg.root.children[1].children[1].children[0].value is True
    assert cfg.root.children[0].children[1].children[1].value == 0.5
    assert (cfg.root.children[1].children[1].children[1].children[0].
            children[1].value) == 'ALL'
    assert (cfg.root.children[1].children[1].children[1].children[1].
            children[0].value) == 0

    # test node spans - try to get node at certain positions
    assert cfg.root.get_node_at_position(Position(
        5, 5)) == (cfg.root.children[0].children[0].children[0])
    assert cfg.root.get_node_at_position(Position(
        5, 9)) == (cfg.root.children[0].children[0].children[0])
    assert cfg.root.get_node_at_position(Position(
        13, 18)) == (cfg.root.children[1].children[1].children[0])
    assert cfg.root.get_node_at_position(Position(15, 22)) == (
        cfg.root.children[1].children[1].children[1].children[0].children[0])
    assert cfg.root.get_node_at_position(Position(15, 33)) == (
        cfg.root.children[1].children[1].children[1].children[0].children[1])

    # test absolute_path, get_node_at_path
    assert cfg.root.get_node_at_path('/') == cfg.root
    input_fields_node = cfg.root.get_node_at_path(
        '/problem/primary_equation/input_fields')
    assert input_fields_node == cfg.root.children[1].children[1].children[1]
    assert input_fields_node.get_node_at_path('.') == input_fields_node
    assert (input_fields_node.get_node_at_path('./0/r_set') ==
            input_fields_node.children[0].children[1])
    assert (input_fields_node.get_node_at_path(
        '/problem/primary_equation/input_fields/0/r_set') ==
            input_fields_node.children[0].children[1])
    assert input_fields_node.get_node_at_path('../../..') == cfg.root

    with pytest.raises(LookupError):
        cfg.root.get_node_at_path('/invalid/path')

    # test parser error
    document = ("format: ascii\n" "- file: dual_sorp.vtk")
    loader.load(document)
    assert len(loader.notification_handler.notifications) == 1

    # test tag parsing
    document = ("problem: !SequentialCoupling\n" "  test: 1")
    root = loader.load(document)
    assert root.children[0].type.value == 'SequentialCoupling'
    assert root.get_node_at_position(Position(
        1, 11)).type.value == 'SequentialCoupling'

    mockcfg.load_valid_structure_to_config()

    # test get_node_at_path
    assert cfg.root.get_node_at_path('/') == cfg.root
    assert (cfg.root.get_node_at_path('/problem/mesh/mesh_file').value ==
            'input/dual_por.msh')
    assert (cfg.root.children[0].children[0].get_node_at_path(
        '../primary_equation/balance').value is True)

    # test tag
    assert cfg.root.children[0].type.value == 'SequentialCoupling'
    assert cfg.root.children[0].type.span.start.line == 6
    assert cfg.root.children[0].type.span.start.column == 11
    assert cfg.root.children[0].type.span.end.line == 6
    assert cfg.root.children[0].type.span.end.column == 29

    # test ref
    input_fields = cfg.root.children[0].children[1].children[1]
    assert input_fields.children[0].children[0].value == 0
    assert input_fields.children[2].children[0].value == 0

    # test empty abstract record
    node = cfg.root.get_node_at_path('/problem/primary_equation/solver')
    assert node.implementation == DataNode.Implementation.mapping
    assert node.type.value == 'Petsc'

    # test ref errors
    document = ("- &r text\n" "- *x\n" "- *r\n" "- *y")
    loader.notification_handler.clear()
    root = loader.load(document)
    assert len(loader.notification_handler.notifications) == 2
Exemplo n.º 6
0
    def parse(self, yaml_file):
        """
        Parse regions, params and active processes from .yaml file.
        Also computes hashes from .yaml and input files.
        """
        err = []

        dir_name = os.path.dirname(yaml_file)

        document = ""
        try:
            try:
                with codecs.open(yaml_file, 'r', 'utf-8') as file_d:
                    document = file_d.read().expandtabs(tabsize=2)
            except UnicodeDecodeError:
                with open(yaml_file, 'r') as file_d:
                    document = file_d.read().expandtabs(tabsize=2)
        except (RuntimeError, IOError) as e:
            err.append("Can't open .yaml file: {0}".format(e))
            return err
        loader = Loader(notification_handler)
        validator = Validator(notification_handler)
        notification_handler.clear()
        root = loader.load(document)
        
        root_input_type, new_err = self._get_root_input_type()
        if len(new_err) > 0:
            err.extend(new_err)
            return err

        # autoconvert
        root = autoconvert(root, root_input_type)

        # validate
        if not validator.validate(root, root_input_type):
            err.append(".yaml file have not valid format")
            #return err

        # mesh file
        try:
            node = root.get_node_at_path('/problem/mesh/mesh_file')
            self._mesh_file = node.value
        except LookupError:
            err.append("Can't find node '/problem/mesh/mesh_file'")
            return err

        # active processes
        self._active_processes = {}
        try:
            problem_node = root.get_node_at_path('/problem')
        except LookupError:
            err.append("Can't find node '/problem'")
            return err

        # flow equation
        if "flow_equation" in problem_node.children_keys:
            data = {}

            # output stream file
            try:
                node = problem_node.get_node_at_path('flow_equation/output_stream/file')
                data["output_stream_file"] = node.value
            except LookupError:
                pass

            # observed quantities
            oq = {}
            oq.update(self._get_observed_quantities(problem_node, 'flow_equation/output/observe_fields'))
            if len(oq) > 0:
                data["observed_quantities"] = oq

            # balance file
            try:
                node = problem_node.get_node_at_path('flow_equation/balance/file')
                data["balance_file"] = node.value
            except LookupError:
                data["balance_file"] = "water_balance.txt"
            self._active_processes["flow_equation"] = data

        # solute equation
        if "solute_equation" in problem_node.children_keys:
            data = {}

            # output stream file
            try:
                node = problem_node.get_node_at_path('solute_equation/output_stream/file')
                data["output_stream_file"] = node.value
            except LookupError:
                pass

            # observed quantities
            oq = {}
            oq.update(self._get_observed_quantities(problem_node, 'solute_equation/transport/output/observe_fields'))
            oq.update(self._get_observed_quantities(problem_node, 'solute_equation/reaction/output/observe_fields'))
            oq.update(self._get_observed_quantities(problem_node, 'solute_equation/reaction/reaction_mobile/output/observe_fields'))
            oq.update(self._get_observed_quantities(problem_node, 'solute_equation/reaction/reaction_immobile/output/observe_fields'))
            if len(oq) > 0:
                data["observed_quantities"] = oq

            # balance file
            try:
                node = problem_node.get_node_at_path('solute_equation/balance/file')
                data["balance_file"] = node.value
            except LookupError:
                data["balance_file"] = "mass_balance.txt"

            # substances
            sub = []
            try:
                node = problem_node.get_node_at_path('solute_equation/substances')
                for child in node.children:
                    name = child.get_child("name")
                    if name is not None:
                        sub.append(name.value)
            except LookupError:
                pass
            data["substances"] = sub

            self._active_processes["solute_equation"] = data

        # heat equation
        if "heat_equation" in problem_node.children_keys:
            data = {}

            # output stream file
            try:
                node = problem_node.get_node_at_path('heat_equation/output_stream/file')
                data["output_stream_file"] = node.value
            except LookupError:
                pass

            # observed quantities
            oq = {}
            oq.update(self._get_observed_quantities(problem_node, 'heat_equation/output/observe_fields'))
            if len(oq) > 0:
                data["observed_quantities"] = oq

            # balance file
            try:
                node = problem_node.get_node_at_path('heat_equation/balance/file')
                data["balance_file"] = node.value
            except LookupError:
                data["balance_file"] = "energy_balance.txt"

            self._active_processes["heat_equation"] = data

        # params
        self._params = sorted(list(set(RE_PARAM.findall(document))))

        # regions
        mesh_dict = {}
        mesh_file_path = os.path.join(dir_name, os.path.normpath(self._mesh_file))
        try:
            with open(mesh_file_path, 'r') as file_d:
                line = file_d.readline()
                while (len(line) > 0) and (line.split()[0] != "$PhysicalNames"):
                    line = file_d.readline()
                line = file_d.readline()
                if len(line) > 0:
                    for i in range(int(line)):
                        s = file_d.readline().split()
                        mesh_dict[s[2][1:-1]] = s[1]
        except (RuntimeError, IOError) as e:
            err.append("Can't open mesh file: {0}".format(e))
            return err

        self._regions = list(mesh_dict.keys())
        self._regions.append(".IMPLICIT_BOUNDARY")
        self._regions.append("ALL")
        self._regions.sort()

        # input files
        self._input_files = []

        def crawl(node):
            for child in node.children:
                it = child.input_type
                if (it is not None) and (it["base_type"] == "FileName") and \
                        (it["file_mode"] == "input") and (child.value not in self._input_files):
                    self._input_files.append(child.value)
                crawl(child)

        crawl(root)

        # .yaml file hash
        e, self._yaml_file_hash = self.file_hash(yaml_file)
        err.extend(e)

        # input files hashes
        self._input_files_hashes = {}
        for file in self._input_files:
            file_path = os.path.join(dir_name, os.path.normpath(file))
            e, hash = self.file_hash(file_path)
            err.extend(e)
            self._input_files_hashes[file] = hash

        return err
Exemplo n.º 7
0
 def transform(self, yaml, cfg):
     """transform yaml file"""
     self.err = []
     notification_handler = NotificationHandler()
     loader = Loader(notification_handler)
     root = loader.load(yaml)
     lines = yaml.splitlines(False)
     changes = False
     text = cfg.get_curr_format_text()
     root_input_type = get_root_input_type_from_json(text)
     for aaction in self._transformation['actions']:
         if changes:
             root, lines = self.refresh(root_input_type, yaml,
                                        notification_handler, loader)
             changes = False
         for action in self._replace_wildchars(root, aaction):
             if changes:
                 root, lines = self.refresh(root_input_type, yaml,
                                            notification_handler, loader)
             if action['action'] == "delete-key":
                 changes = self._delete_key(root, lines, action)
             elif action['action'] == "move-key":
                 changes = self._move_key(root, lines, action)
                 if changes and "set_type_path" in action['parameters']:
                     yaml = "\n".join(lines)
                     root, lines = self.refresh(root_input_type, yaml,
                                                notification_handler,
                                                loader)
                     changes = False
                     try:
                         node = root.get_node_at_path(
                             action['parameters']['set_type_path'])
                         if node.type is not None:
                             StructureChanger.change_tag(
                                 lines, node, node.type.value,
                                 action['parameters']['new_type'].strip())
                         else:
                             StructureChanger.add_tag(
                                 lines, node,
                                 action['parameters']['new_type'].strip())
                         changes = True
                     except:
                         pass
             elif action['action'] == "rename-type":
                 changes = self._rename_type(root, lines, action)
             elif action['action'] == "move-key-forward":
                 changes = self._move_key_forward(root, lines, action)
             elif action['action'] == "change-value":
                 changes = self._change_value(root, lines, action)
             elif action['action'] == "replace-value":
                 changes = self._replace_value(root, lines, action)
             elif action['action'] == "merge-arrays":
                 changes = self._add_array(root, lines, action)
                 if 'destination_path' in action['parameters']:
                     if changes:
                         yaml = "\n".join(lines)
                         root, lines = self.refresh(root_input_type, yaml,
                                                    notification_handler,
                                                    loader)
                     action['parameters']['keep-source'] = False
                     changes = self._move_key(root, lines, action)
             elif action['action'] == "add-key":
                 changes = self._add_key(root, lines, action)
             elif action['action'] == "scale-value":
                 changes = self._scale_value(root, lines, action)
             if changes:
                 yaml = "\n".join(lines)
     yaml = "\n".join(lines)
     return yaml
Exemplo n.º 8
0
def loader():
    return Loader()
Exemplo n.º 9
0
class MEConfig:
    """Static data class"""
    notification_handler = notification_handler
    """error handler for reporting and buffering errors"""
    autocomplete_helper = AutocompleteHelper()
    """helpers for handling autocomplete options in editor"""
    format_files = []
    """Array of format files"""
    transformation_files = []
    """Array of transformation files"""
    curr_format_file = None
    """selected format file"""
    config = _Config.open()
    """Serialized variables"""
    curr_file = None
    """Name of open file"""
    curr_file_timestamp = None
    """
    Timestamp of opened file, if editor text is 
    imported or new timestamp is None
    """
    imported_file_name = None
    """if a file was imported, this is its suggested name"""
    root = None
    """root DataNode structure"""
    document = ""
    """text set by editor after significant changing"""
    main_window = None
    """parent of dialogs"""
    notifications = []
    """array of notifications"""
    changed = False
    """is file changed"""
    loader = Loader(notification_handler)
    """loader of YAML files"""
    validator = Validator(notification_handler)
    """data validator"""
    root_input_type = None
    """input type of the whole tree, parsed from format"""
    resource_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                'resources')
    """path to a folder containing ME resources"""
    format_dir = os.path.join(resource_dir, '..', '..', 'gm_base', 'resources',
                              'ist')
    """path to a folder containing IST files"""
    transformation_dir = os.path.join(resource_dir, 'transformation')
    """path to a folder containing transformation files"""
    stylesheet_dir = os.path.join(resource_dir, 'css')
    """path to a folder containing Qt stylesheets"""
    info_text_html_root_dir = os.path.join(resource_dir, 'ist_html')
    """path to a root folder for InfoText"""
    logger = logging.getLogger(LOGGER_PREFIX + constants.CONTEXT_NAME)
    """root context logger"""

    DEFAULT_IMPORT_FORMAT_FILE = '1.8.3'
    """default IST version to be used for imported con files"""
    @classmethod
    def init(cls, main_window):
        """Init class wit static method"""
        cls._read_format_files()
        cls._read_transformation_files()
        cls.main_window = main_window
        if len(cls.config.format_files) > 0:
            cls.curr_format_file = cls.config.format_files[0]
        else:
            if len(cls.format_files) > 0:
                cls.curr_format_file = cls.format_files[0]
        cls.update_format()

    @classmethod
    def _read_format_files(cls):
        """read names of format files in format files directory"""
        from os import listdir
        from os.path import isfile, join
        for file_name in sorted(listdir(cls.format_dir)):
            if (isfile(join(cls.format_dir, file_name))
                    and file_name[-5:].lower() == ".json"):
                cls.format_files.append(file_name[:-5])
        # reverse sorting 9 - 0
        cls.format_files = cls.format_files[::-1]

    @classmethod
    def _read_transformation_files(cls):
        """read names of transformation files in format files directory"""
        from os import listdir
        from os.path import isfile, join
        for file_name in listdir(cls.transformation_dir):
            if (isfile(join(cls.transformation_dir, file_name))
                    and file_name[-5:].lower() == ".json"):
                cls.transformation_files.append(file_name[:-5])

    @classmethod
    def get_curr_format_text(cls):
        """return current format file text"""
        from os.path import join
        file_name = join(cls.format_dir, cls.curr_format_file + ".json")
        try:
            with open(file_name, 'r') as file_d:
                return file_d.read()
        except (RuntimeError, IOError) as err:
            if cls.main_window is not None:
                cls._report_error(
                    "Can't open format file '" + cls.curr_format_file + "'",
                    err)
            else:
                raise err
        return None

    @classmethod
    def get_transformation_text(cls, file):
        """return transformation file text"""
        from os.path import join
        file_name = join(cls.transformation_dir, file + ".json")
        try:
            with open(file_name, 'r') as file_d:
                return file_d.read()
        except (RuntimeError, IOError) as err:
            if cls.main_window is not None:
                cls._report_error(
                    "Can't open transformation file '" + file + "'", err)
            else:
                raise err
        return None

    @classmethod
    def get_data_node(cls, position):
        """
        Returns DataNode at given `class::Position` position.
        """
        # empty file with comment
        if cls.root is None:
            return None

        return cls.root.get_node_at_position(position)

    @classmethod
    def set_current_format_file(cls, file_name):
        """
        set current format file
        """
        if file_name not in cls.format_files:
            return
        cls.curr_format_file = file_name
        cls.update_format()

    @classmethod
    def confront_file_timestamp(cls):
        """
        Compare file timestamp with file time and if is diferent
        show dialog, and reload file content.
        :return: if file is reloaded 
        """
        if cls.curr_file_timestamp is not None and \
            cls.curr_file is not None:
            try:
                timestamp = os.path.getmtime(cls.curr_file)
                if timestamp != cls.curr_file_timestamp:
                    from PyQt5 import QtWidgets
                    msg = QtWidgets.QMessageBox()
                    msg.setText(
                        "File has been modified outside of Model editor. Do you want to reload it?"
                    )
                    msg.setStandardButtons( QtWidgets.QMessageBox.Ignore | \
                        QtWidgets.QMessageBox.Reset)
                    msg.button(QtWidgets.QMessageBox.Reset).setText("Reload")
                    msg.setDefaultButton(QtWidgets.QMessageBox.Ignore)
                    ret = msg.exec_()
                    if ret == QtWidgets.QMessageBox.Reset:
                        cls.document = cls.read_file(cls.curr_file)
                        cls.curr_file_timestamp = timestamp
                        cls.update()
                        return True
            except OSError:
                pass
        return False

    @classmethod
    def _set_file(cls, file, imported=False):
        """
        save file name and timestamp
        """
        if imported:
            base_name = os.path.splitext(os.path.basename(file))[0]
            cls.imported_file_name = base_name
            i = 1
            dir_path = cls.config.current_working_dir + os.path.sep
            while os.path.isfile(dir_path + cls.imported_file_name + '.yaml'):
                if i > 999:
                    break
                cls.imported_file_name = "{0}{1:03d}".format(base_name, i)
                i += 1
            cls.imported_file_name = dir_path + cls.imported_file_name + '.yaml'
            cls.curr_file = None
            cls.curr_file_timestamp = None
        else:
            cls.curr_file = file
            cls.imported_file_name = None
            if file is None:
                cls.curr_file_timestamp = None
            else:
                try:
                    cls.curr_file_timestamp = os.path.getmtime(file)
                except OSError:
                    cls.curr_file_timestamp = None

    @classmethod
    def new_file(cls):
        """
       empty file
        """
        cls.document = ""
        # if Analysis.current is not None:
        #     cls.curr_format_file = Analysis.current.flow123d_version
        #     if not cls.curr_format_file:
        #         cls.curr_format_file = sorted(cls.format_files, reverse=True)[0]
        cls.curr_format_file = sorted(cls.format_files, reverse=True)[0]
        cls.update_format()
        cls.changed = False
        cls._set_file(None)

    @classmethod
    def read_file(cls, file_name):
        try:
            # try utf-8
            try:
                with codecs.open(file_name, 'r', 'utf-8') as file_d:
                    return file_d.read().expandtabs(tabsize=2)
            except UnicodeDecodeError:
                pass

            # try windows-1250
            try:
                with codecs.open(file_name, 'r', 'cp1250') as file_d:
                    return file_d.read().expandtabs(tabsize=2)
            except UnicodeDecodeError:
                cls._report_error(
                    "Unknown encoding of the file %s. Should be UTF-8." %
                    file_name)

        except (RuntimeError, IOError) as err:
            cls._report_error("Can not open file: %s." % file_name, err)

    @classmethod
    def open_file(cls, file_name):
        """
        read file

        return: if file have good format (boolean)
        """
        cls.document = cls.read_file(file_name)
        cls.config.update_current_working_dir(file_name)
        cls._set_file(file_name)
        cls.config.add_recent_file(file_name, cls.curr_format_file)
        cls.update()
        cls._set_format_file_from_data()
        cls.update_format()
        cls.changed = False
        cls.sync_analysis_for_curr_file()

    @classmethod
    def _set_format_file_from_data(cls):
        try:
            cls.curr_format_file = cls.root.get_node_at_path(
                '/flow123d_version').value
        except (LookupError, AttributeError):
            cls.curr_format_file = MEConfig.DEFAULT_IMPORT_FORMAT_FILE
        else:
            if cls.curr_format_file not in cls.format_files:
                # specified version not available, select next lower version
                def get_version(format_file):
                    vers = format_file.split('.')
                    major = vers[0]
                    minor = vers[1] if len(vers) > 1 else 0
                    rev = vers[2] if len(vers) > 2 else 0
                    return major, minor, rev

                req_version = get_version(cls.curr_format_file)
                for format_file in sorted(cls.format_files, reverse=True):
                    version = get_version(format_file)
                    if version > req_version:
                        continue
                    cls.curr_format_file = format_file
                    break

    @classmethod
    def import_file(cls, file_name):
        """
        read con file and transform it to yaml structure

        return: if file have good format (boolean)
        """
        try:
            con = cls.read_file(file_name)
            cls.document = parse_con(con)
            # find available file name
            cls._set_file(file_name, True)
            cls.update()
            cls.document = fix_intendation(cls.document, cls.root)
            cls.update()
            cls.document, need_move_forward = fix_tags(cls.document, cls.root)
            cls.update()
            cls.document = rewrite_comments(con, cls.document, cls.root)
            cls.update()
            data = {
                'actions': [{
                    'action': 'move-key-forward',
                    'parameters': {
                        'path': '/system'
                    }
                }, {
                    'action': 'delete-key',
                    'parameters': {
                        'path': '/system',
                        'deep': True
                    }
                }]
            }
            for path in need_move_forward:
                data['actions'].append({
                    'action': 'move-key-forward',
                    'parameters': {
                        'path': path
                    }
                })
            transformator = Transformator(None, data)
            cls.document = transformator.transform(cls.document, cls)
            cls.curr_format_file = MEConfig.DEFAULT_IMPORT_FORMAT_FILE
            # if Analysis.current is not None and \
            #     (Analysis.current.flow123d_version[:5] == '2.0.0' or
            #     Analysis.current.flow123d_version[:5] == '2.1.0'):
            #     cls.curr_format_file = MEConfig.DEFAULT_IMPORT_FORMAT_FILE
            #     cls.transform_con("flow123d_1.8.3_to_2.0.0_rc")
            cls.update_format()
            cls.changed = True
            return True
        except (RuntimeError, IOError) as err:
            if cls.main_window is not None:
                cls._report_error("Can't open import file", err)
            else:
                raise err
        except Exception as err:
            if cls.main_window is not None:
                cls._report_error("Can't import file from con format", err)
            else:
                raise err
        return False

    @classmethod
    def export_file(cls):
        """Export the current YAML document to CON format.

        :return: text of the exported file
        :rtype: str
        """
        return export_con(cls.root)

    @classmethod
    def open_recent_file(cls, file_name):
        """
        read file from recent files

        return: if file have good format (boolean)
        """
        # If we want to use this code, GUI has to be updated as well.
        # format_file = cls.config.get_format_file(file_name)
        # if format_file is not None:
        #     cls.curr_format_file = format_file

        cls.document = cls.read_file(file_name)
        cls.config.update_current_working_dir(file_name)
        cls._set_file(file_name)
        cls.config.add_recent_file(file_name, cls.curr_format_file)
        cls.update()
        cls._set_format_file_from_data()
        cls.update_format()
        cls.changed = False
        cls.sync_analysis_for_curr_file()
        return True

    @classmethod
    def update(cls):
        """reread yaml text and update node tree"""
        cls.notification_handler.clear()
        cls.root = cls.loader.load(cls.document)
        cls.autocomplete_helper.clear_anchors()
        for anchor in cls.loader.anchors:
            cls.autocomplete_helper.register_anchor(anchor)
        cls.notifications = cls.notification_handler.notifications
        if cls.root_input_type is None or cls.root is None:
            return
        cls.root = autoconvert(cls.root, cls.root_input_type)
        cls.validator.validate(cls.root, cls.root_input_type)

        # flow123d_version notifications
        try:
            node = cls.root.get_node_at_path('/flow123d_version')
        except LookupError:
            pass
        else:
            if node.value != cls.curr_format_file:
                ntf = Notification.from_name('Flow123dVersionMismatch',
                                             node.value, cls.curr_format_file)
                ntf.span = node.span
                cls.notification_handler.report(ntf)

        # handle parameters
        # if (Analysis.current is not None and
        #         Analysis.current.is_abs_path_in_analysis_dir(cls.curr_file)):
        #     Analysis.current.merge_params(cls.validator.params)

        StructureAnalyzer.add_node_info(cls.document, cls.root,
                                        cls.notification_handler)
        cls.notifications = cls.notification_handler.notifications

    @classmethod
    def update_format(cls):
        """reread json format file and update node tree"""
        if cls.curr_format_file is None:
            return
        try:
            text = cls.get_curr_format_text()
        except FileNotFoundError:
            # if format is not found, open the latest instead
            cls.curr_format_file = sorted(cls.format_files, reverse=True)[0]
            text = cls.get_curr_format_text()
        try:
            cls.root_input_type = get_root_input_type_from_json(text)
        except Exception as e:
            cls._report_error("Can't open format file", e)
        else:
            InfoTextGenerator.init(text)
            cls.autocomplete_helper.create_options(cls.root_input_type)
            cls.update()

    @classmethod
    def save_file(cls):
        """save file"""
        cls.update()
        try:
            with codecs.open(cls.curr_file, 'w', 'utf-8') as file_d:
                file_d.write(cls.document)
            # format is save to recent files up to save file
            cls._set_file(cls.curr_file)
            cls.config.format_files[0] = cls.curr_format_file
            cls.changed = False
        except (RuntimeError, IOError) as err:
            cls._report_error("Can't save file", err)
        else:
            cls.sync_analysis_for_curr_file()

    @classmethod
    def save_as(cls, file_name):
        """save file as"""
        cls.update()
        try:
            with codecs.open(file_name, 'w', 'utf-8') as file_d:
                file_d.write(cls.document)
            cls.config.update_current_working_dir(file_name)
            cls._set_file(file_name)
            cls.config.add_recent_file(file_name, cls.curr_format_file)
            cls.changed = False
        except (RuntimeError, IOError) as err:
            if cls.main_window is not None:
                cls._report_error("Can't save file", err)
            else:
                raise err
        else:
            cls.sync_analysis_for_curr_file()

    @classmethod
    def sync_analysis_for_curr_file(cls):
        """Write current file and params to analysis file."""
        # if (Analysis.current is not None and
        #         Analysis.current.is_abs_path_in_analysis_dir(cls.curr_file)):
        #     Analysis.current.merge_params(cls.validator.params)
        #     params = [param.name for param in cls.validator.params]
        #     Analysis.current.add_file(cls.curr_file, params)
        #     if not Analysis.current.flow123d_version:
        #         Analysis.current.flow123d_version = cls.curr_format_file
        #     Analysis.current.save()
        pass

    @classmethod
    def update_yaml_file(cls, new_yaml_text):
        """update new editor text"""
        if new_yaml_text != cls.document:
            cls.document = new_yaml_text
            cls.changed = True
            return True
        return False

    @classmethod
    def transform_con(cls, file):
        """
        Run transformation according rules in set file.
        Now used only for .con files import.
        """
        cls.update()
        text = cls.get_transformation_text(file)
        try:
            transformator = Transformator(text)
        except (ValueError, TransformationFileFormatError) as err:
            if cls.main_window is not None:
                cls._report_error("Can't decode transformation file", err)
            else:
                raise err
            return
        if cls.main_window is None:
            if cls.curr_format_file != transformator.old_version:
                print("Transformed file format '" + cls.curr_format_file +
                      "' and format specified in transformation file '" +
                      transformator.old_version + "' are different")
        try:
            cls.document = transformator.transform(cls.document, cls)
            if len(transformator.err) > 0:
                if cls.main_window is not None:
                    cls._report_notify(transformator.err)
                else:
                    for err in transformator.err:
                        print(err)
        except TransformationFileFormatError as err:
            if cls.main_window is not None:
                cls._report_error("Transformation format error", err)
            else:
                raise err
            return
        if transformator.new_version in cls.format_files:
            cls.set_current_format_file(transformator.new_version)
        else:
            if cls.main_window is None:
                print(
                    "Cannot set new fileformat specified in transformation file '"
                    + transformator.new_version +
                    "'. Format file is not available.")
            cls.update()

    @classmethod
    def transform(cls, to_version):
        """Run transformation to version to_version."""
        cls.update()

        changes = make_changes()
        yml = get_yaml_serializer()
        tree = yml.load(cls.document)

        try:
            actions = changes.apply_changes(tree,
                                            to_version,
                                            map_insert=Changes.BEGINNING)
        except:
            cls._report_error("Transformation format error.")
            return

        stream = StringIO()
        yml.dump(tree, stream)
        cls.document = stream.getvalue()

        if to_version in cls.format_files:
            cls.set_current_format_file(to_version)
        else:
            cls.update()

    @classmethod
    def transform_get_version_list(cls):
        """Returns list of versions available to transformation."""
        changes = make_changes()
        return [v for v in changes.versions if v in cls.format_files]

    @classmethod
    def get_shortcut(cls, name):
        """Locate a keyboard shortcut by its action name.

        :param str name: name of the shortcut
        :return: the assigned shortcut
        :rtype: :py:class:`helpers.keyboard_shortcuts.KeyboardShortcut` or ``None``
        """
        shortcut = None
        if name in shortcuts_definition.SYSTEM_SHORTCUTS:
            shortcut = shortcuts_definition.SYSTEM_SHORTCUTS[name]
        elif name in cls.config.shortcuts:
            shortcut = cls.config.shortcuts[name]
        if shortcut:
            return shortcuts.get_shortcut(shortcut)
        return None

    @classmethod
    def _report_error(cls, mess, err=None):
        """Report an error with dialog."""
        from gm_base.geomop_dialogs import GMErrorDialog
        if cls.main_window is not None:
            err_dialog = GMErrorDialog(cls.main_window)
            err_dialog.open_error_dialog(mess, err)
        else:
            raise Exception(mess)

    @classmethod
    def _report_notify(cls, errs):
        """Report an error with dialog."""
        from gm_base.geomop_dialogs import GMErrorDialog
        err_dialog = GMErrorDialog(cls.main_window)
        err_dialog.open_error_report_dialog(errs)
Exemplo n.º 10
0
def load_invalid_structure_to_config():
    with open(os.path.join(__sample_dir__, 'config_invalid.yaml')) as file:
        cfg.document = file.read()
    loader = Loader(NotificationHandler())
    cfg.root = loader.load(cfg.document)
Exemplo n.º 11
0
def test_validator():
    error_handler = NotificationHandler()
    loader = Loader(error_handler)
    validator = Validator(error_handler)

    it_int = dict(base_type='Integer', min=0, max=3)

    it_string = dict(base_type='String')

    it_array = dict(base_type='Array', subtype=it_int, min=1, max=4)

    it_record = dict(base_type='Record',
                     keys={
                         'a1': {
                             'default': {
                                 'type': 'obligatory'
                             },
                             'type': it_int
                         },
                         'a2': {
                             'default': {
                                 'type': 'obligatory'
                             },
                             'type': it_int
                         },
                         'b': {
                             'default': {
                                 'type': 'value at declaration'
                             },
                             'type': it_int
                         },
                         'c': {
                             'default': {
                                 'type': 'value at read time'
                             },
                             'type': it_int
                         },
                         'd': {
                             'default': {
                                 'type': 'optional'
                             },
                             'type': it_int
                         },
                         'TYPE': {
                             'type': it_string
                         }
                     },
                     name='MyRecord')

    it_record2 = dict(base_type='Record',
                      keys={
                          'b': {
                              'default': {
                                  'type': 'obligatory'
                              },
                              'type': it_int
                          },
                          'TYPE': {
                              'type': it_string
                          }
                      },
                      name='MyRecord2')

    it_abstract = dict(name='MyAbstract',
                       base_type='Abstract',
                       implementations={
                           'record1': it_record,
                           'record2': it_record2
                       })

    # validate scalar
    node = ScalarDataNode()
    node.value = 2
    assert validator.validate(node, it_int) is True

    error_handler.clear()
    node.value = 4
    assert validator.validate(node, it_int) is False
    assert len(error_handler.notifications) == 1

    # validate record
    document = ("a1: 1\n" "a2: 1")
    node = loader.load(document)
    assert validator.validate(node, it_record) is True

    error_handler.clear()
    document = ("a1: 1\n" "a2: 1\n" "d: 2\n" "e: 4")
    node = loader.load(document)
    assert validator.validate(node, it_record) is True
    assert len(error_handler.notifications) == 1

    # test array
    document = "[0, 1, 1, 2]"
    node = loader.load(document)
    assert validator.validate(node, it_array) is True

    error_handler.clear()
    document = "[0, 1, 1, 2, -1, 5]"
    node = loader.load(document)
    assert validator.validate(node, it_array) is False
    assert len(error_handler.notifications) == 3

    # validate abstract
    document = ("!record1\n" "a1: 1\n" "a2: 1\n")
    node = loader.load(document)
    assert validator.validate(node, it_abstract) is True

    node.type.value = 'record2'
    assert validator.validate(node, it_abstract) is False

    document = ("a1: 1\n" "a2: 1\n")
    node = loader.load(document)
    assert validator.validate(node, it_abstract) is False

    # validate abstract type by tag
    document = ("!record1\n" "a1: 1\n" "a2: 1\n")
    node = loader.load(document)
    assert validator.validate(node, it_abstract) is True

    # test validate
    document = ("!record1\n" "a1: 2\n" "a2: 1\n")
    node = loader.load(document)
    assert validator.validate(node, it_abstract) is True

    document = ("!record2\n" "b: 2\n")
    node = loader.load(document)
    assert validator.validate(node, it_abstract) is True

    error_handler.clear()
    document = ("!record1\n" "a1: 5\n" "a2: -1\n" "e: 4\n" "b: r")
    node = loader.load(document)
    assert validator.validate(node, it_abstract) is False
    assert len(error_handler.notifications) == 4
Exemplo n.º 12
0
def loader():
    from gm_base.model_data import Loader
    return Loader()