Exemplo n.º 1
0
    def fix_app_spec_locations(self, app_spec_config: t.MutableMapping,
                               config_path: pathlib.Path):
        """Fix any relative path in the AppSpec configuration.

        Relative paths are relative to the location of the configuration
        file.

        :param app_spec_config: The configuration of the application.
        :param config_path: The location of the configuration file.
        :return:
        """
        # Aliases for proc_input and proc_output.
        # TODO: Deprecate these fields.
        if 'input' in app_spec_config:
            app_spec_config['proc_input'] = app_spec_config.pop('input')
        if 'output' in app_spec_config:
            app_spec_config['proc_output'] = app_spec_config.pop('output')

        proc_input = app_spec_config['proc_input']
        handler_type = proc_input['type']
        if handler_type in self.io_file_handler_types:
            # If input_location is absolute, base_path is discarded
            # automatically.
            input_location = proc_input['location']
            proc_input['location'] = str(config_path / input_location)

        proc_output = app_spec_config['proc_output']
        handler_type = proc_output['type']
        if handler_type in self.io_file_handler_types:
            output_location = proc_output['location']
            proc_output['location'] = str(config_path / output_location)
Exemplo n.º 2
0
    def __setitem__(self, key: str, val: object) -> None:
        '''
        Writes an `ArrayFile`, `EncodedFile`, or `Artifact` to `self.path/key`

        `np.ndarray`-like objects are written as `ArrayFiles`, `Path`-like
        objects are written as `EncodedFile`s, and string-keyed mappings are
        written as subartifacts.

        Attribute access syntax is also supported, and occurrences of "__" in
        `key` are transformed into ".", to support accessing encoded files as
        attributes (i.e. `artifact['name.ext'] = val` is equivalent to
        `artifact.name__ext = val`).
        '''
        path = self.path / key

        # Copy an existing file.
        if isinstance(val, Path):
            assert path.suffix != ''
            _copy_file(path, val)

        # Write a subartifact.
        elif isinstance(val, (Mapping, Artifact)):
            assert path.suffix == ''
            MutableMapping.update(Artifact(path), val)  # type: ignore

        # Write an array.
        else:
            assert path.suffix == ''
            _write_h5(path.with_suffix('.h5'), val)
Exemplo n.º 3
0
def override(target_dict: typing.MutableMapping,
             override_dict: typing.Mapping):
    """Apply the updates in override_dict to the dict target_dict. This is like
  dict.update, but recursive. i.e. if the existing element is a dict, then
  override elements of the sub-dict rather than wholesale replacing.

  One special case is added. If a key within override dict starts with '!' then
  it is interpretted as follows:
     - if the associated value is "REMOVE", the key is removed from the parent
       dict
     - use !! for keys that actually start with ! and shouldn't be removed.

  e.g.
  override(
    {
      'outer': { 'inner': { 'key': 'oldValue', 'existingKey': True } }
    },
    {
      'outer': { 'inner': { 'key': 'newValue' } },
      'newKey': { 'newDict': True },
    }
  )
  yields:
    {
      'outer': {
        'inner': {
           'key': 'newValue',
           'existingKey': True
        }
      },
      'newKey': { newDict: True }
    }
  """

    for key, value in override_dict.items():
        #
        # Handle special ! syntax:
        #   "!keyname" : "REMOVE",   --> remove the key 'keyname' from target_dict
        #
        if key[0:1] == "!" and key[1:2] != "!":
            key = key[1:]
            if value == "REMOVE":
                target_dict.pop(key, None)
                continue

        current_value = target_dict.get(key)
        if not isinstance(current_value, Mapping):
            # Thing or Mapping overrides Thing or None
            target_dict[key] = value
        elif isinstance(value, Mapping):
            # Mapping overrides mapping, recurse
            target_dict[key] = override(current_value, value)
        else:
            # Thing overrides Mapping
            target_dict[key] = value

    return target_dict
Exemplo n.º 4
0
    def _func(self, args: t.MutableMapping):
        # map args back onto the signature.
        pargs = []  # type: t.List[t.Any]
        for param in self.positionals:
            if param.kind == param.VAR_POSITIONAL:
                pargs.extend(args.pop(param.name))
            elif param.kind == param.POSITIONAL_OR_KEYWORD:
                pargs.append(args.pop(param.name))

        for key, value in args.items():
            if key.startswith('no_'):
                args[key[3:]] = args.pop(key)
                continue

        return (self.function or (lambda: None))(*pargs, **args)
Exemplo n.º 5
0
 def _pretty_print(cls,
                   values: typing.MutableMapping,
                   translate_dict: typing.Dict[str, AlgorithmProperty],
                   indent=0):
     res = ""
     for k, v in values.items():
         if k not in translate_dict:
             if isinstance(v, typing.MutableMapping):
                 res += " " * indent + f"{k}: {cls._pretty_print(v, {}, indent + 2)}\n"
             else:
                 res += " " * indent + f"{k}: {v}\n"
             continue
         desc = translate_dict[k]
         res += " " * indent + desc.user_name + ": "
         if issubclass(desc.value_type, Channel):
             res += str(Channel(v))
         elif issubclass(desc.value_type, AlgorithmDescribeBase):
             res += desc.possible_values[v["name"]].get_name()
             if v["values"]:
                 res += "\n"
                 res += cls._pretty_print(
                     v["values"],
                     desc.possible_values[v["name"]].get_fields_dict(),
                     indent + 2)
         elif isinstance(v, typing.MutableMapping):
             res += cls._pretty_print(v, {}, indent + 2)
         else:
             res += str(v)
         res += "\n"
     return res[:-1]
Exemplo n.º 6
0
def setup_struct(struct: typing.MutableMapping):
    if struct['options'][predef.PredefParseKVMode]:
        fields = get_struct_kv_fields(struct)
        struct['fields'] = fields
    comment = struct.get("comment", "")
    if comment == "":
        comment = struct["options"].get(predef.PredefClassComment, "")
        if comment != "":
            struct["comment"] = comment
Exemplo n.º 7
0
 def _lookup(
     self,
     key: str,
     context: typing.MutableMapping,
 ) -> typing.Union[T, None, bool]:
     result = self.mapping.get(key)
     if result is not None:
         result = getattr(result, context.get('profile') or 'default')
         return result if result != '__ignored__' else False
     return None
Exemplo n.º 8
0
 def recursive_worker(d: MutableMappingType,
                      u: MappingType) -> MutableMappingType:
     for k, v in u.items():
         dv = d.get(k, {})
         if not isinstance(dv, MutableMapping):
             d[k] = v
         elif isinstance(v, Mapping):
             d[k] = recursive_worker(dv, v)
         else:
             d[k] = v
     return d
Exemplo n.º 9
0
def setup_meta_kv_mode(meta: typing.MutableMapping):
    meta[predef.PredefParseKVMode] = False
    text = meta.get(predef.PredefKeyValueColumns, "")
    if text != "":
        columns = text.split(",")
        assert len(columns) >= 3
        meta[predef.PredefParseKVMode] = True
        meta[predef.PredefKeyColumn] = int(columns[0])
        meta[predef.PredefValueTypeColumn] = int(columns[1])
        meta[predef.PredefValueColumn] = int(columns[2])
        if len(columns) > 3:
            meta[predef.PredefCommentColumn] = int(columns[3])
Exemplo n.º 10
0
    def prepare_meta(attrs: typing.MutableMapping):
        # pylint: disable=unsupported-membership-test,unsubscriptable-object
        if 'Meta' not in attrs:
            raise RuntimeError('no Meta')
        if not hasattr(attrs['Meta'], 'table'):
            raise RuntimeError('no table in Meta')
        if not isinstance(attrs['Meta'].table, Table):
            raise RuntimeError('Meta.table is not Table')

        frozen_meta = attrs.pop('Meta')
        frozen_meta.__setattr__ = _frozen_setattrs
        frozen_meta.__delattr__ = _frozen_setattrs

        return frozen_meta
Exemplo n.º 11
0
    def __init__(
        self,
        message: str,
        context: typing.MutableMapping,
        exc_info: typing.Optional[str] = None,
    ) -> None:
        """
        :param exc_info: Exception traceback (if applicable).
        """
        super().__init__()

        self.message = message
        self.context = context
        self.code = context.get('code') or message
        self.exc_info = exc_info
Exemplo n.º 12
0
def get_encoding_from_headers(headers: typing.MutableMapping) -> str:
    """Returns encodings from given HTTP Header Dict.

    :param headers: dictionary to extract encoding from.
    :rtype: str
    """
    content_type = headers.get("Content-Type")
    if not content_type:
        return None

    content_type, params = cgi.parse_header(content_type)
    if "charset" in params:
        return params["charset"].strip("'\"")

    if "text" in content_type:
        return "ISO-8859-1"
Exemplo n.º 13
0
    def handle_invalid_value(
        self,
        message: str,
        exc_info: bool,
        context: typing.MutableMapping,
    ) -> None:
        key = context.get('key', '')
        msg = FilterMessage(
            message=message,
            context=context,
            exc_info=format_exc() if exc_info else None,
        )

        try:
            self.messages[key].append(msg)
        except KeyError:
            self.messages[key] = [msg]
Exemplo n.º 14
0
def drop(_dict: MM, keys: Iterable) -> MM:
    for key in keys:
        _dict.pop(key, None)
    return _dict
Exemplo n.º 15
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: typing.MutableMapping,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    feedstock_dir=None,
) -> typing.MutableMapping:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({"feedstock_name": name, "bad": False})

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph
    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items() if k in {
                "provider",
                "min_r_ver",
                "min_py_ver",
                "max_py_ver",
                "max_r_ver",
                "compiler_stack",
                "bot",
            }
        }

    if (feedstock_dir is not None and
            len(glob.glob(os.path.join(feedstock_dir, ".ci_support",
                                       "*.yaml"))) > 0):
        recipe_dir = os.path.join(feedstock_dir, "recipe")
        ci_support_files = glob.glob(
            os.path.join(feedstock_dir, ".ci_support", "*.yaml"), )
        varient_yamls = []
        plat_arch = []
        for cbc_path in ci_support_files:
            cbc_name = os.path.basename(cbc_path)
            cbc_name_parts = cbc_name.replace(".yaml", "").split("_")
            plat = cbc_name_parts[0]
            if len(cbc_name_parts) == 1:
                arch = "64"
            else:
                if cbc_name_parts[1] in ["64", "aarch64", "ppc64le", "arm64"]:
                    arch = cbc_name_parts[1]
                else:
                    arch = "64"
            plat_arch.append((plat, arch))

            varient_yamls.append(
                parse_meta_yaml(
                    meta_yaml,
                    platform=plat,
                    arch=arch,
                    recipe_dir=recipe_dir,
                    cbc_path=cbc_path,
                ), )

            # collapse them down
            final_cfgs = {}
            for plat_arch, varyml in zip(plat_arch, varient_yamls):
                if plat_arch not in final_cfgs:
                    final_cfgs[plat_arch] = []
                final_cfgs[plat_arch].append(varyml)
            for k in final_cfgs:
                ymls = final_cfgs[k]
                final_cfgs[k] = _convert_to_dict(ChainDB(*ymls))
            plat_arch = []
            varient_yamls = []
            for k, v in final_cfgs.items():
                plat_arch.append(k)
                varient_yamls.append(v)
    else:
        plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")]
        for k in set(sub_graph["conda-forge.yml"].get("provider", {})):
            if "_" in k:
                plat_arch.append(k.split("_"))
        varient_yamls = [
            parse_meta_yaml(meta_yaml, platform=plat, arch=arch)
            for plat, arch in plat_arch
        ]

    # this makes certain that we have consistent ordering
    sorted_varient_yamls = [
        x for _, x in sorted(zip(plat_arch, varient_yamls))
    ]
    yaml_dict = ChainDB(*sorted_varient_yamls)
    if not yaml_dict:
        logger.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph

    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    for k, v in zip(plat_arch, varient_yamls):
        plat_arch_name = "_".join(k)
        sub_graph[f"{plat_arch_name}_meta_yaml"] = v
        _, sub_graph[
            f"{plat_arch_name}_requirements"], _ = extract_requirements(v)

    (
        sub_graph["total_requirements"],
        sub_graph["requirements"],
        sub_graph["strong_exports"],
    ) = extract_requirements(meta_yaml)

    # handle multi outputs
    if "outputs" in yaml_dict:
        sub_graph["outputs_names"] = sorted(
            list({d.get("name", "")
                  for d in yaml_dict["outputs"]}), )

    # TODO: Write schema for dict
    # TODO: remove this
    req = get_requirements(yaml_dict)
    sub_graph["req"] = req

    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]
    return sub_graph
Exemplo n.º 16
0
def filter_empty_fields(data: typing.MutableMapping) -> typing.MutableMapping:
    required = ('errors', ) if data.get('errors') else ('data', )
    return {
        key: value
        for key, value in data.items() if key in required or value
    }
Exemplo n.º 17
0
 def _itemsetter(obj: tp.MutableMapping):
     for key, val in chain(zip(keys, repeat(value)), kwargs.items()):
         obj.__setitem__(key, val)
     return obj
Exemplo n.º 18
0
import logging

import os
import time
import datetime
import subprocess
import sys
import enum
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor

from . import exceptions

from classad import ClassAd

MutableMapping.register(ClassAd)

logger = logging.getLogger(__name__)


class StrEnum(enum.Enum):
    def __str__(self):
        return self.value


def wait_for_path_to_exist(
    path: Path,
    timeout: Optional[Union[int, float, datetime.timedelta]] = None,
    wait_time: Optional[Union[int, float, datetime.timedelta]] = 1,
) -> None:
    """
Exemplo n.º 19
0
def populate_feedstock_attributes(
    name: str,
    sub_graph: typing.MutableMapping,
    meta_yaml: typing.Union[str, Response] = "",
    conda_forge_yaml: typing.Union[str, Response] = "",
    mark_not_archived=False,
    feedstock_dir=None,
) -> typing.MutableMapping:
    """Parse the various configuration information into something usable

    Notes
    -----
    If the return is bad hand the response itself in so that it can be parsed
    for meaning.
    """
    sub_graph.update({
        "feedstock_name": name,
        "bad": False,
        "branch": "master"
    })

    if mark_not_archived:
        sub_graph.update({"archived": False})

    # handle all the raw strings
    if isinstance(meta_yaml, Response):
        sub_graph["bad"] = f"make_graph: {meta_yaml.status_code}"
        return sub_graph

    # strip out old keys - this removes old platforms when one gets disabled
    for key in list(sub_graph.keys()):
        if key.endswith("meta_yaml") or key.endswith(
                "requirements") or key == "req":
            del sub_graph[key]

    sub_graph["raw_meta_yaml"] = meta_yaml

    # Get the conda-forge.yml
    if isinstance(conda_forge_yaml, str):
        sub_graph["conda-forge.yml"] = {
            k: v
            for k, v in yaml.safe_load(conda_forge_yaml).items()
            if k in CONDA_FORGE_YML_KEYS_TO_KEEP
        }

    if feedstock_dir is not None:
        LOGGER.debug(
            "# of ci support files: %s",
            len(glob.glob(os.path.join(feedstock_dir, ".ci_support",
                                       "*.yaml"))),
        )

    try:
        if (feedstock_dir is not None and len(
                glob.glob(os.path.join(feedstock_dir, ".ci_support",
                                       "*.yaml"))) > 0):
            recipe_dir = os.path.join(feedstock_dir, "recipe")
            ci_support_files = glob.glob(
                os.path.join(feedstock_dir, ".ci_support", "*.yaml"), )
            varient_yamls = []
            plat_arch = []
            for cbc_path in ci_support_files:
                cbc_name = os.path.basename(cbc_path)
                cbc_name_parts = cbc_name.replace(".yaml", "").split("_")
                plat = cbc_name_parts[0]
                if len(cbc_name_parts) == 1:
                    arch = "64"
                else:
                    if cbc_name_parts[1] in [
                            "64", "aarch64", "ppc64le", "arm64"
                    ]:
                        arch = cbc_name_parts[1]
                    else:
                        arch = "64"
                plat_arch.append((plat, arch))

                varient_yamls.append(
                    parse_meta_yaml(
                        meta_yaml,
                        platform=plat,
                        arch=arch,
                        recipe_dir=recipe_dir,
                        cbc_path=cbc_path,
                    ), )

                # sometimes the requirements come out to None and this ruins the
                # aggregated meta_yaml
                if "requirements" in varient_yamls[-1]:
                    for section in ["build", "host", "run"]:
                        # We make sure to set a section only if it is actually in
                        # the recipe. Adding a section when it is not there might
                        # confuse migrators trying to move CB2 recipes to CB3.
                        if section in varient_yamls[-1]["requirements"]:
                            val = varient_yamls[-1]["requirements"].get(
                                section, [])
                            varient_yamls[-1]["requirements"][
                                section] = val or []

                # collapse them down
                final_cfgs = {}
                for plat_arch, varyml in zip(plat_arch, varient_yamls):
                    if plat_arch not in final_cfgs:
                        final_cfgs[plat_arch] = []
                    final_cfgs[plat_arch].append(varyml)
                for k in final_cfgs:
                    ymls = final_cfgs[k]
                    final_cfgs[k] = _convert_to_dict(ChainDB(*ymls))
                plat_arch = []
                varient_yamls = []
                for k, v in final_cfgs.items():
                    plat_arch.append(k)
                    varient_yamls.append(v)
        else:
            plat_arch = [("win", "64"), ("osx", "64"), ("linux", "64")]
            for k in set(sub_graph["conda-forge.yml"].get("provider", {})):
                if "_" in k:
                    plat_arch.append(k.split("_"))
            varient_yamls = [
                parse_meta_yaml(meta_yaml, platform=plat, arch=arch)
                for plat, arch in plat_arch
            ]
    except Exception as e:
        import traceback

        trb = traceback.format_exc()
        sub_graph["bad"] = f"make_graph: render error {e}\n{trb}"
        raise

    LOGGER.debug("platforms: %s", plat_arch)

    # this makes certain that we have consistent ordering
    sorted_varient_yamls = [
        x for _, x in sorted(zip(plat_arch, varient_yamls))
    ]
    yaml_dict = ChainDB(*sorted_varient_yamls)
    if not yaml_dict:
        LOGGER.error(f"Something odd happened when parsing recipe {name}")
        sub_graph["bad"] = "make_graph: Could not parse"
        return sub_graph

    sub_graph["meta_yaml"] = _convert_to_dict(yaml_dict)
    meta_yaml = sub_graph["meta_yaml"]

    for k, v in zip(plat_arch, varient_yamls):
        plat_arch_name = "_".join(k)
        sub_graph[f"{plat_arch_name}_meta_yaml"] = v
        _, sub_graph[
            f"{plat_arch_name}_requirements"], _ = _extract_requirements(v)

    (
        sub_graph["total_requirements"],
        sub_graph["requirements"],
        sub_graph["strong_exports"],
    ) = _extract_requirements(meta_yaml)

    # handle multi outputs
    outputs_names = set()
    if "outputs" in yaml_dict:
        outputs_names.update(
            set(list({d.get("name", "")
                      for d in yaml_dict["outputs"]}), ), )
        # handle implicit meta packages
        if "run" in sub_graph.get("meta_yaml", {}).get("requirements", {}):
            outputs_names.add(meta_yaml["package"]["name"])
    # add in single package name
    else:
        outputs_names = {meta_yaml["package"]["name"]}
    sub_graph["outputs_names"] = outputs_names

    # TODO: Write schema for dict
    # TODO: remove this
    req = _get_requirements(yaml_dict)
    sub_graph["req"] = req

    # set name and version
    keys = [("package", "name"), ("package", "version")]
    missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
    for k in keys:
        if k[1] not in missing_keys:
            sub_graph[k[1]] = yaml_dict[k[0]][k[1]]

    # set the url and hash
    sub_graph.pop("url", None)
    sub_graph.pop("hash_type", None)

    source = yaml_dict.get("source", [])
    if isinstance(source, collections.abc.Mapping):
        source = [source]
    source_keys: Set[str] = set()
    for s in source:
        if not sub_graph.get("url"):
            sub_graph["url"] = s.get("url")
        source_keys |= s.keys()

    kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
    if kl:
        sub_graph["hash_type"] = kl[0]

    return sub_graph
Exemplo n.º 20
0
def pick(_dict: MM,
         keys: Iterable,
         pop=True,
         default=None) -> Generator[Any, None, None]:
    for key in keys:
        yield _dict.pop(key, default) if pop else _dict.get(key, default)
Exemplo n.º 21
0
Arquivo: abap.py Projeto: sahwar/abap
def merge(directory: pathlib.Path, data: typing.MutableMapping,
          yaml_data: typing.MutableMapping) -> typing.MutableMapping:

    result = copy.deepcopy(data)
    yaml_data = copy.deepcopy(yaml_data)
    yaml_items = collections.OrderedDict()

    def override(key):
        if key in yaml_data:
            result[key] = yaml_data[key]

    items_by_path = {}
    for item in result.get('items', []):
        items_by_path[item['path']] = item

    if ('title' in yaml_data and not yaml_data['title'] == data['title']
            and 'slug' not in yaml_data):
        result['slug'] = slugify(yaml_data['title'])

    for key in ('title', 'authors', 'categories', 'description', 'slug'):
        override(key)

    for idx, item in enumerate(yaml_data.get('items', [])):
        item_path = directory / item['path']

        current_item = items_by_path.get(item_path)
        if current_item is None:
            LOG.warn(f'Unknown item: {item_path!s} in YAML data')
            continue

        overrides = {}
        for k in ('title', 'categories', 'description', 'chapters', 'sequence',
                  'explicit'):
            if k in item:
                overrides[k] = item[k]
        if overrides:
            current_item.update(overrides)

        # Retain index for sorting purposes.
        yaml_items[item_path] = idx

    # Decide sorting order.
    if len(items_by_path) == len(yaml_items):
        LOG.debug(
            'Manifest contains all the items, items without sequence will be '
            'sorted by their order in the manifest')
        sequences = {p: i for i, p in enumerate(yaml_items, start=1)}
    else:
        LOG.debug('Items without sequence will be sorted by path')
        sequences = {
            p: i
            for i, p in enumerate(sorted(items_by_path), start=1)
        }

    def sort_key(item):
        return item.get('sequence', sequences[item['path']])

    result.update({
        'items': sorted(result.get('items', []), key=sort_key),
    })

    return result
Exemplo n.º 22
0
    def rewrite(self, original_tree: cst.FunctionDef, env: SymbolTable,
                metadata: tp.MutableMapping) -> PASS_ARGS_T:
        if not isinstance(original_tree, cst.FunctionDef):
            raise TypeError('ssa must be run on a FunctionDef')

        # resolve position information necessary for generating symbol table
        wrapper = _wrap(to_module(original_tree))
        pos_info = wrapper.resolve(PositionProvider)

        # convert `elif cond:` to `else: if cond:`
        # (simplifies ssa logic)
        transformer = with_tracking(ElifToElse)()
        tree = original_tree.visit(transformer)

        # original node -> generated nodes
        node_tracking_table = transformer.node_tracking_table
        # node_tracking_table.i
        # generated node -> original nodes

        wrapper = _wrap(to_module(tree))
        writter_attr_visitor = WrittenAttrs()
        wrapper.visit(writter_attr_visitor)

        replacer = with_tracking(AttrReplacer)()
        attr_format = gen_free_prefix(tree, env, '_attr') + '_{}_{}'
        init_reads = []
        names_to_attr = {}
        seen = set()

        for written_attr in writter_attr_visitor.written_attrs:
            d_attr = DeepNode(written_attr)
            if d_attr in seen:
                continue
            if not isinstance(written_attr.value, cst.Name):
                raise NotImplementedError(
                    'writing non name nodes is not supported')

            seen.add(d_attr)

            attr_name = attr_format.format(
                written_attr.value.value,
                written_attr.attr.value,
            )

            # using normal node instead of original node
            # is safe as parenthesis don't matter:
            #  (name).attr == (name.attr) == name.attr
            norm = d_attr.normal_node
            names_to_attr[attr_name] = norm
            name = cst.Name(attr_name)
            replacer.add_replacement(written_attr, name)
            read = to_stmt(make_assign(name, norm))
            init_reads.append(read)

        # Replace references to attr with the name generated above
        tree = tree.visit(replacer)

        node_tracking_table = replacer.trace_origins(node_tracking_table)

        # Rewrite conditions to be ssa
        cond_prefix = gen_free_prefix(tree, env, '_cond')
        wrapper = _wrap(tree)
        name_tests = NameTests(cond_prefix)
        tree = wrapper.visit(name_tests)

        node_tracking_table = name_tests.trace_origins(node_tracking_table)

        # Transform to single return format
        wrapper = _wrap(tree)
        single_return = SingleReturn(env, names_to_attr, self.strict)
        tree = wrapper.visit(single_return)

        node_tracking_table = single_return.trace_origins(node_tracking_table)

        # insert the initial reads / final writes / return
        body = tree.body
        body = body.with_changes(body=(*init_reads, *body.body,
                                       *single_return.tail))
        tree = tree.with_changes(body=body)

        # perform ssa
        wrapper = _wrap(to_module(tree))
        ctxs = wrapper.resolve(ExpressionContextProvider)
        # These names were constructed in such a way that they are
        # guaranteed to be ssa and shouldn't be touched by the
        # transformer
        final_names = single_return.added_names | name_tests.added_names
        ssa_transformer = SSATransformer(env,
                                         ctxs,
                                         final_names,
                                         single_return.returning_blocks,
                                         strict=self.strict)
        tree = tree.visit(ssa_transformer)

        node_tracking_table = ssa_transformer.trace_origins(
            node_tracking_table)

        tree.validate_types_deep()
        # generate symbol table
        start_ln = pos_info[original_tree].start.line
        end_ln = pos_info[original_tree].end.line
        visitor = GenerateSymbolTable(
            node_tracking_table,
            ssa_transformer.original_names,
            pos_info,
            start_ln,
            end_ln,
        )

        tree.visit(visitor)
        metadata.setdefault('SYMBOL-TABLE', list()).append(
            (type(self), visitor.symbol_table))
        return tree, env, metadata
Exemplo n.º 23
0
def assign(_dict: MM, *_dicts: Iterable[Mapping]) -> MM:
    for __dict in _dicts:
        _dict.update(__dict)
    return _dict