Beispiel #1
0
def mustache(html, data={}, default=None, *outside):
  global GLOBALS

  # Check for max iterations
  if len(outside) > MAX_ITERATIONS: log(0, 'Stack overflow')
  # Check for too larde html page
  if len(html) > MAX_PAGE_SIZE: log(0, 'Page is too large')

  if REGEX_LAYOUT_NAME.match(html): html = layout(html)

  count = 0
  while count < MAX_BLOCK_LOOPS:
    count += 1
    mt = REGEX_MUSTACHE_BLOCK.search(html)
    if mt is None: break

    tag, key, compare = mt.group(0), mt.group(1), mt.group(2)
    i, l = html.find('{{/%s}}' % key), 5 + len(key)
    if i < 0: log(0, 'Layout error: No end tag for %s' % (tag,))
    start, blk, nblk, end = html[:mt.start()], html[mt.end():i], '', html[i+l:]
    if '{{^%s}}' % key in blk: blk, nblk = blk.split('{{^%s}}' % key, 1)

    val = _getValue(key, default, data, *outside)

    if compare:
      cp = data
      for k in compare[1:].split('.'):
        tmp = collections.ChainMap(cp, *outside, GLOBALS, FUNCTIONS)
        cp = tmp.get(k)
        if val is None: break
      val = val == cp

    if not val or val == '[%s]' % key:
      ht = mustache(nblk, data, *outside)
    elif isinstance(val, (tuple, list, set)):
      ht = ''
      for vv in val:
        dt = data
        if isinstance(vv, dict): dt = vv
        elif isinstance(vv, (tuple, list)):
          dt = { '$%d' % (i + 1): n for i, n in enumerate(vv)}
        ht += mustache(blk, dt, vv, data, *outside)
    elif callable(val):
      argv = mustache(blk, data, *outside).split(',')
      try: #XXX
        ht = val(*argv)
      except TypeError as te: raise TypeError('%r' % (argv,)) #XXX
    else:
      dt = data
      if isinstance(val, dict): dt = val
      ht = mustache(blk, dt, val, data, *outside)
    html = start + ht + end

  if count >= MAX_BLOCK_LOOPS:
    raise Exception('Maximum loop count reached: %d' % count)

  count = 0
  while count < MAX_VARIABLE_LOOPS:
    count += 1
    mt = REGEX_MUSTACHE_VARIABLE.search(html)
    if mt is None: break

    tp, key, arg = mt.groups()
    tag, rep = mt.group(0), '[no data]'

    val = _getValue(key, default, data, *outside)

    if callable(val):
      argv = tuple()
      if arg and arg[0] == ':': argv = arg[1:].split(',')
      rep = val(*argv)
    elif isinstance(val, (tuple, list, set)):
      rep = ', '.join(map(str, val))
    else:
      rep = str(val)

    html = html.replace(tag, rep)

  if count >= MAX_VARIABLE_LOOPS:
    raise Exception('Maximum loop count reached: %d' % count)

  return html
Beispiel #2
0
    def __init__(self, **kwargs) -> None:
        default_kwargs = {'id': next(self.discord_id), 'name': 'channel', 'guild': MockGuild()}
        super().__init__(**collections.ChainMap(kwargs, default_kwargs))

        if 'mention' not in kwargs:
            self.mention = f"#{self.name}"
Beispiel #3
0
 def __init__(self, **kwargs) -> None:
     default_kwargs = {'attachments': []}
     super().__init__(**collections.ChainMap(kwargs, default_kwargs))
     self.author = kwargs.get('author', MockMember())
     self.channel = kwargs.get('channel', MockTextChannel())
Beispiel #4
0
import collections

d1 = {
    "one": 1,
    "two": 2,
    "three": 3,
    "four": 4,
}

d2 = {
    "two": -2,
    "three": -3,
    "five": 5,
}

d3 = {
    "two": -4,
    "three": -6,
    "one": -1,
}

d_1 = d1.copy()
d_1.update(d2)
d_1.update(d3)
print(d_1)

d_2 = collections.ChainMap(d3, d2, d1)
print(d_2)

print(d_1 == d_2)
Beispiel #5
0
 def transform_one(self, x):
     """Passes the data through each transformer and packs the results together."""
     return dict(collections.ChainMap(*(
         transformer.transform_one(x)
         for transformer in self.values()
     )))
import collections
import builtins

dict1 = {'a': 1, 'b': 2}
dict2 = {'b': 3, 'c': 4}
dict3 = {'f': 5}

ord_dict = collections.OrderedDict()
ord_dict.update({'one': 1})
ord_dict.update({'two': 2})
ord_dict.update({'eleven': 11})
print(ord_dict)

chain = collections.ChainMap(dict1, dict2)

print("All the ChainMap contents are: ")
print(chain.maps)
print("All keys of ChainMap are: ")
print(chain.keys())
print("All keys (list,no repeat) are: ")
print(list(chain.keys()))
print("All values of ChainMap are: ")
print(chain.values())
print("All values (list) of ChainMap are: ")
print(list(chain.values()))
chain = chain.new_child(dict3)
print("Chain.newchild(dict3): ", chain)
print("chain['b']: ", chain['b'])

print("\n----Counter:-----")
ct = collections.Counter('Abracadabra')
Beispiel #7
0
    def __init__(self, prevlevel, mode):
        if prevlevel is None:
            self.env = None
            self.argmap = collections.OrderedDict()

            self.toplevel_stmt = None
            self.stmt = None
            self.rel = None
            self.rel_hierarchy = {}
            self.pending_query = None

            self.clause = None
            self.toplevel_clause = None
            self.expr_exposed = None
            self.volatility_ref = None
            self.group_by_rels = {}

            self.disable_semi_join = set()
            self.unique_paths = set()
            self.force_optional = set()

            self.path_scope = collections.ChainMap()
            self.scope_tree = None

        else:
            self.env = prevlevel.env
            self.argmap = prevlevel.argmap

            self.toplevel_stmt = prevlevel.toplevel_stmt
            self.stmt = prevlevel.stmt
            self.rel = prevlevel.rel
            self.rel_hierarchy = prevlevel.rel_hierarchy
            self.pending_query = prevlevel.pending_query

            self.clause = prevlevel.clause
            self.toplevel_clause = prevlevel.toplevel_clause
            self.expr_exposed = prevlevel.expr_exposed
            self.volatility_ref = prevlevel.volatility_ref
            self.group_by_rels = prevlevel.group_by_rels

            self.disable_semi_join = prevlevel.disable_semi_join.copy()
            self.unique_paths = prevlevel.unique_paths.copy()
            self.force_optional = prevlevel.force_optional.copy()

            self.path_scope = prevlevel.path_scope
            self.scope_tree = prevlevel.scope_tree

            if mode in {
                    ContextSwitchMode.SUBREL, ContextSwitchMode.NEWREL,
                    ContextSwitchMode.SUBSTMT
            }:
                if self.pending_query and mode == ContextSwitchMode.SUBSTMT:
                    self.rel = self.pending_query
                else:
                    self.rel = pgast.SelectStmt()
                    if mode != ContextSwitchMode.NEWREL:
                        self.rel_hierarchy[self.rel] = prevlevel.rel

                self.pending_query = None
                self.clause = 'result'

            if mode == ContextSwitchMode.SUBSTMT:
                self.stmt = self.rel

            if mode == ContextSwitchMode.NEWSCOPE:
                self.path_scope = prevlevel.path_scope.new_child()
def function18(value=collections.ChainMap()):  # [dangerous-default-value]
    """mutable, dangerous"""
    return value
Beispiel #9
0
    def __init__(
        self,
        prevlevel: Optional[CompilerContextLevel],
        mode: ContextSwitchMode,
        *,
        env: Optional[Environment] = None,
        scope_tree: Optional[irast.ScopeTreeNode] = None,
    ) -> None:
        if prevlevel is None:
            assert env is not None
            assert scope_tree is not None

            self.env = env
            self.argmap = collections.OrderedDict()
            self.next_argument = itertools.count(1)

            self.singleton_mode = False

            self.toplevel_stmt = NO_STMT
            self.stmt = NO_STMT
            self.rel = NO_STMT
            self.rel_hierarchy = {}
            self.dml_stmts = {}
            self.parent_rel = None
            self.pending_query = None

            self.expr_exposed = None
            self.volatility_ref = None
            self.group_by_rels = {}

            self.disable_semi_join = set()
            self.force_optional = set()
            self.join_target_type_filter = {}

            self.path_scope = collections.ChainMap()
            self.scope_tree = scope_tree
            self.type_rel_overlays = collections.defaultdict(list)
            self.ptr_rel_overlays = collections.defaultdict(list)
            self.enclosing_dml = None

        else:
            self.env = prevlevel.env
            self.argmap = prevlevel.argmap
            self.next_argument = prevlevel.next_argument

            self.singleton_mode = prevlevel.singleton_mode

            self.toplevel_stmt = prevlevel.toplevel_stmt
            self.stmt = prevlevel.stmt
            self.rel = prevlevel.rel
            self.rel_hierarchy = prevlevel.rel_hierarchy
            self.dml_stmts = prevlevel.dml_stmts
            self.parent_rel = prevlevel.parent_rel
            self.pending_query = prevlevel.pending_query

            self.expr_exposed = prevlevel.expr_exposed
            self.volatility_ref = prevlevel.volatility_ref
            self.group_by_rels = prevlevel.group_by_rels

            self.disable_semi_join = prevlevel.disable_semi_join.copy()
            self.force_optional = prevlevel.force_optional.copy()
            self.join_target_type_filter = prevlevel.join_target_type_filter

            self.path_scope = prevlevel.path_scope
            self.scope_tree = prevlevel.scope_tree
            self.type_rel_overlays = prevlevel.type_rel_overlays
            self.ptr_rel_overlays = prevlevel.ptr_rel_overlays
            self.enclosing_dml = prevlevel.enclosing_dml

            if mode in {ContextSwitchMode.SUBREL, ContextSwitchMode.NEWREL,
                        ContextSwitchMode.SUBSTMT}:
                if self.pending_query and mode == ContextSwitchMode.SUBSTMT:
                    self.rel = self.pending_query
                else:
                    self.rel = pgast.SelectStmt()
                    if mode != ContextSwitchMode.NEWREL:
                        if prevlevel.parent_rel is not None:
                            parent_rel = prevlevel.parent_rel
                        else:
                            parent_rel = prevlevel.rel
                        self.rel_hierarchy[self.rel] = parent_rel

                self.pending_query = None
                self.parent_rel = None

            if mode == ContextSwitchMode.SUBSTMT:
                self.stmt = self.rel

            if mode == ContextSwitchMode.NEWSCOPE:
                self.path_scope = prevlevel.path_scope.new_child()
Beispiel #10
0
 def __new__(cls, *args, **kwds):
     if cls._gorg is ChainMap:
         return collections.ChainMap(*args, **kwds)
     return _generic_new(collections.ChainMap, cls, *args, **kwds)
Beispiel #11
0
import builtins
import collections

# this is one way
builtin_vars = vars(builtins)
if key in locals():
    value = locals()[key]
elif key in globals():
    value = globals()[key]
elif key in builtin_vars:
    value = builtin_vars[key]

# this is a better way 
mappings = collections.ChainMap(globals(), locals(), vars(builtins))
value = mappings[key]

Beispiel #12
0
 def __new__(cls, *args, **kwds):
     if _geqv(cls, ChainMap):
         return collections.ChainMap(*args, **kwds)
     return _generic_new(collections.ChainMap, cls, *args, **kwds)
Beispiel #13
0
class Converter:
    __CONVERSIONS = collections.ChainMap(
        _to_internal_conversions({
            int: marshmallow.fields.Integer,
            float: marshmallow.fields.Float,
            str: marshmallow.fields.String,
            bool: marshmallow.fields.Boolean,
            datetime.datetime: marshmallow.fields.DateTime,
            datetime.time: marshmallow.fields.Time,
            datetime.timedelta: marshmallow.fields.TimeDelta,
            datetime.date: marshmallow.fields.Date,
            decimal.Decimal: marshmallow.fields.Decimal,
            uuid.UUID: marshmallow.fields.UUID,
            typing.Any: marshmallow.fields.Raw,
            typing.Mapping: marshmallow.fields.Mapping,
            typing.MutableMapping: marshmallow.fields.Mapping,
            typing.Dict: marshmallow.fields.Dict,
            typing.List: marshmallow.fields.List,
            typing.Tuple: marshmallow.fields.Tuple,
            (typing.Tuple, list): mm.fields.VarTuple,
            typing.Callable: marshmallow.fields.Function,
            enum.Enum: mm.fields.Enum,
            typing.Union: mm.fields.Union,
            mm.typing.DateTime: marshmallow.fields.DateTime,
            mm.typing.AwareDateTime: marshmallow.fields.AwareDateTime,
            mm.typing.NaiveDateTime: marshmallow.fields.NaiveDateTime,
            mm.typing.Constant: marshmallow.fields.Constant,
            mm.typing.Pluck: marshmallow.fields.Pluck,
            mm.typing.Number: marshmallow.fields.Number,
            mm.typing.Url: marshmallow.fields.Url,
            mm.typing.Email: marshmallow.fields.Email,
        }))

    def __init__(self, conversions=None):
        if conversions is not None:
            conversions = _to_internal_conversions(conversions)
        self.conversions = type(self).__CONVERSIONS.new_child(conversions)

    @staticmethod
    def _optional_mutations(type_, metadata):
        metadata.setdefault("default", None)
        metadata.setdefault("missing", None)
        metadata["required"] = False

    @staticmethod
    def _new_type_mutations(type_, metadata):
        metadata.setdefault("description", type_.__name__)

    def _make_type(self, type_, metadata, arguments):
        if type_ not in self.conversions:
            raise ValueError(f"No conversion for type {type_}")
        class_ = self.conversions[type_]
        arguments = (self._convert(a, metadata=metadata) for a in arguments)
        if hasattr(class_, "from_typing"):
            return class_.from_typing(self, arguments, **metadata)
        return class_(*arguments, **metadata)

    def _handle_basic(self, type_, type_info, metadata):
        return self._make_type(type_, metadata,
                               type_info.args if type_info else ())

    def _handle_tuple(self, type_, type_info, metadata):
        tuple_type = typing.Tuple
        args = type_info.args if type_info else ()
        if not args or ... in args:
            tuple_type = (typing.Tuple, list)
            args = (args[0] if args else typing.Any, )
        return self._make_type(tuple_type, metadata, args)

    def _handle_default(self, type_, type_info, metadata):
        warnings.warn(f"Unknown type {type_!r}.")
        return self._make_type(typing.Any, metadata, ())

    def _handle_enum(self, type_, type_info, metadata):
        metadata.setdefault("enum", type_)
        return self._make_type(enum.Enum, metadata, ())

    def _handle_union(self, type_, type_info, metadata):
        args = type_info.args
        nonnone_args = [a for a in args if a is not NoneType]
        if not (type_info.unwrapped is typing.Optional
                or len(nonnone_args) == 1):
            return self._make_type(typing.Union, metadata, args)
        elif typing.Optional in self.conversions:
            return self._make_type(typing.Optional, metadata, args)
        else:
            new_type = (nonnone_args + [typing.Any])[0]
            self._optional_mutations(type_, metadata)
            return self._convert(new_type, metadata=metadata)

    def _is_new_type(self, type_):
        return getattr(type_, "__supertype__",
                       None) and inspect.isfunction(type_)

    def _handle_new_type(self, type_, type_info, metadata):
        self._new_type_mutations(type_, metadata)
        return self._convert(type_.__supertype__, metadata=metadata)

    def _handle_other(self, type_, type_info, metadata):
        raise ValueError(f"Unknown type {type_}.")

    def _convert(self, type_, *, metadata):
        type_info = typing_inspect_lib.get_type_info(type_)
        if type_info:
            unwrapped = type_info.unwrapped
        else:
            unwrapped = type_
        unwrapped = _LITERAL_CONVERT.get(unwrapped, unwrapped)

        if unwrapped is typing.Tuple:
            return self._handle_tuple(type_, type_info, metadata)

        if type_info and (type_info.unwrapped is typing.Optional
                          or type_info.unwrapped is typing.Union):
            return self._handle_union(type_, type_info, metadata)

        if unwrapped in self.conversions:
            return self._handle_basic(unwrapped, type_info, metadata)

        if isinstance(type_, enum.EnumMeta):
            return self._handle_enum(type_, type_info, metadata)

        if type_info is None:
            return self._handle_default(type_, type_info, metadata)

        if self._is_new_type(type_):
            return self._handle_new_type(type_, type_info, metadata)

        return self._handle_other(type_, type_info, metadata)

    def convert(self, type_, *, metadata):
        return self._convert(type_, metadata=metadata.copy())
Beispiel #14
0
 def _load(self):
     for catalog in self._catalogs:
         catalog._load()
     self._entries = collections.ChainMap(*(catalog._entries
                                            for catalog in self._catalogs))
Beispiel #15
0
 def sensors(self) -> typing.Mapping[str, NetworkSensor]:
     sensors = collections.ChainMap(*(n.sensors for n in self._nodes))
     return dict(sensors)
Beispiel #16
0
 def enums(self) -> Mapping[str, wrappers.EnumType]:
     """Return a map of all enums available in the API."""
     return collections.ChainMap(
         {},
         *[p.all_enums for p in self.protos.values()],
     )
Beispiel #17
0
 def aliasses(self):
     return dict(
         collections.ChainMap(
             AliasFile.get_file(local=True).aliasses,
             AliasFile.get_file(local=False).aliasses))
Beispiel #18
0
 def messages(self) -> Mapping[str, wrappers.MessageType]:
     """Return a map of all messages available in the API."""
     return collections.ChainMap(
         {},
         *[p.all_messages for p in self.protos.values()],
     )
Beispiel #19
0
x_factory_mapping = {
    List: list,
    Deque: collections.deque,
    Tuple: tuple,
    Set: set,
    FrozenSet: frozenset,
    MutableSet: set,
    Dict: lambda items: {k: v
                         for k, v in items},
    Mapping: lambda items: {k: v
                            for k, v in items},
    MutableMapping: lambda items: {k: v
                                   for k, v in items},
    ChainMap: lambda items: collections.ChainMap(*({
        k: v
    } for k, v in items))
}


# noinspection PyCallingNonCallable
def check_one_arg_generic(type_, value_info, use_bytes, use_enum,
                          use_datetime):
    x_type, x_value, x_value_dumped = value_info

    @dataclass
    class DataClass(DataClassDictMixin):
        x: type_[x_type]

    x_factory = x_factory_mapping[type_]
    x = x_factory([x_value for _ in range(3)])
Beispiel #20
0
 def services(self) -> Mapping[str, wrappers.Service]:
     """Return a map of all services available in the API."""
     return collections.ChainMap(
         {},
         *[p.services for p in self.protos.values()],
     )
Beispiel #21
0
    def _gen_combined_metadata(self) -> ChainMap[Metadata, MetadataTypes]:
        if self.parent:

            return collections.ChainMap(self.component_metadata,
                                        self.parent.metadata)
        return collections.ChainMap(self.component_metadata)
Beispiel #22
0
 def api_enums(self) -> Mapping[str, wrappers.EnumType]:
     return collections.ChainMap(
         {},
         self.proto_enums,
         *[p.all_enums for p in self.prior_protos.values()],
     )
Beispiel #23
0
 def get_dispatch_table(self):
     return collections.ChainMap({}, pickle.dispatch_table)
Beispiel #24
0
 def api_messages(self) -> Mapping[str, wrappers.MessageType]:
     return collections.ChainMap(
         {},
         self.proto_messages,
         *[p.all_messages for p in self.prior_protos.values()],
     )
Beispiel #25
0
    def __init__(self, **kwargs) -> None:
        default_kwargs = {'name': 'user', 'id': next(self.discord_id), 'bot': False}
        super().__init__(**collections.ChainMap(kwargs, default_kwargs))

        if 'mention' not in kwargs:
            self.mention = f"@{self.name}"
Beispiel #26
0
class Intel(linear.LinearlyMappedLayer):
    """Translation Layer for the Intel IA32 memory mapping."""

    _entry_format = "<I"
    _page_size_in_bits = 12
    _bits_per_register = 32
    # NOTE: _maxphyaddr is MAXPHYADDR as defined in the Intel specs *NOT* the maximum physical address
    _maxphyaddr = 32
    _maxvirtaddr = _maxphyaddr
    _structure = [('page directory', 10, False), ('page table', 10, True)]
    _direct_metadata = collections.ChainMap(
        {'architecture': 'Intel32'}, {'mapped': True},
        interfaces.layers.TranslationLayerInterface._direct_metadata)

    def __init__(self,
                 context: interfaces.context.ContextInterface,
                 config_path: str,
                 name: str,
                 metadata: Optional[Dict[str, Any]] = None) -> None:
        super().__init__(context=context,
                         config_path=config_path,
                         name=name,
                         metadata=metadata)
        self._base_layer = self.config["memory_layer"]
        self._swap_layers = []  # type: List[str]
        self._page_map_offset = self.config["page_map_offset"]

        # Assign constants
        self._initial_position = min(self._maxvirtaddr,
                                     self._bits_per_register) - 1
        self._initial_entry = self._mask(self._page_map_offset,
                                         self._initial_position, 0) | 0x1
        self._entry_size = struct.calcsize(self._entry_format)
        self._entry_number = self.page_size // self._entry_size

        # These can vary depending on the type of space
        self._index_shift = int(
            math.ceil(math.log2(struct.calcsize(self._entry_format))))

    @classproperty
    def page_size(cls) -> int:
        """Page size for the intel memory layers.

        All Intel layers work on 4096 byte pages
        """
        return 1 << cls._page_size_in_bits

    @classproperty
    def bits_per_register(cls) -> int:
        """Returns the bits_per_register to determine the range of an
        IntelTranslationLayer."""
        return cls._bits_per_register

    @classproperty
    def minimum_address(cls) -> int:
        return 0

    @classproperty
    def maximum_address(cls) -> int:
        return (1 << cls._maxvirtaddr) - 1

    @classproperty
    def structure(cls) -> List[Tuple[str, int, bool]]:
        return cls._structure

    @staticmethod
    def _mask(value: int, high_bit: int, low_bit: int) -> int:
        """Returns the bits of a value between highbit and lowbit inclusive."""
        high_mask = (1 << (high_bit + 1)) - 1
        low_mask = (1 << low_bit) - 1
        mask = (high_mask ^ low_mask)
        # print(high_bit, low_bit, bin(mask), bin(value))
        return value & mask

    @staticmethod
    def _page_is_valid(entry: int) -> bool:
        """Returns whether a particular page is valid based on its entry."""
        return bool(entry & 1)

    def _translate(self, offset: int) -> Tuple[int, int, str]:
        """Translates a specific offset based on paging tables.

        Returns the translated offset, the contiguous pagesize that the
        translated address lives in and the layer_name that the address
        lives in
        """
        entry, position = self._translate_entry(offset)

        # Now we're done
        if not self._page_is_valid(entry):
            raise exceptions.PagedInvalidAddressException(
                self.name, offset, position + 1, entry,
                "Page Fault at entry {} in page entry".format(hex(entry)))
        page = self._mask(entry, self._maxphyaddr - 1,
                          position + 1) | self._mask(offset, position, 0)

        return page, 1 << (position + 1), self._base_layer

    def _translate_entry(self, offset):
        """Translates a specific offset based on paging tables.

        Returns the translated entry value
        """
        # Setup the entry and how far we are through the offset
        # Position maintains the number of bits left to process
        # We or with 0x1 to ensure our page_map_offset is always valid
        position = self._initial_position
        entry = self._initial_entry

        # Run through the offset in various chunks
        for (name, size, large_page) in self._structure:
            # Check we're valid
            if not self._page_is_valid(entry):
                raise exceptions.PagedInvalidAddressException(
                    self.name, offset, position + 1, entry,
                    "Page Fault at entry " + hex(entry) + " in table " + name)
            # Check if we're a large page
            if large_page and (entry & (1 << 7)):
                # We're a large page, the rest is finished below
                # If we want to implement PSE-36, it would need to be done here
                break
            # Figure out how much of the offset we should be using
            start = position
            position -= size
            index = self._mask(offset, start, position + 1) >> (position + 1)

            # Grab the base address of the table we'll be getting the next entry from
            base_address = self._mask(entry, self._maxphyaddr - 1,
                                      size + self._index_shift)

            table = self._get_valid_table(base_address)
            if table is None:
                raise exceptions.PagedInvalidAddressException(
                    self.name, offset, position + 1, entry,
                    "Page Fault at entry " + hex(entry) + " in table " + name)

            # Read the data for the next entry
            entry_data = table[(
                index << self._index_shift):(index << self._index_shift) +
                               self._entry_size]

            # Read out the new entry from memory
            entry, = struct.unpack(self._entry_format, entry_data)

        return entry, position

    @functools.lru_cache(1025)
    def _get_valid_table(self, base_address: int) -> Optional[bytes]:
        """Extracts the table, validates it and returns it if it's valid."""
        table = self._context.layers.read(self._base_layer, base_address,
                                          self.page_size)

        # If the table is entirely duplicates, then mark the whole table as bad
        if (table == table[:self._entry_size] * self._entry_number):
            return None
        return table

    def is_valid(self, offset: int, length: int = 1) -> bool:
        """Returns whether the address offset can be translated to a valid
        address."""
        try:
            # TODO: Consider reimplementing this, since calls to mapping can call is_valid
            return all([
                self._context.layers[layer].is_valid(mapped_offset)
                for _, _, mapped_offset, _, layer in self.mapping(
                    offset, length)
            ])
        except exceptions.InvalidAddressException:
            return False

    def mapping(
        self,
        offset: int,
        length: int,
        ignore_errors: bool = False
    ) -> Iterable[Tuple[int, int, int, int, str]]:
        """Returns a sorted iterable of (offset, sublength, mapped_offset, mapped_length, layer)
        mappings.

        This allows translation layers to provide maps of contiguous
        regions in one layer
        """
        if length == 0:
            try:
                mapped_offset, _, layer_name = self._translate(offset)
                if not self._context.layers[layer_name].is_valid(
                        mapped_offset):
                    raise exceptions.InvalidAddressException(
                        layer_name=layer_name, invalid_address=mapped_offset)
            except exceptions.InvalidAddressException:
                if not ignore_errors:
                    raise
                return
            yield offset, length, mapped_offset, length, layer_name
            return
        while length > 0:
            try:
                chunk_offset, page_size, layer_name = self._translate(offset)
                chunk_size = min(page_size - (chunk_offset % page_size),
                                 length)
                if not self._context.layers[layer_name].is_valid(
                        chunk_offset, chunk_size):
                    raise exceptions.InvalidAddressException(
                        layer_name=layer_name, invalid_address=chunk_offset)
            except (exceptions.PagedInvalidAddressException,
                    exceptions.InvalidAddressException) as excp:
                if not ignore_errors:
                    raise
                # We can jump more if we know where the page fault failed
                if isinstance(excp, exceptions.PagedInvalidAddressException):
                    mask = (1 << excp.invalid_bits) - 1
                else:
                    mask = (1 << self._page_size_in_bits) - 1
                length_diff = (mask + 1 - (offset & mask))
                length -= length_diff
                offset += length_diff
            else:
                yield offset, chunk_size, chunk_offset, chunk_size, layer_name
                length -= chunk_size
                offset += chunk_size

    @property
    def dependencies(self) -> List[str]:
        """Returns a list of the lower layer names that this layer is dependent
        upon."""
        return [self._base_layer] + self._swap_layers

    @classmethod
    def get_requirements(
            cls) -> List[interfaces.configuration.RequirementInterface]:
        return [
            requirements.TranslationLayerRequirement(name='memory_layer',
                                                     optional=False),
            requirements.LayerListRequirement(name='swap_layers',
                                              optional=True),
            requirements.IntRequirement(name='page_map_offset',
                                        optional=False),
            requirements.IntRequirement(name='kernel_virtual_offset',
                                        optional=True),
            requirements.StringRequirement(name='kernel_banner', optional=True)
        ]
Beispiel #27
0
 def __init__(self, **kwargs) -> None:
     default_kwargs = {'id': next(self.discord_id), 'recipient': MockUser(), "me": MockUser()}
     super().__init__(**collections.ChainMap(kwargs, default_kwargs))
Beispiel #28
0
def scope_ast(nodes, scopes=None, return_types=None):
    if scopes is None:
        # We start off with a global scope.
        scopes = collections.ChainMap()

    if return_types is None:
        return_types = {}

    scoped_nodes = []
    returned_names = {}

    _varnames = ('literal%d' % i for i in itertools.count(1))

    def _create_variable(value, var_type):
        variable = next(_varnames)

        assert variable not in scopes
        scopes[variable] = var_type

        scoped_nodes.append(
            ast.Declaration(None, None, var_type, variable, value))

        return ast.Name(None, None, variable)

    def _store_literals(value):
        if isinstance(value, ast.ExpressionStatement):
            # TODO: Save the whole expression's result to a temporary var.
            value.expression = _store_literals(value.expression)
            return value
        elif isinstance(value, ast.FunctionCall):
            for j, arg in enumerate(value.arguments):
                value.arguments[j] = _store_literals(arg)

            if value.function.name in return_types:
                return _create_variable(value,
                                        return_types[value.function.name])
            # XXX: This return statement is only executed when the function is
            # a builtin, ergo it's not in return_types. Maybe we could
            # circumvent this by providing return_types with the builtins'
            # types?
            return value
        elif isinstance(value, ast.Return):
            value.value = _store_literals(value.value)
            returned_names[value.value.name] = len(scoped_nodes)
            return value
        elif not isinstance(value, (ast.String, ast.Integer)):
            print('*** NOT LITERAL:', value)
            # it's not a literal
            return value

        print('*** WOLO WOLO', value)
        name = _create_variable(value,
                                ast.Name(None, None, value.__class__.__name__))

        return name

    for node in nodes:
        if isinstance(node, ast.Declaration):
            variable = node.variable
            if variable in scopes:
                raise RuntimeError("Variable already declared!")
            else:
                scopes[variable] = node.type
        elif isinstance(node, ast.Return) and isinstance(node.value, ast.Name):
            # We have to account for the fact that the return statement is
            # added to scoped_nodes later.
            returned_names[node.value.name] = len(scoped_nodes) + 1
        elif isinstance(node, ast.FunctionDef):
            return_types[node.name] = node.returntype

        node = _store_literals(node)
        if isinstance(node, (ast.FunctionDef, ast.If)):
            # it creates a new scope
            node.body[:] = scope_ast(node.body, scopes.new_child())
        scoped_nodes.append(node)

    decrefs = [
        ast.DecRef(name) for name in scopes.maps[0]
        if name not in returned_names
    ]

    # XXX: Figure out why this works.
    if returned_names:
        for j in set(returned_names.values()) | {len(scoped_nodes) - 1}:
            scoped_nodes[j:j] = decrefs
    else:
        scoped_nodes.extend(decrefs)

    return scoped_nodes
Beispiel #29
0
import collections

a = {'a': 'A', 'c': 'C'}
b = {'b': 'B', 'c': 'D'}

m = collections.ChainMap(a, b)
print('Before:', m)
m['c'] = 'E'
print('After:', m)
print('a:', a)
Beispiel #30
0
 def _make_entries_container(self):
     return collections.ChainMap(*(catalog._entries
                                   for catalog in self._catalogs))