Ejemplo n.º 1
0
    def __init__(self, entity_id: str, state: Any,
                 attributes: Optional[Dict] = None,
                 last_changed: Optional[datetime.datetime] = None,
                 last_updated: Optional[datetime.datetime] = None,
                 context: Optional[Context] = None,
                 # Temp, because database can still store invalid entity IDs
                 # Remove with 1.0 or in 2020.
                 temp_invalid_id_bypass: Optional[bool] = False) -> None:
        """Initialize a new state."""
        state = str(state)

        if not valid_entity_id(entity_id) and not temp_invalid_id_bypass:
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        if not valid_state(state):
            raise InvalidStateError((
                "Invalid state encountered for entity id: {}. "
                "State max length is 255 characters.").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = state  # type: str
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()
        self.last_changed = last_changed or self.last_updated
        self.context = context or Context()
Ejemplo n.º 2
0
    def __init__(self, key, mass_fractions, atomic_fractions, formula):
        """
        Private constructor. It should never be used.
        """
        if key != Composition._key:
            raise TypeError('Composition cannot be created using constructor')
        if set(mass_fractions.keys()) != set(atomic_fractions.keys()):
            raise ValueError('Mass and atomic fractions must have the same elements')

        self.mass_fractions = MappingProxyType(mass_fractions)
        self.atomic_fractions = MappingProxyType(atomic_fractions)
        self._formula = formula
Ejemplo n.º 3
0
    def __init__(self, entity_id, state, attributes=None, last_changed=None,
                 last_updated=None):
        """Initialize a new state."""
        if not valid_entity_id(entity_id):
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = str(state)
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()
        self.last_changed = last_changed or self.last_updated
Ejemplo n.º 4
0
def make_crud_args(args: argparse.Namespace,
                   presets: MappingProxyType = MappingProxyType({})):

    res = presets.get(args.access_preset, {})

    relevant_cli_arguments = {k: v for k, v in vars(args).items()
                              if k in ('aws_access_key_id',
                                       'bucket_name',
                                       'endpoint_url',
                                       'aws_secret_access_key') and v is not None}

    res.update(relevant_cli_arguments)

    return res
Ejemplo n.º 5
0
    def __init__(self, entity_id, state, attributes=None, last_changed=None,
                 last_updated=None):
        """Initialize a new state."""
        if not valid_entity_id(entity_id):
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = str(state)
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = dt_util.strip_microseconds(
            last_updated or dt_util.utcnow())

        # Strip microsecond from last_changed else we cannot guarantee
        # state == State.from_dict(state.as_dict())
        # This behavior occurs because to_dict uses datetime_to_str
        # which does not preserve microseconds
        self.last_changed = dt_util.strip_microseconds(
            last_changed or self.last_updated)
Ejemplo n.º 6
0
    def __init__(self, entity_id: str, state: Any,
                 attributes: Optional[Dict] = None,
                 last_changed: Optional[datetime.datetime] = None,
                 last_updated: Optional[datetime.datetime] = None,
                 context: Optional[Context] = None) -> None:
        """Initialize a new state."""
        state = str(state)

        if not valid_entity_id(entity_id):
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        if not valid_state(state):
            raise InvalidStateError((
                "Invalid state encountered for entity id: {}. "
                "State max length is 255 characters.").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = state
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()
        self.last_changed = last_changed or self.last_updated
        self.context = context or Context()
Ejemplo n.º 7
0
cards_of_deck = MappingProxyType(OrderedDict((
    (deck['Miscellaneous Mayhem'], (
        card['Badyear Git'],
        card['Sprinkler Malfunction'],
        card['Eclipse'],
        card['Fanatic Invasion'],
        card['Friendly Fans'],
        card['Rowdy Fans'],
        card['Heckler'],
        card['Hometown Fans'],
        card['Incoming!'],
        card['Rogue Wizard'],
        card['Ball Clone'],
        card['Johnny Waterboy'],
        card['That Babe\'s Got Talent!'],
        )),
    (deck['Special Team Plays'], (
        card['Come To Papa!'],
        card['Dogged Defense'],
        card['Flea Flicker'],
        card['Fumblerooski'],
        card['Going the Extra Mile'],
        card['Heroic Leap'],
        card['New Blocking Scheme'],
        card['Perfect Kick'],
        card['Option Play'],
        card['Punt'],
        card['Spectacular Catch'],
        card['Suicide Blitz'],
        card['Wake Up Call'],
        )),
    (deck['Magic Items'], (
        card['Beguiling Bracers'],
        card['Belt of Invunerability'],
        card['Fawndough\'s Headband'],
        card['Force Shield'],
        card['Gikta\'s Strength of Da Bear'],
        card['Gloves of Holding'],
        card['Inertia Dampner'],
        card['Lucky Charm'],
        card['Magic Gloves of Jark Longarm'],
        card['Good Old Magic Codpiece'],
        card['Rabbit\'s Foot'],
        card['Ring of Teleportation'],
        card['Wand of Smashing'],
        )),
    (deck['Dirty Tricks'], (
        card['Blatant Foul'],
        card['Chop Block'],
        card['Custard Pie'],
        card['Distract'],
        card['Greased Shoes'],
        card['Gromskull\'s Exploding Runes'],
        card['Illegal Substitution'],
        card['Kicking Boots'],
        card['Pit Trap'],
        card['Spiked Ball'],
        card['Stolen Playbook'],
        card['Trampoline Trap'],
        card['Witch\'s Brew'],
        )),
    (deck['Good Karma'], (
        card['All Out Blitz'],
        card['Banana Skin'],
        card['Butterfingers'],
        card['Chainsaw'],
        card['Dazed and Confused'],
        card['Doc Bonesaw'],
        card['Extra Training'],
        card['Fan Uproar'],
        card['Hurry Up Offense'],
        card['Intensive Training'],
        card['Unsportsmanlike Conduct'],
        card['Knutt\'s Spell of Awesome Strength'],
        card['Lewd Maneuvers'],
        card['Lurve Potion'],
        card['Magic Helmet'],
        card['Miracle Worker'],
        card['One with the Kicker'],
        card['Razzle Dazzle'],
        card['Suitable Pitch'],
        card['Rune of Fear'],
        card['Scutt\'s Scroll of Weather Magic'],
        card['Stiletto'],
        card['Team Anthem'],
        card['The Fan'],
        card['The Wall'],
        card['Woof Woof!'],
        )),
    (deck['Random Events'], (
        card['Bad Habits'],
        card['Ballista'],
        card['Blackmail'],
        card['Buzzing'],
        card['Duh, Where Am I?'],
        card['Ego Trip'],
        card['Zap!'],
        card['Gimme That!'],
        card['Iron Man'],
        card['Kid Gloves'],
        card['Knuckledusters'],
        card['Magic Sponge'],
        card['Mine'],
        card['Not-So-Secret Weapon'],
        card['Orcidas Sponsorship'],
        card['Rakarth\'s Curse of Petty Spite'],
        card['Tackling Machine'],
        card['Get \'Em Lads!'],
        )),
    (deck['Desperate Measures'], (
        card['Assassin'],
        card['Doom and Gloom'],
        card['Da Freight Train'],
        card['Morley\'s Revenge'],
        card['I am the Greatest'],
        card['Mindblow'],
        card['Come On Boys!'],
        card['Mysterious Old Medicine Man'],
        )),
    )))
Ejemplo n.º 8
0
 def __setstate__(self, state):
     self.mass_fractions = MappingProxyType(state.get('mass_fractions', {}))
     self.atomic_fractions = MappingProxyType(state.get('atomic_fractions', {}))
     self._formula = state.get('formula', '')
Ejemplo n.º 9
0
class Composition:
    """
    Defines a composition of a compound.

    To create a composition, use the class methods:

        - :meth:`from_pure`
        - :meth:`from_formula`
        - :meth:`from_mass_fractions`
        - :meth:`from_atomic_fractions`

    Use the following attributes to access the composition values:

        - :attr:`mass_fractions`: :class:`dict` where the keys are atomic numbers and the values weight fractions.
        - :attr:`atomic_fractions`: :class:`dict` where the keys are atomic numbers and the values atomic fractions.
        - :attr:`formula`: chemical formula

    The composition object is immutable, i.e. it cannot be modified once created.
    Equality can be checked.
    It is hashable.
    It can be pickled or copied.
    """

    _key = object()
    PRECISION = 0.000000001 # 1ppb

    def __init__(self, key, mass_fractions, atomic_fractions, formula):
        """
        Private constructor. It should never be used.
        """
        if key != Composition._key:
            raise TypeError('Composition cannot be created using constructor')
        if set(mass_fractions.keys()) != set(atomic_fractions.keys()):
            raise ValueError('Mass and atomic fractions must have the same elements')

        self.mass_fractions = MappingProxyType(mass_fractions)
        self.atomic_fractions = MappingProxyType(atomic_fractions)
        self._formula = formula

    @classmethod
    def from_pure(cls, z):
        """
        Creates a pure composition.

        Args:
            z (int): atomic number
        """
        return cls(cls._key, {z: 1.0}, {z: 1.0}, pyxray.element_symbol(z))

    @classmethod
    def from_formula(cls, formula):
        """
        Creates a composition from a chemical formula.

        Args:
            formula (str): chemical formula
        """
        atomic_fractions = convert_formula_to_atomic_fractions(formula)
        return cls.from_atomic_fractions(atomic_fractions)

    @classmethod
    def from_mass_fractions(cls, mass_fractions, formula=None):
        """
        Creates a composition from a mass fraction :class:`dict`.

        Args:
            mass_fractions (dict): mass fraction :class:`dict`.
                The keys are atomic numbers and the values weight fractions.
                Wildcard are accepted, e.g. ``{5: '?', 25: 0.4}`` where boron
                will get a mass fraction of 0.6.
            formula (str): optional chemical formula for the composition.
                If ``None``, a formula will be generated for the composition.
        """
        mass_fractions = process_wildcard(mass_fractions)
        atomic_fractions = convert_mass_to_atomic_fractions(mass_fractions)
        if not formula:
            formula = generate_name(atomic_fractions)
        return cls(cls._key, mass_fractions, atomic_fractions, formula)

    @classmethod
    def from_atomic_fractions(cls, atomic_fractions, formula=None):
        """
        Creates a composition from an atomic fraction :class:`dict`.

        Args:
            atomic_fractions (dict): atomic fraction :class:`dict`.
                The keys are atomic numbers and the values atomic fractions.
                Wildcard are accepted, e.g. ``{5: '?', 25: 0.4}`` where boron
                will get a atomic fraction of 0.6.
            formula (str): optional chemical formula for the composition.
                If ``None``, a formula will be generated for the composition.
        """
        atomic_fractions = process_wildcard(atomic_fractions)
        mass_fractions = convert_atomic_to_mass_fractions(atomic_fractions)
        if not formula:
            formula = generate_name(atomic_fractions)
        return cls(cls._key, mass_fractions, atomic_fractions, formula)

    def __len__(self):
        return len(self.mass_fractions)

    def __contains__(self, z):
        return z in self.mass_fractions

    def __iter__(self):
        return iter(self.mass_fractions.keys())

    def __repr__(self):
        return '<{}({})>'.format(self.__class__.__name__, self.inner_repr())

    def __eq__(self, other):
        if not isinstance(other, self.__class__):
            return False

        if len(self) != len(other):
            return False

        for z in self.mass_fractions:
            if z not in other.mass_fractions:
                return False

            fraction = self.mass_fractions[z]
            other_fraction = other.mass_fractions[z]

            if not math.isclose(fraction, other_fraction, abs_tol=self.PRECISION):
                return False

        return True

    def __ne__(self, other):
        return not self == other

    def __hash__(self):
        out = []
        for z in sorted(self.mass_fractions):
            out.append(z)
            out.append(int(self.mass_fractions[z] / self.PRECISION))

        return hash(tuple(out))

    def __getstate__(self):
        return {'mass_fractions': dict(self.mass_fractions),
                'atomic_fractions': dict(self.atomic_fractions),
                'formula': self.formula}

    def __setstate__(self, state):
        self.mass_fractions = MappingProxyType(state.get('mass_fractions', {}))
        self.atomic_fractions = MappingProxyType(state.get('atomic_fractions', {}))
        self._formula = state.get('formula', '')

    def is_normalized(self):
        return math.isclose(sum(self.mass_fractions.values()), 1.0, abs_tol=self.PRECISION)

    def inner_repr(self):
        return ', '.join('{}: {:.4f}'.format(pyxray.element_symbol(z), mass_fraction) for z, mass_fraction in self.mass_fractions.items())

    @property
    def formula(self):
        return self._formula
Ejemplo n.º 10
0
 def cached_hosts(self):
     """Read-only dict of cached DNS record."""
     return MappingProxyType(self._cached_hosts)
Ejemplo n.º 11
0
    async def format_and_send(
        self,
        *,
        destination: discord.TextChannel,
        response: feedparser.FeedParserDict,
        feed_name: str,
        feed_settings: dict,
        embed_default: bool,
        force: bool = False,
    ) -> Optional[List[int]]:
        """
        Formats and sends,
        returns the integer timestamp of latest entry in the feed which was sent
        """

        use_embed = feed_settings.get("embed_override", None)
        if use_embed is None:
            use_embed = embed_default

        assert isinstance(response.entries, list), "mypy"  # nosec

        match_rule = feed_settings.get("match_req", [])

        def meets_rule(entry):
            if not match_rule:
                return True

            field_name, term = match_rule

            d = getattr(entry, field_name, None)
            if not d:
                return False
            elif isinstance(d, list):
                for item in d:
                    if term in item:
                        return True
                return False
            elif isinstance(d, str):
                return term in d.casefold()

            return False

        if force:
            _to_send = next(filter(meets_rule, response.entries), None)
            if not _to_send:
                return None
            to_send = [_to_send]
        else:
            last = feed_settings.get("last", None)
            last = tuple((last or (0,))[:5])

            to_send = sorted(
                [
                    e
                    for e in response.entries
                    if self.process_entry_time(e) > last and meets_rule(e)
                ],
                key=self.process_entry_time,
            )

        last_sent = None
        roles = feed_settings.get("role_mentions", [])
        for entry in to_send:
            color = destination.guild.me.color

            kwargs = self.format_post(
                entry, use_embed, color, feed_settings.get("template", None), roles
            )
            try:
                r = discord.http.Route(
                    "POST", "/channels/{channel_id}/messages", channel_id=destination.id
                )
                if em := kwargs.pop("embed", None):
                    assert isinstance(em, discord.Embed), "mypy"  # nosec
                    kwargs["embed"] = em.to_dict()
                kwargs["allowed_mentions"] = {"parse": [], "roles": roles}

                await self.bot.http.request(r, json=kwargs)
            except discord.HTTPException as exc:
                debug_exc_log(log, exc, "Exception while sending feed.")
                self.bot.dispatch(
                    # If you want to use this, make your listener accept
                    # what you need from this + **kwargs to not break if I add more
                    # This listener is versioned.
                    # you should not mutate the feedparser classes.
                    #
                    # version: 1
                    # destination: discord.TextChannel
                    # feed_name: str
                    # feedparser_entry: feedparser.FeedParserDict
                    # feed_settings: MappingProxy
                    # forced_update: bool
                    "sinbadcogs_rss_send_fail",
                    listener_version=1,
                    destination=destination,
                    feed_name=feed_name,
                    feedparser_entry=entry,
                    feed_settings=MappingProxyType(feed_settings),
                    forced_update=force,
                )
Ejemplo n.º 12
0
        [('hdf5 index', ''), ('ligand count', ''), ('opt', ''), ('settings', 1), ('settings', 2)],
        names=['index', 'sub index']
    )

    df = pd.DataFrame(None, index=idx, columns=columns)
    df['hdf5 index'] = -1
    df['ligand count'] = -1
    df['settings'] = 'str'
    df['opt'] = False
    df.to_csv(filename)


IDX_DTYPE: Mapping[str, np.dtype] = MappingProxyType({
    'core': BACKUP_IDX_DTYPE,
    'core_no_opt': BACKUP_IDX_DTYPE,
    'ligand': LIG_IDX_DTYPE,
    'ligand_no_opt': LIG_IDX_DTYPE,
    'qd': QD_IDX_DTYPE,
    'qd_no_opt': QD_IDX_DTYPE
})


DEFAULT_PROPERTIES: Mapping[str, Optional[Tuple[str, np.dtype]]] = MappingProxyType({
    'core': None,
    'core_no_opt': None,
    'ligand': ('formula', FORMULA_DTYPE),
    'ligand_no_opt': None,
    'qd': ('ligand count', LIG_COUNT_DTYPE),
    'qd_no_opt': None
})

Ejemplo n.º 13
0
class GtirToNir(eve.NodeTranslator):
    REDUCE_OP_INIT_VAL: ClassVar[Mapping[
        gtir.ReduceOperator, common.BuiltInLiteral]] = MappingProxyType({
            gtir.ReduceOperator.ADD:
            common.BuiltInLiteral.ZERO,
            gtir.ReduceOperator.MUL:
            common.BuiltInLiteral.ONE,
            gtir.ReduceOperator.MIN:
            common.BuiltInLiteral.MIN_VALUE,
            gtir.ReduceOperator.MAX:
            common.BuiltInLiteral.MAX_VALUE,
        })

    REDUCE_OP_TO_BINOP: ClassVar[Mapping[
        gtir.ReduceOperator, common.BinaryOperator]] = MappingProxyType({
            gtir.ReduceOperator.ADD:
            common.BinaryOperator.ADD,
            gtir.ReduceOperator.MUL:
            common.BinaryOperator.MUL,
            # TODO
            # gtir.ReduceOperator.MIN: nir.BuiltInLiteral.MIN_VALUE,
            # gtir.ReduceOperator.MAX: nir.BuiltInLiteral.MAX_VALUE,
        })

    def visit_NeighborChain(self, node: gtir.NeighborChain, **kwargs):
        return nir.NeighborChain(elements=node.elements)

    def visit_HorizontalDimension(self, node: gtir.HorizontalDimension,
                                  **kwargs):
        return nir.HorizontalDimension(
            primary=node.primary,
            secondary=self.visit(node.secondary) if node.secondary else None)

    def visit_VerticalDimension(self, node: gtir.VerticalDimension, **kwargs):
        return nir.VerticalDimension()

    def visit_Dimensions(self, node: gtir.Dimensions, **kwargs):
        return nir.Dimensions(
            horizontal=self.visit(node.horizontal)
            if node.horizontal else None,
            vertical=self.visit(node.vertical) if node.vertical else None,
        )

    def visit_UField(self, node: gtir.UField, **kwargs):
        return nir.UField(name=node.name,
                          vtype=node.vtype,
                          dimensions=self.visit(node.dimensions))

    def visit_TemporaryField(self, node: gtir.UField, **kwargs):
        return nir.TemporaryField(name=node.name,
                                  vtype=node.vtype,
                                  dimensions=self.visit(node.dimensions))

    # TODO test
    # TODO discuss if this actually works: can we uniquely identify which ref in the field references which dimension or do we need other techniques (e.g. refer to primary and secondary dimension by name)
    @staticmethod
    def order_location_refs(
        location_refs: List[gtir.LocationRef],
        location_comprehensions: Dict[str, gtir.LocationComprehension],
    ):
        """
        Returns a dict with primary, secondary and vertical (TODO)
        """
        result = {}

        decls = [location_comprehensions[ref.name] for ref in location_refs]

        # If there is a secondary dimension (sparse), then one of the LocationComprehensions references the other.
        for decl in decls:
            if not isinstance(decl.of, gtir.Domain) and decl.of.name in [
                    ref.name for ref in location_refs
            ]:
                assert "secondary" not in result
                result["secondary"] = decl.name
            else:
                assert "primary" not in result
                result["primary"] = decl.name

        return result

    def visit_FieldAccess(self, node: gtir.FieldAccess, *,
                          location_comprehensions, **kwargs):
        ordered_location_refs = self.order_location_refs(
            node.subscript, location_comprehensions)
        primary_chain = location_comprehensions[
            ordered_location_refs["primary"]].chain
        secondary_chain = (
            location_comprehensions[ordered_location_refs["secondary"]].chain
            if "secondary" in ordered_location_refs else None)

        return nir.FieldAccess(
            name=node.name,
            location_type=node.location_type,
            primary=primary_chain,
            secondary=secondary_chain,
        )

    def visit_NeighborReduce(self, node: gtir.NeighborReduce, *, last_block,
                             **kwargs):
        loc_comprehension = copy.deepcopy(kwargs["location_comprehensions"])
        assert node.neighbors.name not in loc_comprehension
        loc_comprehension[node.neighbors.name] = node.neighbors
        kwargs["location_comprehensions"] = loc_comprehension

        body_location = node.neighbors.chain.elements[-1]
        reduce_var_name = "local" + str(node.id_attr_)
        last_block.declarations.append(
            nir.LocalVar(
                name=reduce_var_name,
                vtype=common.DataType.FLOAT64,  # TODO
                location_type=node.location_type,
            ))
        last_block.statements.append(
            nir.AssignStmt(
                left=nir.VarAccess(name=reduce_var_name,
                                   location_type=node.location_type),
                right=nir.Literal(
                    value=self.REDUCE_OP_INIT_VAL[node.op],
                    location_type=node.location_type,
                    vtype=common.DataType.FLOAT64,  # TODO
                ),
                location_type=node.location_type,
            ), )
        body = nir.BlockStmt(
            declarations=[],
            statements=[
                nir.AssignStmt(
                    left=nir.VarAccess(name=reduce_var_name,
                                       location_type=body_location),
                    right=nir.BinaryOp(
                        left=nir.VarAccess(name=reduce_var_name,
                                           location_type=body_location),
                        op=self.REDUCE_OP_TO_BINOP[node.op],
                        right=self.visit(node.operand,
                                         in_neighbor_loop=True,
                                         **kwargs),
                        location_type=body_location,
                    ),
                    location_type=body_location,
                )
            ],
            location_type=body_location,
        )
        last_block.statements.append(
            nir.NeighborLoop(
                neighbors=self.visit(node.neighbors.chain),
                body=body,
                location_type=node.location_type,
            ))
        return nir.VarAccess(name=reduce_var_name,
                             location_type=node.location_type)  # TODO

    def visit_Literal(self, node: gtir.Literal, **kwargs):
        return nir.Literal(value=node.value,
                           vtype=node.vtype,
                           location_type=node.location_type)

    def visit_BinaryOp(self, node: gtir.BinaryOp, **kwargs):
        return nir.BinaryOp(
            left=self.visit(node.left, **kwargs),
            op=node.op,
            right=self.visit(node.right, **kwargs),
            location_type=node.location_type,
        )

    def visit_AssignStmt(self, node: gtir.AssignStmt, **kwargs):
        return nir.AssignStmt(
            left=self.visit(node.left, **kwargs),
            right=self.visit(node.right, **kwargs),
            location_type=node.location_type,
        )

    def visit_HorizontalLoop(self, node: gtir.HorizontalLoop, **kwargs):
        block = nir.BlockStmt(declarations=[],
                              statements=[],
                              location_type=node.stmt.location_type)
        stmt = self.visit(
            node.stmt,
            last_block=block,
            location_comprehensions={node.location.name: node.location})
        block.statements.append(stmt)
        return nir.HorizontalLoop(
            stmt=block,
            location_type=node.location.chain.elements[0],
        )

    def visit_VerticalLoop(self, node: gtir.VerticalLoop, **kwargs):
        return nir.VerticalLoop(
            horizontal_loops=[self.visit(h) for h in node.horizontal_loops],
            loop_order=node.loop_order,
        )

    def visit_Stencil(self, node: gtir.Stencil, **kwargs):
        return nir.Stencil(vertical_loops=[
            self.visit(loop) for loop in node.vertical_loops
        ], )
        # TODO

    def visit_Computation(self, node: gtir.Stencil, **kwargs):
        return nir.Computation(
            name=node.name,
            params=[self.visit(p) for p in node.params],
            declarations=[self.visit(decl) for decl in node.declarations]
            if node.declarations else [],
            stencils=[self.visit(s) for s in node.stencils],
        )
Ejemplo n.º 14
0
 def types(self) -> 'Mapping[TypeConName, ConcreteType]':
     with self._lock:
         return MappingProxyType(dict(self._data_types))
Ejemplo n.º 15
0
                return self.everything
            else:
                # *:Specific.Module:Entity means return all matches of the requested name, but
                # under any package
                return self.by_name_lookup.get(type_name, ())
        elif type_name == '*':
            # PKG_ID:* means return all matches in a specific package
            return reduce(add,
                          self.by_package_lookup.get(package_id, {}).values(),
                          ())
        else:
            # PKG_ID:Specific.Module:Entity; we are looking for a very specific type
            return self.by_package_lookup.get(package_id,
                                              {}).get(type_name, ())


def _immutable_mmc(mapping: 'Mapping[str, Mapping[str, Collection[T]]]') -> \
        'Mapping[str, Mapping[str, Collection[T]]]':
    """
    Create an immutable copy of :class:`TemplateStoreCache` data structures.
    """
    return MappingProxyType({
        k1: MappingProxyType({k2: tuple(v)
                              for k2, v in v1.items()})
        for k1, v1 in mapping.items()
    })


EMPTY_MAPPING = MappingProxyType({})
EMPTY_TYPE_CACHE = TypeCache((), EMPTY_MAPPING, EMPTY_MAPPING)
    def __init__(self, in_dir: pathlib.Path):
        if not in_dir.is_dir():
            raise IOError("Input directory is not a directory")

        self._ldb = ccl_leveldb.RawLevelDb(in_dir)

        # If performance is a concern we should refactor this, but slow and steady for now

        # First collect the namespace (session/tab guid  + host) and map-ids together
        self._map_id_to_host = {}  # map_id: (guid, host)
        self._deleted_keys = set()

        for rec in self._ldb.iterate_records_raw():
            if rec.user_key.startswith(_NAMESPACE_PREFIX):
                if rec.user_key == _NAMESPACE_PREFIX:
                    continue  # bogus entry near the top usually
                try:
                    key = rec.user_key.decode("utf-8")
                except UnicodeDecodeError:
                    print(f"Invalid namespace key: {rec.user_key}")
                    continue

                split_key = key.split("-", 2)
                if len(split_key) != 3:
                    print(f"Invalid namespace key: {key}")
                    continue

                _, guid, host = split_key

                if not host:
                    continue  # TODO investigate why this happens

                # normalize host to lower just in case
                host = host.lower()
                guid_host_pair = guid, host

                if rec.state == ccl_leveldb.KeyState.Deleted:
                    self._deleted_keys.add(guid_host_pair)
                else:
                    try:
                        map_id = rec.value.decode("utf-8")
                    except UnicodeDecodeError:
                        print(f"Invalid namespace value: {key}")
                        continue

                    if not map_id:
                        continue  # TODO: investigate why this happens/do we want to keep the host around somewhere?

                    #if map_id in self._map_id_to_host_guid and self._map_id_to_host_guid[map_id] != guid_host_pair:
                    if map_id in self._map_id_to_host and self._map_id_to_host[
                            map_id] != host:
                        print("Map ID Collision!")
                        print(f"map_id: {map_id}")
                        print(f"Old host: {self._map_id_to_host[map_id]}")
                        print(f"New host: {guid_host_pair}")
                        raise ValueError("map_id collision")
                    else:
                        self._map_id_to_host[map_id] = host

        # freeze stuff
        self._map_id_to_host = MappingProxyType(self._map_id_to_host)
        self._deleted_keys = frozenset(self._deleted_keys)

        self._host_lookup = {}  # {host: {ss_key: [SessionStoreValue, ...]}}
        self._orphans = [
        ]  #  list of tuples of key, value where we can't get the host
        for rec in self._ldb.iterate_records_raw():
            if rec.user_key.startswith(_MAP_ID_PREFIX):
                try:
                    key = rec.user_key.decode("utf-8")
                except UnicodeDecodeError:
                    print(f"Invalid map id key: {rec.user_key}")
                    continue

                if rec.state == ccl_leveldb.KeyState.Deleted:
                    continue  # TODO: do we want to keep the key around because the presence is important?

                split_key = key.split("-", 2)
                if len(split_key) != 3:
                    print(f"Invalid map id key: {key}")
                    continue

                _, map_id, ss_key = split_key

                if not split_key:
                    # TODO what does it mean when there is no key here?
                    #      The value will also be a single number (encoded utf-8)
                    continue

                try:
                    value = rec.value.decode("UTF-16-LE")
                except UnicodeDecodeError:
                    # print(f"Error decoding value for {key}")
                    # print(f"Raw Value: {rec.value}")
                    continue

                #guid_host_pair = self._map_id_to_host_guid.get(map_id)
                host = self._map_id_to_host.get(map_id)
                #if not guid_host_pair:
                if not host:
                    self._orphans.append(
                        (ss_key, SessionStoreValue(value, None, rec.seq)))
                else:
                    #guid, host = guid_host_pair
                    self._host_lookup.setdefault(host, {})
                    self._host_lookup[host].setdefault(ss_key, [])
                    self._host_lookup[host][ss_key].append(
                        SessionStoreValue(value, None, rec.seq))
class SessionStoreDb:
    # todo: get all grouped by namespace by host?
    # todo: get all grouped by namespace by host.key?
    # todo: consider refactoring to only getting metadata on first pass and everything else on demand?
    def __init__(self, in_dir: pathlib.Path):
        if not in_dir.is_dir():
            raise IOError("Input directory is not a directory")

        self._ldb = ccl_leveldb.RawLevelDb(in_dir)

        # If performance is a concern we should refactor this, but slow and steady for now

        # First collect the namespace (session/tab guid  + host) and map-ids together
        self._map_id_to_host = {}  # map_id: (guid, host)
        self._deleted_keys = set()

        for rec in self._ldb.iterate_records_raw():
            if rec.user_key.startswith(_NAMESPACE_PREFIX):
                if rec.user_key == _NAMESPACE_PREFIX:
                    continue  # bogus entry near the top usually
                try:
                    key = rec.user_key.decode("utf-8")
                except UnicodeDecodeError:
                    print(f"Invalid namespace key: {rec.user_key}")
                    continue

                split_key = key.split("-", 2)
                if len(split_key) != 3:
                    print(f"Invalid namespace key: {key}")
                    continue

                _, guid, host = split_key

                if not host:
                    continue  # TODO investigate why this happens

                # normalize host to lower just in case
                host = host.lower()
                guid_host_pair = guid, host

                if rec.state == ccl_leveldb.KeyState.Deleted:
                    self._deleted_keys.add(guid_host_pair)
                else:
                    try:
                        map_id = rec.value.decode("utf-8")
                    except UnicodeDecodeError:
                        print(f"Invalid namespace value: {key}")
                        continue

                    if not map_id:
                        continue  # TODO: investigate why this happens/do we want to keep the host around somewhere?

                    #if map_id in self._map_id_to_host_guid and self._map_id_to_host_guid[map_id] != guid_host_pair:
                    if map_id in self._map_id_to_host and self._map_id_to_host[
                            map_id] != host:
                        print("Map ID Collision!")
                        print(f"map_id: {map_id}")
                        print(f"Old host: {self._map_id_to_host[map_id]}")
                        print(f"New host: {guid_host_pair}")
                        raise ValueError("map_id collision")
                    else:
                        self._map_id_to_host[map_id] = host

        # freeze stuff
        self._map_id_to_host = MappingProxyType(self._map_id_to_host)
        self._deleted_keys = frozenset(self._deleted_keys)

        self._host_lookup = {}  # {host: {ss_key: [SessionStoreValue, ...]}}
        self._orphans = [
        ]  #  list of tuples of key, value where we can't get the host
        for rec in self._ldb.iterate_records_raw():
            if rec.user_key.startswith(_MAP_ID_PREFIX):
                try:
                    key = rec.user_key.decode("utf-8")
                except UnicodeDecodeError:
                    print(f"Invalid map id key: {rec.user_key}")
                    continue

                if rec.state == ccl_leveldb.KeyState.Deleted:
                    continue  # TODO: do we want to keep the key around because the presence is important?

                split_key = key.split("-", 2)
                if len(split_key) != 3:
                    print(f"Invalid map id key: {key}")
                    continue

                _, map_id, ss_key = split_key

                if not split_key:
                    # TODO what does it mean when there is no key here?
                    #      The value will also be a single number (encoded utf-8)
                    continue

                try:
                    value = rec.value.decode("UTF-16-LE")
                except UnicodeDecodeError:
                    # print(f"Error decoding value for {key}")
                    # print(f"Raw Value: {rec.value}")
                    continue

                #guid_host_pair = self._map_id_to_host_guid.get(map_id)
                host = self._map_id_to_host.get(map_id)
                #if not guid_host_pair:
                if not host:
                    self._orphans.append(
                        (ss_key, SessionStoreValue(value, None, rec.seq)))
                else:
                    #guid, host = guid_host_pair
                    self._host_lookup.setdefault(host, {})
                    self._host_lookup[host].setdefault(ss_key, [])
                    self._host_lookup[host][ss_key].append(
                        SessionStoreValue(value, None, rec.seq))

    def __contains__(self, item: typing.Union[str, typing.Tuple[str,
                                                                str]]) -> bool:
        """if item is a str, returns true if that host is present
        if item is a tuple of (str, str), returns True if that host and key pair are present"""
        if isinstance(item, str):
            return item in self._host_lookup
        elif isinstance(item, tuple) and len(item) == 2:
            host, key = item
            return host in self._host_lookup and key in self._host_lookup[host]
        else:
            raise TypeError("item must be a string or a tuple of (str, str)")

    def iter_hosts(self) -> typing.Iterable[str]:
        yield from self._host_lookup.keys()

    def get_all_for_host(self, host):
        if host not in self:
            return {}
        result_raw = dict(self._host_lookup[host])
        for ss_key in result_raw:
            result_raw[ss_key] = tuple(result_raw[ss_key])
        return result_raw

    def get_session_storage_key(self, host, key):
        if (host, key) not in self:
            return tuple()
        return tuple(self._host_lookup[host][key])

    def iter_orphans(self):
        yield from self._orphans

    def __getitem__(self, item: typing.Union[str, typing.Tuple[str, str]]):
        if item not in self:
            raise KeyError(item)

        if isinstance(item, str):
            return self.get_all_for_host(item)
        elif isinstance(item, tuple) and len(item) == 2:
            return self.get_session_storage_key(*item)
        else:
            raise TypeError("item must be a string or a tuple of (str, str)")

    def __iter__(self):
        """iterates the hosts present"""
        return self.iter_hosts()

    def close(self):
        self._ldb.close()
Ejemplo n.º 18
0
class NaiveCodeGenerator(codegen.TemplatedGenerator):
    DATA_TYPE_TO_STR: ClassVar[Mapping[common.DataType,
                                       str]] = MappingProxyType({
                                           common.DataType.BOOLEAN:
                                           "bool",
                                           common.DataType.INT32:
                                           "int",
                                           common.DataType.UINT32:
                                           "unsigned_int",
                                           common.DataType.FLOAT32:
                                           "float",
                                           common.DataType.FLOAT64:
                                           "double",
                                       })

    LOCATION_TYPE_TO_STR_MAP: ClassVar[Mapping[LocationType, Mapping[
        str, str]]] = MappingProxyType({
            LocationType.Node:
            MappingProxyType({
                "singular": "vertex",
                "plural": "vertices"
            }),
            LocationType.Edge:
            MappingProxyType({
                "singular": "edge",
                "plural": "edges"
            }),
            LocationType.Face:
            MappingProxyType({
                "singular": "cell",
                "plural": "cells"
            }),
        })

    @classmethod
    def apply(cls, root, **kwargs) -> str:
        generated_code = super().apply(root, **kwargs)
        formatted_code = codegen.format_source("cpp",
                                               generated_code,
                                               style="LLVM")
        return formatted_code

    def visit_DataType(self, node, **kwargs) -> str:
        return self.DATA_TYPE_TO_STR[node]

    def visit_LocationType(self, node, **kwargs) -> Mapping[str, str]:
        return self.LOCATION_TYPE_TO_STR_MAP[node]

    Node = as_mako(
        "${_this_node.__class__.__name__.upper()}")  # only for testing

    UnstructuredField = as_mako("""<%
loc_type = location_type["singular"]
sparseloc = "sparse_" if _this_node.sparse_location_type else ""
%>
dawn::${ sparseloc }${ loc_type }_field_t<LibTag, ${ data_type }>& ${ name };"""
                                )

    FieldAccessExpr = as_mako("""<%
sparse_index = "m_sparse_dimension_idx, " if _this_node.is_sparse else ""
field_acc_itervar = outer_iter_var if _this_node.is_sparse else iter_var
%>${ name }(deref(LibTag{}, ${ field_acc_itervar }), ${ sparse_index } k)""")

    AssignmentExpr = as_fmt("{left} = {right}")

    VarAccessExpr = as_fmt("{name}")

    BinaryOp = as_fmt("{left} {op} {right}")

    ExprStmt = as_fmt("\n{expr};")

    VarDeclStmt = as_fmt("\n{data_type} {name};")

    TemporaryFieldDeclStmt = as_mako("""using dawn::allocateEdgeField;
        auto ${ name } = allocate${ location_type['singular'].capitalize() }Field<${ data_type }>(mesh);"""
                                     )

    ForK = as_mako("""<%
if _this_node.loop_order == _this_module.common.LoopOrder.FORWARD:
    k_init = '0'
    k_cond = 'k < k_size'
    k_step = '++k'
else:
    k_init = 'k_size -1'
    k_cond = 'k >= 0'
    k_step = '--k'
%>for (int k = ${k_init}; ${k_cond}; ${k_step}) {
int m_sparse_dimension_idx;
${ "".join(horizontal_loops) }\n}""")

    HorizontalLoop = as_mako("""<%
loc_type = location_type['plural'].title()
%>for(auto const & t: get${ loc_type }(LibTag{}, mesh)) ${ ast }""")

    def visit_HorizontalLoop(self, node, **kwargs) -> str:
        return self.generic_visit(node, iter_var="t", **kwargs)

    BlockStmt = as_mako("{${ ''.join(statements) }\n}")

    ReduceOverNeighbourExpr = as_mako("""<%
right_loc_type = right_location_type["singular"].title()
loc_type = location_type["singular"].title()
%>(m_sparse_dimension_idx=0,reduce${ right_loc_type }To${ loc_type }(mesh, ${ outer_iter_var }, ${ init }, [&](auto& lhs, auto const& ${ iter_var }) {
lhs ${ operation }= ${ right };
m_sparse_dimension_idx++;
return lhs;
}))""")

    def visit_ReduceOverNeighbourExpr(self, node, *, iter_var,
                                      **kwargs) -> str:
        outer_iter_var = iter_var
        return self.generic_visit(
            node,
            outer_iter_var=outer_iter_var,
            iter_var="redIdx",
            **kwargs,
        )

    LiteralExpr = as_fmt("({data_type}){value}")

    Stencil = as_mako("""
void ${name}() {
using dawn::deref;

${ "\\n".join(declarations) if _this_node.declarations else ""}

${ "".join(k_loops) }
}
""")

    Computation = as_mako("""<%
stencil_calls = '\\n'.join("{name}();".format(name=s.name) for s in _this_node.stencils)
ctor_field_params = ', '.join(
    'dawn::{sparse_loc}{loc_type}_field_t<LibTag, {data_type}>& {name}'.format(
        loc_type=_this_generator.LOCATION_TYPE_TO_STR_MAP[p.location_type]['singular'],
        name=p.name,
        data_type=_this_generator.DATA_TYPE_TO_STR[p.data_type],
        sparse_loc="sparse_" if p.sparse_location_type else ""
    )
    for p in _this_node.params
)
ctor_field_initializers = ', '.join(
    '{name}({name})'.format(name=p.name) for p in _this_node.params
)
%>#define DAWN_GENERATED 1
#define DAWN_BACKEND_T CXXNAIVEICO
#include <driver-includes/unstructured_interface.hpp>
namespace dawn_generated {
namespace cxxnaiveico {
template <typename LibTag>
class generated {
private:
dawn::mesh_t<LibTag>& mesh;
int const k_size;

${ ''.join(params) }
${ ''.join(stencils) }

public:
generated(dawn::mesh_t<LibTag>& mesh, int k_size, ${ ctor_field_params }): mesh(mesh), k_size(k_size), ${ ctor_field_initializers } {}

void run() {
${ stencil_calls }
}
};
}
}

""")
Ejemplo n.º 19
0
    def __init__(
        self,
        *_,  # don't allow any positional arguments
        embedding_size,
        output_size,
        representation_size=None,
        representation_type='lstm',
        representation_args=MappingProxyType({}),
        deidentifier_type='lstm',
        deidentifier_args=MappingProxyType({}),
        extra_input_size=0,
        adversaries=('discriminate-representations',
                     'discriminate-representation-embedding-pair'),
        adversary_args=MappingProxyType({}),
        optimizer='adam',
        optimizer_args=MappingProxyType({})):
        """ Initialize the adversarial model. It's components are
        - a representation model that transforms embeddings into a (noisy) representation
        - a deidentifier model that performs the deidentification task from the representation
        - an adversary model that tries to reconstruct information from the representation

        :param embedding_size: the representation input size
        :param output_size: the deidentifier output size
        :param representation_size: the representation size (or None to use the embedding size)
        :param representation_type: the type of representation model to use (see representer.py)
        :param representation_args: the kwargs for the representation model
        :param deidentifier_type: the type of deidentifier model to use (see deidentifier.py)
        :param deidentifier_args: the kwargs for the deidentifier model
        :param adversaries: a sequence of adversary type strings (see adversary.py)
        :param adversary_args: a dictionary of adversary args or a list of dictionaries (if every adversary should get
            its own args)
        :param optimizer: the type of optimizer to use (see optimizer.py)
        :param optimizer_args: the args passed to the optimizer
        """

        if representation_size is None:
            representation_size = embedding_size

        original_embeddings = Input(shape=(None, embedding_size))

        build_representer = get_representer(representation_type)
        self.train_representer = build_representer(
            embedding_size=embedding_size,
            representation_size=representation_size,
            apply_noise=True,
            **representation_args)

        train_representation = self.train_representer(original_embeddings)

        deidentifier, deidentifier_loss = get_deidentifier(deidentifier_type)(
            name='deidentifier',
            input_size=representation_size,
            output_size=output_size,
            extra_input_size=extra_input_size,
            **deidentifier_args)

        extra_input = Input(shape=(None, extra_input_size))
        if extra_input_size > 0:
            train_deidentifier_input = [train_representation, extra_input]
        else:
            train_deidentifier_input = train_representation

        train_deidentifier_output = deidentifier(train_deidentifier_input)
        self.pretrain_deidentifier = Model([original_embeddings, extra_input],
                                           train_deidentifier_output)
        self.pretrain_deidentifier.compile(
            optimizer=get_optimizer(optimizer)(**optimizer_args),
            loss=deidentifier_loss,
            metrics=['accuracy'])

        self.train_representer.trainable = False

        adv_embeddings = Input(shape=(None, embedding_size))
        adv_representation = self.train_representer(adv_embeddings)

        adv_fake_embeddings = Input(shape=(None, embedding_size))
        adv_fake_representation = self.train_representer(adv_fake_embeddings)

        adversary_models = []
        adversary_outputs = []
        if isinstance(adversary_args, dict):
            adversary_args = [adversary_args for _ in adversaries]

        for adversary_type, args in zip(adversaries, adversary_args):
            adversary = get_adversary(adversary_type)(
                inputs={
                    'train_representation': adv_representation,
                    'original_embeddings': adv_embeddings,
                    'fake_representation': adv_fake_representation
                },
                representation_size=representation_size,
                embedding_size=embedding_size,
                **args)
            adversary_models.append(adversary.model)
            adversary_outputs.append(adversary.model(adversary.inputs))
            adversary.model.summary()
        adversary_output = concatenate(adversary_outputs, axis=-1)
        adversary_output = Lambda(lambda x: K.mean(x, axis=-1, keepdims=True),
                                  name='adversary')(adversary_output)

        self.pretrain_adversary = Model([adv_embeddings, adv_fake_embeddings],
                                        adversary_output)
        self.pretrain_adversary.summary()
        self.pretrain_adversary.compile(
            optimizer=get_optimizer(optimizer)(**optimizer_args),
            loss='binary_crossentropy',
            metrics=['accuracy'])

        self.fine_tune_branches = Model([
            original_embeddings, extra_input, adv_embeddings,
            adv_fake_embeddings
        ], [train_deidentifier_output, adversary_output])
        self.fine_tune_branches.compile(
            optimizer=get_optimizer(optimizer)(**optimizer_args),
            loss=[deidentifier_loss, 'binary_crossentropy'],
            metrics=['accuracy'])

        self.train_representer.trainable = True
        deidentifier.trainable = False
        for adversary in adversary_models:
            adversary.trainable = False
        self.fine_tune_representer = Model([
            original_embeddings, extra_input, adv_embeddings,
            adv_fake_embeddings
        ], [train_deidentifier_output, adversary_output])
        self.fine_tune_representer.compile(
            optimizer=get_optimizer(optimizer)(**optimizer_args),
            loss=[deidentifier_loss, adversarial_objective],
            loss_weights=[1, 1],
            metrics=['accuracy'])
Ejemplo n.º 20
0
AQI: 'MappingProxyType[str, dict]' = MappingProxyType({
    # Key == `pm25_high`
    'pm25': {
        '12.0': {
            'aqi_high': 50,
            'aqi_low': 0,
            'pollutant_high': 12.0,
            'pollutant_low': 0.0,
        },
        '35.4': {
            'aqi_high': 100,
            'aqi_low': 51,
            'pollutant_high': 35.4,
            'pollutant_low': 12.1,
        },
        '55.4': {
            'aqi_high': 150,
            'aqi_low': 101,
            'pollutant_high': 55.4,
            'pollutant_low': 35.5,
        },
        '150.4': {
            'aqi_high': 200,
            'aqi_low': 151,
            'pollutant_high': 150.4,
            'pollutant_low': 55.5,
        },
        '250.4': {
            'aqi_high': 300,
            'aqi_low': 201,
            'pollutant_high': 250.4,
            'pollutant_low': 150.5,
        },
        '350.4': {
            'aqi_high': 400,
            'aqi_low': 301,
            'pollutant_high': 350.4,
            'pollutant_low': 250.5,
        },
        '500.4': {
            'aqi_high': 500,
            'aqi_low': 401,
            'pollutant_high': 500.4,
            'pollutant_low': 350.5,
        },
    },
    'pm10': {
        '54': {
            'aqi_high': 50,
            'aqi_low': 0,
            'pollutant_high': 54,
            'pollutant_low': 0,
        },
        '154': {
            'aqi_high': 100,
            'aqi_low': 51,
            'pollutant_high': 154,
            'pollutant_low': 55,
        },
        '254': {
            'aqi_high': 150,
            'aqi_low': 101,
            'pollutant_high': 254,
            'pollutant_low': 155,
        },
        '354': {
            'aqi_high': 200,
            'aqi_low': 151,
            'pollutant_high': 354,
            'pollutant_low': 255,
        },
        '424': {
            'aqi_high': 300,
            'aqi_low': 201,
            'pollutant_high': 424,
            'pollutant_low': 355,
        },
        '504': {
            'aqi_high': 301,
            'aqi_low': 400,
            'pollutant_high': 504,
            'pollutant_low': 425,
        },
        '604': {
            'aqi_high': 500,
            'aqi_low': 401,
            'pollutant_high': 604,
            'pollutant_low': 505,
        },
    },
})
Ejemplo n.º 21
0
class State:
    """Object to represent a state within the state machine.

    entity_id: the entity that is represented.
    state: the state of the entity
    attributes: extra information on entity and state
    last_changed: last time the state was changed, not the attributes.
    last_updated: last time this object was updated.
    context: Context in which it was created
    """

    __slots__ = ['entity_id', 'state', 'attributes',
                 'last_changed', 'last_updated', 'context']

    def __init__(self, entity_id: str, state: Any,
                 attributes: Optional[Dict] = None,
                 last_changed: Optional[datetime.datetime] = None,
                 last_updated: Optional[datetime.datetime] = None,
                 context: Optional[Context] = None,
                 # Temp, because database can still store invalid entity IDs
                 # Remove with 1.0 or in 2020.
                 temp_invalid_id_bypass: Optional[bool] = False) -> None:
        """Initialize a new state."""
        state = str(state)

        if not valid_entity_id(entity_id) and not temp_invalid_id_bypass:
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        if not valid_state(state):
            raise InvalidStateError((
                "Invalid state encountered for entity id: {}. "
                "State max length is 255 characters.").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = state  # type: str
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()
        self.last_changed = last_changed or self.last_updated
        self.context = context or Context()

    @property
    def domain(self) -> str:
        """Domain of this state."""
        return split_entity_id(self.entity_id)[0]

    @property
    def object_id(self) -> str:
        """Object id of this state."""
        return split_entity_id(self.entity_id)[1]

    @property
    def name(self) -> str:
        """Name of this state."""
        return (
            self.attributes.get(ATTR_FRIENDLY_NAME) or
            self.object_id.replace('_', ' '))

    def as_dict(self) -> Dict:
        """Return a dict representation of the State.

        Async friendly.

        To be used for JSON serialization.
        Ensures: state == State.from_dict(state.as_dict())
        """
        return {'entity_id': self.entity_id,
                'state': self.state,
                'attributes': dict(self.attributes),
                'last_changed': self.last_changed,
                'last_updated': self.last_updated,
                'context': self.context.as_dict()}

    @classmethod
    def from_dict(cls, json_dict: Dict) -> Any:
        """Initialize a state from a dict.

        Async friendly.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and 'entity_id' in json_dict and
                'state' in json_dict):
            return None

        last_changed = json_dict.get('last_changed')

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get('last_updated')

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        context = json_dict.get('context')
        if context:
            context = Context(
                id=context.get('id'),
                user_id=context.get('user_id'),
            )

        return cls(json_dict['entity_id'], json_dict['state'],
                   json_dict.get('attributes'), last_changed, last_updated,
                   context)

    def __eq__(self, other: Any) -> bool:
        """Return the comparison of the state."""
        return (self.__class__ == other.__class__ and  # type: ignore
                self.entity_id == other.entity_id and
                self.state == other.state and
                self.attributes == other.attributes and
                self.context == other.context)

    def __repr__(self) -> str:
        """Return the representation of the states."""
        attrs = "; {}".format(util.repr_helper(self.attributes)) \
            if self.attributes else ""

        return "<state {}={}{} @ {}>".format(
            self.entity_id, self.state, attrs,
            dt_util.as_local(self.last_changed).isoformat())
Ejemplo n.º 22
0
 def lifted(self):
     return MappingProxyType(self.__lifted)
Ejemplo n.º 23
0
class State:
    """Object to represent a state within the state machine.

    entity_id: the entity that is represented.
    state: the state of the entity
    attributes: extra information on entity and state
    last_changed: last time the state was changed, not the attributes.
    last_updated: last time this object was updated.
    context: Context in which it was created
    """

    __slots__ = [
        'entity_id', 'state', 'attributes', 'last_changed', 'last_updated',
        'context'
    ]

    def __init__(self,
                 entity_id: str,
                 state: Any,
                 attributes: Optional[Dict] = None,
                 last_changed: Optional[datetime.datetime] = None,
                 last_updated: Optional[datetime.datetime] = None,
                 context: Optional[Context] = None) -> None:
        """Initialize a new state."""
        state = str(state)

        if not valid_entity_id(entity_id):
            raise InvalidEntityFormatError(
                ("Invalid entity id encountered: {}. "
                 "Format should be <domain>.<object_id>").format(entity_id))

        if not valid_state(state):
            raise InvalidStateError(
                ("Invalid state encountered for entity id: {}. "
                 "State max length is 255 characters.").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = state
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()
        self.last_changed = last_changed or self.last_updated
        self.context = context or Context()

    @property
    def domain(self) -> str:
        """Domain of this state."""
        return split_entity_id(self.entity_id)[0]

    @property
    def object_id(self) -> str:
        """Object id of this state."""
        return split_entity_id(self.entity_id)[1]

    @property
    def name(self) -> str:
        """Name of this state."""
        return (self.attributes.get(ATTR_FRIENDLY_NAME)
                or self.object_id.replace('_', ' '))

    def as_dict(self) -> Dict:
        """Return a dict representation of the State.

        Async friendly.

        To be used for JSON serialization.
        Ensures: state == State.from_dict(state.as_dict())
        """
        return {
            'entity_id': self.entity_id,
            'state': self.state,
            'attributes': dict(self.attributes),
            'last_changed': self.last_changed,
            'last_updated': self.last_updated,
            'context': self.context.as_dict()
        }

    @classmethod
    def from_dict(cls, json_dict: Dict) -> Any:
        """Initialize a state from a dict.

        Async friendly.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and 'entity_id' in json_dict
                and 'state' in json_dict):
            return None

        last_changed = json_dict.get('last_changed')

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get('last_updated')

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        context = json_dict.get('context')
        if context:
            context = Context(**context)

        return cls(json_dict['entity_id'], json_dict['state'],
                   json_dict.get('attributes'), last_changed, last_updated,
                   context)

    def __eq__(self, other: Any) -> bool:
        """Return the comparison of the state."""
        return (self.__class__ == other.__class__ and  # type: ignore
                self.entity_id == other.entity_id and
                self.state == other.state and self.attributes
                == other.attributes and self.context == other.context)

    def __repr__(self) -> str:
        """Return the representation of the states."""
        attrs = "; {}".format(util.repr_helper(self.attributes)) \
            if self.attributes else ""

        return "<state {}={}{} @ {}>".format(
            self.entity_id, self.state, attrs,
            dt_util.as_local(self.last_changed).isoformat())
Ejemplo n.º 24
0
class JsonPatch(object):
    json_dumper = staticmethod(json.dumps)
    json_loader = staticmethod(_jsonloads)

    operations = MappingProxyType({
        'remove': RemoveOperation,
        'add': AddOperation,
        'replace': ReplaceOperation,
        'move': MoveOperation,
        'test': TestOperation,
        'copy': CopyOperation,
    })
    """A JSON Patch is a list of Patch Operations.

    >>> patch = JsonPatch([
    ...     {'op': 'add', 'path': '/foo', 'value': 'bar'},
    ...     {'op': 'add', 'path': '/baz', 'value': [1, 2, 3]},
    ...     {'op': 'remove', 'path': '/baz/1'},
    ...     {'op': 'test', 'path': '/baz', 'value': [1, 3]},
    ...     {'op': 'replace', 'path': '/baz/0', 'value': 42},
    ...     {'op': 'remove', 'path': '/baz/1'},
    ... ])
    >>> doc = {}
    >>> result = patch.apply(doc)
    >>> expected = {'foo': 'bar', 'baz': [42]}
    >>> result == expected
    True

    JsonPatch object is iterable, so you can easily access each patch
    statement in a loop:

    >>> lpatch = list(patch)
    >>> expected = {'op': 'add', 'path': '/foo', 'value': 'bar'}
    >>> lpatch[0] == expected
    True
    >>> lpatch == patch.patch
    True

    Also JsonPatch could be converted directly to :class:`bool` if it contains
    any operation statements:

    >>> bool(patch)
    True
    >>> bool(JsonPatch([]))
    False

    This behavior is very handy with :func:`make_patch` to write more readable
    code:

    >>> old = {'foo': 'bar', 'numbers': [1, 3, 4, 8]}
    >>> new = {'baz': 'qux', 'numbers': [1, 4, 7]}
    >>> patch = make_patch(old, new)
    >>> if patch:
    ...     # document have changed, do something useful
    ...     patch.apply(old)    #doctest: +ELLIPSIS
    {...}
    """
    def __init__(self, patch, pointer_cls=JsonPointer):
        self.patch = patch
        self.pointer_cls = pointer_cls

        # Verify that the structure of the patch document
        # is correct by retrieving each patch element.
        # Much of the validation is done in the initializer
        # though some is delayed until the patch is applied.
        for op in self.patch:
            self._get_operation(op)

    def __str__(self):
        """str(self) -> self.to_string()"""
        return self.to_string()

    def __bool__(self):
        return bool(self.patch)

    __nonzero__ = __bool__

    def __iter__(self):
        return iter(self.patch)

    def __hash__(self):
        return hash(tuple(self._ops))

    def __eq__(self, other):
        if not isinstance(other, JsonPatch):
            return False
        return self._ops == other._ops

    def __ne__(self, other):
        return not (self == other)

    @classmethod
    def from_string(cls, patch_str, loads=None, pointer_cls=JsonPointer):
        """Creates JsonPatch instance from string source.

        :param patch_str: JSON patch as raw string.
        :type patch_str: str

        :param loads: A function of one argument that loads a serialized
                      JSON string.
        :type loads: function

        :param pointer_cls: JSON pointer class to use.
        :type pointer_cls: Type[JsonPointer]

        :return: :class:`JsonPatch` instance.
        """
        json_loader = loads or cls.json_loader
        patch = json_loader(patch_str)
        return cls(patch, pointer_cls=pointer_cls)

    @classmethod
    def from_diff(
        cls,
        src,
        dst,
        optimization=True,
        dumps=None,
        pointer_cls=JsonPointer,
    ):
        """Creates JsonPatch instance based on comparison of two document
        objects. Json patch would be created for `src` argument against `dst`
        one.

        :param src: Data source document object.
        :type src: dict

        :param dst: Data source document object.
        :type dst: dict

        :param dumps: A function of one argument that produces a serialized
                      JSON string.
        :type dumps: function

        :param pointer_cls: JSON pointer class to use.
        :type pointer_cls: Type[JsonPointer]

        :return: :class:`JsonPatch` instance.

        >>> src = {'foo': 'bar', 'numbers': [1, 3, 4, 8]}
        >>> dst = {'baz': 'qux', 'numbers': [1, 4, 7]}
        >>> patch = JsonPatch.from_diff(src, dst)
        >>> new = patch.apply(src)
        >>> new == dst
        True
        """
        json_dumper = dumps or cls.json_dumper
        builder = DiffBuilder(json_dumper, pointer_cls=pointer_cls)
        builder._compare_values('', None, src, dst)
        ops = list(builder.execute())
        return cls(ops, pointer_cls=pointer_cls)

    def to_string(self, dumps=None):
        """Returns patch set as JSON string."""
        json_dumper = dumps or self.json_dumper
        return json_dumper(self.patch)

    @property
    def _ops(self):
        return tuple(map(self._get_operation, self.patch))

    def apply(self, obj, in_place=False):
        """Applies the patch to a given object.

        :param obj: Document object.
        :type obj: dict

        :param in_place: Tweaks the way how patch would be applied - directly to
                         specified `obj` or to its copy.
        :type in_place: bool

        :return: Modified `obj`.
        """

        if not in_place:
            obj = copy.deepcopy(obj)

        for operation in self._ops:
            obj = operation.apply(obj)

        return obj

    def _get_operation(self, operation):
        if 'op' not in operation:
            raise InvalidJsonPatch("Operation does not contain 'op' member",
                                   op=operation)

        op = operation['op']

        if not isinstance(op, basestring):
            raise InvalidJsonPatch("Operation must be a string", op=operation)

        if op not in self.operations:
            raise InvalidJsonPatch("Unknown operation {0!r}".format(op),
                                   op=operation)

        cls = self.operations[op]
        return cls(operation, pointer_cls=self.pointer_cls)
Ejemplo n.º 25
0
from types import MappingProxyType

DEFAULT_STORAGE: str = 'django.core.files.storage.FileSystemStorage'

STORAGES: MappingProxyType = MappingProxyType({
    'LOCAL': DEFAULT_STORAGE,
    'AWS_S3': 'storages.backends.s3boto3.S3Boto3Storage'
})
Ejemplo n.º 26
0
                    # what you need from this + **kwargs to not break if I add more
                    # This listener is versioned.
                    # you should not mutate the feedparser classes.
                    #
                    # version: 1
                    # destination: discord.TextChannel
                    # feed_name: str
                    # feedparser_entry: feedparser.FeedParserDict
                    # feed_settings: MappingProxy
                    # forced_update: bool
                    "sinbadcogs_rss_send",
                    listener_version=1,
                    destination=destination,
                    feed_name=feed_name,
                    feedparser_entry=entry,
                    feed_settings=MappingProxyType(feed_settings),
                    forced_update=force,
                )
            finally:
                last_sent = list(self.process_entry_time(entry))

        return last_sent

    def format_post(self, entry, embed: bool, color, template=None, roles=[]) -> dict:

        if template is None:
            if embed:
                _template = "[$title]($link)"
            else:
                _template = "$title: <$link>"
        else:
Ejemplo n.º 27
0
    An immutable empty :class:`~collections.abc.Set`.

.. data:: EMPTY_SEQUENCE
    :type: Sequence
    :value: tuple()

    An immutable empty :class:`~collections.abc.Sequence`.

.. data:: EMPTY_MAPPING
    :type: Mapping
    :value: types.MappingProxyType({})

    An immutable empty :class:`~collections.abc.Mapping`.

"""

from types import MappingProxyType
from typing import Mapping, Collection, Sequence, AbstractSet, Container, Any

__all__ = [
    'EMPTY_SEQUENCE', 'EMPTY_MAPPING', 'EMPTY_COLLECTION', 'EMPTY_SET',
    'EMPTY_CONTAINER'
]

EMPTY_SEQUENCE: Sequence[Any] = ()
EMPTY_MAPPING: Mapping[Any, Any] = MappingProxyType({})
EMPTY_COLLECTION: Collection[Any] = frozenset()
EMPTY_SET: AbstractSet[Any] = frozenset()
EMPTY_CONTAINER: Container[Any] = frozenset()
Ejemplo n.º 28
0
 def params(self):
     return MappingProxyType(self._params)
Ejemplo n.º 29
0
    def _pack_value(
            self,
            fname,
            ftype,
            parent,
            value_name="value",
            metadata=MappingProxyType({}),
    ):

        overridden: typing.Optional[str] = None
        serialize_option = metadata.get("serialize")
        if serialize_option is None:
            strategy = metadata.get("serialization_strategy")
            if isinstance(strategy, SerializationStrategy):
                serialize_option = strategy.serialize
        if serialize_option is None:
            strategy = self.get_config().serialization_strategy.get(ftype)
            if isinstance(strategy, dict):
                serialize_option = strategy.get("serialize")
            elif isinstance(strategy, SerializationStrategy):
                serialize_option = strategy.serialize
        if callable(serialize_option):
            setattr(
                self.cls,
                f"__{fname}_serialize",
                staticmethod(serialize_option),
            )
            overridden = f"self.__{fname}_serialize({value_name})"

        with suppress(TypeError):
            if issubclass(ftype, SerializableType):
                return overridden or f"{value_name}._serialize()"

        origin_type = get_type_origin(ftype)
        if is_special_typing_primitive(origin_type):
            if origin_type is typing.Any:
                return overridden or value_name
            elif is_union(ftype):
                args = getattr(ftype, "__args__", ())
                if len(args) == 2 and args[1] == NoneType:  # it is Optional
                    return self._pack_value(fname,
                                            args[0],
                                            parent,
                                            metadata=metadata)
                else:
                    method_name = self._add_pack_union(fname, ftype, args,
                                                       parent, metadata)
                    return (f"self.{method_name}({value_name},"
                            f"{self.get_to_dict_flags()})")
            elif origin_type is typing.AnyStr:
                raise UnserializableDataError(
                    "AnyStr is not supported by mashumaro")
            elif is_type_var(ftype):
                raise UnserializableDataError(
                    "TypeVars are not supported by mashumaro")
            else:
                raise UnserializableDataError(
                    f"{ftype} as a field type is not supported by mashumaro")
        elif origin_type is int:
            return overridden or f"int({value_name})"
        elif origin_type is float:
            return overridden or f"float({value_name})"
        elif origin_type in (bool, NoneType):
            return overridden or value_name
        elif origin_type in (datetime.datetime, datetime.date, datetime.time):
            if overridden:
                return f"{value_name} if use_datetime else {overridden}"
            return (
                f"{value_name} if use_datetime else {value_name}.isoformat()")
        elif origin_type is datetime.timedelta:
            return overridden or f"{value_name}.total_seconds()"
        elif origin_type is datetime.timezone:
            return overridden or f"{value_name}.tzname(None)"
        elif origin_type is uuid.UUID:
            return overridden or f"str({value_name})"
        elif origin_type in [
                ipaddress.IPv4Address,
                ipaddress.IPv6Address,
                ipaddress.IPv4Network,
                ipaddress.IPv6Network,
                ipaddress.IPv4Interface,
                ipaddress.IPv6Interface,
        ]:
            return overridden or f"str({value_name})"
        elif origin_type is Decimal:
            return overridden or f"str({value_name})"
        elif origin_type is Fraction:
            return overridden or f"str({value_name})"
        elif issubclass(origin_type, typing.Collection) and not issubclass(
                origin_type, enum.Enum):
            args = getattr(ftype, "__args__", ())

            def inner_expr(arg_num=0, v_name="value", v_type=None):
                if v_type:
                    return self._pack_value(fname, v_type, parent, v_name)
                else:
                    return self._pack_value(fname, args[arg_num], parent,
                                            v_name)

            if issubclass(origin_type, typing.ByteString):
                specific = f"encodebytes({value_name}).decode()"
                return (
                    f"{value_name} if use_bytes else {overridden or specific}")
            elif issubclass(origin_type, str):
                return overridden or value_name
            elif issubclass(
                    origin_type,
                (typing.List, typing.Deque, typing.Tuple, typing.AbstractSet),
            ):
                if is_generic(ftype):
                    return (overridden
                            or f"[{inner_expr()} for value in {value_name}]")
                elif ftype is list:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.List[T] instead")
                elif ftype is collections.deque:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.Deque[T] instead")
                elif ftype is tuple:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.Tuple[T] instead")
                elif ftype is set:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.Set[T] instead")
                elif ftype is frozenset:
                    raise UnserializableField(
                        fname, ftype, parent,
                        "Use typing.FrozenSet[T] instead")
            elif issubclass(origin_type, typing.ChainMap):
                if ftype is collections.ChainMap:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.ChainMap[KT,VT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "ChainMaps with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden
                                or f'[{{{inner_expr(0,"key")}:{inner_expr(1)} '
                                f"for key,value in m.items()}} "
                                f"for m in value.maps]")
            elif PY_37_MIN and issubclass(origin_type, typing.OrderedDict):
                if ftype is collections.OrderedDict:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.OrderedDict[KT,VT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "OrderedDict with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden or
                                f'{{{inner_expr(0, "key")}: {inner_expr(1)} '
                                f"for key, value in {value_name}.items()}}")
            elif issubclass(origin_type, typing.Counter):
                if ftype is collections.Counter:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.Counter[KT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "Counter with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden or f'{{{inner_expr(0, "key")}: '
                                f"{inner_expr(1, v_type=int)} "
                                f"for key, value in {value_name}.items()}}")
            elif issubclass(origin_type, typing.Mapping):
                if ftype is dict:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.Dict[KT,VT] or Mapping[KT,VT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "Mappings with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden
                                or f'{{{inner_expr(0,"key")}: {inner_expr(1)} '
                                f"for key, value in {value_name}.items()}}")
            elif issubclass(origin_type, typing.Sequence):
                if is_generic(ftype):
                    return (overridden
                            or f"[{inner_expr()} for value in {value_name}]")
        elif issubclass(origin_type, os.PathLike):
            return overridden or f"{value_name}.__fspath__()"
        elif issubclass(origin_type, enum.Enum):
            specific = f"{value_name}.value"
            return f"{value_name} if use_enum else {overridden or specific}"
        elif is_dataclass_dict_mixin_subclass(ftype):
            flags = self.get_to_dict_flags(ftype)
            return overridden or f"{value_name}.to_dict({flags})"
        elif overridden:
            return overridden

        raise UnserializableField(fname, ftype, parent)
Ejemplo n.º 30
0
skill_by_skillcat = MappingProxyType(OrderedDict((
    (skillcat['GENERAL'], frozenset((
        skill['Block'],
        skill['Dauntless'],
        skill['Dirty Player'],
        skill['Frenzy'],
        skill['Kick'],
        skill['Leader'],
        skill['Nerves of Steel'],
        skill['Pass Block'],
        skill['Pro'],
        skill['Shadowing'],
        skill['Strip Ball'],
        skill['Sure Hands'],
        skill['Tackle'],
        ))),
    (skillcat['AGILITY'], frozenset((
        skill['Catch'],
        skill['Diving Catch'],
        skill['Diving Tackle'],
        skill['Dodge'],
        skill['Jump Up'],
        skill['Leap'],
        skill['Side Step'],
        skill['Sprint'],
        skill['Sure Feet'],
        ))),
    (skillcat['STRENGTH'], frozenset((
        skill['Break Tackle'],
        skill['Guard'],
        skill['Mighty Blow'],
        skill['Multiple Block'],
        skill['Piling On'],
        skill['Stand Firm'],
        ))),
    (skillcat['PASSING'], frozenset((
        skill['Accurate'],
        skill['Dump-Off'],
        skill['Hail Mary'],
        skill['Pass'],
        skill['Safe Throw'],
        skill['Strong Arm'],
        ))),
    (skillcat['PHYSICAL'], frozenset((
        skill['Big Hand'],
        skill['Claw(s)'],
        skill['Extra Arms'],
        skill['Foul Appearance'],
        skill['Horns'],
        skill['Prehensile Tail'],
        skill['Razor Sharp Claws'],
        skill['Spikes'],
        skill['Tentacles'],
        skill['Thick Skull'],
        skill['Two Heads'],
        skill['Very Long Legs'],
        ))),
    (skillcat['RACIAL CHARACTERISTICS'], frozenset((
        skill['Always Hungry'],
        skill['Big Guy'],
        skill['Blood Lust'],
        skill['Bone Head'],
        skill['Easily Confused'],
        skill['Hypnotic Gaze'],
        skill['Nurgle\'s Rot'],
        skill['Really Stupid'],
        skill['Regeneration'],
        skill['Right Stuff'],
        skill['Stunty'],
        skill['Take Root'],
        skill['Throw Team-Mate'],
        skill['Thrud\'s Fans'],
        skill['Wild Animal'],
        ))),
    )))
Ejemplo n.º 31
0
    def _unpack_field_value(
            self,
            fname,
            ftype,
            parent,
            value_name="value",
            metadata=MappingProxyType({}),
    ):

        overridden: typing.Optional[str] = None
        deserialize_option = metadata.get("deserialize")
        if deserialize_option is None:
            strategy = metadata.get("serialization_strategy")
            if isinstance(strategy, SerializationStrategy):
                deserialize_option = strategy.deserialize
        if deserialize_option is None:
            strategy = self.get_config().serialization_strategy.get(ftype)
            if isinstance(strategy, dict):
                deserialize_option = strategy.get("deserialize")
            elif isinstance(strategy, SerializationStrategy):
                deserialize_option = strategy.deserialize
        if callable(deserialize_option):
            setattr(self.cls, f"__{fname}_deserialize", deserialize_option)
            overridden = f"cls.__{fname}_deserialize({value_name})"

        with suppress(TypeError):
            if issubclass(ftype, SerializableType):
                return (overridden
                        or f"{type_name(ftype)}._deserialize({value_name})")

        origin_type = get_type_origin(ftype)
        if is_special_typing_primitive(origin_type):
            if origin_type is typing.Any:
                return overridden or value_name
            elif is_union(ftype):
                args = getattr(ftype, "__args__", ())
                if len(args) == 2 and args[1] == NoneType:  # it is Optional
                    return self._unpack_field_value(fname,
                                                    args[0],
                                                    parent,
                                                    metadata=metadata)
                else:
                    method_name = self._add_unpack_union(
                        fname, ftype, args, parent, metadata)
                    return (f"cls.{method_name}({value_name},"
                            f"use_bytes,use_enum,use_datetime)")
            elif origin_type is typing.AnyStr:
                raise UnserializableDataError(
                    "AnyStr is not supported by mashumaro")
            elif is_type_var(ftype):
                raise UnserializableDataError(
                    "TypeVars are not supported by mashumaro")
            else:
                raise UnserializableDataError(
                    f"{ftype} as a field type is not supported by mashumaro")
        elif origin_type is int:
            return overridden or f"int({value_name})"
        elif origin_type is float:
            return overridden or f"float({value_name})"
        elif origin_type in (bool, NoneType):
            return overridden or value_name
        elif origin_type in (datetime.datetime, datetime.date, datetime.time):
            if overridden:
                return f"{value_name} if use_datetime else {overridden}"
            elif deserialize_option is not None:
                if deserialize_option == "ciso8601":
                    if ciso8601:
                        self.ensure_module_imported(ciso8601)
                        datetime_parser = "ciso8601.parse_datetime"
                    else:
                        raise ThirdPartyModuleNotFoundError(
                            "ciso8601", fname, parent)  # pragma no cover
                elif deserialize_option == "pendulum":
                    if pendulum:
                        self.ensure_module_imported(pendulum)
                        datetime_parser = "pendulum.parse"
                    else:
                        raise ThirdPartyModuleNotFoundError(
                            "pendulum", fname, parent)  # pragma no cover
                else:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        f"Unsupported deserialization engine "
                        f'"{deserialize_option}"',
                    )
                suffix = ""
                if origin_type is datetime.date:
                    suffix = ".date()"
                elif origin_type is datetime.time:
                    suffix = ".time()"
                return (f"{value_name} if use_datetime else "
                        f"{datetime_parser}({value_name}){suffix}")
            return (f"{value_name} if use_datetime else "
                    f"datetime.{origin_type.__name__}."
                    f"fromisoformat({value_name})")
        elif origin_type is datetime.timedelta:
            return overridden or f"datetime.timedelta(seconds={value_name})"
        elif origin_type is datetime.timezone:
            return overridden or f"parse_timezone({value_name})"
        elif origin_type is uuid.UUID:
            return overridden or f"uuid.UUID({value_name})"
        elif origin_type is ipaddress.IPv4Address:
            return overridden or f"ipaddress.IPv4Address({value_name})"
        elif origin_type is ipaddress.IPv6Address:
            return overridden or f"ipaddress.IPv6Address({value_name})"
        elif origin_type is ipaddress.IPv4Network:
            return overridden or f"ipaddress.IPv4Network({value_name})"
        elif origin_type is ipaddress.IPv6Network:
            return overridden or f"ipaddress.IPv6Network({value_name})"
        elif origin_type is ipaddress.IPv4Interface:
            return overridden or f"ipaddress.IPv4Interface({value_name})"
        elif origin_type is ipaddress.IPv6Interface:
            return overridden or f"ipaddress.IPv6Interface({value_name})"
        elif origin_type is Decimal:
            return overridden or f"Decimal({value_name})"
        elif origin_type is Fraction:
            return overridden or f"Fraction({value_name})"
        elif issubclass(origin_type, typing.Collection) and not issubclass(
                origin_type, enum.Enum):
            args = getattr(ftype, "__args__", ())

            def inner_expr(arg_num=0, v_name="value", v_type=None):
                if v_type:
                    return self._unpack_field_value(fname, v_type, parent,
                                                    v_name)
                else:
                    return self._unpack_field_value(fname, args[arg_num],
                                                    parent, v_name)

            if issubclass(origin_type, typing.ByteString):
                if origin_type is bytes:
                    specific = f"decodebytes({value_name}.encode())"
                    return (f"{value_name} if use_bytes else "
                            f"{overridden or specific}")
                elif origin_type is bytearray:
                    if overridden:
                        overridden = (
                            f"bytearray({value_name}) if use_bytes else "
                            f"{overridden}")
                    specific = (f"bytearray({value_name} if use_bytes else "
                                f"decodebytes({value_name}.encode()))")
                    return overridden or specific
            elif issubclass(origin_type, str):
                return overridden or value_name
            elif issubclass(origin_type, typing.List):
                if is_generic(ftype):
                    return (overridden
                            or f"[{inner_expr()} for value in {value_name}]")
                elif ftype is list:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.List[T] instead")
            elif issubclass(origin_type, typing.Deque):
                if is_generic(ftype):
                    return (overridden or f"collections.deque([{inner_expr()} "
                            f"for value in {value_name}])")
                elif ftype is collections.deque:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.Deque[T] instead")
            elif issubclass(origin_type, typing.Tuple):
                if is_generic(ftype):
                    return (
                        overridden or
                        f"tuple([{inner_expr()} for value in {value_name}])")
                elif ftype is tuple:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.Tuple[T] instead")
            elif issubclass(origin_type, typing.FrozenSet):
                if is_generic(ftype):
                    return (overridden or f"frozenset([{inner_expr()} "
                            f"for value in {value_name}])")
                elif ftype is frozenset:
                    raise UnserializableField(
                        fname, ftype, parent,
                        "Use typing.FrozenSet[T] instead")
            elif issubclass(origin_type, typing.AbstractSet):
                if is_generic(ftype):
                    return (overridden or
                            f"set([{inner_expr()} for value in {value_name}])")
                elif ftype is set:
                    raise UnserializableField(fname, ftype, parent,
                                              "Use typing.Set[T] instead")
            elif issubclass(origin_type, typing.ChainMap):
                if ftype is collections.ChainMap:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.ChainMap[KT,VT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "ChainMaps with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden or f"collections.ChainMap("
                                f'*[{{{inner_expr(0,"key")}:{inner_expr(1)} '
                                f"for key, value in m.items()}} "
                                f"for m in {value_name}])")
            elif PY_37_MIN and issubclass(origin_type, typing.OrderedDict):
                if ftype is collections.OrderedDict:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.OrderedDict[KT,VT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "OrderedDict with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden or f"collections.OrderedDict("
                                f'{{{inner_expr(0,"key")}: {inner_expr(1)} '
                                f"for key, value in {value_name}.items()}})")
            elif issubclass(origin_type, typing.Counter):
                if ftype is collections.Counter:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.Counter[KT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "Counter with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden or f"collections.Counter("
                                f'{{{inner_expr(0,"key")}: '
                                f"{inner_expr(1, v_type=int)} "
                                f"for key, value in {value_name}.items()}})")
            elif issubclass(origin_type, typing.Mapping):
                if ftype is dict:
                    raise UnserializableField(
                        fname,
                        ftype,
                        parent,
                        "Use typing.Dict[KT,VT] or Mapping[KT,VT] instead",
                    )
                elif is_generic(ftype):
                    if is_dataclass(args[0]):
                        raise UnserializableDataError(
                            "Mappings with dataclasses as keys "
                            "are not supported by mashumaro")
                    else:
                        return (overridden
                                or f'{{{inner_expr(0,"key")}: {inner_expr(1)} '
                                f"for key, value in {value_name}.items()}}")
            elif issubclass(origin_type, typing.Sequence):
                if is_generic(ftype):
                    return (overridden
                            or f"[{inner_expr()} for value in {value_name}]")
        elif issubclass(origin_type, os.PathLike):
            if overridden:
                return overridden
            elif issubclass(origin_type, pathlib.PosixPath):
                return f"pathlib.PosixPath({value_name})"
            elif issubclass(origin_type, pathlib.WindowsPath):
                return f"pathlib.WindowsPath({value_name})"
            elif issubclass(origin_type, pathlib.Path):
                return f"pathlib.Path({value_name})"
            elif issubclass(origin_type, pathlib.PurePosixPath):
                return f"pathlib.PurePosixPath({value_name})"
            elif issubclass(origin_type, pathlib.PureWindowsPath):
                return f"pathlib.PureWindowsPath({value_name})"
            elif issubclass(origin_type, pathlib.PurePath):
                return f"pathlib.PurePath({value_name})"
            elif origin_type is os.PathLike:
                return f"pathlib.PurePath({value_name})"
            else:
                return f"{type_name(origin_type)}({value_name})"
        elif issubclass(origin_type, enum.Enum):
            specific = f"{type_name(origin_type)}({value_name})"
            return f"{value_name} if use_enum else {overridden or specific}"
        elif is_dataclass_dict_mixin_subclass(ftype):
            return overridden or (
                f"{type_name(ftype)}.from_dict({value_name}, "
                f"use_bytes, use_enum, use_datetime)")
        elif overridden:
            return overridden

        raise UnserializableField(fname, ftype, parent)
Ejemplo n.º 32
0
#FIXME: faild to load parent module when executing from other path
#from .configuration import S3Configuration

_AWS = 'aws'
_DIGITALOCEAN_PUBLIC = 'dig-public'
_DIGITALOCEAN_PRIVATE = 'dig-private'

_DIGITALOCEAN_ENDPOINT = 'https://ams3.digitaloceanspaces.com'

_DEFAULTS = MappingProxyType({
    _AWS: {
        'bucket_name': 's3.obus.hasadna.org.il'
    },
    _DIGITALOCEAN_PRIVATE: {
        'bucket_name': 'obus-do1',
        'endpoint_url': _DIGITALOCEAN_ENDPOINT
    },
    _DIGITALOCEAN_PUBLIC: {
        'bucket_name': 'obus-do2',
        'endpoint_url': _DIGITALOCEAN_ENDPOINT
    }
})


class S3Crud:
    def __init__(self, **conn_args):
        self.bucket_name = conn_args.get('bucket_name')
        assert self.bucket_name is not None

        conn_args.pop('bucket_name')
Ejemplo n.º 33
0
def _safeMakeMappingProxyType(data):
    if data is None:
        data = {}
    return MappingProxyType(data)
Ejemplo n.º 34
0
 def __init__(self, domain, service, data=None, call_id=None):
     """Initialize a service call."""
     self.domain = domain.lower()
     self.service = service.lower()
     self.data = MappingProxyType(data or {})
     self.call_id = call_id
Ejemplo n.º 35
0
class State(object):
    """Object to represent a state within the state machine.

    entity_id: the entity that is represented.
    state: the state of the entity
    attributes: extra information on entity and state
    last_changed: last time the state was changed, not the attributes.
    last_updated: last time this object was updated.
    """

    __slots__ = ['entity_id', 'state', 'attributes',
                 'last_changed', 'last_updated']

    # pylint: disable=too-many-arguments
    def __init__(self, entity_id, state, attributes=None, last_changed=None,
                 last_updated=None):
        """Initialize a new state."""
        if not valid_entity_id(entity_id):
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = str(state)
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = dt_util.strip_microseconds(
            last_updated or dt_util.utcnow())

        # Strip microsecond from last_changed else we cannot guarantee
        # state == State.from_dict(state.as_dict())
        # This behavior occurs because to_dict uses datetime_to_str
        # which does not preserve microseconds
        self.last_changed = dt_util.strip_microseconds(
            last_changed or self.last_updated)

    @property
    def domain(self):
        """Domain of this state."""
        return split_entity_id(self.entity_id)[0]

    @property
    def object_id(self):
        """Object id of this state."""
        return split_entity_id(self.entity_id)[1]

    @property
    def name(self):
        """Name of this state."""
        return (
            self.attributes.get(ATTR_FRIENDLY_NAME) or
            self.object_id.replace('_', ' '))

    def as_dict(self):
        """Return a dict representation of the State.

        To be used for JSON serialization.
        Ensures: state == State.from_dict(state.as_dict())
        """
        return {'entity_id': self.entity_id,
                'state': self.state,
                'attributes': dict(self.attributes),
                'last_changed': dt_util.datetime_to_str(self.last_changed),
                'last_updated': dt_util.datetime_to_str(self.last_updated)}

    @classmethod
    def from_dict(cls, json_dict):
        """Initialize a state from a dict.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and 'entity_id' in json_dict and
                'state' in json_dict):
            return None

        last_changed = json_dict.get('last_changed')

        if last_changed:
            last_changed = dt_util.str_to_datetime(last_changed)

        last_updated = json_dict.get('last_updated')

        if last_updated:
            last_updated = dt_util.str_to_datetime(last_updated)

        return cls(json_dict['entity_id'], json_dict['state'],
                   json_dict.get('attributes'), last_changed, last_updated)

    def __eq__(self, other):
        """Return the comparison of the state."""
        return (self.__class__ == other.__class__ and
                self.entity_id == other.entity_id and
                self.state == other.state and
                self.attributes == other.attributes)

    def __repr__(self):
        """Return the representation of the states."""
        attr = "; {}".format(util.repr_helper(self.attributes)) \
               if self.attributes else ""

        return "<state {}={}{} @ {}>".format(
            self.entity_id, self.state, attr,
            dt_util.datetime_to_local_str(self.last_changed))
Ejemplo n.º 36
0
def write_mapping(f, key, value, dataset_kwargs=MappingProxyType({})):
    for sub_key, sub_value in value.items():
        write_attribute(f, f"{key}/{sub_key}", sub_value, dataset_kwargs)
Ejemplo n.º 37
0
class SirToNaive(NodeTranslator):
    def __init__(self, **kwargs):
        super().__init__()
        self.isControlFlow = None
        self.sir_stencil_params = (
            {}
        )  # elements are sir.Field # TODO this is a dummy symbol table for stencil parameters
        self.current_loc_type_stack = []  # TODO experimental

    BINOPSTR_TO_ENUM: ClassVar[Mapping[
        str, common.BinaryOperator]] = MappingProxyType({
            "+":
            common.BinaryOperator.ADD,
            "-":
            common.BinaryOperator.SUB,
            "*":
            common.BinaryOperator.MUL,
            "/":
            common.BinaryOperator.DIV,
        })

    SIR_TO_NAIVE_LOCATION_TYPE: ClassVar[Mapping[
        sir.LocationType, naive.LocationType]] = MappingProxyType({
            sir.LocationType.Edge:
            naive.LocationType.Edge,
            sir.LocationType.Cell:
            naive.LocationType.Face,
            sir.LocationType.Vertex:
            naive.LocationType.Node,
        })

    def _get_field_location_type(self, field: sir.Field):
        if field.field_dimensions.horizontal_dimension.sparse_part:
            return self.SIR_TO_NAIVE_LOCATION_TYPE[
                field.field_dimensions.horizontal_dimension.sparse_part[0]]
        return self.SIR_TO_NAIVE_LOCATION_TYPE[
            field.field_dimensions.horizontal_dimension.dense_location_type]

    def _is_sparse_field(self, field: sir.Field):
        if field.field_dimensions.horizontal_dimension.sparse_part:
            return True
        else:
            return False

    def visit_Field(self, node: Node, **kwargs):
        assert (
            not node.field_dimensions.horizontal_dimension.sparse_part
        ) or (len(node.field_dimensions.horizontal_dimension.sparse_part) <= 1)
        sparse_location_type = None
        if node.field_dimensions.horizontal_dimension.sparse_part:
            sparse_location_type = self.SIR_TO_NAIVE_LOCATION_TYPE[
                node.field_dimensions.horizontal_dimension.sparse_part[0]]
        return naive.UnstructuredField(
            name=node.name,
            location_type=self.SIR_TO_NAIVE_LOCATION_TYPE[
                node.field_dimensions.horizontal_dimension.
                dense_location_type],
            sparse_location_type=sparse_location_type,
            data_type=common.DataType.FLOAT64,
        )

    def visit_Stencil(self, node: Node, **kwargs):
        # Needs to run before the body/ast as it creates a dummy symbol table for stencil parameters
        params = []
        for f in node.params:
            self.sir_stencil_params[f.name] = f
            params.append(self.visit(f))

        [declarations, k_loops] = self.visit(node.ast)
        return naive.Computation(
            params=params,
            stencils=[
                naive.Stencil(name=node.name,
                              k_loops=k_loops,
                              declarations=declarations)
            ],
        )

    def visit_VerticalRegion(self, node: Node, **kwargs):
        # TODO don't ignore interval
        [declarations, horizontal_loops] = self.visit(node.ast)
        return [
            declarations,
            [
                naive.ForK(
                    loop_order=node.loop_order,
                    horizontal_loops=horizontal_loops,
                )
            ],
        ]

    def visit_VerticalRegionDeclStmt(self, node: Node, **kwargs):
        return self.visit(node.vertical_region)

    def visit_FieldAccessExpr(self, node: Node, **kwargs):
        horizontal_offset = False  # TODO
        return naive.FieldAccessExpr(
            name=node.name,
            offset=(horizontal_offset, node.vertical_offset),
            location_type=self._get_field_location_type(
                self.sir_stencil_params[node.name]),
            is_sparse=self._is_sparse_field(
                self.sir_stencil_params[node.name]),
        )

    def visit_AssignmentExpr(self, node: Node, **kwargs):
        assert node.op == "="
        return naive.AssignmentExpr(left=self.visit(node.left),
                                    right=self.visit(node.right))

    def visit_ExprStmt(self, node: Node, **kwargs):
        return naive.ExprStmt(expr=self.visit(node.expr))

    def visit_VarAccessExpr(self, node: Node, **kwargs):
        loctype = ""
        if node.location_type:
            loctype = self.SIR_TO_NAIVE_LOCATION_TYPE[node.location_type]
        elif self.current_loc_type_stack:
            loctype = self.current_loc_type_stack[-1]
        else:
            raise ValueError("no location type")

        return naive.FieldAccessExpr(
            name=node.name,
            offset=(False, 0),
            location_type=loctype,
            is_sparse=False,
        )

    def visit_BlockStmt(self, node: Node, **kwargs):
        if self.isControlFlow:
            for s in node.statements:
                assert isinstance(s, sir.VerticalRegionDeclStmt)
                return self.visit(s)
        else:
            horizontal_loops = []
            declarations = []
            for s in node.statements:
                if isinstance(s, sir.VarDeclStmt):
                    # TODO this doesn't work: if we move the declaration out of the horizontal loop, we need to promote it to a field
                    [vardecl, initexpr] = self.visit(s)
                    declarations.append(vardecl)
                    transformed_stmt = naive.ExprStmt(
                        expr=naive.AssignmentExpr(
                            left=naive.FieldAccessExpr(
                                name=vardecl.name,
                                offset=(False, 0),
                                location_type=initexpr.location_type,
                                is_sparse=False,
                            ),
                            right=initexpr,
                        ))
                else:
                    transformed_stmt = self.visit(s)

                horizontal_loops.append(
                    naive.HorizontalLoop(ast=naive.BlockStmt(
                        statements=[transformed_stmt])))
            return [declarations, horizontal_loops]

    def visit_BinaryOperator(self, node: Node, **kwargs):
        return naive.BinaryOp(
            op=self.BINOPSTR_TO_ENUM[node.op],
            left=self.visit(node.left),
            right=self.visit(node.right),
        )

    def visit_VarDeclStmt(self, node: Node, **kwargs):
        assert node.op == "="
        assert node.dimension == 0
        assert len(node.init_list) == 1
        assert isinstance(node.data_type.data_type, sir.BuiltinType)

        loctype = ""
        if node.location_type:
            loctype = self.SIR_TO_NAIVE_LOCATION_TYPE[node.location_type]
            if not self.current_loc_type_stack or self.current_loc_type_stack[
                    -1] != loctype:
                self.current_loc_type_stack.append(loctype)
        else:
            raise ValueError("no location type")

        init = self.visit(node.init_list[0])
        return [
            naive.TemporaryFieldDeclStmt(
                data_type=node.data_type.data_type.type_id,
                name=node.name,
                location_type=loctype,
            ),
            init,
        ]

    def visit_LiteralAccessExpr(self, node: Node, **kwargs):
        loctype = ""
        if node.location_type:
            loctype = self.SIR_TO_NAIVE_LOCATION_TYPE[node.location_type]
        elif self.current_loc_type_stack:
            loctype = self.current_loc_type_stack[-1]
        else:
            raise ValueError("no location type")

        return naive.LiteralExpr(
            value=node.value,
            data_type=node.data_type.type_id,
            location_type=loctype,
        )

    def visit_ReductionOverNeighborExpr(self, node: Node, **kwargs):
        self.current_loc_type_stack.append(
            self.SIR_TO_NAIVE_LOCATION_TYPE[node.chain[-1]])
        right = self.visit(node.rhs)
        init = self.visit(node.init)
        self.current_loc_type_stack.pop()
        return naive.ReduceOverNeighbourExpr(
            operation=self.BINOPSTR_TO_ENUM[node.op],
            right=right,
            init=init,
            location_type=self.SIR_TO_NAIVE_LOCATION_TYPE[node.chain[0]],
        )

    def visit_AST(self, node: Node, **kwargs):
        assert isinstance(node.root, sir.BlockStmt)  # TODO add check to IR
        if self.isControlFlow is None:
            self.isControlFlow = True
            return self.visit(node.root)
        elif self.isControlFlow is True:
            self.isControlFlow = False
            return self.visit(node.root)
        else:
            raise "unreachable: there should not be an AST node in the stencil ast"
Ejemplo n.º 38
0
PG_DIALECT = postgres_dialect.dialect()
RESERVED_WORDS = frozenset(postgres_dialect.RESERVED_WORDS)

CONSTRAINT_CONVENTIONS = {
    "ix": '%(column_0_label)s_idx',
    "uq": "%(table_name)s_%(column_0_name)s_key",
    "ck": "%(table_name)s_%(constraint_name)s_check",
    "fk": "%(table_name)s_%(column_0_name)s_%(referred_table_name)s_fkey",
    "pk": "%(table_name)s_pkey"
}

ALCHEMY_TO_PYTHON_DATA_TYPE = {
    ARRAY: list,
    INTEGER: int,
    BOOLEAN: bool,
    FLOAT: float,
    CHAR: str,
    DATE: date,
    VARCHAR: str,
    TEXT: str,
    NullType: None,
    UUID: uuid.UUID,
    TIMESTAMP: datetime
}

# swap uuid to custom type
POSTGRES_TO_ALCHEMY_TYPE = MappingProxyType(
    dict((k, v) if k != 'uuid' else (k, UUID)
         for k, v in ischema_names.items()))
Ejemplo n.º 39
0
class State:
    """Object to represent a state within the state machine.

    entity_id: the entity that is represented.
    state: the state of the entity
    attributes: extra information on entity and state
    last_changed: last time the state was changed, not the attributes.
    last_updated: last time this object was updated.
    context: Context in which it was created
    """

    __slots__ = [
        "entity_id",
        "state",
        "attributes",
        "last_changed",
        "last_updated",
        "context",
    ]

    def __init__(
        self,
        entity_id: str,
        state: str,
        attributes: Optional[Mapping] = None,
        last_changed: Optional[datetime.datetime] = None,
        last_updated: Optional[datetime.datetime] = None,
        context: Optional[Context] = None,
        # Temp, because database can still store invalid entity IDs
        # Remove with 1.0 or in 2020.
        temp_invalid_id_bypass: Optional[bool] = False,
    ) -> None:
        """Initialize a new state."""
        state = str(state)

        if not valid_entity_id(entity_id) and not temp_invalid_id_bypass:
            raise InvalidEntityFormatError(
                f"Invalid entity id encountered: {entity_id}. "
                "Format should be <domain>.<object_id>")

        if not valid_state(state):
            raise InvalidStateError(
                f"Invalid state encountered for entity id: {entity_id}. "
                "State max length is 255 characters.")

        self.entity_id = entity_id.lower()
        self.state = state
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()
        self.last_changed = last_changed or self.last_updated
        self.context = context or Context()

    @property
    def domain(self) -> str:
        """Domain of this state."""
        return split_entity_id(self.entity_id)[0]

    @property
    def object_id(self) -> str:
        """Object id of this state."""
        return split_entity_id(self.entity_id)[1]

    @property
    def name(self) -> str:
        """Name of this state."""
        return self.attributes.get(
            ATTR_FRIENDLY_NAME) or self.object_id.replace("_", " ")

    def as_dict(self) -> Dict:
        """Return a dict representation of the State.

        Async friendly.

        To be used for JSON serialization.
        Ensures: state == State.from_dict(state.as_dict())
        """
        return {
            "entity_id": self.entity_id,
            "state": self.state,
            "attributes": dict(self.attributes),
            "last_changed": self.last_changed,
            "last_updated": self.last_updated,
            "context": self.context.as_dict(),
        }

    @classmethod
    def from_dict(cls, json_dict: Dict) -> Any:
        """Initialize a state from a dict.

        Async friendly.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and "entity_id" in json_dict
                and "state" in json_dict):
            return None

        last_changed = json_dict.get("last_changed")

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get("last_updated")

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        context = json_dict.get("context")
        if context:
            context = Context(id=context.get("id"),
                              user_id=context.get("user_id"))

        return cls(
            json_dict["entity_id"],
            json_dict["state"],
            json_dict.get("attributes"),
            last_changed,
            last_updated,
            context,
        )

    def __eq__(self, other: Any) -> bool:
        """Return the comparison of the state."""
        return (  # type: ignore
            self.__class__ == other.__class__
            and self.entity_id == other.entity_id and self.state == other.state
            and self.attributes == other.attributes
            and self.context == other.context)

    def __repr__(self) -> str:
        """Return the representation of the states."""
        attrs = ("; {}".format(util.repr_helper(self.attributes))
                 if self.attributes else "")

        return "<state {}={}{} @ {}>".format(
            self.entity_id,
            self.state,
            attrs,
            dt_util.as_local(self.last_changed).isoformat(),
        )
Ejemplo n.º 40
0
class State(object):
    """Object to represent a state within the state machine.

    entity_id: the entity that is represented.
    state: the state of the entity
    attributes: extra information on entity and state
    last_changed: last time the state was changed, not the attributes.
    last_updated: last time this object was updated.
    """

    __slots__ = ['entity_id', 'state', 'attributes',
                 'last_changed', 'last_updated']

    def __init__(self, entity_id, state, attributes=None, last_changed=None,
                 last_updated=None):
        """Initialize a new state."""
        if not valid_entity_id(entity_id):
            raise InvalidEntityFormatError((
                "Invalid entity id encountered: {}. "
                "Format should be <domain>.<object_id>").format(entity_id))

        self.entity_id = entity_id.lower()
        self.state = str(state)
        self.attributes = MappingProxyType(attributes or {})
        self.last_updated = last_updated or dt_util.utcnow()

        self.last_changed = last_changed or self.last_updated

    @property
    def domain(self):
        """Domain of this state."""
        return split_entity_id(self.entity_id)[0]

    @property
    def object_id(self):
        """Object id of this state."""
        return split_entity_id(self.entity_id)[1]

    @property
    def name(self):
        """Name of this state."""
        return (
            self.attributes.get(ATTR_FRIENDLY_NAME) or
            self.object_id.replace('_', ' '))

    def as_dict(self):
        """Return a dict representation of the State.

        Async friendly.

        To be used for JSON serialization.
        Ensures: state == State.from_dict(state.as_dict())
        """
        return {'entity_id': self.entity_id,
                'state': self.state,
                'attributes': dict(self.attributes),
                'last_changed': self.last_changed,
                'last_updated': self.last_updated}

    @classmethod
    def from_dict(cls, json_dict):
        """Initialize a state from a dict.

        Async friendly.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and 'entity_id' in json_dict and
                'state' in json_dict):
            return None

        last_changed = json_dict.get('last_changed')

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get('last_updated')

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        return cls(json_dict['entity_id'], json_dict['state'],
                   json_dict.get('attributes'), last_changed, last_updated)

    def __eq__(self, other):
        """Return the comparison of the state."""
        return (self.__class__ == other.__class__ and
                self.entity_id == other.entity_id and
                self.state == other.state and
                self.attributes == other.attributes)

    def __repr__(self):
        """Return the representation of the states."""
        attr = "; {}".format(util.repr_helper(self.attributes)) \
               if self.attributes else ""

        return "<state {}={}{} @ {}>".format(
            self.entity_id, self.state, attr,
            dt_util.as_local(self.last_changed).isoformat())
Ejemplo n.º 41
0
 def __new__(__cls, **kwargs):
     return super(__class__,
                  __cls).__new__(__cls, user_dict=MappingProxyType(kwargs))
Ejemplo n.º 42
0
from types import MappingProxyType

d = {1: 'A'}

dproxy = MappingProxyType(d)
print(dproxy)

print(dproxy[1])
#Error dproxy[2]='x'
d[2] = 'B'
print(dproxy)

st = {"1", "2", "1"}
print(st)

st2inm = frozenset({'2', 'puto', 'puta', '2'})
print(st2inm)

no_repeatset = set('a la pradera')
no_repeatfset = frozenset('a la verga')

print(no_repeatset)
print(no_repeatfset)
Ejemplo n.º 43
0
def write_scalar(f, key, value, dataset_kwargs=MappingProxyType({})):
    if ("compression"
            in dataset_kwargs):  # Can't compress scalars, error is thrown
        dataset_kwargs = dataset_kwargs.copy()
        dataset_kwargs.pop("compression")
    write_array(f, key, np.array(value), dataset_kwargs)