Esempio n. 1
0
async def test_switched_host(hass: HomeAssistantType):
    """Test cast device listens for changed hosts and disconnects old cast."""
    info = get_fake_chromecast_info()
    full_info = attr.evolve(info, model_name='google home',
                            friendly_name='Speaker', uuid=FakeUUID)

    with patch('pychromecast.dial.get_device_status',
               return_value=full_info):
        chromecast, _ = await async_setup_media_player_cast(hass, full_info)

    chromecast2 = get_fake_chromecast(info)
    with patch('pychromecast._get_chromecast_from_host',
               return_value=chromecast2) as get_chromecast:
        async_dispatcher_send(hass, cast.SIGNAL_CAST_DISCOVERED, full_info)
        await hass.async_block_till_done()
        assert get_chromecast.call_count == 0

        changed = attr.evolve(full_info, friendly_name='Speaker 2')
        async_dispatcher_send(hass, cast.SIGNAL_CAST_DISCOVERED, changed)
        await hass.async_block_till_done()
        assert get_chromecast.call_count == 0

        changed = attr.evolve(changed, host='host2')
        async_dispatcher_send(hass, cast.SIGNAL_CAST_DISCOVERED, changed)
        await hass.async_block_till_done()
        assert get_chromecast.call_count == 1
        assert chromecast.disconnect.call_count == 1
def union1(p: Union[A, B]):
    attr.fields(<warning descr="'attr.fields' method should be called on attrs types">p</warning>)
    attr.fields_dict(<warning descr="'attr.fields_dict' method should be called on attrs types">p</warning>)

    attr.asdict(p)
    attr.astuple(p)
    attr.assoc(p)
    attr.evolve(p)
def union2(p: Union[Type[A], Type[B]]):
    attr.fields(p)
    attr.fields_dict(p)

    attr.asdict(<warning descr="'attr.asdict' method should be called on attrs instances">p</warning>)
    attr.astuple(<warning descr="'attr.astuple' method should be called on attrs instances">p</warning>)
    attr.assoc(<warning descr="'attr.assoc' method should be called on attrs instances">p</warning>)
    attr.evolve(<warning descr="'attr.evolve' method should be called on attrs instances">p</warning>)
def structural(p):
    print(len(p))
    attr.fields(p)
    attr.fields_dict(p)

    attr.asdict(p)
    attr.astuple(p)
    attr.assoc(p)
    attr.evolve(p)
Esempio n. 5
0
 def test_unknown(self, C):
     """
     Wanting to change an unknown attribute raises an
     AttrsAttributeNotFoundError.
     """
     # No generated class will have a four letter attribute.
     with pytest.raises(TypeError) as e:
         evolve(C(), aaaa=2)
     expected = "__init__() got an unexpected keyword argument 'aaaa'"
     assert (expected,) == e.value.args
Esempio n. 6
0
def test_can_add_authorizers_to_dependencies(lambda_function):
    auth1 = evolve(lambda_function, resource_name='auth1')
    auth2 = evolve(lambda_function, resource_name='auth2')
    rest_api = models.RestAPI(
        resource_name='rest_api',
        swagger_doc={'swagger': '2.0'},
        api_gateway_stage='api',
        lambda_function=lambda_function,
        authorizers=[auth1, auth2],
    )
    assert rest_api.dependencies() == [lambda_function, auth1, auth2]
Esempio n. 7
0
    def test_validator_failure(self):
        """
        TypeError isn't swallowed when validation fails within evolve.
        """
        @attributes
        class C(object):
            a = attr(validator=instance_of(int))

        with pytest.raises(TypeError) as e:
            evolve(C(a=1), a="some string")
        m = e.value.args[0]
        assert m.startswith("'a' must be <{type} 'int'>".format(type=TYPE))
Esempio n. 8
0
    def test_private(self):
        """
        evolve() acts as `__init__` with regards to private attributes.
        """
        @attributes
        class C(object):
            _a = attr()

        assert evolve(C(1), a=2)._a == 2

        with pytest.raises(TypeError):
            evolve(C(1), _a=2)

        with pytest.raises(TypeError):
            evolve(C(1), a=3, _a=2)
    def _async_update_device(self, device_id, *, config_entry_id=_UNDEF,
                             remove_config_entry_id=_UNDEF,
                             hub_device_id=_UNDEF):
        """Update device attributes."""
        old = self.devices[device_id]

        changes = {}

        config_entries = old.config_entries

        if (config_entry_id is not _UNDEF and
                config_entry_id not in old.config_entries):
            config_entries = old.config_entries | {config_entry_id}

        if (remove_config_entry_id is not _UNDEF and
                remove_config_entry_id in config_entries):
            config_entries = set(config_entries)
            config_entries.remove(remove_config_entry_id)

        if config_entries is not old.config_entries:
            changes['config_entries'] = config_entries

        if (hub_device_id is not _UNDEF and
                hub_device_id != old.hub_device_id):
            changes['hub_device_id'] = hub_device_id

        if not changes:
            return old

        new = self.devices[device_id] = attr.evolve(old, **changes)
        self.async_schedule_save()
        return new
Esempio n. 10
0
def _search_unicode_file(subsequence, sequence_file, search_params, search_class,
                         _chunk_size):
    if not subsequence:
        raise ValueError('subsequence must not be empty')

    CHUNK_SIZE = _chunk_size
    keep_chars = (
        len(subsequence) - 1 +
        search_class.extra_items_for_chunked_search(subsequence, search_params)
    )

    chunk = sequence_file.read(CHUNK_SIZE)
    offset = 0
    while chunk:
        for match in search_class.search(subsequence, chunk, search_params):
            yield attr.evolve(match,
                              start=match.start + offset,
                              end=match.end + offset)

        n_to_keep = min(keep_chars, len(chunk))
        offset += len(chunk) - n_to_keep
        if n_to_keep:
            chunk = chunk[-n_to_keep:] + sequence_file.read(CHUNK_SIZE)
            if len(chunk) == n_to_keep:
                break
        else:
            chunk = sequence_file.read(CHUNK_SIZE)
Esempio n. 11
0
    def remove(self, *types):
        """
        Produce a new checker with the given types forgotten.

        Arguments:

            types (~collections.Iterable):

                the names of the types to remove.

        Returns:

            A new `TypeChecker` instance

        Raises:

            `jsonschema.exceptions.UndefinedTypeCheck`:

                if any given type is unknown to this object
        """

        checkers = self._type_checkers
        for each in types:
            try:
                checkers = checkers.remove(each)
            except KeyError:
                raise UndefinedTypeCheck(each)
        return attr.evolve(self, type_checkers=checkers)
Esempio n. 12
0
    def async_update_entity(self, entity_id, *, name=_UNDEF):
        """Update properties of an entity."""
        old = self.entities[entity_id]

        changes = {}

        if name is not _UNDEF and name != old.name:
            changes['name'] = name

        if not changes:
            return old

        new = self.entities[entity_id] = attr.evolve(old, **changes)

        to_remove = []
        for listener_ref in new.update_listeners:
            listener = listener_ref()
            if listener is None:
                to_remove.append(listener)
            else:
                try:
                    listener.async_registry_updated(old, new)
                except Exception:  # pylint: disable=broad-except
                    _LOGGER.exception('Error calling update listener')

        for ref in to_remove:
            new.update_listeners.remove(ref)

        self.async_schedule_save()

        return new
Esempio n. 13
0
 def test_can_update_lambda_function_code(self):
     function = create_function_resource('function_name')
     copy_of_function = attr.evolve(function)
     self.remote_state.declare_resource_exists(copy_of_function)
     # Now let's change the memory size and ensure we
     # get an update.
     function.memory_size = 256
     plan = self.determine_plan(function)
     existing_params = {
         'function_name': 'appname-dev-function_name',
         'role_arn': 'role:arn',
         'zip_contents': mock.ANY,
         'runtime': 'python2.7',
         'environment_variables': {},
         'tags': {},
         'timeout': 60,
     }
     expected_params = dict(memory_size=256, **existing_params)
     expected = models.APICall(
         method_name='update_function',
         params=expected_params,
     )
     self.assert_apicall_equals(plan[0], expected)
     assert list(self.last_plan.messages.values()) == [
         'Updating lambda function: appname-dev-function_name\n'
     ]
Esempio n. 14
0
    def remove_many(self, types):
        """
        Remove multiple types from the checkers that this object understands.

        Arguments:

            types (~collections.Iterable):

                An iterable of types to remove.

        Returns:

            A new `TypeChecker` instance

        Raises:

            `jsonschema.exceptions.UndefinedTypeCheck`:
                if any of the types are unknown to this object
        """

        checkers = self._type_checkers
        for each in types:
            try:
                checkers = checkers.remove(each)
            except KeyError:
                raise UndefinedTypeCheck(each)
        return attr.evolve(self, type_checkers=checkers)
Esempio n. 15
0
    def test_no_changes(self, C):
        """
        No changes means a verbatim copy.
        """
        i1 = C()
        i2 = evolve(i1)

        assert i1 is not i2
        assert i1 == i2
Esempio n. 16
0
    def test_non_init_attrs(self):
        """
        evolve() handles `init=False` attributes.
        """
        @attributes
        class C(object):
            a = attr()
            b = attr(init=False, default=0)

        assert evolve(C(1), a=2).a == 2
Esempio n. 17
0
    def chdir(self, path):
        """
        Return a new executor where the working directory is different.

        :param path: new path
        :returns: new executor with a different working directory
        """
        new_shell = self._shell.clone()
        new_shell.chdir(path)
        return attr.evolve(self, shell=new_shell)
Esempio n. 18
0
    def _async_update_entity(self, entity_id, *, name=_UNDEF,
                             config_entry_id=_UNDEF, new_entity_id=_UNDEF,
                             device_id=_UNDEF):
        """Private facing update properties method."""
        old = self.entities[entity_id]

        changes = {}

        if name is not _UNDEF and name != old.name:
            changes['name'] = name

        if (config_entry_id is not _UNDEF and
                config_entry_id != old.config_entry_id):
            changes['config_entry_id'] = config_entry_id

        if (device_id is not _UNDEF and device_id != old.device_id):
            changes['device_id'] = device_id

        if new_entity_id is not _UNDEF and new_entity_id != old.entity_id:
            if self.async_is_registered(new_entity_id):
                raise ValueError('Entity is already registered')

            if not valid_entity_id(new_entity_id):
                raise ValueError('Invalid entity ID')

            if (split_entity_id(new_entity_id)[0] !=
                    split_entity_id(entity_id)[0]):
                raise ValueError('New entity ID should be same domain')

            self.entities.pop(entity_id)
            entity_id = changes['entity_id'] = new_entity_id

        if not changes:
            return old

        new = self.entities[entity_id] = attr.evolve(old, **changes)

        to_remove = []
        for listener_ref in new.update_listeners:
            listener = listener_ref()
            if listener is None:
                to_remove.append(listener)
            else:
                try:
                    listener.async_registry_updated(old, new)
                except Exception:  # pylint: disable=broad-except
                    _LOGGER.exception('Error calling update listener')

        for ref in to_remove:
            new.update_listeners.remove(ref)

        self.async_schedule_save()

        return new
Esempio n. 19
0
    def patch_env(self, **kwargs):
        """
        Return a new executor where the environment is patched with the given attributes

        :param kwargs: new environment variables
        :returns: new executor with a shell with a patched environment.
        """
        new_shell = self._shell.clone()
        for key, value in kwargs.items():
            new_shell.setenv(key, value)
        return attr.evolve(self, shell=new_shell)
Esempio n. 20
0
async def test_entity_media_states(hass: HomeAssistantType):
    """Test various entity media states."""
    info = get_fake_chromecast_info()
    full_info = attr.evolve(info, model_name='google home',
                            friendly_name='Speaker', uuid=FakeUUID)

    with patch('pychromecast.dial.get_device_status',
               return_value=full_info):
        chromecast, entity = await async_setup_media_player_cast(hass, info)

    entity._available = True
    entity.schedule_update_ha_state()
    await hass.async_block_till_done()

    state = hass.states.get('media_player.speaker')
    assert state is not None
    assert state.name == 'Speaker'
    assert state.state == 'unknown'
    assert entity.unique_id == full_info.uuid

    media_status = MagicMock(images=None)
    media_status.player_is_playing = True
    entity.new_media_status(media_status)
    await hass.async_block_till_done()
    state = hass.states.get('media_player.speaker')
    assert state.state == 'playing'

    media_status.player_is_playing = False
    media_status.player_is_paused = True
    entity.new_media_status(media_status)
    await hass.async_block_till_done()
    state = hass.states.get('media_player.speaker')
    assert state.state == 'paused'

    media_status.player_is_paused = False
    media_status.player_is_idle = True
    entity.new_media_status(media_status)
    await hass.async_block_till_done()
    state = hass.states.get('media_player.speaker')
    assert state.state == 'idle'

    media_status.player_is_idle = False
    chromecast.is_idle = True
    entity.new_media_status(media_status)
    await hass.async_block_till_done()
    state = hass.states.get('media_player.speaker')
    assert state.state == 'off'

    chromecast.is_idle = False
    entity.new_media_status(media_status)
    await hass.async_block_till_done()
    state = hass.states.get('media_player.speaker')
    assert state.state == 'unknown'
Esempio n. 21
0
File: poker.py Progetto: jab/cards
def replace_rank(*cards, from_=(rAno, rAlo), to=rAhi):
    """

        >>> alo = evolve(cAh, rank=rAlo)
        >>> ahi = evolve(cAs, rank=rAhi)
        >>> check = replace_rank(alo, ahi, from_=(rAhi, rAlo), to=rAno)
        >>> expect = {cAh, cAs}
        >>> check == expect
        True

    """
    return {evolve(c, rank=to) if c.rank in from_ else c for c in cards}
Esempio n. 22
0
    def _async_update_device(self, device_id, *, add_config_entry_id=_UNDEF,
                             remove_config_entry_id=_UNDEF,
                             merge_connections=_UNDEF,
                             merge_identifiers=_UNDEF,
                             manufacturer=_UNDEF,
                             model=_UNDEF,
                             name=_UNDEF,
                             sw_version=_UNDEF,
                             hub_device_id=_UNDEF):
        """Update device attributes."""
        old = self.devices[device_id]

        changes = {}

        config_entries = old.config_entries

        if (add_config_entry_id is not _UNDEF and
                add_config_entry_id not in old.config_entries):
            config_entries = old.config_entries | {add_config_entry_id}

        if (remove_config_entry_id is not _UNDEF and
                remove_config_entry_id in config_entries):
            config_entries = config_entries - {remove_config_entry_id}

        if config_entries is not old.config_entries:
            changes['config_entries'] = config_entries

        for attr_name, value in (
                ('connections', merge_connections),
                ('identifiers', merge_identifiers),
        ):
            old_value = getattr(old, attr_name)
            # If not undefined, check if `value` contains new items.
            if value is not _UNDEF and not value.issubset(old_value):
                changes[attr_name] = old_value | value

        for attr_name, value in (
                ('manufacturer', manufacturer),
                ('model', model),
                ('name', name),
                ('sw_version', sw_version),
                ('hub_device_id', hub_device_id),
        ):
            if value is not _UNDEF and value != getattr(old, attr_name):
                changes[attr_name] = value

        if not changes:
            return old

        new = self.devices[device_id] = attr.evolve(old, **changes)
        self.async_schedule_save()
        return new
def test_validation_after_evolve_call():
    smaj = 1.5
    smin = 1.

    g1 = Gaussian2dParams(x_centre=0,
                          y_centre=0,
                          amplitude=1,
                          semimajor=smaj,
                          semiminor=smin,
                          theta=0.,
                          )
    with pytest.raises(ValueError):
        g2 = attr.evolve(g1, semimajor=smin - 0.1)
Esempio n. 24
0
    def test_empty(self, slots, frozen):
        """
        Empty classes without changes get copied.
        """
        @attributes(slots=slots, frozen=frozen)
        class C(object):
            pass

        i1 = C()
        i2 = evolve(i1)

        assert i1 is not i2
        assert i1 == i2
def test_correlation_coefficient():
    smaj = 1.5
    smin = 1.
    g1 = Gaussian2dParams(x_centre=0,
                          y_centre=0,
                          amplitude=1,
                          semimajor=smaj,
                          semiminor=smin,
                          theta=0.,
                          )

    assert g1.correlation == 0

    cov = np.array([[smaj * smaj, 0],
                    [0, smin * smin]])
    assert_equal(g1.covariance, cov)

    cov_rotated = np.array([[smin * smin, 0],
                            [0, smaj * smaj]])
    g1 = attr.evolve(g1, theta=np.pi / 2)
    assert_allclose(g1.covariance, cov_rotated, atol=1e-10)

    smaj = 1e5
    g2 = Gaussian2dParams(x_centre=0,
                          y_centre=0,
                          amplitude=1,
                          semimajor=smaj,
                          semiminor=smin,
                          theta=0.,
                          )
    assert g2.correlation == 0
    g2 = attr.evolve(g2, theta=np.pi / 4)
    # print()
    # print(g2.correlation)
    # print(1.0 - g2.correlation)
    assert g2.correlation == approx(1.0, rel=1e-7)
    g2 = attr.evolve(g2, theta=-np.pi / 4)
    assert g2.correlation == approx(-1.0, rel=1e-7)
def test_approx_equality():
    init_pars = dict(x_centre=48.24, y_centre=52.66,
                     amplitude=42.,
                     semimajor=1.5,
                     semiminor=1.4,
                     theta=1.1,
                     )
    g1 = Gaussian2dParams(**init_pars)
    g2 = Gaussian2dParams(**init_pars)
    assert g1 == g2
    g2 = attr.evolve(g2,
                     y_centre=g2.y_centre + 5e-9,
                     amplitude=g2.amplitude - 5e-9,
                     )

    assert g1 != g2
    assert g2.approx_equal_to(g1)

    g2 = attr.evolve(g2,
                     theta=g2.theta + g2.theta * 1e-4)
    # It appears that the correlation sensitivity matches reasonably well
    # to the order-of-change to theta (at least for these semimajor/minor):
    assert g2.comparable_params == approx(g1.comparable_params, rel=2e-4)
    assert g2.comparable_params != approx(g1.comparable_params, rel=1e-4)
Esempio n. 27
0
 def test_change(self, C, data):
     """
     Changes work.
     """
     # Take the first attribute, and change it.
     assume(fields(C))  # Skip classes with no attributes.
     field_names = [a.name for a in fields(C)]
     original = C()
     chosen_names = data.draw(st.sets(st.sampled_from(field_names)))
     # We pay special attention to private attributes, they should behave
     # like in `__init__`.
     change_dict = {name.replace('_', ''): data.draw(st.integers())
                    for name in chosen_names}
     changed = evolve(original, **change_dict)
     for name in chosen_names:
         assert getattr(changed, name) == change_dict[name.replace('_', '')]
Esempio n. 28
0
    def async_update(self, area_id: str, name: str) -> AreaEntry:
        """Update name of area."""
        old = self.areas[area_id]

        changes = {}

        if name == old.name:
            return old

        if self._async_is_registered(name):
            raise ValueError('Name is already in use')

        changes['name'] = name

        new = self.areas[area_id] = attr.evolve(old, **changes)
        self.async_schedule_save()
        return new
Esempio n. 29
0
    def redefine_many(self, definitions=()):
        """
        Produce a new checker with the given types redefined.

        Arguments:

            definitions (dict):

                A dictionary mapping types to their checking functions.

        Returns:

            A new `TypeChecker` instance.
        """
        return attr.evolve(
            self, type_checkers=self._type_checkers.update(definitions),
        )
Esempio n. 30
0
    def async_update(self, device) -> ZhaDeviceEntry:
        """Update name of ZhaDeviceEntry."""
        ieee_str = str(device.ieee)
        old = self.devices[ieee_str]

        changes = {}

        if device.power_source != old.power_source:
            changes['power_source'] = device.power_source

        if device.manufacturer_code != old.manufacturer_code:
            changes['manufacturer_code'] = device.manufacturer_code

        changes['last_seen'] = device.last_seen

        new = self.devices[ieee_str] = attr.evolve(old, **changes)
        self.async_schedule_save()
        return new
Esempio n. 31
0
 def set_id(panel):
     return panel if panel.id else attr.evolve(panel, id=next(auto_ids))
Esempio n. 32
0
 def set_event_probs(self, **running_event_probs):
     self.running_event_probabilities = attr.evolve(
         self.running_event_probabilities, **running_event_probs)
Esempio n. 33
0
def main(
    options: optparse.Values,
    *,
    term: Optional[Terminal] = None,
    render_header: bool = True,
    render_footer: bool = True,
    width: Optional[int] = None,
    wait_on_actions: Optional[float] = None,
) -> None:
    data = pg_connect(
        options,
        password=os.environ.get("PGPASSWORD"),
        service=os.environ.get("PGSERVICE"),
        min_duration=options.minduration,
    )

    hostname = socket.gethostname()
    fs_blocksize = options.blocksize

    host = types.Host(
        data.pg_version,
        hostname,
        options.username,
        options.host,
        options.port,
        options.dbname,
    )

    is_local = data.pg_is_local() and data.pg_is_local_access()

    skip_sizes = options.nodbsize
    pg_db_info = data.pg_get_db_info(None,
                                     using_rds=options.rds,
                                     skip_sizes=options.nodbsize)

    flag = types.Flag.from_options(is_local=is_local, **vars(options))
    ui = types.UI.make(
        flag=flag,
        min_duration=options.minduration,
        duration_mode=int(options.durationmode),
        verbose_mode=int(options.verbosemode),
        max_db_length=min(max(int(pg_db_info["max_length"]), 8), 16),
    )

    if term is None:
        # Used in tests.
        term = Terminal()
    key, in_help = None, False
    sys_procs: Dict[int, types.SystemProcess] = {}
    pg_procs = types.SelectableProcesses([])
    activity_stats: types.ActivityStats

    msg_pile = utils.MessagePile(2)

    with term.fullscreen(), term.cbreak(), term.hidden_cursor():
        while True:
            if key == keys.HELP:
                in_help = True
            elif in_help and key is not None:
                in_help, key = False, None
            elif key == keys.EXIT:
                break
            elif not ui.interactive() and key == keys.SPACE:
                ui = ui.toggle_pause()
            elif options.nodbsize and key == keys.REFRESH_DB_SIZE:
                skip_sizes = False
            elif key is not None:
                if keys.is_process_next(key):
                    pg_procs.focus_next()
                    ui.start_interactive()
                elif keys.is_process_prev(key):
                    pg_procs.focus_prev()
                    ui.start_interactive()
                elif key == keys.SPACE:
                    pg_procs.toggle_pin_focused()
                elif key.name == keys.CANCEL_SELECTION:
                    pg_procs.reset()
                    ui.end_interactive()
                elif pg_procs.selected and key in (
                        keys.PROCESS_CANCEL,
                        keys.PROCESS_KILL,
                ):
                    action, color = {
                        keys.PROCESS_CANCEL: ("cancel", "yellow"),
                        keys.PROCESS_KILL: ("terminate", "red"),
                    }[key]
                    action_formatter = getattr(term, color)
                    pids = pg_procs.selected
                    if len(pids) > 1:
                        ptitle = f"processes {', '.join((str(p) for p in pids))}"
                    else:
                        ptitle = f"process {pids[0]}"
                    with term.location(x=0, y=term.height // 3):
                        print(
                            widgets.boxed(
                                term,
                                f"Confirm {action_formatter(action)} action on {ptitle}? (y/n)",
                                border_color=color,
                                center=True,
                                width=width,
                            ),
                            end="",
                        )
                        confirm_key = term.inkey(timeout=None)
                    if confirm_key.lower() == "y":
                        if action == "cancel":
                            for pid in pids:
                                data.pg_cancel_backend(pid)
                            msg_pile.send(
                                action_formatter(
                                    f"{ptitle.capitalize()} cancelled"))
                        elif action == "terminate":
                            for pid in pids:
                                data.pg_terminate_backend(pid)
                            msg_pile.send(
                                action_formatter(
                                    f"{ptitle.capitalize()} terminated"))
                        pg_procs.reset()
                        ui.end_interactive()
                        if wait_on_actions:
                            # Used in tests.
                            time.sleep(wait_on_actions)
                else:
                    pg_procs.reset()
                    ui.end_interactive()
                    changes = {
                        "duration_mode":
                        handlers.duration_mode(key, ui.duration_mode),
                        "verbose_mode":
                        handlers.verbose_mode(key, ui.verbose_mode),
                    }
                    if key in (keys.REFRESH_TIME_INCREASE,
                               keys.REFRESH_TIME_DECREASE):
                        changes["refresh_time"] = handlers.refresh_time(
                            key, ui.refresh_time)
                    query_mode = handlers.query_mode(key)
                    if query_mode is not None:
                        changes["query_mode"] = query_mode
                    else:
                        query_mode = ui.query_mode
                    sort_key = handlers.sort_key_for(key, query_mode, flag)
                    if sort_key is not None:
                        changes["sort_key"] = sort_key
                    ui = ui.evolve(**changes)

            if in_help:
                print(term.clear + term.home, end="")
                views.help(
                    term,
                    __version__,
                    is_local,
                    lines_counter=views.line_counter(term.height),
                )

            else:
                pg_db_info = data.pg_get_db_info(pg_db_info,
                                                 using_rds=options.rds,
                                                 skip_sizes=skip_sizes)
                if options.nodbsize and not skip_sizes:
                    skip_sizes = True

                dbinfo = types.DBInfo(
                    total_size=int(pg_db_info["total_size"]),
                    size_ev=int(pg_db_info["size_ev"]),
                )
                tps = int(pg_db_info["tps"])

                active_connections = data.pg_get_active_connections()
                memory, swap, load = activities.mem_swap_load()
                system_info = types.SystemInfo.default(memory=memory,
                                                       swap=swap,
                                                       load=load)

                if not ui.in_pause and not ui.interactive():
                    if is_local:
                        memory, swap, load = activities.mem_swap_load()
                        system_info = attr.evolve(
                            system_info,
                            memory=memory,
                            swap=swap,
                            load=load,
                        )

                    if ui.query_mode == types.QueryMode.activities:
                        pg_procs.set_items(
                            data.pg_get_activities(ui.duration_mode))
                        if is_local:
                            # TODO: Use this logic in waiting and blocking cases.
                            local_pg_procs, io_read, io_write = activities.ps_complete(
                                cast(List[types.RunningProcess],
                                     pg_procs.items),
                                sys_procs,
                                fs_blocksize,
                            )
                            system_info = attr.evolve(
                                system_info,
                                io_read=io_read,
                                io_write=io_write,
                                max_iops=activities.update_max_iops(
                                    system_info.max_iops,
                                    io_read.count,
                                    io_write.count,
                                ),
                            )
                            pg_procs.set_items(local_pg_procs)

                    else:
                        if ui.query_mode == types.QueryMode.blocking:
                            pg_procs.set_items(
                                data.pg_get_blocking(ui.duration_mode))
                        elif ui.query_mode == types.QueryMode.waiting:
                            pg_procs.set_items(
                                data.pg_get_waiting(ui.duration_mode))
                        else:
                            assert False  # help type checking

                    activity_stats = (
                        pg_procs,
                        system_info) if is_local else pg_procs  # type: ignore

                if options.output is not None:
                    with open(options.output, "a") as f:
                        utils.csv_write(f, map(attr.asdict, pg_procs.items))

                views.screen(
                    term,
                    ui,
                    host=host,
                    dbinfo=dbinfo,
                    tps=tps,
                    active_connections=active_connections,
                    activity_stats=activity_stats,
                    message=msg_pile.get(),
                    render_header=render_header,
                    render_footer=render_footer,
                    width=width,
                )

                if ui.interactive():
                    if not pg_procs.pinned:
                        ui.tick_interactive()
                elif pg_procs.selected:
                    pg_procs.reset()

            key = term.inkey(timeout=ui.refresh_time) or None
    def _async_update_entity(
        self,
        entity_id,
        *,
        name=_UNDEF,
        icon=_UNDEF,
        config_entry_id=_UNDEF,
        new_entity_id=_UNDEF,
        device_id=_UNDEF,
        new_unique_id=_UNDEF,
        disabled_by=_UNDEF,
        capabilities=_UNDEF,
        supported_features=_UNDEF,
        device_class=_UNDEF,
        unit_of_measurement=_UNDEF,
        original_name=_UNDEF,
        original_icon=_UNDEF,
    ):
        """Private facing update properties method."""
        old = self.entities[entity_id]

        changes = {}

        for attr_name, value in (
            ("name", name),
            ("icon", icon),
            ("config_entry_id", config_entry_id),
            ("device_id", device_id),
            ("disabled_by", disabled_by),
            ("capabilities", capabilities),
            ("supported_features", supported_features),
            ("device_class", device_class),
            ("unit_of_measurement", unit_of_measurement),
            ("original_name", original_name),
            ("original_icon", original_icon),
        ):
            if value is not _UNDEF and value != getattr(old, attr_name):
                changes[attr_name] = value

        if new_entity_id is not _UNDEF and new_entity_id != old.entity_id:
            if self.async_is_registered(new_entity_id):
                raise ValueError("Entity is already registered")

            if not valid_entity_id(new_entity_id):
                raise ValueError("Invalid entity ID")

            if split_entity_id(new_entity_id)[0] != split_entity_id(
                    entity_id)[0]:
                raise ValueError("New entity ID should be same domain")

            self.entities.pop(entity_id)
            entity_id = changes["entity_id"] = new_entity_id

        if new_unique_id is not _UNDEF:
            conflict = next(
                (entity for entity in self.entities.values()
                 if entity.unique_id == new_unique_id and entity.domain ==
                 old.domain and entity.platform == old.platform),
                None,
            )
            if conflict:
                raise ValueError(
                    f"Unique id '{new_unique_id}' is already in use by "
                    f"'{conflict.entity_id}'")
            changes["unique_id"] = new_unique_id

        if not changes:
            return old

        new = self.entities[entity_id] = attr.evolve(old, **changes)

        self.async_schedule_save()

        data = {
            "action": "update",
            "entity_id": entity_id,
            "changes": list(changes)
        }

        if old.entity_id != entity_id:
            data["old_entity_id"] = old.entity_id

        self.hass.bus.async_fire(EVENT_ENTITY_REGISTRY_UPDATED, data)

        return new
Esempio n. 35
0
    def _async_update_entity(
        self,
        entity_id: str,
        *,
        name: Union[str, None, UndefinedType] = UNDEFINED,
        icon: Union[str, None, UndefinedType] = UNDEFINED,
        config_entry_id: Union[str, None, UndefinedType] = UNDEFINED,
        new_entity_id: Union[str, UndefinedType] = UNDEFINED,
        device_id: Union[str, None, UndefinedType] = UNDEFINED,
        area_id: Union[str, None, UndefinedType] = UNDEFINED,
        new_unique_id: Union[str, UndefinedType] = UNDEFINED,
        disabled_by: Union[str, None, UndefinedType] = UNDEFINED,
        capabilities: Union[Dict[str, Any], None, UndefinedType] = UNDEFINED,
        supported_features: Union[int, UndefinedType] = UNDEFINED,
        device_class: Union[str, None, UndefinedType] = UNDEFINED,
        unit_of_measurement: Union[str, None, UndefinedType] = UNDEFINED,
        original_name: Union[str, None, UndefinedType] = UNDEFINED,
        original_icon: Union[str, None, UndefinedType] = UNDEFINED,
    ) -> RegistryEntry:
        """Private facing update properties method."""
        old = self.entities[entity_id]

        changes = {}

        for attr_name, value in (
            ("name", name),
            ("icon", icon),
            ("config_entry_id", config_entry_id),
            ("device_id", device_id),
            ("area_id", area_id),
            ("disabled_by", disabled_by),
            ("capabilities", capabilities),
            ("supported_features", supported_features),
            ("device_class", device_class),
            ("unit_of_measurement", unit_of_measurement),
            ("original_name", original_name),
            ("original_icon", original_icon),
        ):
            if value is not UNDEFINED and value != getattr(old, attr_name):
                changes[attr_name] = value

        if new_entity_id is not UNDEFINED and new_entity_id != old.entity_id:
            if self.async_is_registered(new_entity_id):
                raise ValueError("Entity is already registered")

            if not valid_entity_id(new_entity_id):
                raise ValueError("Invalid entity ID")

            if split_entity_id(new_entity_id)[0] != split_entity_id(
                    entity_id)[0]:
                raise ValueError("New entity ID should be same domain")

            self.entities.pop(entity_id)
            entity_id = changes["entity_id"] = new_entity_id

        if new_unique_id is not UNDEFINED:
            conflict_entity_id = self.async_get_entity_id(
                old.domain, old.platform, new_unique_id)
            if conflict_entity_id:
                raise ValueError(
                    f"Unique id '{new_unique_id}' is already in use by "
                    f"'{conflict_entity_id}'")
            changes["unique_id"] = new_unique_id

        if not changes:
            return old

        self._remove_index(old)
        new = attr.evolve(old, **changes)
        self._register_entry(new)

        self.async_schedule_save()

        data = {
            "action": "update",
            "entity_id": entity_id,
            "changes": list(changes)
        }

        if old.entity_id != entity_id:
            data["old_entity_id"] = old.entity_id

        self.hass.bus.async_fire(EVENT_ENTITY_REGISTRY_UPDATED, data)

        return new
Esempio n. 36
0
 def rm_secret(self, name):
     return attr.evolve(self, secrets=_deleted(self._secrets, name))
Esempio n. 37
0
def dummy_config():
    return attr.evolve(sac_dummy_config(), framework=FrameworkType.PYTORCH)
Esempio n. 38
0
def main(
    output_handler: OutputHandler,
    target: List[str],
    pattern: str,
    lang: str,
    configs: List[str],
    no_rewrite_rule_ids: bool = False,
    jobs: int = 1,
    include: Optional[List[str]] = None,
    exclude: Optional[List[str]] = None,
    strict: bool = False,
    autofix: bool = False,
    dryrun: bool = False,
    disable_nosem: bool = False,
    dangerously_allow_arbitrary_code_execution_from_rules: bool = False,
    no_git_ignore: bool = False,
    timeout: int = DEFAULT_TIMEOUT,
    max_memory: int = 0,
    timeout_threshold: int = 0,
    skip_unknown_extensions: bool = False,
    severity: Optional[List[str]] = None,
) -> None:
    if include is None:
        include = []

    if exclude is None:
        exclude = []

    configs_obj, errors = get_config(pattern, lang, configs)
    all_rules = configs_obj.get_rules(no_rewrite_rule_ids)

    if severity is None or severity == []:
        filtered_rules = all_rules
    else:
        filtered_rules = [
            rule for rule in all_rules if rule.severity in severity
        ]

    output_handler.handle_semgrep_errors(errors)

    if errors and strict:
        raise SemgrepError(
            f"run with --strict and there were {len(errors)} errors loading configs",
            code=MISSING_CONFIG_EXIT_CODE,
        )

    if not pattern:
        plural = "s" if len(configs_obj.valid) > 1 else ""
        config_id_if_single = (list(configs_obj.valid.keys())[0]
                               if len(configs_obj.valid) == 1 else "")
        invalid_msg = (f"({len(errors)} config files were invalid)"
                       if len(errors) else "")
        logger.debug(
            f"running {len(filtered_rules)} rules from {len(configs_obj.valid)} config{plural} {config_id_if_single} {invalid_msg}"
        )

        if len(configs_obj.valid) == 0:
            raise SemgrepError(
                f"no valid configuration file found ({len(errors)} configs were invalid)",
                code=MISSING_CONFIG_EXIT_CODE,
            )

        notify_user_of_work(filtered_rules, include, exclude)

    respect_git_ignore = not no_git_ignore
    target_manager = TargetManager(
        includes=include,
        excludes=exclude,
        targets=target,
        respect_git_ignore=respect_git_ignore,
        output_handler=output_handler,
        skip_unknown_extensions=skip_unknown_extensions,
    )

    # actually invoke semgrep
    (
        rule_matches_by_rule,
        debug_steps_by_rule,
        semgrep_errors,
        all_targets,
        profiler,
    ) = CoreRunner(
        allow_exec=dangerously_allow_arbitrary_code_execution_from_rules,
        jobs=jobs,
        timeout=timeout,
        max_memory=max_memory,
        timeout_threshold=timeout_threshold,
    ).invoke_semgrep(target_manager, filtered_rules)

    output_handler.handle_semgrep_errors(semgrep_errors)

    rule_matches_by_rule = {
        rule: [
            attr.evolve(rule_match,
                        is_ignored=rule_match_nosem(rule_match, strict))
            for rule_match in rule_matches
        ]
        for rule, rule_matches in rule_matches_by_rule.items()
    }

    if not disable_nosem:
        rule_matches_by_rule = {
            rule: [
                rule_match for rule_match in rule_matches
                if not rule_match._is_ignored
            ]
            for rule, rule_matches in rule_matches_by_rule.items()
        }

    num_findings = sum(len(v) for v in rule_matches_by_rule.values())
    stats_line = f"ran {len(filtered_rules)} rules on {len(all_targets)} files: {num_findings} findings"

    output_handler.handle_semgrep_core_output(rule_matches_by_rule,
                                              debug_steps_by_rule, stats_line,
                                              all_targets, profiler)

    if autofix:
        apply_fixes(rule_matches_by_rule, dryrun)
Esempio n. 39
0
def test_semantic_reparse_simple() -> None:
    TEST_ACT = Act(
        identifier="2050. évi XD. törvény",
        publication_date=Date(2050, 3, 4),
        subject="A nyelvtani tesztelésről",
        preamble='',
        children=(
            Article(
                identifier="1",
                children=(Paragraph(
                    text=
                    "Fontos lesz később a tesztelés X tulajdonságiról szóló 2040. évi DX. törvény (a továbbiakban Xtv.) rövidítésének feloldása.",
                ), )),
            Article(
                identifier="2",
                children=(
                    Paragraph(
                        identifier="1",
                        text=
                        "Bekeverünk a tesztelés Y tulajdonságiról szóló 2041. évi X. törvény dolgaival.",
                    ),
                    Paragraph(
                        identifier="2",
                        text=
                        "Itt megemlítendő a 1. § és a tesztelés Z tulajdonságiról szóló 2041. évi XXX. törvény (a továbbiakban Ztv.)  1. §-a közötti különbség",
                    ),
                    Paragraph(
                        identifier="3",
                        intro="Az Xtv.",
                        wrap_up="szöveg lép.",
                        children=(
                            AlphabeticPoint(
                                identifier="a",
                                text=
                                "12. § (7) bekezdésében a „fontatlan” szövegrész helyébe a „nem fontos”,",
                            ),
                            AlphabeticPoint(
                                identifier="b",
                                text=
                                "11. §-ben a „nemigazán” szövegrész helyébe a „nem”",
                            ),
                        ),
                    ),
                ),
            ),
            Article(
                identifier="3",
                children=(Paragraph(
                    text=
                    "Ez a törvény a kihirdetését követő napon lép hatályba."),
                          )),
            Article(
                identifier="4",
                children=(Paragraph(
                    intro=
                    "A Ztv. 12. § (8) bekezdése helyébe a következő rendelkezés lép:",
                    children=(BlockAmendmentContainer(children=(Paragraph(
                        identifier='8',
                        text="Beillesztett referencia: 11. §, vajon lesz baj?"
                    ), ), ), ),
                ), ),
            ),
        ),
    )

    with_semantics_1 = ActSemanticsParser.add_semantics_to_act(TEST_ACT)
    assert with_semantics_1.is_semantic_parsed

    assert with_semantics_1.article('1').paragraph().act_id_abbreviations == (
        ActIdAbbreviation('Xtv.', '2040. évi DX. törvény'), )
    assert with_semantics_1.article('2').paragraph(
        '2').act_id_abbreviations == (ActIdAbbreviation(
            'Ztv.', '2041. évi XXX. törvény'), )
    assert with_semantics_1.article('1').paragraph().outgoing_references == (
        OutgoingReference(start_pos=55,
                          end_pos=76,
                          reference=Reference(act='2040. évi DX. törvény')), )
    assert with_semantics_1.article('2').paragraph(
        '1').outgoing_references == (OutgoingReference(
            start_pos=47,
            end_pos=67,
            reference=Reference(act='2041. évi X. törvény')), )
    assert with_semantics_1.article('2').paragraph('3').point(
        'a').semantic_data == (TextAmendment(position=Reference(
            act='2040. évi DX. törvény', article='12', paragraph='7'),
                                             original_text='fontatlan',
                                             replacement_text='nem fontos'), )
    assert with_semantics_1.article('3').paragraph().semantic_data == (
        EnforcementDate(position=None, date=DaysAfterPublication(days=1)), )
    assert with_semantics_1.article('4').paragraph().semantic_data == (
        BlockAmendment(position=Reference(act='2041. évi XXX. törvény',
                                          article='12',
                                          paragraph='8'), ), )

    a4_children = with_semantics_1.article('4').paragraph().children
    assert a4_children is not None
    the_block_amendment = a4_children[0]
    assert isinstance(the_block_amendment, BlockAmendmentContainer)

    assert the_block_amendment.semantic_data is None
    assert the_block_amendment.outgoing_references is None
    assert the_block_amendment.act_id_abbreviations is None

    with_semantics_2 = ActSemanticsParser.add_semantics_to_act(
        with_semantics_1)

    assert with_semantics_2 is with_semantics_1

    modified_paragraph = attr.evolve(
        with_semantics_1.article("2").paragraph("1"),
        text="Az 1. § és 3. § egészen fontos.",
        semantic_data=None,
        outgoing_references=None,
        act_id_abbreviations=None,
    )
    modified_article = attr.evolve(
        with_semantics_1.article("2"),
        children=(modified_paragraph, ) +
        with_semantics_1.article("2").children[1:],
    )
    modified_act = attr.evolve(
        with_semantics_1,
        children=(with_semantics_1.children[0], modified_article,
                  with_semantics_1.children[2], with_semantics_1.children[3]),
    )

    assert modified_act.article('1').is_semantic_parsed
    assert not modified_act.article('2').is_semantic_parsed
    assert modified_act.article('3').is_semantic_parsed
    assert modified_act.article('4').is_semantic_parsed
    assert not modified_act.is_semantic_parsed

    modified_with_semantics = ActSemanticsParser.add_semantics_to_act(
        modified_act)
    assert modified_with_semantics.is_semantic_parsed
    assert modified_with_semantics.article('2').paragraph(
        '1').outgoing_references == (
            OutgoingReference(start_pos=3,
                              end_pos=7,
                              reference=Reference(act=None, article='1')),
            OutgoingReference(start_pos=11,
                              end_pos=15,
                              reference=Reference(act=None, article='3')),
        )

    # Check if nothing else was touched but the modified part.
    assert with_semantics_1.article('1') is modified_with_semantics.article(
        '1')
    assert with_semantics_1.article('2').paragraph(
        '2') is modified_with_semantics.article('2').paragraph('2')
    assert with_semantics_1.article('2').paragraph(
        '3') is modified_with_semantics.article('2').paragraph('3')
    assert with_semantics_1.article('3') is modified_with_semantics.article(
        '3')
    assert with_semantics_1.article('4') is modified_with_semantics.article(
        '4')
Esempio n. 40
0
 def filter(self, filter):
     filters = self.filters.copy()
     filters.update(filter)
     return attr.evolve(self, filters=filters)
Esempio n. 41
0
 def _map_panels(self, f):
     return attr.evolve(self, rows=[r._map_panels(f) for r in self.rows])
Esempio n. 42
0
    def _async_update_device(
        self,
        device_id,
        *,
        add_config_entry_id=_UNDEF,
        remove_config_entry_id=_UNDEF,
        merge_connections=_UNDEF,
        merge_identifiers=_UNDEF,
        new_identifiers=_UNDEF,
        manufacturer=_UNDEF,
        model=_UNDEF,
        name=_UNDEF,
        sw_version=_UNDEF,
        entry_type=_UNDEF,
        via_device_id=_UNDEF,
        area_id=_UNDEF,
        name_by_user=_UNDEF,
    ):
        """Update device attributes."""
        old = self.devices[device_id]

        changes = {}

        config_entries = old.config_entries

        if (
            add_config_entry_id is not _UNDEF
            and add_config_entry_id not in old.config_entries
        ):
            config_entries = old.config_entries | {add_config_entry_id}

        if (
            remove_config_entry_id is not _UNDEF
            and remove_config_entry_id in config_entries
        ):
            if config_entries == {remove_config_entry_id}:
                self.async_remove_device(device_id)
                return

            config_entries = config_entries - {remove_config_entry_id}

        if config_entries != old.config_entries:
            changes["config_entries"] = config_entries

        for attr_name, value in (
            ("connections", merge_connections),
            ("identifiers", merge_identifiers),
        ):
            old_value = getattr(old, attr_name)
            # If not undefined, check if `value` contains new items.
            if value is not _UNDEF and not value.issubset(old_value):
                changes[attr_name] = old_value | value

        if new_identifiers is not _UNDEF:
            changes["identifiers"] = new_identifiers

        for attr_name, value in (
            ("manufacturer", manufacturer),
            ("model", model),
            ("name", name),
            ("sw_version", sw_version),
            ("entry_type", entry_type),
            ("via_device_id", via_device_id),
        ):
            if value is not _UNDEF and value != getattr(old, attr_name):
                changes[attr_name] = value

        if area_id is not _UNDEF and area_id != old.area_id:
            changes["area_id"] = area_id

        if name_by_user is not _UNDEF and name_by_user != old.name_by_user:
            changes["name_by_user"] = name_by_user

        if old.is_new:
            changes["is_new"] = False

        if not changes:
            return old

        new = attr.evolve(old, **changes)
        self._update_device(old, new)
        self.async_schedule_save()

        self.hass.bus.async_fire(
            EVENT_DEVICE_REGISTRY_UPDATED,
            {
                "action": "create" if "is_new" in changes else "update",
                "device_id": new.id,
            },
        )

        return new
Esempio n. 43
0
 def _map_panels(self, f):
     return attr.evolve(self, panels=list(map(f, self.panels)))
Esempio n. 44
0
def test_simple_sac(use_discrete):
    env = SimpleEnvironment([BRAIN_NAME], use_discrete=use_discrete)
    config = attr.evolve(SAC_CONFIG)
    _check_environment_trains(env, {BRAIN_NAME: config})
Esempio n. 45
0
def full_export(
    project,
    paths,
    target_directory,
    first_time=False,
    skip_sunspec=False,
    include_uuid_in_item=False,
):
    epcpm.cantosym.export(
        path=paths.can,
        can_model=project.models.can,
        parameters_model=project.models.parameters,
    )

    epcpm.parameterstohierarchy.export(
        path=paths.hierarchy,
        can_model=project.models.can,
        parameters_model=project.models.parameters,
    )

    epcpm.parameterstointerface.export(
        c_path=paths.interface_c,
        h_path=paths.interface_c.with_suffix(".h"),
        can_model=project.models.can,
        sunspec_model=project.models.sunspec,
        parameters_model=project.models.parameters,
        skip_sunspec=skip_sunspec,
        include_uuid_in_item=include_uuid_in_item,
    )

    epcpm.sunspectoxlsx.export(
        path=paths.spreadsheet,
        sunspec_model=project.models.sunspec,
        parameters_model=project.models.parameters,
        skip_sunspec=skip_sunspec,
    )

    epcpm.sunspectoxlsx.export(
        path=paths.spreadsheet_user,
        sunspec_model=project.models.sunspec,
        parameters_model=project.models.parameters,
        skip_sunspec=skip_sunspec,
        column_filter=attr.evolve(
            epcpm.sunspectoxlsx.attr_fill(epcpm.sunspectoxlsx.Fields, True),
            get=False,
            set=False,
            item=False,
        ),
    )

    epcpm.sunspectotablesc.export(
        c_path=paths.sunspec_tables_c,
        h_path=paths.sunspec_tables_c.with_suffix(".h"),
        sunspec_model=project.models.sunspec,
        skip_sunspec=skip_sunspec,
    )

    epcpm.parameterstosil.export(
        c_path=paths.sil_c,
        h_path=paths.sil_c.with_suffix(".h"),
        parameters_model=project.models.parameters,
    )

    epcpm.sunspectobitfieldsc.export(
        c_path=paths.sunspec_bitfields_c,
        h_path=paths.sunspec_bitfields_c.with_suffix(".h"),
        sunspec_model=project.models.sunspec,
        include_uuid_in_item=include_uuid_in_item,
    )

    if first_time and not skip_sunspec:
        epcpm.sunspectomanualc.export(
            path=paths.sunspec_c,
            sunspec_model=project.models.sunspec,
        )

        epcpm.sunspectomanualh.export(
            path=paths.sunspec_c,
            sunspec_model=project.models.sunspec,
        )

    run_generation_scripts(target_directory, skip_sunspec=skip_sunspec)
Esempio n. 46
0
 def async_update_area(self, area_id: str, changes: dict) -> AreaEntry:
     """Update existing self."""
     old = self.areas[area_id]
     new = self.areas[area_id] = attr.evolve(old, **changes)
     self.async_schedule_save()
     return attr.asdict(new)
Esempio n. 47
0
    def install(self, content_meta=None, force_overwrite=False):
        """extract the archive to the filesystem and write out install metadata.

        MUST be called after self.fetch()."""

        log.debug('install: content_meta=%s, force_overwrite=%s',
                  content_meta, force_overwrite)
        installed = []
        archive_parent_dir = None

        # FIXME: enum/constant/etc demagic
        # content_archive_type = 'multi'

        content_meta = content_meta or self.content_meta

        # FIXME: really need to move the fetch step elsewhere and do it before,
        #        install should get pass a content_archive (or something more abstract)
        # TODO: some useful exceptions for 'cant find', 'cant read', 'cant write'

        archive_path = self._fetch_results.get('archive_path', None)

        if not archive_path:
            raise exceptions.GalaxyClientError('No valid content data found for %s', self.src)

        log.debug("installing from %s", archive_path)

        content_tar_file, archive_meta = content_archive.load_archive(archive_path)

        # TODO: do we still need to check the fetched version against the spec version?
        content_data = self._fetch_results.get('content', {})

        # If the requested namespace/version is different than the one we got via find()/fetch()...
        if content_data.get('fetched_version', content_meta.version) != content_meta.version:
            log.info('Version "%s" for %s was requested but fetch found version "%s"',
                     content_meta.version, '%s.%s' % (content_meta.namespace, content_meta.name),
                     content_data.get('fetched_version', content_meta.version))

            content_meta = attr.evolve(content_meta, version=content_data['fetched_version'])

        if content_data.get('content_namespace', content_meta.namespace) != content_meta.namespace:
            log.info('Namespace "%s" for %s was requested but fetch found namespace "%s"',
                     content_meta.namespace, '%s.%s' % (content_meta.namespace, content_meta.name),
                     content_data.get('content_namespace', content_meta.namespace))

            content_meta = attr.evolve(content_meta, namespace=content_data['content_namespace'])

        log.debug('archive_meta: %s', archive_meta)

        # we strip off any higher-level directories for all of the files contained within
        # the tar file here. The default is 'github_repo-target'. Gerrit instances, on the other
        # hand, does not have a parent directory at all.

        if not os.path.isdir(content_meta.path):
            log.debug('No content path (%s) found so creating it', content_meta.path)

            os.makedirs(content_meta.path)

        if archive_meta.archive_type == 'multi-content':
            self._content_type = 'all'
            self.display_callback('- extracting all content from "%s" to %s' % (content_meta.name, self.path))
        else:
            self.display_callback("- extracting all content in %s to content directories" % content_meta.name)

        log.info('Installing content from archive type: %s', archive_meta.archive_type)

        if archive_meta.archive_type == 'multi-content':
            log.info('Installing "%s" as a archive_type=%s content_type=%s install_type=%s ',
                     content_meta.name, archive_meta.archive_type, content_meta.content_type,
                     self.content_install_type)

            log.info('About to extract content_type=%s "%s" version=%s to %s',
                     content_meta.content_type, content_meta.name, content_meta.version, content_meta.path)

            log.debug('content_meta: %s', content_meta)

            res = self._install_contents(content_tar_file,
                                         archive_parent_dir,
                                         archive_meta.archive_type,
                                         content_meta,
                                         content_sub_name=self.sub_name,
                                         force_overwrite=force_overwrite)
            installed.append((content_meta, res))

        elif archive_meta.archive_type == 'role':
            log.info('Installing "%s" as a role content archive and content_type=%s (role)', content_meta.name, content_meta.content_type)

            # log.debug('archive_parent_dir: %s', archive_parent_dir)

            installed_from_role = self._install_role_archive(content_tar_file,
                                                             archive_meta=archive_meta,
                                                             content_meta=content_meta,
                                                             force_overwrite=force_overwrite)
            installed.extend(installed_from_role)

        install_datetime = datetime.datetime.utcnow()

        repo_info_path = os.path.join(content_meta.path,
                                      self.content_meta.namespace,
                                      self.content_meta.name,
                                      '.galaxy_install_info')

        repo_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                          install_datetime=install_datetime)

        log.debug('repo_info_path: %s', repo_info_path)
        install_info.save(repo_install_info, repo_info_path)

        # return the parsed yaml metadata
        if archive_meta.archive_type == 'multi-content':
            self.display_callback("- all content was succssfully installed to %s" % self.path)
        else:
            self.display_callback("- %s was installed successfully to %s" % (str(self), self.path))

        # rm any temp files created when getting the content archive
        self._fetcher.cleanup()

        for item in installed:
            log.info('Installed content: %s', item[0])
            # log.debug('Installed files: %s', pprint.pformat(item[1]))

        return installed
Esempio n. 48
0
def test_semantic_reparse_abbrevs() -> None:
    TEST_ACT = Act(
        identifier="2050. évi XD. törvény",
        publication_date=Date(2050, 3, 4),
        subject="A nyelvtani tesztelésről",
        preamble='',
        children=(
            Article(
                identifier="1",
                children=(Paragraph(
                    text=
                    "Fontos lesz később a tesztelés X tulajdonságiról szóló 2040. évi DX. törvény (a továbbiakban Xtv.) rövidítésének feloldása.",
                ), )),
            Article(
                identifier="2",
                children=(
                    Paragraph(
                        identifier="1",
                        text=
                        "Bekeverünk a tesztelés Y tulajdonságiról szóló 2041. évi X. törvény (a továbbiakban Ytv.) dolgaival",
                    ),
                    Paragraph(
                        identifier="2",
                        text=
                        "Itt megemlítendő az Ytv. 10. §-a és a tesztelés Z tulajdonságiról szóló 2041. évi XXX. törvény (a továbbiakban Ztv.) 1. §-a közötti különbség",
                    ),
                    Paragraph(
                        identifier="3",
                        intro="Mert később használatban van",
                        children=(
                            AlphabeticPoint(identifier="a",
                                            text="az Xtv. 1. § c) pontja, és"),
                            AlphabeticPoint(identifier="b",
                                            text="az Ytv. 2. §-a."),
                        ),
                    ),
                ),
            ),
            Article(
                identifier="3",
                children=(Paragraph(
                    text=
                    "Mégegyszer megemlítendő, hogy fontos az Xtv. 1. §-a, Ytv. 1. §-a, és Ztv. 1337. §-a.",
                ), )),
            Article(
                identifier="4",
                children=(Paragraph(
                    intro=
                    "Az Ytv. 12. § (8) bekezdése helyébe a következő rendelkezés lép:",
                    children=(BlockAmendmentContainer(children=(Paragraph(
                        identifier='8',
                        text="Beillesztett referencia: 12. §, vajon lesz baj?"
                    ), ), ), ),
                ), ),
            ),
        ),
    )

    with_semantics_1 = ActSemanticsParser.add_semantics_to_act(TEST_ACT)
    assert with_semantics_1.is_semantic_parsed

    assert with_semantics_1.article('1').paragraph().act_id_abbreviations == (
        ActIdAbbreviation('Xtv.', '2040. évi DX. törvény'), )
    assert with_semantics_1.article('2').paragraph(
        '1').act_id_abbreviations == (ActIdAbbreviation(
            'Ytv.', '2041. évi X. törvény'), )
    assert with_semantics_1.article('2').paragraph(
        '2').act_id_abbreviations == (ActIdAbbreviation(
            'Ztv.', '2041. évi XXX. törvény'), )
    assert with_semantics_1.article('3').paragraph().outgoing_references == (
        OutgoingReference(start_pos=40,
                          end_pos=44,
                          reference=Reference(act='2040. évi DX. törvény')),
        OutgoingReference(start_pos=45,
                          end_pos=51,
                          reference=Reference(act='2040. évi DX. törvény',
                                              article='1')),
        OutgoingReference(start_pos=53,
                          end_pos=57,
                          reference=Reference(act='2041. évi X. törvény')),
        OutgoingReference(start_pos=58,
                          end_pos=64,
                          reference=Reference(act='2041. évi X. törvény',
                                              article='1')),
        OutgoingReference(start_pos=69,
                          end_pos=73,
                          reference=Reference(act='2041. évi XXX. törvény')),
        OutgoingReference(start_pos=74,
                          end_pos=83,
                          reference=Reference(act='2041. évi XXX. törvény',
                                              article='1337')),
    )

    with_semantics_2 = ActSemanticsParser.add_semantics_to_act(
        with_semantics_1)
    # TODO: with_semantics_2 is with_semantics_1
    assert with_semantics_2 == with_semantics_1

    modified_paragraph = attr.evolve(
        with_semantics_1.article("2").paragraph("1"),
        text=
        "Bekeverünk a tesztelés Y új tulajdonságiról szóló 2057. évi X. törvény (a továbbiakban Ytv.) dolgaival",
        semantic_data=None,
        outgoing_references=None,
        act_id_abbreviations=None,
    )
    modified_article = attr.evolve(
        with_semantics_1.article("2"),
        children=(modified_paragraph, ) +
        with_semantics_1.article("2").children[1:],
    )
    modified_act = attr.evolve(
        with_semantics_1,
        children=(with_semantics_1.children[0], modified_article,
                  with_semantics_1.children[2], with_semantics_1.children[3]),
    )

    assert not modified_act.is_semantic_parsed

    modified_with_semantics = ActSemanticsParser.add_semantics_to_act(
        modified_act)
    assert modified_with_semantics.article('2').paragraph(
        '1').act_id_abbreviations == (ActIdAbbreviation(
            'Ytv.', '2057. évi X. törvény'), )
    assert modified_with_semantics.article('3').paragraph(
    ).outgoing_references == (
        OutgoingReference(start_pos=40,
                          end_pos=44,
                          reference=Reference(act='2040. évi DX. törvény')),
        OutgoingReference(start_pos=45,
                          end_pos=51,
                          reference=Reference(act='2040. évi DX. törvény',
                                              article='1')),
        OutgoingReference(start_pos=53,
                          end_pos=57,
                          reference=Reference(act='2057. évi X. törvény')),
        OutgoingReference(start_pos=58,
                          end_pos=64,
                          reference=Reference(act='2057. évi X. törvény',
                                              article='1')),
        OutgoingReference(start_pos=69,
                          end_pos=73,
                          reference=Reference(act='2041. évi XXX. törvény')),
        OutgoingReference(start_pos=74,
                          end_pos=83,
                          reference=Reference(act='2041. évi XXX. törvény',
                                              article='1337')),
    )
    assert modified_with_semantics.article('4').paragraph().semantic_data == (
        BlockAmendment(position=Reference(act='2057. évi X. törvény',
                                          article='12',
                                          paragraph='8'), ), )

    assert with_semantics_1.article('1') is modified_with_semantics.article(
        '1')
    # Note that because of the abbreviation change, everything else may be reparsed,
    # so no asserts for e.g. article('3')

    # No need to reparse BlockAmendments though
    a4_children = with_semantics_1.article('4').paragraph().children
    modified_a4_children = modified_with_semantics.article(
        '4').paragraph().children
    assert a4_children is not None
    assert modified_a4_children is not None
    assert a4_children[0] is modified_a4_children[0]
Esempio n. 49
0
 def _derelativize(self, metadata_url):
     return attr.evolve(
         self,
         artifact_url=derelativize_url(self.artifact_url, metadata_url),
     )
Esempio n. 50
0
def server_computation(
        # Tensor/Dataset arguments that will be supplied by TFF:
        server_state: ServerState,
        gen_delta: tf.keras.Model.weights,
        disc_delta: tf.keras.Model.weights,
        generator: tf.keras.Model,
        discriminator: tf.keras.Model,
        gen_optimizer: tf.keras.optimizers.Optimizer,
        disc_optimizer: tf.keras.optimizers.Optimizer) -> ServerState:
    """The computation to run on the server, training the generator.

  Args:
    server_state: The initial `ServerState` for the round.
    gen_inputs_ds: An infinite `tf.data.Dataset` of inputs to the `generator`.
    client_output: The (possibly aggregated) `ClientOutput`.
    generator:  The generator.
    discriminator: The discriminator.
    server_disc_update_optimizer: Optimizer used to `apply_gradients` based on
      the client_output delta.
    train_generator_fn: A function which takes the two networks and generator
      input and trains the generator.
    new_aggregation_state: The updated state of the (possibly DP) averaging
      aggregator.

  Returns:
    An updated `ServerState` object.
  """
    # A tf.function can't modify the structure of its input arguments,
    # so we make a semi-shallow copy:
    server_state = attr.evolve(server_state,
                               counters=dict(server_state.counters))

    tf.nest.map_structure(lambda a, b: a.assign(b), generator.weights,
                          server_state.generator_weights)
    tf.nest.map_structure(lambda a, b: a.assign(b), discriminator.weights,
                          server_state.discriminator_weights)
    tf.nest.assert_same_structure(disc_delta, discriminator.weights)
    grads_and_vars_disc = tf.nest.map_structure(lambda x, v: (-1.0 * x, v),
                                                disc_delta,
                                                discriminator.weights)
    grads_and_vars_gen = tf.nest.map_structure(lambda x, v: (-1.0 * x, v),
                                               gen_delta, generator.weights)
    disc_optimizer.apply_gradients(grads_and_vars_disc,
                                   name='server_update_disc')
    gen_optimizer.apply_gradients(grads_and_vars_gen, name='server_update_gen')

    gen_examples_this_round = tf.constant(0)

    numrounds = server_state.counters['num_rounds']
    round_mod = tf.math.floormod(numrounds, 2, name=None)
    server_state.meta_gen = tf.cond(tf.math.equal(round_mod, 0),
                                    lambda: generator.weights,
                                    lambda: server_state.generator_weights)
    server_state.meta_disc = tf.cond(
        tf.math.equal(round_mod, 0), lambda: discriminator.weights,
        lambda: server_state.discriminator_weights)

    server_state.counters[
        'num_generator_train_examples'] += gen_examples_this_round
    server_state.generator_weights = _weights(generator)
    server_state.discriminator_weights = _weights(discriminator)
    server_state.counters['num_rounds'] += 1
    return server_state
Esempio n. 51
0
    def __init__(self,
                 *,
                 connector=None,
                 loop=None,
                 cookies=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 json_serialize=json.dumps,
                 request_class=ClientRequest,
                 response_class=ClientResponse,
                 ws_response_class=ClientWebSocketResponse,
                 version=http.HttpVersion11,
                 cookie_jar=None,
                 connector_owner=True,
                 raise_for_status=False,
                 read_timeout=sentinel,
                 conn_timeout=None,
                 timeout=sentinel,
                 auto_decompress=True,
                 trust_env=False,
                 trace_configs=None):

        implicit_loop = False
        if loop is None:
            if connector is not None:
                loop = connector._loop
            else:
                implicit_loop = True
                loop = asyncio.get_event_loop()

        if connector is None:
            connector = TCPConnector(loop=loop)

        if connector._loop is not loop:
            raise RuntimeError(
                "Session and connector has to use same event loop")

        self._loop = loop

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        if implicit_loop and not loop.is_running():
            warnings.warn(
                "Creating a client session outside of coroutine is "
                "a very dangerous idea",
                stacklevel=2)
            context = {
                'client_session': self,
                'message': 'Creating a client session outside '
                'of coroutine'
            }
            if self._source_traceback is not None:
                context['source_traceback'] = self._source_traceback
            loop.call_exception_handler(context)

        if cookie_jar is None:
            cookie_jar = CookieJar(loop=loop)
        self._cookie_jar = cookie_jar

        if cookies is not None:
            self._cookie_jar.update_cookies(cookies)

        self._connector = connector
        self._connector_owner = connector_owner
        self._default_auth = auth
        self._version = version
        self._json_serialize = json_serialize
        if timeout is not sentinel:
            self._timeout = timeout
        else:
            self._timeout = DEFAULT_TIMEOUT
            if read_timeout is not sentinel:
                if timeout is not sentinel:
                    raise ValueError("read_timeout and timeout parameters "
                                     "conflict, please setup "
                                     "timeout.read")
                else:
                    self._timeout = attr.evolve(self._timeout,
                                                total=read_timeout)
            if conn_timeout is not None:
                if timeout is not sentinel:
                    raise ValueError("conn_timeout and timeout parameters "
                                     "conflict, please setup "
                                     "timeout.connect")
                else:
                    self._timeout = attr.evolve(self._timeout,
                                                connect=conn_timeout)
        self._raise_for_status = raise_for_status
        self._auto_decompress = auto_decompress
        self._trust_env = trust_env

        # Convert to list of tuples
        if headers:
            headers = CIMultiDict(headers)
        else:
            headers = CIMultiDict()
        self._default_headers = headers
        if skip_auto_headers is not None:
            self._skip_auto_headers = frozenset(
                [istr(i) for i in skip_auto_headers])
        else:
            self._skip_auto_headers = frozenset()

        self._request_class = request_class
        self._response_class = response_class
        self._ws_response_class = ws_response_class

        self._trace_configs = trace_configs or []
        for trace_config in self._trace_configs:
            trace_config.freeze()
Esempio n. 52
0
    attr.set_run_validators(False)
    yield
    attr.set_run_validators(True)


def popid(d):
    d.pop("id")
    return d


testdata = [
    pytest.param(
        "content",
        "content_add",
        list(TEST_OBJECTS["content"]),
        attr.evolve(model.Content.from_data(data=b"too big"), length=1000),
        attr.evolve(model.Content.from_data(data=b"to fail"), length=1000),
        id="content",
    ),
    pytest.param(
        "content",
        "content_add_metadata",
        [attr.evolve(cnt, ctime=now()) for cnt in TEST_OBJECTS["content"]],
        attr.evolve(model.Content.from_data(data=b"too big"), length=1000, ctime=now()),
        attr.evolve(model.Content.from_data(data=b"to fail"), length=1000, ctime=now()),
        id="content_metadata",
    ),
    pytest.param(
        "skipped_content",
        "skipped_content_add",
        list(TEST_OBJECTS["skipped_content"]),
Esempio n. 53
0
 def previous_line(self) -> "Position":
     return attr.evolve(self, line=self.line - 1)
Esempio n. 54
0
 def set_batting_event_probs(self, **batting_event_probs):
     self.batting_event_probabilities = attr.evolve(
         self.batting_event_probabilities, **batting_event_probs)
Esempio n. 55
0
 def async_update_user(self, user_id: str, changes: dict) -> UserEntry:
     """Update existing UserEntry."""
     old = self.users[user_id]
     new = self.users[user_id] = attr.evolve(old, **changes)
     self.async_schedule_save()
     return new
Esempio n. 56
0
 def next_line(self) -> "Position":
     return attr.evolve(self, line=self.line + 1)
Esempio n. 57
0
    async def async_load(self) -> None:
        """Load the registry of schedule entries."""
        data = await self._store.async_load()
        config: Config = Config()
        areas: "OrderedDict[str, AreaEntry]" = OrderedDict()
        sensors: "OrderedDict[str, SensorEntry]" = OrderedDict()
        users: "OrderedDict[str, UserEntry]" = OrderedDict()
        automations: "OrderedDict[str, AutomationEntry]" = OrderedDict()

        if data is not None:
            config = Config(
                code_arm_required=data["config"]["code_arm_required"],
                code_disarm_required=data["config"]["code_disarm_required"],
                code_format=data["config"]["code_format"],
                disarm_after_trigger=data["config"]["disarm_after_trigger"])

            if "mqtt" in data["config"]:
                config = attr.evolve(
                    config, **{
                        "mqtt": MqttConfig(**data["config"]["mqtt"]),
                    })

            if "master" in data["config"]:
                config = attr.evolve(
                    config, **{
                        "master": MasterConfig(**data["config"]["master"]),
                    })

            if "areas" in data:
                for area in data["areas"]:
                    modes = {
                        mode: ModeEntry(enabled=config["enabled"],
                                        exit_time=config["exit_time"],
                                        entry_time=config["entry_time"],
                                        trigger_time=config["trigger_time"])
                        for (mode, config) in area["modes"].items()
                    }
                    areas[area["area_id"]] = AreaEntry(area_id=area["area_id"],
                                                       name=area["name"],
                                                       modes=modes)

            if "sensors" in data:
                for sensor in data["sensors"]:
                    sensors[sensor["entity_id"]] = SensorEntry(**sensor)

            if "users" in data:
                for user in data["users"]:
                    users[user["user_id"]] = UserEntry(**user)

            if "automations" in data:
                for automation in data["automations"]:
                    automations[automation["automation_id"]] = AutomationEntry(
                        **automation)

        self.config = config
        self.areas = areas
        self.sensors = sensors
        self.automations = automations
        self.users = users

        if not areas:
            self.async_create_area({
                "name": "Alarmo",
                "modes": {
                    STATE_ALARM_ARMED_AWAY:
                    attr.asdict(
                        ModeEntry(enabled=True,
                                  exit_time=60,
                                  entry_time=60,
                                  trigger_time=1800)),
                    STATE_ALARM_ARMED_HOME:
                    attr.asdict(ModeEntry(enabled=True, trigger_time=1800)),
                    STATE_ALARM_ARMED_NIGHT:
                    attr.asdict(ModeEntry()),
                    STATE_ALARM_ARMED_CUSTOM_BYPASS:
                    attr.asdict(ModeEntry())
                }
            })
Esempio n. 58
0
 def copy_and_replace(self, key, new_value) -> "StreamToken":
     return attr.evolve(self, **{key: new_value})
Esempio n. 59
0
    def _async_update_device(
        self,
        device_id: str,
        *,
        add_config_entry_id: str | UndefinedType = UNDEFINED,
        remove_config_entry_id: str | UndefinedType = UNDEFINED,
        merge_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED,
        merge_identifiers: set[tuple[str, str]] | UndefinedType = UNDEFINED,
        new_identifiers: set[tuple[str, str]] | UndefinedType = UNDEFINED,
        manufacturer: str | None | UndefinedType = UNDEFINED,
        model: str | None | UndefinedType = UNDEFINED,
        name: str | None | UndefinedType = UNDEFINED,
        sw_version: str | None | UndefinedType = UNDEFINED,
        entry_type: str | None | UndefinedType = UNDEFINED,
        via_device_id: str | None | UndefinedType = UNDEFINED,
        area_id: str | None | UndefinedType = UNDEFINED,
        name_by_user: str | None | UndefinedType = UNDEFINED,
        disabled_by: str | None | UndefinedType = UNDEFINED,
        suggested_area: str | None | UndefinedType = UNDEFINED,
    ) -> DeviceEntry | None:
        """Update device attributes."""
        old = self.devices[device_id]

        changes: dict[str, Any] = {}

        config_entries = old.config_entries

        if (suggested_area not in (UNDEFINED, None, "")
                and area_id is UNDEFINED and old.area_id is None):
            area = self.hass.helpers.area_registry.async_get(
                self.hass).async_get_or_create(suggested_area)
            area_id = area.id

        if (add_config_entry_id is not UNDEFINED
                and add_config_entry_id not in old.config_entries):
            config_entries = old.config_entries | {add_config_entry_id}

        if (remove_config_entry_id is not UNDEFINED
                and remove_config_entry_id in config_entries):
            if config_entries == {remove_config_entry_id}:
                self.async_remove_device(device_id)
                return None

            config_entries = config_entries - {remove_config_entry_id}

        if config_entries != old.config_entries:
            changes["config_entries"] = config_entries

        for attr_name, setvalue in (
            ("connections", merge_connections),
            ("identifiers", merge_identifiers),
        ):
            old_value = getattr(old, attr_name)
            # If not undefined, check if `value` contains new items.
            if setvalue is not UNDEFINED and not setvalue.issubset(old_value):
                changes[attr_name] = old_value | setvalue

        if new_identifiers is not UNDEFINED:
            changes["identifiers"] = new_identifiers

        for attr_name, value in (
            ("manufacturer", manufacturer),
            ("model", model),
            ("name", name),
            ("sw_version", sw_version),
            ("entry_type", entry_type),
            ("via_device_id", via_device_id),
            ("disabled_by", disabled_by),
            ("suggested_area", suggested_area),
        ):
            if value is not UNDEFINED and value != getattr(old, attr_name):
                changes[attr_name] = value

        if area_id is not UNDEFINED and area_id != old.area_id:
            changes["area_id"] = area_id

        if name_by_user is not UNDEFINED and name_by_user != old.name_by_user:
            changes["name_by_user"] = name_by_user

        if old.is_new:
            changes["is_new"] = False

        if not changes:
            return old

        new = attr.evolve(old, **changes)
        self._update_device(old, new)
        self.async_schedule_save()

        self.hass.bus.async_fire(
            EVENT_DEVICE_REGISTRY_UPDATED,
            {
                "action": "create" if "is_new" in changes else "update",
                "device_id": new.id,
            },
        )

        return new
Esempio n. 60
0
 def evolve(self, **kwargs):
     return attr.evolve(self, **kwargs)