def test_multiple_empty(self): """ Empty list/tuple for validator is the same as None. """ C1 = make_class("C", {"x": attr.ib(validator=[])}) C2 = make_class("C", {"x": attr.ib(validator=None)}) assert inspect.getsource(C1.__init__) == inspect.getsource(C2.__init__)
def test_hash_attribute(self, slots): """ If `hash` is False on an attribute, ignore that attribute. """ C = make_class("C", {"a": attr.ib(hash=False), "b": attr.ib()}, slots=slots, hash=True) assert hash(C(1, 2)) == hash(C(2, 2))
def test_success(self): """ If the validator succeeds, nothing gets raised. """ C = make_class("C", { "x": attr.ib(validator=lambda *a: None), "y": attr.ib() }) validate(C(1, 2))
def test_no_init_order(self, slots, frozen): """ If an attribute is `init=False`, it's legal to come after a mandatory attribute. """ make_class("C", { "a": attr.ib(default=Factory(list)), "b": attr.ib(init=False), }, slots=slots, frozen=frozen)
def test_cmp(self, slots): """ If `cmp` is False, ignore that attribute. """ C = make_class("C", { "a": attr.ib(cmp=False), "b": attr.ib() }, slots=slots) assert C(1, 2) == C(2, 2)
def test_repr(self, slots): """ If `repr` is False, ignore that attribute. """ C = make_class("C", { "a": attr.ib(repr=False), "b": attr.ib() }, slots=slots) assert "C(b=2)" == repr(C(1, 2))
def test_make_class_ordered(self): """ If `make_class()` is passed ordered attrs, their order is respected instead of the counter. """ b = attr.ib(default=2) a = attr.ib(default=1) C = attr.make_class("C", ordered_dict([("a", a), ("b", b)])) assert "C(a=1, b=2)" == repr(C())
def test_convert(self): """ Return value of converter is used as the attribute's value. """ C = make_class("C", { "x": attr.ib(converter=lambda v: v + 1), "y": attr.ib(), }) c = C(1, 2) assert c.x == 2 assert c.y == 2
def test_convert_property(self, val, init): """ Property tests for attributes with convert. """ C = make_class("C", { "y": attr.ib(), "x": attr.ib(init=init, default=val, converter=lambda v: v + 1), }) c = C(2) assert c.x == val + 1 assert c.y == 2
def test_these_ordered(self): """ If these is passed ordered attrs, their order respect instead of the counter. """ b = attr.ib(default=2) a = attr.ib(default=1) @attr.s(these=ordered_dict([("a", a), ("b", b)])) class C(object): pass assert "C(a=1, b=2)" == repr(C())
def test_convert_before_validate(self): """ Validation happens after conversion. """ def validator(inst, attr, val): raise RuntimeError("foo") C = make_class( "C", { "x": attr.ib(validator=validator, converter=lambda v: 1 / 0), "y": attr.ib(), }) with pytest.raises(ZeroDivisionError): C(1, 2)
def test_init(self, slots, frozen): """ If `init` is False, ignore that attribute. """ C = make_class("C", {"a": attr.ib(init=False), "b": attr.ib()}, slots=slots, frozen=frozen) with pytest.raises(TypeError) as e: C(a=1, b=2) assert ( "__init__() got an unexpected keyword argument 'a'" == e.value.args[0] )
def make_attrs_class(typename, d): import attr vals = {} for k, v in d.items(): if v[2] == float: vals[k] = attr.ib( default=v[0], validator=attr.validators.instance_of(v[2])) else: vals[k] = attr.ib(default=v[0]) C = attr.make_class(typename, vals) return C()
def __new__(cls, *args, **kwargs) -> Any: # allow all items in Binary Object schema to be populated as optional # arguments to `__init__()` with sensible defaults. if cls is not GenericObjectMeta: attributes = { k: attr.ib( type=getattr(v, 'pythonic', type(None)), default=getattr(v, 'default', None), ) for k, v in cls.schema.items() } attributes.update({'version': attr.ib(type=int, default=1)}) cls = attr.s(cls, these=attributes) # skip parameters return super().__new__(cls)
def test_nonslots_these(): """ Enhancing a non-slots class using 'these' works. This will actually *replace* the class with another one, using slots. """ class SimpleOrdinaryClass(object): def __init__(self, x, y, z): self.x = x self.y = y self.z = z def method(self): return self.x @classmethod def classmethod(cls): return "clsmethod" @staticmethod def staticmethod(): return "staticmethod" C2Slots = attr.s(these={"x": attr.ib(), "y": attr.ib(), "z": attr.ib()}, init=False, slots=True)(SimpleOrdinaryClass) c2 = C2Slots(x=1, y=2, z="test") assert 1 == c2.x assert 2 == c2.y assert "test" == c2.z with pytest.raises(AttributeError): c2.t = "test" # We have slots now. assert 1 == c2.method() assert "clsmethod" == c2.classmethod() assert "staticmethod" == c2.staticmethod() assert set(["x", "y", "z"]) == set(C2Slots.__slots__) c3 = C2Slots(x=1, y=3, z="test") assert c3 > c2 c2_ = C2Slots(x=1, y=2, z="test") assert c2 == c2_ assert "SimpleOrdinaryClass(x=1, y=2, z='test')" == repr(c2) hash(c2) # Just to assert it doesn't raise. assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_dict(self): """ Passing a dict of name: _CountingAttr creates an equivalent class. """ C1 = make_class("C1", { "a": attr.ib(default=42), "b": attr.ib(default=None), }) @attr.s class C2(object): a = attr.ib(default=42) b = attr.ib(default=None) assert C1.__attrs_attrs__ == C2.__attrs_attrs__
def test_convert_factory_property(self, val, init): """ Property tests for attributes with convert, and a factory default. """ C = make_class("C", { "y": attr.ib(), "x": attr.ib( init=init, default=Factory(lambda: val), convert=lambda v: v + 1), }) c = C(2) assert c.x == val + 1 assert c.y == 2
def test_returns_Attr(self): """ Returns an instance of _CountingAttr. """ a = attr.ib() assert isinstance(a, _CountingAttr)
def test_convert_factory_property(self, val, init): """ Property tests for attributes with convert, and a factory default. """ C = make_class("C", ordered_dict([ ("y", attr.ib()), ("x", attr.ib( init=init, default=Factory(lambda: val), converter=lambda v: v + 1 )), ])) c = C(2) assert c.x == val + 1 assert c.y == 2
def test_multiple_validators(self): """ If a list is passed as a validator, all of its items are treated as one and must pass. """ def v1(_, __, value): if value == 23: raise TypeError("omg") def v2(_, __, value): if value == 42: raise ValueError("omg") C = make_class("C", {"x": attr.ib(validator=[v1, v2])}) validate(C(1)) with pytest.raises(TypeError) as e: C(23) assert "omg" == e.value.args[0] with pytest.raises(ValueError) as e: C(42) assert "omg" == e.value.args[0]
def test_frozen(self): """ Converters circumvent immutability. """ C = make_class("C", { "x": attr.ib(converter=lambda v: int(v)), }, frozen=True) C("1")
def test_no_init_default(self, slots, frozen): """ If `init` is False but a Factory is specified, don't allow passing that argument but initialize it anyway. """ C = make_class("C", { "_a": attr.ib(init=False, default=42), "_b": attr.ib(init=False, default=Factory(list)), "c": attr.ib() }, slots=slots, frozen=frozen) with pytest.raises(TypeError): C(a=1, c=2) with pytest.raises(TypeError): C(b=1, c=2) i = C(23) assert (42, [], 23) == (i._a, i._b, i.c)
def test_repr_uninitialized_member(self): """ repr signals unset attributes """ C = make_class("C", { "a": attr.ib(init=False), }) assert "C(a=NOTHING)" == repr(C())
def array(eltype=None): def validate(inst, attr, value): for item in value: if not isinstance(value, eltype): eltype.validate(inst, attr, item) def inflate(value): return map(eltype.inflate, value) return attr.ib(validator=validate, convert=inflate)
def test_these_leave_body(self): """ If these is passed, no attributes are removed from the body. """ @attr.s(init=False, these={"x": attr.ib()}) class C(object): x = 5 assert 5 == C().x assert "C(x=5)" == repr(C())
def test_default_decorator_already_set(self): """ Raise DefaultAlreadySetError if the decorator is used after a default has been set. """ a = attr.ib(default=42) with pytest.raises(DefaultAlreadySetError): @a.default def f(self): pass
def test_metadata(self): """ If metadata that is not None is passed, it is used. This is necessary for coverage because the previous test is hypothesis-based. """ md = {} a = attr.ib(metadata=md) assert md is a.metadata
def simple_attrs_with_metadata(draw): """ Create a simple attribute with arbitrary metadata. """ c_attr = draw(simple_attrs) keys = st.booleans() | st.binary() | st.integers() | st.text() vals = st.booleans() | st.binary() | st.integers() | st.text() metadata = draw(st.dictionaries(keys=keys, values=vals)) return attr.ib(c_attr._default, c_attr._validator, c_attr.repr, c_attr.cmp, c_attr.hash, c_attr.init, c_attr.convert, metadata)
def test_default_decorator_sets(self): """ Decorator wraps the method in a Factory with pass_self=True and sets the default. """ a = attr.ib() @a.default def f(self): pass assert Factory(f, True) == a._default
def test_validator_decorator_single(self): """ If _CountingAttr.validator is used as a decorator and there is no decorator set, the decorated method is used as the validator. """ a = attr.ib() @a.validator def v(): pass assert v == a._validator
class Integration: """Represent an integration in our validator.""" @classmethod def load_dir(cls, path: pathlib.Path): """Load all integrations in a directory.""" assert path.is_dir() integrations = {} for fil in path.iterdir(): if fil.is_file() or fil.name == "__pycache__": continue init = fil / "__init__.py" if not init.exists(): print( f"Warning: {init} missing, skipping directory. " "If this is your development environment, " "you can safely delete this folder." ) continue integration = cls(fil) integration.load_manifest() integrations[integration.domain] = integration return integrations path: pathlib.Path = attr.ib() manifest: dict[str, Any] | None = attr.ib(default=None) errors: list[Error] = attr.ib(factory=list) warnings: list[Error] = attr.ib(factory=list) @property def domain(self) -> str: """Integration domain.""" return self.path.name @property def core(self) -> bool: """Core integration.""" return self.path.as_posix().startswith("homeassistant/components") @property def disabled(self) -> str | None: """Return if integration is disabled.""" return self.manifest.get("disabled") @property def name(self) -> str: """Return name of the integration.""" return self.manifest["name"] @property def quality_scale(self) -> str: """Return quality scale of the integration.""" return self.manifest.get("quality_scale") @property def config_flow(self) -> str: """Return if the integration has a config flow.""" return self.manifest.get("config_flow") @property def requirements(self) -> list[str]: """List of requirements.""" return self.manifest.get("requirements", []) @property def dependencies(self) -> list[str]: """List of dependencies.""" return self.manifest.get("dependencies", []) def add_error(self, *args: Any, **kwargs: Any) -> None: """Add an error.""" self.errors.append(Error(*args, **kwargs)) def add_warning(self, *args: Any, **kwargs: Any) -> None: """Add an warning.""" self.warnings.append(Error(*args, **kwargs)) def load_manifest(self) -> None: """Load manifest.""" manifest_path = self.path / "manifest.json" if not manifest_path.is_file(): self.add_error("model", f"Manifest file {manifest_path} not found") return try: manifest = json.loads(manifest_path.read_text()) except ValueError as err: self.add_error("model", f"Manifest contains invalid JSON: {err}") return self.manifest = manifest def import_pkg(self, platform=None): """Import the Python file.""" pkg = f"homeassistant.components.{self.domain}" if platform is not None: pkg += f".{platform}" return importlib.import_module(pkg)
def make_attr_class(class_name, fields, **class_kwargs): fields = OrderedDict([(k, attr.ib(**kwargs) if isinstance(kwargs, dict) else kwargs) for k, kwargs in fields]) return attr.make_class(class_name, fields, **class_kwargs)
class MessageUpdateEvent(BaseEvent): guild: GuildIDWrapper = attr.ib() channel: ChannelIDWrapper = attr.ib() message: Message = attr.ib()
class OrchestrationTemplate(BaseEntity, Updateable, Pretty, Taggable): template_group = attr.ib() template_name = attr.ib() content = attr.ib() description = attr.ib(default=None) draft = attr.ib(default=None) def update(self, updates): view = navigate_to(self, 'Edit') view.fill({'description': updates.get('description'), 'name': updates.get('template_name'), 'draft': updates.get('draft'), 'content': updates.get('content')}) view.save_button.click() view.flash.wait_displayed("10s") if self.appliance.version < "5.11": view.flash.assert_success_message( f'Orchestration Template "{self.template_name}" was saved') else: view.flash.assert_success_message( f'Orchestration Template {self.template_name} was saved') def delete(self): view = navigate_to(self, 'Details') msg = "Remove this Orchestration Template from Inventory" view.toolbar.configuration.item_select(msg, handle_alert=True) view.flash.assert_success_message('Orchestration Template "{}" was deleted.'.format( self.template_name)) def delete_all_templates(self): view = navigate_to(self, 'TemplateType') view.paginator.check_all() view.configuration.item_select("Remove selected Orchestration Templates", handle_alert=True) def copy_template(self, template_name, content, draft=None, description=None): view = navigate_to(self, 'CopyTemplate') view.fill({'name': template_name, 'content': content, 'draft': draft, 'description': description }) view.add_button.click() view.flash.wait_displayed("10s") view.flash.assert_no_error() # TODO - Move assertions to tests return self.parent.instantiate(template_group=self.template_group, description=description, template_name=template_name, content=content, draft=draft) def create_service_dialog_from_template(self, dialog_name): view = navigate_to(self, 'AddDialog') view.fill({'name': dialog_name}) wait_for(lambda: view.add_button.is_enabled, num_sec=5) view.add_button.click() view.flash.assert_no_error() service_dialog = self.parent.parent.collections.service_dialogs.instantiate( label=dialog_name) return service_dialog
class PanCompleter(Completer): """Completer for panctl commands.""" commands = attr.ib(type=List[str]) ctl = attr.ib() devices = attr.ib() rooms = attr.ib(init=False, default=attr.Factory(lambda: defaultdict(set))) path_completer = PathCompleter(expanduser=True) def complete_commands(self, last_word): """Complete the available commands.""" compl_words = self.filter_words(self.commands, last_word) for compl_word in compl_words: yield Completion(compl_word, -len(last_word)) def complete_users(self, last_word, pan_user): devices = self.devices.List(pan_user) users = set(device["user_id"] for device in devices) compl_words = self.filter_words(users, last_word) for compl_word in compl_words: yield Completion(compl_word, -len(last_word)) return "" def complete_devices(self, last_word, pan_user, user_id): devices = self.devices.ListUserDevices(pan_user, user_id) device_ids = [device["device_id"] for device in devices] compl_words = self.filter_words(device_ids, last_word) for compl_word in compl_words: yield Completion(compl_word, -len(last_word)) return "" def filter_words(self, words, last_word): compl_words = [] for word in words: if last_word in word: compl_words.append(word) return compl_words def complete_pan_users(self, last_word): servers = self.ctl.ListServers() users = [item[0] for sublist in servers.values() for item in sublist] compl_words = self.filter_words(users, last_word) for compl_word in compl_words: yield Completion(compl_word, -len(last_word)) def complete_verification(self, command, last_word, words): if len(words) == 2: return self.complete_pan_users(last_word) elif len(words) == 3: pan_user = words[1] return self.complete_users(last_word, pan_user) elif len(words) == 4: pan_user = words[1] user_id = words[2] return self.complete_devices(last_word, pan_user, user_id) return "" def complete_key_file_cmds( self, document, complete_event, command, last_word, words ): if len(words) == 2: return self.complete_pan_users(last_word) elif len(words) == 3: return self.path_completer.get_completions( Document(last_word), complete_event ) return "" def complete_rooms(self, pan_user, last_word, words): rooms = self.rooms[pan_user] compl_words = self.filter_words(list(rooms), last_word) for compl_word in compl_words: yield Completion(compl_word, -len(last_word)) return "" def complete_send_cmds(self, last_word, words): if len(words) == 2: return self.complete_pan_users(last_word) elif len(words) == 3: pan_user = words[1] return self.complete_rooms(pan_user, last_word, words) return "" def complete_list_devices(self, last_word, words): if len(words) == 2: return self.complete_pan_users(last_word) elif len(words) == 3: pan_user = words[1] return self.complete_users(last_word, pan_user) return "" def get_completions(self, document, complete_event): """Build the completions.""" text_before_cursor = document.text_before_cursor text_before_cursor = str(text_before_cursor) words = text_before_cursor.split(" ") last_word = words[-1] if len(words) == 1: return self.complete_commands(last_word) if len(words) > 1: command = words[0] if command in [ "start-verification", "accept-verification", "confirm-verification", "cancel-verification", "verify-device", "unverify-device", "blacklist-device", "unblacklist-device", ]: return self.complete_verification(command, last_word, words) elif command in ["export-keys", "import-keys"]: return self.complete_key_file_cmds( document, complete_event, command, last_word, words ) elif command in ["send-anyways", "cancel-sending"]: return self.complete_send_cmds(last_word, words) elif command == "list-devices": return self.complete_list_devices(last_word, words) elif command == "help": if len(words) == 2: return self.complete_commands(last_word) else: return "" elif command in ["cancel-keyshare", "continue-keyshare"]: return self.complete_verification(command, last_word, words) return ""
class AudioChunk(object): serial = attr.ib() samples = attr.ib(repr=lambda w: '{} {}'.format(w.shape, w.dtype))
class PanCtl: bus = attr.ib(init=False) pan_bus = attr.ib(init=False) ctl = attr.ib(init=False) devices = attr.ib(init=False) completer = attr.ib(init=False) own_message_ids = attr.ib(init=False) command_help = { "help": "Display help about commands.", "list-servers": ( "List the configured homeservers and pan users on each homeserver." ), "list-devices": ("List the devices of a user that are known to the pan-user."), "start-verification": ( "Start an interactive key verification between " "the given pan-user and user." ), "accept-verification": ( "Accept an interactive key verification that " "the given user has started with our given " "pan-user." ), "cancel-verification": ( "Cancel an interactive key verification " "between the given pan-user and user." ), "confirm-verification": ( "Confirm that the short authentication " "string of the interactive key verification " "with the given pan-user and user is " "matching." ), "verify-device": ("Manually mark the given device as verified."), "unverify-device": ( "Mark a previously verified device of the given user as unverified." ), "blacklist-device": ( "Manually mark the given device of the given user as blacklisted." ), "unblacklist-device": ( "Mark a previously blacklisted device of the " "given user as unblacklisted." ), "send-anyways": ( "Send a room message despite having unverified " "devices in the room and mark the devices as " "ignored." ), "cancel-sending": ( "Cancel the send of a room message in a room that " "contains unverified devices" ), "import-keys": ( "Import end-to-end encryption keys from the given " "file for the given pan-user." ), "export-keys": ( "Export end-to-end encryption keys to the given file " "for the given pan-user." ), "continue-keyshare": ( "Export end-to-end encryption keys to the given file " "for the given pan-user." ), "cancel-keyshare": ( "Export end-to-end encryption keys to the given file " "for the given pan-user." ), } commands = list(command_help.keys()) def __attrs_post_init__(self): self.bus = SessionBus() self.pan_bus = self.bus.get("org.pantalaimon1") self.ctl = self.pan_bus["org.pantalaimon1.control"] self.devices = self.pan_bus["org.pantalaimon1.devices"] self.own_message_ids = [] self.ctl.Response.connect(self.show_response) self.ctl.UnverifiedDevices.connect(self.unverified_devices) self.completer = PanCompleter(self.commands, self.ctl, self.devices) self.devices.VerificationInvite.connect(self.show_sas_invite) self.devices.VerificationString.connect(self.show_sas) self.devices.VerificationDone.connect(self.sas_done) self.devices.KeyRequest.connect(self.show_key_request) self.devices.KeyRequestCancel.connect(self.show_key_request_cancel) def show_help(self, command): print(self.command_help[command]) def unverified_devices(self, pan_user, room_id, display_name): self.completer.rooms[pan_user].add(room_id) print( f"Error sending message for user {pan_user}, " f"there are unverified devices in the room {display_name} " f"({room_id}).\nUse the send-anyways or cancel-sending commands " f"to ignore the devices or cancel the sending." ) def show_response(self, response_id, pan_user, message): if response_id not in self.own_message_ids: return self.own_message_ids.remove(response_id) print(message["message"]) def show_key_request(self, pan_user, user_id, device_id, request_id): print( f"{user_id} has requested room keys from our pan " f"user {pan_user}, but the requesting device " f"{device_id} is unverified\n" f"After verifying the device accept the key share request with " f"the continue-keyshare, alternatively cancel the " f"request with the cancel-keyshare command." ) def show_key_request_cancel(self, pan_user, user_id, device_id, request_id): print( f"{user_id} via {device_id} has " f"canceled the room key request from our pan user " f"{pan_user}." ) def sas_done(self, pan_user, user_id, device_id, _): print( f"Device {device_id} of user {user_id}" f" succesfully verified for pan user {pan_user}." ) def show_sas_invite(self, pan_user, user_id, device_id, _): print( f"{user_id} has started an interactive device " f"verification for their device {device_id} with pan user " f"{pan_user}\n" f"Accept the invitation with the accept-verification command." ) # The emoji printing logic was taken from weechat-matrix and was written by # dkasak. def show_sas(self, pan_user, user_id, device_id, _, emoji): emojis = [x[0] for x in emoji] descriptions = [x[1] for x in emoji] centered_width = 12 def center_emoji(emoji, width): # Assume each emoji has width 2 emoji_width = 2 # These are emojis that need VARIATION-SELECTOR-16 (U+FE0F) so # that they are rendered with coloured glyphs. For these, we # need to add an extra space after them so that they are # rendered properly in weechat. variation_selector_emojis = ["☁️", "❤️", "☂️", "✏️", "✂️", "☎️", "✈️"] if emoji in variation_selector_emojis: emoji += " " # This is a trick to account for the fact that emojis are wider # than other monospace characters. placeholder = "." * emoji_width return placeholder.center(width).replace(placeholder, emoji) emoji_str = "".join(center_emoji(e, centered_width) for e in emojis) desc = "".join(d.center(centered_width) for d in descriptions) short_string = "\n".join([emoji_str, desc]) print( f"Short authentication string for pan " f"user {pan_user} from {user_id} via " f"{device_id}:\n{short_string}" ) def list_servers(self): """List the daemons users.""" servers = self.ctl.ListServers() print("pantalaimon servers:") for server, server_users in servers.items(): server_c = get_color(server) print_formatted_text(HTML(f" - Name: <{server_c}>{server}</{server_c}>")) user_list = [] for user, device in server_users: user_c = get_color(user) device_c = get_color(device) user_list.append( f" - <{user_c}>{user}</{user_c}> " f"<{device_c}>{device}</{device_c}>" ) if user_list: print(" - Pan users:") user_string = "\n".join(user_list) print_formatted_text(HTML(user_string)) def list_devices(self, args): devices = self.devices.ListUserDevices(args.pan_user, args.user_id) print_formatted_text(HTML(f"Devices for user <b>{args.user_id}</b>:")) for device in devices: if device["trust_state"] == "verified": trust_state = "<ansigreen>Verified</ansigreen>" elif device["trust_state"] == "blacklisted": trust_state = "<ansired>Blacklisted</ansired>" elif device["trust_state"] == "ignored": trust_state = "Ignored" else: trust_state = "Unset" key = partition_key(device["ed25519"]) color = get_color(device["device_id"]) print_formatted_text( HTML( f" - Display name: " f"{device['device_display_name']}\n" f" - Device id: " f"<{color}>{device['device_id']}</{color}>\n" f" - Device key: " f"<ansiyellow>{key}</ansiyellow>\n" f" - Trust state: " f"{trust_state}" ) ) async def loop(self): """Event loop for panctl.""" promptsession = PromptSession("panctl> ", completer=self.completer) while True: with patch_stdout(): try: if PTK2: result = await promptsession.prompt(async_=True) else: result = await promptsession.prompt_async() except EOFError: break if not result: continue parser = PanctlParser(self.commands) try: args = parser.parse_args(result.split()) except ParseError: continue command = args.subcommand if command == "list-servers": self.list_servers() if command == "help": self.show_help(args.command) elif command == "import-keys": self.own_message_ids.append( self.ctl.ImportKeys(args.pan_user, args.path, args.passphrase) ) elif command == "export-keys": self.own_message_ids.append( self.ctl.ExportKeys(args.pan_user, args.path, args.passphrase) ) elif command == "send-anyways": self.own_message_ids.append( self.ctl.SendAnyways(args.pan_user, args.room_id) ) elif command == "cancel-sending": self.own_message_ids.append( self.ctl.CancelSending(args.pan_user, args.room_id) ) elif command == "list-devices": self.list_devices(args) elif command == "verify-device": self.own_message_ids.append( self.devices.Verify(args.pan_user, args.user_id, args.device_id) ) elif command == "unverify-device": self.own_message_ids.append( self.devices.Unverify(args.pan_user, args.user_id, args.device_id) ) elif command == "blacklist-device": self.own_message_ids.append( self.devices.Blacklist(args.pan_user, args.user_id, args.device_id) ) elif command == "unblacklist-device": self.own_message_ids.append( self.devices.Unblacklist( args.pan_user, args.user_id, args.device_id ) ) elif command == "start-verification": self.own_message_ids.append( self.devices.StartKeyVerification( args.pan_user, args.user_id, args.device_id ) ) elif command == "cancel-verification": self.own_message_ids.append( self.devices.CancelKeyVerification( args.pan_user, args.user_id, args.device_id ) ) elif command == "accept-verification": self.own_message_ids.append( self.devices.AcceptKeyVerification( args.pan_user, args.user_id, args.device_id ) ) elif command == "confirm-verification": self.own_message_ids.append( self.devices.ConfirmKeyVerification( args.pan_user, args.user_id, args.device_id ) ) elif command == "continue-keyshare": self.own_message_ids.append( self.devices.ContinueKeyShare( args.pan_user, args.user_id, args.device_id ) ) elif command == "cancel-keyshare": self.own_message_ids.append( self.devices.CancelKeyShare( args.pan_user, args.user_id, args.device_id ) )
class IOHandler(object): """Common handler for all IO operations. Holds common configuration values used for all operations. :param metadata_writer: File-like to which metadata should be written :type metadata_writer: aws_encryption_sdk_cli.internal.metadata.MetadataWriter :param bool interactive: Should prompt before overwriting existing files :param bool no_overwrite: Should never overwrite existing files :param bool decode_input: Should input be base64 decoded before operation :param bool encode_output: Should output be base64 encoded after operation :param dict required_encryption_context: Encryption context key-value pairs to require :param list required_encryption_context_keys: Encryption context keys to require """ metadata_writer = attr.ib( validator=attr.validators.instance_of(MetadataWriter)) interactive = attr.ib(validator=attr.validators.instance_of(bool)) no_overwrite = attr.ib(validator=attr.validators.instance_of(bool)) decode_input = attr.ib(validator=attr.validators.instance_of(bool)) encode_output = attr.ib(validator=attr.validators.instance_of(bool)) required_encryption_context = attr.ib( validator=attr.validators.instance_of(dict)) required_encryption_context_keys = attr.ib( validator=attr.validators.instance_of(list)) # noqa pylint: disable=invalid-name def __init__( self, metadata_writer, # type: MetadataWriter interactive, # type: bool no_overwrite, # type: bool decode_input, # type: bool encode_output, # type: bool required_encryption_context, # type: Dict[str, str] required_encryption_context_keys, # type: List[str] ): # type: (...) -> None """Workaround pending resolution of attrs/mypy interaction. https://github.com/python/mypy/issues/2088 https://github.com/python-attrs/attrs/issues/215 """ self.metadata_writer = metadata_writer self.interactive = interactive self.no_overwrite = no_overwrite self.decode_input = decode_input self.encode_output = encode_output self.required_encryption_context = required_encryption_context self.required_encryption_context_keys = required_encryption_context_keys # pylint: disable=invalid-name attr.validate(self) def _single_io_write(self, stream_args, source, destination_writer): # type: (STREAM_KWARGS, IO, IO) -> OperationResult """Performs the actual write operations for a single operation. :param dict stream_args: kwargs to pass to `aws_encryption_sdk.stream` :param source: source to write :type source: file-like object :param destination_writer: destination object to which to write :type destination_writer: file-like object :returns: OperationResult stating whether the file was written :rtype: aws_encryption_sdk_cli.internal.identifiers.OperationResult """ with _encoder(source, self.decode_input) as _source, _encoder( destination_writer, self.encode_output ) as _destination: # noqa pylint: disable=line-too-long with aws_encryption_sdk.stream( source=_source, ** stream_args) as handler, self.metadata_writer as metadata: metadata_kwargs = dict( mode=stream_args["mode"], input=source.name, output=destination_writer.name, header=json_ready_header(handler.header), ) try: header_auth = handler.header_auth except AttributeError: # EncryptStream doesn't expose the header auth at this time pass else: metadata_kwargs["header_auth"] = json_ready_header_auth( header_auth) if stream_args["mode"] == "decrypt": discovered_ec = handler.header.encryption_context missing_keys = set( self.required_encryption_context_keys).difference( set(discovered_ec.keys())) missing_pairs = set( self.required_encryption_context.items()).difference( set(discovered_ec.items())) if missing_keys or missing_pairs: _LOGGER.warning( "Skipping decrypt because discovered encryption context did not match required elements." ) metadata_kwargs.update( dict( skipped=True, reason= "Missing encryption context key or value", missing_encryption_context_keys=list( missing_keys), missing_encryption_context_pairs=list( missing_pairs), )) metadata.write_metadata(**metadata_kwargs) return OperationResult.FAILED_VALIDATION metadata.write_metadata(**metadata_kwargs) for chunk in handler: _destination.write(chunk) _destination.flush() return OperationResult.SUCCESS def process_single_operation(self, stream_args, source, destination): # type: (STREAM_KWARGS, SOURCE, str) -> OperationResult """Processes a single encrypt/decrypt operation given a pre-loaded source. :param dict stream_args: kwargs to pass to `aws_encryption_sdk.stream` :param source: source to write :type source: str or file-like object :param str destination: destination identifier :returns: OperationResult stating whether the file was written :rtype: aws_encryption_sdk_cli.internal.identifiers.OperationResult """ if destination == "-": destination_writer = _stdout() else: if not self._should_write_file(destination): return OperationResult.SKIPPED _ensure_dir_exists(destination) destination_writer = open(os.path.abspath(destination), "wb") if source == "-": source = _stdin() try: return self._single_io_write(stream_args=stream_args, source=cast(IO, source), destination_writer=destination_writer) finally: destination_writer.close() def _should_write_file(self, filepath): # type: (str) -> bool """Determines whether a specific file should be written. :param str filepath: Full file path to file in question :rtype: bool """ if not os.path.isfile(filepath): # The file does not exist, nothing to overwrite return True if self.no_overwrite: # The file exists and the caller specifically asked us not to overwrite anything _LOGGER.warning( 'Skipping existing output file because of "no overwrite" option: %s', filepath) return False if self.interactive: # The file exists and the caller asked us to be consulted on action before overwriting decision = six.moves.input( # type: ignore # six.moves confuses mypy 'Overwrite existing output file "{}" with new contents? [y/N]:' .format(filepath)) try: if decision.lower()[0] == "y": _LOGGER.warning( "Overwriting existing output file based on interactive user decision: %s", filepath) return True return False except IndexError: # No input is interpreted as 'do not overwrite' _LOGGER.warning( "Skipping existing output file based on interactive user decision: %s", filepath) return False # If we get to this point, the file exists and we should overwrite it _LOGGER.warning( "Overwriting existing output file because no action was specified otherwise: %s", filepath) return True def process_single_file(self, stream_args, source, destination): # type: (STREAM_KWARGS, str, str) -> None """Processes a single encrypt/decrypt operation on a source file. :param dict stream_args: kwargs to pass to `aws_encryption_sdk.stream` :param str source: Full file path to source file :param str destination: Full file path to destination file """ if os.path.realpath(source) == os.path.realpath(destination): # File source, directory destination, empty suffix: _LOGGER.warning( "Skipping because the source (%s) and destination (%s) are the same", source, destination) return _LOGGER.info("%sing file %s to %s", stream_args["mode"], source, destination) _stream_args = copy.copy(stream_args) # Because we can actually know size for files and Base64IO does not support seeking, # set the source length manually for files. This allows enables data key caching when # Base64-decoding a source file. source_file_size = os.path.getsize(source) if self.decode_input and not self.encode_output: _stream_args["source_length"] = int(source_file_size * (3 / 4)) else: _stream_args["source_length"] = source_file_size try: with open(os.path.abspath(source), "rb") as source_reader: operation_result = self.process_single_operation( stream_args=_stream_args, source=source_reader, destination=destination) except Exception: # pylint: disable=broad-except operation_result = OperationResult.FAILED raise finally: if operation_result.needs_cleanup and destination != "-": _LOGGER.warning("Operation failed: deleting output file: %s", destination) try: os.remove(destination) except OSError: # if the file doesn't exist that's ok too pass def process_dir(self, stream_args, source, destination, suffix): # type: (STREAM_KWARGS, str, str, str) -> None """Processes encrypt/decrypt operations on all files in a directory tree. :param dict stream_args: kwargs to pass to `aws_encryption_sdk.stream` :param str source: Full file path to source directory root :param str destination: Full file path to destination directory root :param str suffix: Suffix to append to output filename """ _LOGGER.debug("%sing directory %s to %s", stream_args["mode"], source, destination) for base_dir, _dirs, files in os.walk(source): for filename in files: source_filename = os.path.join(base_dir, filename) destination_dir = _output_dir(source_root=source, destination_root=destination, source_dir=base_dir) destination_filename = output_filename( source_filename=source_filename, destination_dir=destination_dir, mode=str(stream_args["mode"]), suffix=suffix, ) self.process_single_file(stream_args=stream_args, source=source_filename, destination=destination_filename)
class DiscoveredMachineInterface(AttrHelperMixin): """Discovered machine interface.""" mac_address = attr.ib(converter=str) vid = attr.ib(converter=int, default=-1) tags = attr.ib(converter=converter_list(str), default=attr.Factory(list)) boot = attr.ib(converter=bool, default=False)
class Volume: """Volume information.""" make = classmethod(make) services = attr.ib(repr=False) maxVolume = attr.ib() minVolume = attr.ib() mute = attr.ib() output = attr.ib() step = attr.ib() volume = attr.ib() renderingControl = attr.ib(default=None) @property def is_muted(self): """Return True if volume is muted.""" return self.mute == "on" def __str__(self): if self.output and self.output.rfind("=") > 0: s = "Zone %s Volume: %s/%s" % ( self.output[self.output.rfind("=") + 1:], self.volume, self.maxVolume, ) else: s = "Volume: %s/%s" % (self.volume, self.maxVolume) if self.is_muted: s += " (muted)" return s async def set_mute(self, activate: bool): """Set mute on/off.""" enabled = "off" if activate: enabled = "on" if self.services and self.services["audio"].has_method("setAudioMute"): return await self.services["audio"]["setAudioMute"]( mute=enabled, output=self.output) else: return await self.renderingControl.action("SetMute").async_call( InstanceID=0, Channel="Master", DesiredMute=activate) async def toggle_mute(self): """Toggle mute.""" if self.services and self.services["audio"].has_method("setAudioMute"): return await self.services["audio"]["setAudioMute"]( mute="toggle", output=self.output) else: mute_result = await self.renderingControl.action( "GetMute").async_call(InstanceID=0, Channel="Master") return self.set_mute(not mute_result["CurrentMute"]) async def set_volume(self, volume: int): """Set volume level.""" if self.services and self.services["audio"].has_method( "setAudioVolume"): return await self.services["audio"]["setAudioVolume"]( volume=str(volume), output=self.output) else: return await self.renderingControl.action("SetVolume").async_call( InstanceID=0, Channel="Master", DesiredVolume=volume)
class PlayInfo: """Information about played content. This is only tested on music files, the outs for the method call is much, much larger """ make = classmethod(make) def _make(x) -> StateInfo: return StateInfo.make(**x) # type: ignore stateInfo = attr.ib(converter=_make) contentKind = attr.ib() uri = attr.ib() output = attr.ib() # only available when being played service = attr.ib() artist = attr.ib() albumName = attr.ib() title = attr.ib() durationMsec = attr.ib() mediaType = attr.ib() parentUri = attr.ib() positionMsec = attr.ib() repeatType = attr.ib() source = attr.ib() @property def is_idle(self): """Return if content is being played.""" return self.title is None @property def state(self): """Return playback state.""" return self.stateInfo.state @property def duration(self): """Return total media duration.""" if self.durationMsec is not None: return timedelta(milliseconds=self.durationMsec) @property def position(self): """Return current media position.""" if self.positionMsec is not None: return timedelta(milliseconds=self.positionMsec) def __str__(self): return "%s (%s/%s), state %s" % ( self.title, self.position, self.duration, self.state, )
class Content: """Content infrormation.""" make = classmethod(make) isBrowsable = attr.ib() uri = attr.ib() contentKind = attr.ib() isPlayable = attr.ib() index = attr.ib() title = attr.ib() # no idea why this would be missing.. folderNo = attr.ib() # files do not have this fileNo = attr.ib() # folders do not have this parentUri = attr.ib() # toplevel has no parents fileSizeByte = attr.ib() # dirs do not have this createdTime = attr.ib() broadcastFreqBand = attr.ib() broadcastFreq = attr.ib() def __str__(self): return "%s (%s, kind: %s)" % (self.title, self.uri, self.contentKind)
class TestFoo(object): x = attr.ib() y = attr.ib()
class PyiVisitor(ast.NodeVisitor): filename = attr.ib(default=Path("(none)")) errors = attr.ib(default=attr.Factory(list)) _in_class = attr.ib(default=0) def visit_Assign(self, node: ast.Assign) -> None: self.generic_visit(node) # Attempt to find assignments to type helpers (typevars and aliases), which should be # private. if ( isinstance(node.value, ast.Call) and isinstance(node.value.func, ast.Name) and node.value.func.id == "TypeVar" ): for target in node.targets: if isinstance(target, ast.Name) and not target.id.startswith("_"): # avoid catching AnyStr in typing (the only library TypeVar so far) if not self.filename.name == "typing.pyi": self.error(target, Y001) if len(node.targets) == 1 and isinstance(node.targets[0], ast.Name): if isinstance(node.value, (ast.Num, ast.Str)): self.error(node.value, Y015) def visit_AnnAssign(self, node: ast.AnnAssign) -> None: if isinstance(node.target, ast.Name): if node.value and not isinstance(node.value, ast.Ellipsis): self.error(node.value, Y015) elif node.value and not self._in_class: self.error(node.value, Y092) def visit_If(self, node: ast.If) -> None: self.generic_visit(node) test = node.test if isinstance(test, ast.BoolOp): for expr in test.values: self._check_if_expr(expr) else: self._check_if_expr(test) def _check_if_expr(self, node: ast.expr) -> None: if not isinstance(node, ast.Compare): self.error(node, Y002) return if len(node.comparators) != 1: # mypy doesn't support chained comparisons self.error(node, Y002) return if isinstance(node.left, ast.Subscript): self._check_subscript_version_check(node) elif isinstance(node.left, ast.Attribute): if isinstance(node.left.value, ast.Name) and node.left.value.id == "sys": if node.left.attr == "platform": self._check_platform_check(node) elif node.left.attr == "version_info": self._check_version_check(node) else: self.error(node, Y002) else: self.error(node, Y002) else: self.error(node, Y002) def _check_subscript_version_check(self, node: ast.Compare) -> None: # unless this is on, comparisons against a single integer aren't allowed must_be_single = False # if strict equality is allowed, it must be against a tuple of this length can_have_strict_equals: Optional[int] = None version_info = node.left if isinstance(version_info, ast.Subscript): slc = version_info.slice if isinstance(slc, (ast.Index, ast.Num)): # Python 3.9 flattens the AST and removes Index, so simulate that here slice_num = slc if isinstance(slc, ast.Num) else slc.value # anything other than the integer 0 doesn't make much sense # (things that are in 2.7 and 3.7 but not 3.6?) if isinstance(slice_num, ast.Num) and slice_num.n == 0: must_be_single = True else: self.error(node, Y003) elif isinstance(slc, ast.Slice): # allow only [:1] and [:2] if slc.lower is not None or slc.step is not None: self.error(node, Y003) elif isinstance(slc.upper, ast.Num) and slc.upper.n in (1, 2): can_have_strict_equals = slc.upper.n else: self.error(node, Y003) else: # extended slicing self.error(node, Y003) self._check_version_check( node, must_be_single=must_be_single, can_have_strict_equals=can_have_strict_equals, ) def _check_version_check( self, node: ast.Compare, *, must_be_single: bool = False, can_have_strict_equals: Optional[int] = None ) -> None: comparator = node.comparators[0] if must_be_single: if not isinstance(comparator, ast.Num) or not isinstance(comparator.n, int): self.error(node, Y003) else: if not isinstance(comparator, ast.Tuple): self.error(node, Y003) elif not all(isinstance(elt, ast.Num) for elt in comparator.elts): self.error(node, Y003) elif len(comparator.elts) > 2: # mypy only supports major and minor version checks self.error(node, Y004) cmpop = node.ops[0] if isinstance(cmpop, (ast.Lt, ast.GtE)): pass elif isinstance(cmpop, (ast.Eq, ast.NotEq)): if can_have_strict_equals is not None: if len(comparator.elts) != can_have_strict_equals: self.error(node, Y005.format(n=can_have_strict_equals)) else: self.error(node, Y006) else: self.error(node, Y006) def _check_platform_check(self, node: ast.Compare) -> None: cmpop = node.ops[0] # "in" might also make sense but we don't currently have one if not isinstance(cmpop, (ast.Eq, ast.NotEq)): self.error(node, Y007) comparator = node.comparators[0] if isinstance(comparator, ast.Str): # other values are possible but we don't need them right now # this protects against typos if comparator.s not in ("linux", "win32", "cygwin", "darwin"): self.error(node, Y008.format(platform=comparator.s)) else: self.error(node, Y007) def visit_ClassDef(self, node: ast.ClassDef) -> None: self._in_class += 1 self.generic_visit(node) self._in_class -= 1 # empty class body should contain "..." not "pass" if len(node.body) == 1: statement = node.body[0] if isinstance(statement, ast.Expr) and isinstance( statement.value, ast.Ellipsis ): return elif isinstance(statement, ast.Pass): self.error(statement, Y009) return for i, statement in enumerate(node.body): # "pass" should not used in class body if isinstance(statement, ast.Pass): self.error(statement, Y012) # "..." should not be used in non-empty class body elif isinstance(statement, ast.Expr) and isinstance( statement.value, ast.Ellipsis ): self.error(statement, Y013) def visit_FunctionDef(self, node: ast.FunctionDef) -> None: self.generic_visit(node) for i, statement in enumerate(node.body): if i == 0: # normally, should just be "..." if isinstance(statement, ast.Pass): self.error(statement, Y009) continue elif isinstance(statement, ast.Expr) and isinstance( statement.value, ast.Ellipsis ): continue # special-case raise for backwards compatibility if isinstance(statement, ast.Raise): self.error(statement, Y091) continue # allow assignments in constructor for now # (though these should probably be changed) if node.name == "__init__": self.error(statement, Y090) continue self.error(statement, Y010) def visit_arguments(self, node: ast.arguments) -> None: self.generic_visit(node) args = node.args[-len(node.defaults) :] for arg, default in chain( zip(args, node.defaults), zip(node.kwonlyargs, node.kw_defaults) ): if default is None: continue # keyword-only arg without a default if not isinstance(default, ast.Ellipsis): if arg.annotation is None: self.error(default, Y014) else: self.error(default, Y011) def error(self, node: ast.AST, message: str) -> None: self.errors.append(Error(node.lineno, node.col_offset, message, PyiTreeChecker)) def run(self, tree: ast.AST) -> Iterable[Error]: self.errors.clear() self.visit(tree) yield from self.errors
class Token: # Type of the token (string, e.g. "paragraph_open") type: str = attr.ib() # html tag name, e.g. "p" tag: str = attr.ib() # Level change (number in {-1, 0, 1} set), where: # - `1` means the tag is opening # - `0` means the tag is self-closing # - `-1` means the tag is closing nesting: int = attr.ib() # Html attributes. Note this differs from the upstream "list of lists" format attrs: Dict[str, Union[str, int, float]] = attr.ib(factory=dict, converter=convert_attrs) # Source map info. Format: `[ line_begin, line_end ]` map: Optional[List[int]] = attr.ib(default=None) # nesting level, the same as `state.level` level: int = attr.ib(default=0) # An array of child nodes (inline and img tokens) children: Optional[List["Token"]] = attr.ib(default=None) # In a case of self-closing tag (code, html, fence, etc.), # it has contents of this tag. content: str = attr.ib(default="") # '*' or '_' for emphasis, fence string for fence, etc. markup: str = attr.ib(default="") # Additional information: # - Info string for "fence" tokens # - The value "auto" for autolink "link_open" and "link_close" tokens info: str = attr.ib(default="") # A place for plugins to store any arbitrary data meta: dict = attr.ib(factory=dict) # True for block-level tokens, false for inline tokens. # Used in renderer to calculate line breaks block: bool = attr.ib(default=False) # If it's true, ignore this element when rendering. # Used for tight lists to hide paragraphs. hidden: bool = attr.ib(default=False) def attrIndex(self, name: str) -> int: warnings.warn( "Token.attrIndex should not be used, since Token.attrs is a dictionary", UserWarning, ) if name not in self.attrs: return -1 return list(self.attrs.keys()).index(name) def attrItems(self) -> List[Tuple[str, Union[str, int, float]]]: """Get (key, value) list of attrs.""" return list(self.attrs.items()) def attrPush(self, attrData: Tuple[str, Union[str, int, float]]) -> None: """Add `[ name, value ]` attribute to list. Init attrs if necessary.""" name, value = attrData self.attrSet(name, value) def attrSet(self, name: str, value: Union[str, int, float]) -> None: """Set `name` attribute to `value`. Override old value if exists.""" self.attrs[name] = value def attrGet(self, name: str) -> Union[None, str, int, float]: """Get the value of attribute `name`, or null if it does not exist.""" return self.attrs.get(name, None) def attrJoin(self, name: str, value: str) -> None: """Join value to existing attribute via space. Or create new attribute if not exists. Useful to operate with token classes. """ if name in self.attrs: current = self.attrs[name] if not isinstance(current, str): raise TypeError( f"existing attr 'name' is not a str: {self.attrs[name]}") self.attrs[name] = f"{current} {value}" else: self.attrs[name] = value def copy(self) -> "Token": """Return a shallow copy of the instance.""" return attr.evolve(self) def as_dict( self, *, children: bool = True, as_upstream: bool = True, meta_serializer: Optional[Callable[[dict], Any]] = None, filter: Optional[Callable[[attr.Attribute, Any], bool]] = None, dict_factory: Type[MutableMapping[str, Any]] = dict, ) -> MutableMapping[str, Any]: """Return the token as a dictionary. :param children: Also convert children to dicts :param as_upstream: Ensure the output dictionary is equal to that created by markdown-it For example, attrs are converted to null or lists :param meta_serializer: hook for serializing ``Token.meta`` :param filter: A callable whose return code determines whether an attribute or element is included (``True``) or dropped (``False``). Is called with the `attr.Attribute` as the first argument and the value as the second argument. :param dict_factory: A callable to produce dictionaries from. For example, to produce ordered dictionaries instead of normal Python dictionaries, pass in ``collections.OrderedDict``. """ mapping = attr.asdict(self, recurse=False, filter=filter, dict_factory=dict_factory) if as_upstream and "attrs" in mapping: mapping["attrs"] = (None if not mapping["attrs"] else [[k, v] for k, v in mapping["attrs"].items()]) if meta_serializer and "meta" in mapping: mapping["meta"] = meta_serializer(mapping["meta"]) if children and mapping.get("children", None): mapping["children"] = [ child.as_dict( children=children, filter=filter, dict_factory=dict_factory, as_upstream=as_upstream, meta_serializer=meta_serializer, ) for child in mapping["children"] ] return mapping @classmethod def from_dict(cls, dct: MutableMapping[str, Any]) -> "Token": """Convert a dict to a Token.""" token = cls(**dct) if token.children: token.children = [cls.from_dict(c) for c in token.children ] # type: ignore[arg-type] return token
class Coord: dir_index = attr.ib() seg_index = attr.ib()
class MagicFolderEnabledNode(object): """ Keep track of a Tahoe-LAFS node child process and an associated magic-folder child process. :ivar IProcessTransport tahoe: The Tahoe-LAFS node child process. :ivar IProcessTransport magic_folder: The magic-folder child process. """ reactor = attr.ib() request = attr.ib() temp_dir = attr.ib() name = attr.ib() tahoe = attr.ib() magic_folder = attr.ib() magic_folder_web_port = attr.ib() @property def node_directory(self): return join(self.temp_dir, self.name) @property def magic_config_directory(self): return join(self.temp_dir, "magic-daemon-{}".format(self.name)) @property def magic_directory(self): return join(self.temp_dir, "magic-{}".format(self.name)) @classmethod @inlineCallbacks def create( cls, reactor, request, temp_dir, introducer_furl, flog_gatherer, name, tahoe_web_port, magic_folder_web_port, storage, ): """ Launch the two processes and return a new ``MagicFolderEnabledNode`` referencing them. Note this depends on pytest/Twisted integration for magical blocking. :param reactor: The reactor to use to launch the processes. :param request: The pytest request object to use for cleanup. :param bytes temp_dir: A directory beneath which to place the Tahoe-LAFS node. :param bytes introducer_furl: The introducer fURL to configure the new Tahoe-LAFS node with. :param bytes flog_gatherer: The flog gatherer fURL to configure the new Tahoe-LAFS node with. :param bytes name: A nickname to assign the new Tahoe-LAFS node. :param bytes tahoe_web_port: An endpoint description of the web port for the new Tahoe-LAFS node to listen on. :param bytes magic_folder_web_port: An endpoint description of the web port for the new magic-folder process to listen on. :param bool storage: True if the node should offer storage, False otherwise. """ # Make the Tahoe-LAFS node process tahoe = yield _create_node( reactor, request, temp_dir, introducer_furl, flog_gatherer, name, tahoe_web_port, storage, needed=1, happy=1, total=1, ) yield await_client_ready(reactor, tahoe) # Create the magic-folder daemon config yield _init_magic_folder( reactor, request, temp_dir, name, magic_folder_web_port, ) # Run the magic-folder daemon magic_folder = yield _run_magic_folder( reactor, request, temp_dir, name, ) returnValue( cls( reactor, request, temp_dir, name, tahoe, magic_folder, magic_folder_web_port, )) @inlineCallbacks def stop_magic_folder(self): self.magic_folder.signalProcess('TERM') try: yield self.magic_folder.proto.exited except ProcessExitedAlready: pass @inlineCallbacks def restart_magic_folder(self): yield self.stop_magic_folder() yield self.start_magic_folder() @inlineCallbacks def start_magic_folder(self): with start_action( action_type=u"integration:alice:magic_folder:magic-text"): self.magic_folder = yield _run_magic_folder( self.reactor, self.request, self.temp_dir, self.name, )
class RequestedMachineBlockDevice(AttrHelperMixin): """Requested machine block device information.""" size = attr.ib(converter=int) tags = attr.ib(converter=converter_list(str), default=attr.Factory(list))
class Image: """Represent an image.""" content_type: str = attr.ib() content: bytes = attr.ib()
class RegistryState: s1_instances = attr.ib(validator=instance_of(dict), default=attr.Factory(dict)) s2_instance = attr.ib(validator=optional(instance_of(S1Instance)), default=None)
class ImageData: """Image Data class.""" data: numpy.ndarray = attr.ib() mask: numpy.ndarray = attr.ib() assets: Optional[List[str]] = attr.ib(default=None) bounds: Optional[BoundingBox] = attr.ib(default=None, converter=to_coordsbbox) crs: Optional[CRS] = attr.ib(default=None) metadata: Optional[Dict] = attr.ib(factory=dict) @data.validator def _validate_data(self, attribute, value): """ImageData data has to be a 3d array in form of (count, height, width)""" if not len(value.shape) == 3: raise ValueError( "ImageData data has to be an array in form of (count, height, width)" ) @mask.default def _default_mask(self): return numpy.zeros((self.height, self.width), dtype="uint8") + 255 def __iter__(self): """Allow for variable expansion (``arr, mask = ImageData``)""" for i in (self.data, self.mask): yield i @classmethod def create_from_list(cls, data: Sequence["ImageData"]): """Create ImageData from a sequence of ImageData objects.""" arr = numpy.concatenate([img.data for img in data]) mask = numpy.all([img.mask for img in data], axis=0).astype(numpy.uint8) * 255 assets = [img.assets[0] for img in data if img.assets] bounds_values = [img.bounds for img in data if img.bounds] bounds = bounds_values[0] if bounds_values else None crs_values = [img.crs for img in data if img.crs] crs = crs_values[0] if crs_values else None return cls(arr, mask, assets=assets, crs=crs, bounds=bounds) def as_masked(self) -> numpy.ma.MaskedArray: """return a numpy masked array.""" data = numpy.ma.array(self.data) data.mask = self.mask == 0 return data def data_as_image(self) -> numpy.ndarray: """Return the data array reshaped into an image processing/visualization software friendly order. (bands, rows, columns) -> (rows, columns, bands). """ return reshape_as_image(self.data) @property def width(self) -> int: """Width of the data array.""" return self.data.shape[2] @property def height(self) -> int: """Height of the data array.""" return self.data.shape[1] @property def count(self) -> int: """Number of band.""" return self.data.shape[0] @property def transform(self): """Returns the affine transform.""" return ( from_bounds(*self.bounds, self.width, self.height) if self.bounds else Affine.scale(self.width, -self.height) ) def post_process( self, in_range: Optional[Tuple[NumType, NumType]] = None, out_dtype: Union[str, numpy.number] = "uint8", color_formula: Optional[str] = None, **kwargs: Any, ) -> "ImageData": """Post-process image data. Args: in_range (tuple): input min/max bounds value to rescale from. out_dtype (str): output datatype after rescaling (default is 'uint8') color_formula (str): rio-color formula (see: https://github.com/mapbox/rio-color) kwargs (any): keyword arguments to forward to `rio_tiler.utils.linear_rescale` Returns: ImageData: new ImageData object with the updated data. Examples: >>> img.post_process(in_range=(0, 16000)) >>> img.post_process(color_formula="Gamma RGB 4.1") """ data = self.data.copy() mask = self.mask.copy() if in_range: rescale_arr = tuple(_chunks(in_range, 2)) if len(rescale_arr) != self.count: rescale_arr = ((rescale_arr[0]),) * self.count for bdx in range(self.count): data[bdx] = numpy.where( self.mask, linear_rescale(data[bdx], in_range=rescale_arr[bdx], **kwargs,), 0, ) data = data.astype(out_dtype) if color_formula: data[data < 0] = 0 for ops in parse_operations(color_formula): data = scale_dtype(ops(to_math_type(data)), numpy.uint8) return ImageData( data, mask, crs=self.crs, bounds=self.bounds, assets=self.assets ) def render(self, add_mask: bool = True, img_format: str = "PNG", **kwargs) -> bytes: """Render data to image blob.""" if img_format.lower() == "gtiff": if "transform" not in kwargs: kwargs.update({"transform": self.transform}) if "crs" not in kwargs and self.crs: kwargs.update({"crs": self.crs}) if add_mask: return render(self.data, self.mask, img_format=img_format, **kwargs) return render(self.data, img_format=img_format, **kwargs)
class PyiTreeChecker: name = "flake8-pyi" version = __version__ tree = attr.ib(default=None) filename = attr.ib(default="(none)") options = attr.ib(default=None) def run(self): path = Path(self.filename) if path.suffix == ".pyi": visitor = PyiVisitor(filename=path) for error in visitor.run(self.tree): if self.should_warn(error.message[:4]): yield error @classmethod def add_options(cls, parser): """This is brittle, there's multiple levels of caching of defaults.""" if isinstance(parser.parser, argparse.ArgumentParser): parser.parser.set_defaults(filename="*.py,*.pyi") else: for option in parser.options: if option.long_option_name == "--filename": option.default = "*.py,*.pyi" option.option_kwargs["default"] = option.default option.to_optparse().default = option.default parser.parser.defaults[option.dest] = option.default try: parser.add_option( "--no-pyi-aware-file-checker", default=False, action="store_true", parse_from_config=True, help="don't patch flake8 with .pyi-aware file checker", ) except optparse.OptionConflictError: # In tests, sometimes this option gets called twice for some reason. pass parser.extend_default_ignore(DISABLED_BY_DEFAULT) @classmethod def parse_options(cls, optmanager, options, extra_args): """This is also brittle, only checked with flake8 3.2.1 and master.""" if not options.no_pyi_aware_file_checker: checker.FileChecker = PyiAwareFileChecker # Functionality to ignore some warnings. Adapted from flake8-bugbear. def should_warn(self, code): """Returns `True` if flake8-pyi should emit a particular warning. flake8 overrides default ignores when the user specifies `ignore = ` in configuration. This is problematic because it means specifying anything in `ignore = ` implicitly enables all optional warnings. This function is a workaround for this behavior. Users should explicitly enable these warnings. """ if code[:3] != "Y09": # Normal warnings are safe for emission. return True if self.options is None: return True for i in range(2, len(code) + 1): if code[:i] in self.options.select: return True return False
class OpenstackInfraProvider(InfraProvider): STATS_TO_MATCH = ['num_template', 'num_host'] type_name = "openstack_infra" mgmt_class = OpenstackInfraSystem db_types = ["Openstack::InfraManager"] endpoints_form = OpenStackInfraEndpointForm hosts_menu_item = "Nodes" bad_credentials_error_msg = ( 'Credential validation was not successful: ', 'Login failed due to a bad username or password.' ) api_version = attr.ib(default=None) keystone_v3_domain_id = attr.ib(default=None) _collections = {'nodes': OpenstackNodeCollection} @property def view_value_mapping(self): return { 'name': self.name, 'prov_type': 'OpenStack Platform Director', 'api_version': self.api_version, 'keystone_v3_domain_id': self.keystone_v3_domain_id } @property def nodes(self): return self.collections.nodes def has_nodes(self): return bool(self.nodes.all()) @classmethod def from_config(cls, prov_config, prov_key): endpoints = {} for endp in prov_config['endpoints']: for expected_endpoint in (RHOSEndpoint, EventsEndpoint, SSHEndpoint): if expected_endpoint.name == endp: endpoints[endp] = expected_endpoint(**prov_config['endpoints'][endp]) if prov_config.get('discovery_range'): start_ip = prov_config['discovery_range']['start'] end_ip = prov_config['discovery_range']['end'] else: start_ip = end_ip = prov_config.get('ipaddress') return cls.appliance.collections.infra_providers.instantiate( prov_class=cls, name=prov_config['name'], endpoints=endpoints, key=prov_key, start_ip=start_ip, end_ip=end_ip, api_version=prov_config.get('api_version', 'Keystone v2'), keystone_v3_domain_id=prov_config.get('domain_id')) def register(self, file_path): """Register new nodes (Openstack) Fill a form for new host with json file format This function is valid only for RHOS10 and above Args: file_path: file path of json file with new node details, navigation MUST be from a specific self """ view = navigate_to(self, 'RegisterNodes') view.fill({'file': file_path}) view.register.click() exp_msg = 'Nodes were added successfully. Refresh queued.' self.create_view(ProviderNodesView).flash.assert_success_message(exp_msg) def scale_down(self): """Scales down provider""" view = navigate_to(self, 'ScaleDown') view.checkbox.click() view.scale_down.click() self.create_view(ProviderNodesView).flash.assert_no_error() def scale_out(self, increase_by=1): """Scale out Openstack Infra provider Args: increase_by - count of nodes to be added to infra provider """ view = navigate_to(self, 'ScaleOut') curr_compute_count = int(view.compute_count.value) view.compute_count.fill(curr_compute_count + increase_by) view.scale.click() self.create_view(ProviderNodesView).flash.assert_no_error() def node_exist(self, name='my_node'): """" registered imported host exist This function is valid only for RHOS10 and above Args: name: by default name is my_name Input self, name of the new node, looking for the host in Ironic clients, compare the record found with hosts list in CFME DB Returns: boolean value if host found """ nodes = self.mgmt.list_node() nodes_dict = {i.name: i for i in nodes} query = self.appliance.db.client.session.query( self.appliance.db.client['hosts'], 'guid') node_uuid = str(nodes_dict[name]) for db_node in query.all(): return db_node.hosts.name == str(node_uuid.uuid)
class MessageBulkDeleteEvent(BaseEvent): guild: GuildIDWrapper = attr.ib() channel: ChannelIDWrapper = attr.ib() messages: typing.List[Message] = attr.ib()
class Input: """Input information.""" make = classmethod(make) meta = attr.ib() connection = attr.ib() title = attr.ib(converter=convert_title) uri = attr.ib() services = attr.ib(repr=False) active = attr.ib(converter=convert_is_active) label = attr.ib() iconUrl = attr.ib() outputs = attr.ib(default=attr.Factory(list)) avTransport = attr.ib(default=None) uriMetadata = attr.ib(default=None) def __str__(self): s = "%s (uri: %s)" % (self.title, self.uri) if self.active: s += " (active)" return s async def activate(self, output: Zone = None): """Activate this input.""" output_uri = output.uri if output else "" if self.services and "avContent" in self.services: return await self.services["avContent"]["setPlayContent"]( uri=self.uri, output=output_uri) if self.avTransport: result = await self.avTransport.action( "SetAVTransportURI").async_call( InstanceID=0, CurrentURI=self.uri, CurrentURIMetaData=self.uriMetadata) try: # Attempt to play as the songpal app is doing after changing input, # sometimes needed so that input emits sound await self.avTransport.action("Play").async_call(InstanceID=0, Speed="1") except Exception: # Play action can cause 500 error in certain cases pass return result
class DiscoveredPod(AttrHelperMixin): """Discovered pod information.""" architectures = attr.ib(converter=converter_list(str)) cores = attr.ib(converter=int) cpu_speed = attr.ib(converter=int) memory = attr.ib(converter=int) local_storage = attr.ib(converter=int) hints = attr.ib(converter=converter_obj(DiscoveredPodHints)) local_disks = attr.ib(converter=int, default=-1) iscsi_storage = attr.ib(converter=int, default=-1) capabilities = attr.ib( converter=converter_list(str), default=attr.Factory( lambda: [Capabilities.FIXED_LOCAL_STORAGE])) machines = attr.ib( converter=converter_list(DiscoveredMachine), default=attr.Factory(list)) tags = attr.ib(converter=converter_list(str), default=attr.Factory(list)) storage_pools = attr.ib( converter=converter_list(DiscoveredPodStoragePool), default=attr.Factory(list))
class Storage: """Storage information.""" make = classmethod(make) def _make(x) -> bool: return True if x == "mounted" else False deviceName = attr.ib() uri = attr.ib() volumeLabel = attr.ib() freeCapacityMB = attr.ib() systemAreaCapacityMB = attr.ib() wholeCapacityMB = attr.ib() formattable = attr.ib() formatting = attr.ib() isAvailable = attr.ib(converter=convert_to_bool) mounted = attr.ib(converter=_make) permission = attr.ib() position = attr.ib() def __str__(self): return "%s (%s) in %s (%s/%s free), available: %s, mounted: %s" % ( self.deviceName, self.uri, self.position, self.freeCapacityMB, self.wholeCapacityMB, self.isAvailable, self.mounted, )
class Scheme: """Input scheme container.""" make = classmethod(make) scheme = attr.ib() # type: str
class Point: ttype = attr.ib() value = attr.ib() def __dict__(self): return {"value": self.value, "ttype": self.ttype, "class": "Point"}
class MessageDeleteEvent(BaseEvent): guild: GuildIDWrapper = attr.ib() channel: ChannelIDWrapper = attr.ib() message: typing.Optional[Message] = attr.ib()