def test_list_pools_default_one(self): """ Verify the JSON response from listing all load balancer pools. By default, all tenants have one load balancer pool. """ response, response_json = self.successResultOf( self.json_request(b"GET", "/load_balancer_pools")) self.assertEqual(200, response.code) self.assertEqual([b'application/json'], response.headers.getRawHeaders(b'content-type')) self.assertEqual(1, len(response_json)) pool_json = response_json[0] # has the right JSON self.assertTrue(all( aa.name in pool_json for aa in attr.fields(LoadBalancerPool) if aa.name != "nodes")) # Generated values self.assertTrue(all( pool_json.get(aa.name) for aa in attr.fields(LoadBalancerPool) if aa.name not in ("nodes", "status_detail"))) self.assertEqual( { "cloud_servers": 0, "external": 0, "total": 0 }, pool_json['node_counts'], "Pool should start off with no members.")
def test_slots_being_used(): """ The class is really using __slots__. """ non_slot_instance = C1(x=1, y="test") slot_instance = C1Slots(x=1, y="test") assert "__dict__" not in dir(slot_instance) assert "__slots__" in dir(slot_instance) assert "__dict__" in dir(non_slot_instance) assert "__slots__" not in dir(non_slot_instance) assert set(["x", "y"]) == set(slot_instance.__slots__) if has_pympler: assert asizeof(slot_instance) < asizeof(non_slot_instance) non_slot_instance.t = "test" with pytest.raises(AttributeError): slot_instance.t = "test" assert 1 == non_slot_instance.method() assert 1 == slot_instance.method() assert attr.fields(C1Slots) == attr.fields(C1) assert attr.asdict(slot_instance) == attr.asdict(non_slot_instance)
def stats_to_list(stats, include_lists=None): """ Renders stats entity to a list. If include_lists is specified it will skip not included fields. Also it always skips any nested lists and dictionaries because they brings dynamically changing columns list **. Args: stats: An instance of stats entity. include_lists: An instance of IncludeLists. Returns: A list representing stats. """ if include_lists: included = include_lists.get_included_attrs(stats.__class__) else: included = attr.fields(stats.__class__) result = [] for att in included: if not att.metadata: value = getattr(stats, att.name) result.append(value) elif Meta.ENTITY in att.metadata: value = getattr(stats, att.name) if value is not MISSED: # Render nested stats entity result += stats_to_list(value, include_lists) else: # Render needed number of MISSED values stats_class = att.metadata[Meta.ENTITY] if include_lists: values_number = len(include_lists.get_included_attrs(stats_class)) else: values_number = len(attr.fields(stats_class)) result += [MISSED] * values_number return result
def union1(p: Union[A, B]): attr.fields(<warning descr="'attr.fields' method should be called on attrs types">p</warning>) attr.fields_dict(<warning descr="'attr.fields_dict' method should be called on attrs types">p</warning>) attr.asdict(p) attr.astuple(p) attr.assoc(p) attr.evolve(p)
def union2(p: Union[Type[A], Type[B]]): attr.fields(p) attr.fields_dict(p) attr.asdict(<warning descr="'attr.asdict' method should be called on attrs instances">p</warning>) attr.astuple(<warning descr="'attr.astuple' method should be called on attrs instances">p</warning>) attr.assoc(<warning descr="'attr.assoc' method should be called on attrs instances">p</warning>) attr.evolve(<warning descr="'attr.evolve' method should be called on attrs instances">p</warning>)
def test_splits(self): """ Splits correctly. """ assert ( frozenset((int, str)), frozenset((fields(C).a,)), ) == _split_what((str, fields(C).a, int,))
def structural(p): print(len(p)) attr.fields(p) attr.fields_dict(p) attr.asdict(p) attr.astuple(p) attr.assoc(p) attr.evolve(p)
def insert(self, connection): """ Commit a connection to the database :param sakia.data.entities.Connection connection: the connection to commit """ connection_tuple = attr.astuple(connection, filter=attr.filters.exclude(attr.fields(Connection).password, attr.fields(Connection).salt)) values = ",".join(['?'] * len(connection_tuple)) self._conn.execute("INSERT INTO connections VALUES ({0})".format(values), connection_tuple)
def test_typing_annotations(self): """ Sets the `Attribute.type` attr from typing annotations. """ @attr.s class C: x: typing.List[int] = attr.ib() y = attr.ib(type=typing.Optional[str]) assert typing.List[int] is attr.fields(C).x.type assert typing.Optional[str] is attr.fields(C).y.type
def test_basic_annotations(self): """ Sets the `Attribute.type` attr from basic type annotations. """ @attr.s class C: x: int = attr.ib() y = attr.ib(type=str) z = attr.ib() assert int is attr.fields(C).x.type assert str is attr.fields(C).y.type assert None is attr.fields(C).z.type
async def _async_save(self) -> None: """Save data.""" if self._user_settings is None: return await self._user_store.async_save({STORAGE_USERS: { user_id: attr.asdict( notify_setting, filter=attr.filters.exclude( attr.fields(NotifySetting).secret, attr.fields(NotifySetting).counter, )) for user_id, notify_setting in self._user_settings.items() }})
def test_change(self, C, data): """ Changes work. """ # Take the first attribute, and change it. assume(fields(C)) # Skip classes with no attributes. field_names = [a.name for a in fields(C)] original = C() chosen_names = data.draw(st.sets(st.sampled_from(field_names))) # We pay special attention to private attributes, they should behave # like in `__init__`. change_dict = {name.replace('_', ''): data.draw(st.integers()) for name in chosen_names} changed = evolve(original, **change_dict) for name in chosen_names: assert getattr(changed, name) == change_dict[name.replace('_', '')]
def insert(self, contact): """ Commit a contact to the database :param sakia.data.entities.Contact contact: the contact to commit """ contacts_list = attr.astuple(contact, tuple_factory=list) contacts_list[3] = "\n".join([str(n) for n in contacts_list[3]]) if contacts_list[-1] == -1: col_names = ",".join([a.name for a in attr.fields(Contact)[:-1]]) contacts_list = contacts_list[:-1] else: col_names = ",".join([a.name for a in attr.fields(Contact)]) values = ",".join(['?'] * len(contacts_list)) cursor = self._conn.cursor() cursor.execute("INSERT INTO contacts ({:}) VALUES ({:})".format(col_names, values), contacts_list) contact.contact_id = cursor.lastrowid
def create_widgets(self): """Creates all widgets in main window""" # Entry widgets self.labels = [] self.entry_widgets = [] for setting in attr.fields(PwmSettings): self.labels.append(tk.Label(self, justify="left", text=setting.metadata["guitext"])) widget = self.type2widget[setting.type](self) widget.set(self.settings[setting.name]) self.entry_widgets.append(widget) # Buttons self.generate_button = tk.Button(self, text="Generate", command=self.generate) self.load_button = tk.Button(self, text="Load", command=self.load) self.save_button = tk.Button(self, text="Save", command=self.save) self.passwd_label = tk.Label(self, justify="left", text="Password") self.listbox_label = tk.Label(self, justify="left", text="Settings") self.listbox = tk.Listbox(self) self.listbox .bind('<<ListboxSelect>>', self.on_listbox) self.listbox.insert("end", "default") self.listbox.select_set(0) self.new_setting_button = tk.Button(self, text="+", command=self.new_setting) self.delete_setting_button = tk.Button(self, text="-", command=self.del_setting) self.passwd_text = tk.Entry(self, fg="blue")
def update(self, blockchain): """ Update an existing blockchain in the database :param sakia.data.entities.Blockchain blockchain: the blockchain to update """ updated_fields = attr.astuple(blockchain, filter=attr.filters.exclude( attr.fields(Blockchain).parameters, *BlockchainsRepo._primary_keys)) where_fields = attr.astuple(blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys)) self._conn.execute("""UPDATE blockchains SET current_buid=?, current_members_count=?, current_mass=?, median_time=?, last_mass=?, last_members_count=?, last_ud=?, last_ud_base=?, last_ud_time=?, previous_mass=?, previous_members_count=?, previous_ud=?, previous_ud_base=?, previous_ud_time=? WHERE currency=?""", updated_fields + where_fields)
def generate_image_url(self): """ A URL to a clear image that can be embedded in HTML documents to track email open events. The query string of this URL is used to capture data about the email and visitor. """ parameters = {} fields = attr.fields(self.__class__) for attribute in fields: value = getattr(self, attribute.name, None) if value is not None and 'param_name' in attribute.metadata: parameter_name = attribute.metadata['param_name'] parameters[parameter_name] = str(value) tracking_id = self._get_tracking_id() if tracking_id is None: return None parameters['tid'] = tracking_id user_id_dimension = get_config_value_from_site_or_settings( "GOOGLE_ANALYTICS_USER_ID_CUSTOM_DIMENSION", site=self.site, ) if user_id_dimension is not None and self.user_id is not None: parameter_name = 'cd{0}'.format(user_id_dimension) parameters[parameter_name] = self.user_id if self.course_id is not None and self.event_label is None: param_name = fields.event_label.metadata['param_name'] parameters[param_name] = unicode(self.course_id) return u"https://www.google-analytics.com/collect?{params}".format(params=urlencode(parameters))
def test_typing_annotations(self): """ Sets the `Attribute.type` attr from typing annotations. """ @attr.s class C: x: typing.List[int] = attr.ib() y = attr.ib(type=typing.Optional[str]) assert typing.List[int] is attr.fields(C).x.type assert typing.Optional[str] is attr.fields(C).y.type assert C.__init__.__annotations__ == { 'x': typing.List[int], 'y': typing.Optional[str], 'return': None, }
def get_stats_header(stats_class, include_lists=None, prefix=''): """ Renders a list containing names of fields. If include_lists is specified it will skip not included fields. Also it always skips any nested lists and dictionaries because they bring dynamically changing columns list **. Order of names in this header corresponds to values order in a list generated by stats_to_list. Args: stats_class: An @attr.s decorated class representing stats model. include_lists: An instance of IncludeLists. prefix: A string prefix to be prepended to column names. Returns: A list representing names of stats fields. """ if include_lists: included = include_lists.get_included_attrs(stats_class) else: included = attr.fields(stats_class) result = [] for att in included: if not att.metadata: result.append('{}{}'.format(prefix, att.name)) else: nested_entity_class = att.metadata.get(Meta.ENTITY) if nested_entity_class: result += get_stats_header(nested_entity_class, include_lists, '{}{}.'.format(prefix, att.name)) return result
def assert_proper_tuple_class(obj, obj_tuple): assert isinstance(obj_tuple, tuple_class) for index, field in enumerate(fields(obj.__class__)): field_val = getattr(obj, field.name) if has(field_val.__class__): # This field holds a class, recurse the assertions. assert_proper_tuple_class(field_val, obj_tuple[index])
def assert_proper_col_class(obj, obj_tuple): # Iterate over all attributes, and if they are lists or mappings # in the original, assert they are the same class in the dumped. for index, field in enumerate(fields(obj.__class__)): field_val = getattr(obj, field.name) if has(field_val.__class__): # This field holds a class, recurse the assertions. assert_proper_col_class(field_val, obj_tuple[index]) elif isinstance(field_val, (list, tuple)): # This field holds a sequence of something. expected_type = type(obj_tuple[index]) assert type(field_val) is expected_type # noqa: E721 for obj_e, obj_tuple_e in zip(field_val, obj_tuple[index]): if has(obj_e.__class__): assert_proper_col_class(obj_e, obj_tuple_e) elif isinstance(field_val, dict): orig = field_val tupled = obj_tuple[index] assert type(orig) is type(tupled) # noqa: E721 for obj_e, obj_tuple_e in zip(orig.items(), tupled.items()): if has(obj_e[0].__class__): # Dict key assert_proper_col_class(obj_e[0], obj_tuple_e[0]) if has(obj_e[1].__class__): # Dict value assert_proper_col_class(obj_e[1], obj_tuple_e[1])
def update_widgets(self): """Updates widgets from current self.settings""" self.settings = self.settings_list.get_pwm_settings() for setting, widget in zip(attr.fields(PwmSettings), self.entry_widgets): widget.set(self.settings[setting.name])
def __str__(self): bits = [] for a in attr.fields(self.__class__): value = getattr(self, a.name) if value is None: continue bits.append('{}={!r}'.format(a.name, value)) return '\n'.join(bits)
def test_asdict_preserve_order(self, cls): """ Field order should be preserved when dumping to OrderedDicts. """ instance = cls() dict_instance = asdict(instance, dict_factory=OrderedDict) assert [a.name for a in fields(cls)] == list(dict_instance.keys())
def test_change(self, C, data): """ Changes work. """ # Take the first attribute, and change it. assume(fields(C)) # Skip classes with no attributes. field_names = [a.name for a in fields(C)] original = C() chosen_names = data.draw(st.sets(st.sampled_from(field_names))) change_dict = {name: data.draw(st.integers()) for name in chosen_names} with pytest.deprecated_call(): changed = assoc(original, **change_dict) for k, v in change_dict.items(): assert getattr(changed, k) == v
def test_pickle_object(self, cls, protocol): """ Pickle object serialization works on all kinds of attrs classes. """ if len(attr.fields(cls)) == 2: obj = cls(123, 456) else: obj = cls(123) assert repr(obj) == repr(pickle.loads(pickle.dumps(obj, protocol)))
def is_information_complete(self) -> bool: """Return if all information is filled out.""" want_dynamic_group = self.is_audio_group have_dynamic_group = self.is_dynamic_group is not None have_all_except_dynamic_group = all( attr.astuple(self, filter=attr.filters.exclude( attr.fields(ChromecastInfo).is_dynamic_group))) return (have_all_except_dynamic_group and (not want_dynamic_group or have_dynamic_group))
def insert(self, blockchain): """ Commit a blockchain to the database :param sakia.data.entities.Blockchain blockchain: the blockchain to commit """ blockchain_tuple = attr.astuple(blockchain.parameters) \ + attr.astuple(blockchain, filter=attr.filters.exclude(attr.fields(Blockchain).parameters)) values = ",".join(['?'] * len(blockchain_tuple)) self._conn.execute("INSERT INTO blockchains VALUES ({0})".format(values), blockchain_tuple)
def test_basic_annotations(self): """ Sets the `Attribute.type` attr from basic type annotations. """ @attr.s class C: x: int = attr.ib() y = attr.ib(type=str) z = attr.ib() assert int is attr.fields(C).x.type assert str is attr.fields(C).y.type assert None is attr.fields(C).z.type assert C.__init__.__annotations__ == { 'x': int, 'y': str, 'return': None, }
def WriteStocksToCSV(fn, ticker_to_stocks): import csv with open(fn, 'w') as csvfile: csv_writer = csv.DictWriter(csvfile, fieldnames=[field.name for field in attr.fields(StockInfo)], delimiter=',') csv_writer.writeheader() for _, stock in ticker_to_stocks.items(): csv_writer.writerow(attr.asdict(stock))
def test_factory_sugar(self): """ Passing factory=f is syntactic sugar for passing default=Factory(f). """ @attr.s class C(object): x = attr.ib(factory=list) assert Factory(list) == attr.fields(C).x.default
def __init__( self, name: str, audit_flags: AuditFlag = AuditFlag.NONE, cache_dir=None, cache_locations=None, inputs: ty.Union[ty.Text, File, ty.Dict, None] = None, messenger_args=None, messengers=None, ): """ Initialize a task. Tasks allow for caching (retrieving a previous result of the same task definition and inputs), and concurrent execution. Running tasks follows a decision flow: 1. Check whether prior cache exists -- if ``True``, return cached result 2. Check whether other process is running this task -- wait if ``True``: a. Finishes (with or without exception) -> return result b. Gets killed -> restart 3. No cache or other process -> start 4. Two or more concurrent new processes get to start Parameters ---------- name : :obj:`str` Unique name of this node audit_flags : :class:`AuditFlag`, optional Configure provenance tracking. Default is no provenance tracking. See available flags at :class:`~pydra.utils.messenger.AuditFlag`. cache_dir : :obj:`os.pathlike` Set a custom directory of previously computed nodes. cache_locations : TODO inputs : :obj:`typing.Text`, or :class:`File`, or :obj:`dict`, or `None`. Set particular inputs to this node. messenger_args : TODO messengers : TODO """ from .. import check_latest_version if TaskBase._etelemetry_version_data is None: TaskBase._etelemetry_version_data = check_latest_version() self.name = name if not self.input_spec: raise Exception("No input_spec in class: %s" % self.__class__.__name__) klass = make_klass(self.input_spec) # todo should be used to input_check in spec?? self.inputs = klass( **{(f.name[1:] if f.name.startswith("_") else f.name): f.default for f in attr.fields(klass)}) self.input_names = [ field.name for field in attr.fields(klass) if field.name not in ["_func", "_graph_checksums"] ] # dictionary to save the connections with lazy fields self.inp_lf = {} self.state = None self._output = {} self._result = {} # flag that says if node finished all jobs self._done = False if self._input_sets is None: self._input_sets = {} if inputs: if isinstance(inputs, dict): inputs = { k: v for k, v in inputs.items() if k in self.input_names } elif Path(inputs).is_file(): inputs = json.loads(Path(inputs).read_text()) elif isinstance(inputs, str): if self._input_sets is None or inputs not in self._input_sets: raise ValueError("Unknown input set {!r}".format(inputs)) inputs = self._input_sets[inputs] self.inputs = attr.evolve(self.inputs, **inputs) self.inputs.check_metadata() self.state_inputs = inputs self.audit = Audit( audit_flags=audit_flags, messengers=messengers, messenger_args=messenger_args, develop=develop, ) self.cache_dir = cache_dir self.cache_locations = cache_locations self.allow_cache_override = True self._checksum = None self.plugin = None self.hooks = TaskHook()
def __iter__(cls): return (field.name for field in attr.fields(cls))
def fields(cls): return [f.name for f in attr.fields(cls)][:-1]
def test_allow(self, incl, value): """ Return True if a class or attribute is included. """ i = include(*incl) assert i(fields(C).a, value) is True
def _get_attr_fields(obj: Any) -> Sequence["_attr_module.Attribute[Any]"]: """Get fields for an attrs object.""" return _attr_module.fields(type(obj)) if _has_attrs else []
import math import attr import openpyxl import epyqlib.pm.parametermodel import epyqlib.utils.general import epcpm.c import epcpm.sunspecmodel builders = epyqlib.utils.general.TypeMap() enumeration_builders = epyqlib.utils.general.TypeMap() enumerator_builders = epyqlib.utils.general.TypeMap() data_point_fields = attr.fields(epcpm.sunspecmodel.DataPoint) epc_enumerator_fields = attr.fields( epyqlib.pm.parametermodel.SunSpecEnumerator, ) bitfield_fields = attr.fields(epcpm.sunspecmodel.DataPointBitfield) def attr_fill(cls, value): return cls( **{field.name: value for field in attr.fields(cls) if field.init}) @attr.s class Fields: field_type = attr.ib(default=None) applicable_point = attr.ib(default=None)
def create_schema(cls, tag, options, version, done): include = collections.OrderedDict() include[type_attribute_name] = marshmallow.fields.String( default=tag, required=True, validate=validator(tag), ) if version is not None: include[version_attribute_name] = marshmallow.fields.String( default=version, required=True, validate=validator(version), ) for attribute in attr.fields(cls): metadata = attribute.metadata.get(metadata_key) if metadata is None: if attribute.default is not attr.NOTHING: continue else: raise MissingMetadata( 'Metadata required for defaultless attribute `{}`'.format( attribute.name, ), ) include[attribute.name] = metadata.field meta_dict = { 'include': include, } meta_dict.update(options) class Schema(marshmallow.Schema): Meta = type( 'Meta', (), meta_dict, ) data_class = cls # TODO: seems like this ought to be a static method @marshmallow.post_load def deserialize(self, data): del data[type_attribute_name] if cls.__graham_graham__.version is not None: del data[version_attribute_name] o = cls(**data) if done is not None: m = getattr(o, done, None) if m is not None: m() return o Schema.__name__ = cls.__name__ + 'Schema' setattr( Schema, type_attribute_name, marshmallow.fields.Constant(constant=tag), ) setattr( Schema, version_attribute_name, marshmallow.fields.Constant(constant=version), ) return Schema
class ContactsRepo: """ The repository for Contacts entities. """ _conn = attr.ib() # :type sqlite3.Contact _primary_keys = (attr.fields(Contact).contact_id, ) def insert(self, contact): """ Commit a contact to the database :param sakia.data.entities.Contact contact: the contact to commit """ contacts_list = attr.astuple(contact, tuple_factory=list) contacts_list[3] = "\n".join([str(n) for n in contacts_list[3]]) if contacts_list[-1] == -1: col_names = ",".join([a.name for a in attr.fields(Contact)[:-1]]) contacts_list = contacts_list[:-1] else: col_names = ",".join([a.name for a in attr.fields(Contact)]) values = ",".join(['?'] * len(contacts_list)) cursor = self._conn.cursor() cursor.execute( "INSERT INTO contacts ({:}) VALUES ({:})".format( col_names, values), contacts_list) contact.contact_id = cursor.lastrowid def update(self, contact): """ Update an existing contact in the database :param sakia.data.entities.Contact contact: the certification to update """ updated_fields = attr.astuple( contact, tuple_factory=list, filter=attr.filters.exclude(*ContactsRepo._primary_keys)) updated_fields[3] = "\n".join([str(n) for n in updated_fields[3]]) where_fields = attr.astuple( contact, tuple_factory=list, filter=attr.filters.include(*ContactsRepo._primary_keys)) self._conn.execute( """UPDATE contacts SET currency=?, name=?, pubkey=?, fields=? WHERE contact_id=? """, updated_fields + where_fields) def get_one(self, **search): """ Get an existing contact in the database :param dict search: the criterions of the lookup :rtype: sakia.data.entities.Contact """ filters = [] values = [] for k, v in search.items(): filters.append("{k}=?".format(k=k)) values.append(v) request = "SELECT * FROM contacts WHERE {filters}".format( filters=" AND ".join(filters)) c = self._conn.execute(request, tuple(values)) data = c.fetchone() if data: return Contact(*data) def get_all(self, **search): """ Get all existing contact in the database corresponding to the search :param dict search: the criterions of the lookup :rtype: sakia.data.entities.Contact """ filters = [] values = [] for k, v in search.items(): value = v filters.append("{contact} = ?".format(contact=k)) values.append(value) request = "SELECT * FROM contacts" if filters: request += " WHERE {filters}".format(filters=" AND ".join(filters)) c = self._conn.execute(request, tuple(values)) datas = c.fetchall() if datas: return [Contact(*data) for data in datas] return [] def drop(self, contact): """ Drop an existing contact from the database :param sakia.data.entities.Contact contact: the contact to update """ where_fields = attr.astuple( contact, filter=attr.filters.include(*ContactsRepo._primary_keys)) self._conn.execute( """DELETE FROM contacts WHERE contact_id=?""", where_fields)
from attr import attrs, attrib, fields @attrs class Point(object): x = attrib() y = attrib() print(fields(Point)) if __name__ == '__main__': p = Point(x=1, y=2) print(p)
def test_pickle_attributes(self, cls, protocol): """ Pickling/un-pickling of Attribute instances works. """ for attribute in attr.fields(cls): assert attribute == pickle.loads(pickle.dumps(attribute, protocol))
try: fr = Country(code="FR", population="75M") except TypeError as e: print(e) # population must be <class 'int'> try: fr = Country(code="FR", population=75, customers=100) except ValueError as e: print(e) # ...<function less_than_population at ... # Sometimes attrs feels a little less slick than the others... print( attr.asdict(fr, filter=attr.filters.exclude(attr.fields(Country).customers))) # > {'code': 'FR', 'population': 100} # Converters @attr.s class Country: code = attr.ib(validator=attr.validators.instance_of(str)) population = attr.ib(converter=int, validator=attr.validators.instance_of(int)) fr = Country(code="FR", population="75") print(fr.population) # 75
def test_drop_class(self, excl, value): """ Return True on non-excluded classes and attributes. """ e = exclude(*excl) assert e(fields(C).a, value) is False
def test_allow(self, excl, value): """ Return True if class or attribute is not excluded. """ e = exclude(*excl) assert e(fields(C).a, value) is True
def to_settings(self) -> Dict: return attr.asdict( self, filter=attr.filters.exclude(attr.fields(SwaggerUiSettings).path) )
def _actions_from_config(self, config, blockdevs): """Convert curtin storage config into action instances. curtin represents storage "actions" as defined in https://curtin.readthedocs.io/en/latest/topics/storage.html. We convert each action (that we know about) into an instance of Disk, Partition, RAID, etc (unknown actions, e.g. bcache, are just ignored). We also filter out anything that can be reached from a currently mounted device. The motivation here is only to exclude the media subiquity is mounted from, so this might be a bit excessive but hey it works. Perhaps surprisingly the order of the returned actions matters. The devices are presented in the filesystem view in the reverse of the order they appear in _actions, which means that e.g. a RAID appears higher up the list than the disks is is composed of. This is quite important as it makes "unpeeling" existing compound structures easy, you just delete the top device until you only have disks left. """ byid = {} objs = [] exclusions = set() for action in config: if action['type'] == 'mount': exclusions.add(byid[action['device']]) continue c = _type_to_cls.get(action['type'], None) if c is None: # Ignore any action we do not know how to process yet # (e.g. bcache) continue kw = {} for f in attr.fields(c): n = f.name if n not in action: continue v = action[n] try: if f.metadata.get('ref', False): kw[n] = byid[v] elif f.metadata.get('reflist', False): kw[n] = [byid[id] for id in v] else: kw[n] = v except KeyError: # If a dependency of the current action has been # ignored, we need to ignore the current action too # (e.g. a bcache's filesystem). continue if kw['type'] == 'disk': path = kw['path'] kw['info'] = StorageInfo({path: blockdevs[path]}) kw['preserve'] = True obj = byid[action['id']] = c(m=self, **kw) if action['type'] == "format": obj.volume._original_fs = obj objs.append(obj) while True: next_exclusions = exclusions.copy() for e in exclusions: next_exclusions.update(itertools.chain( dependencies(e), reverse_dependencies(e))) if len(exclusions) == len(next_exclusions): break exclusions = next_exclusions log.debug("exclusions %s", {e.id for e in exclusions}) objs = [o for o in objs if o not in exclusions] for o in objs: if o.type == "partition" and o.flag == "swap" and o._fs is None: fs = Filesystem(m=self, fstype="swap", volume=o, preserve=True) o._original_fs = fs objs.append(fs) return objs
def get_required_fields(cls): """Return the mandatory fields for a resource class. """ return {f.name for f in attr.fields(cls) if f.default is attr.NOTHING}
def test_drop_class(self, incl, value): """ Return False on non-included classes and attributes. """ i = include(*incl) assert i(fields(C).a, value) is False
def fields(cls): """Returns a list of object attributes.""" return [attribute.name for attribute in attr.fields(cls)]
def get_resource_backends(cls): """Return name to documentation mapping of `cls`s backends. """ return {b.name: b.metadata["doc"] for b in attr.fields(cls) if "doc" in b.metadata}
def _get_attr_fields(obj: Any) -> Iterable["_attr_module.Attribute[Any]"]: """Get fields for an attrs object.""" return _attr_module.fields(type(obj)) if _attr_module is not None else []
def fields(cls): """ Return a list of field names defined on this model. """ return [a.name for a in attr.fields(cls)]
def ascsv(self): res = [] for f, v in zip(attr.fields(self.__class__), attr.astuple(self)): res.append((f.metadata.get('ascsv') or value_ascsv)(v)) return res
def _data_attributes(cls): return attr.fields(cls)
def attr_fill(cls, value): return cls( **{field.name: value for field in attr.fields(cls) if field.init})
def __setitem__(self, item, value): if isinstance(item, str): setattr(self, item, value) return setattr(self, attr.fields(type(self))[item].name, value)
def all_off(cls): """ Instantiate the flags with all options set to off. """ return cls(**{x.name: False for x in attr.fields(cls)})
def test_plugin_loader_annotated_type(loader_module): assert loader_module.name == 'my_module' res = loader_module.load() assert isinstance(res, schema.Content) assert isinstance(res.content_type, attr.fields(schema.Content).content_type.type)
def fields_dict(cls): out = {} for field in attr.fields(cls): out[field.name] = field return out
def _fields_from_attr(cls): return [ field.name for field in attr.fields(cls) if JsonInclude.THIS in field.metadata.keys() ]
return cls( Name(parts["mname"]), Name(parts["rname"]), int(parts["serial"]), int(parts["refresh"]), int(parts["retry"]), int(parts["expire"]), int(parts["minimum"]), ) def to_text(self): return u"{mname} {rname} {serial} {refresh} {retry} {expire} {minimum}".format( **attr.asdict(self, recurse=False)) _SOA_FIELDS = list(field.name for field in attr.fields(SOA)) @provider(IResourceRecordLoader) @implementer(IBasicResourceRecord) @attr.s(frozen=True) class UnknownRecordType(object): value = attr.ib(validator=validators.instance_of(unicode)) @classmethod def basic_from_element(cls, e): return cls(maybe_bytes_to_unicode(e.find("Value").text)) def to_text(self): return unicode(self.value)