def _create_header(self, columns: FieldLike) -> None: # Check types of fieldlike column descriptors and convert them to field # descriptors, that are accepted by dataclasses.make_dataclass() fields: list = [] for each in columns: if isinstance(each, str): fields.append(each) continue check.has_type(f"field {each}", each, tuple) check.has_size(f"field {each}", each, min_size=2, max_size=3) check.has_type("first arg", each[0], str) check.has_type("second arg", each[1], type) if len(each) == 2: fields.append(each) continue check.has_type("third arg", each[2], (Field, dict)) if isinstance(each[2], Field): fields.append(each) continue field = dataclasses.field(**each[2]) fields.append(each[:2] + (field,)) # Create record namespace with table hooks namespace = { '_create_row_id': self._create_row_id, '_delete_hook': self._remove_row_id, '_restore_hook': self._append_row_id, '_update_hook': self._update_row_diff, '_revoke_hook': self._remove_row_diff} # Create Record dataclass and constructor self._Record = dataclasses.make_dataclass( 'Row', fields, bases=(Record, ), namespace=namespace) # Create slots self._Record.__slots__ = ['id', 'state'] + [ field.name for field in dataclasses.fields(self._Record)] # Reset store, diff and index self._store = [] self._diff = [] self._index = []
def object_type_to_python_type( objtype: s_objtypes.ObjectType, schema: s_schema.Schema, *, base_class: Optional[type] = None, _memo: Optional[Dict[s_types.Type, type]] = None) -> type: if _memo is None: _memo = {} default: Any fields = [] subclasses = [] for pn, p in objtype.get_pointers(schema).items(schema): str_pn = str(pn) if str_pn in ('id', '__type__'): continue ptype = p.get_target(schema) assert ptype is not None if isinstance(ptype, s_objtypes.ObjectType): pytype = _memo.get(ptype) if pytype is None: pytype = object_type_to_python_type(ptype, schema, base_class=base_class, _memo=_memo) _memo[ptype] = pytype for subtype in ptype.children(schema): subclasses.append( object_type_to_python_type(subtype, schema, base_class=pytype, _memo=_memo)) else: pytype = scalar_type_to_python_type(ptype, schema) ptr_card = p.get_cardinality(schema) is_multi = ptr_card.is_multi() if is_multi: pytype = FrozenSet[pytype] # type: ignore default = p.get_default(schema) if default is None: if p.get_required(schema): default = dataclasses.MISSING else: default = qlcompiler.evaluate_to_python_val(default.text, schema=schema) if is_multi and not isinstance(default, frozenset): default = frozenset((default, )) constraints = p.get_constraints(schema).objects(schema) exclusive = schema.get('std::exclusive', type=s_constr.Constraint) unique = (not ptype.is_object_type() and any( c.issubclass(schema, exclusive) for c in constraints)) field = dataclasses.field( compare=unique, hash=unique, repr=True, default=default, ) fields.append((str_pn, pytype, field)) bases: Tuple[type, ...] if base_class is not None: bases = (base_class, ) else: bases = () ptype_dataclass = dataclasses.make_dataclass( objtype.get_name(schema).name, fields=fields, bases=bases, frozen=True, namespace={'_subclasses': subclasses}, ) assert isinstance(ptype_dataclass, type) return ptype_dataclass
def total_cost(self, **kwargs: Any) -> float: return self.unit_price * self.quantity_on_hand @dataclass class DataClassWithFields: """ Class for testing @dataclass with fields. """ myint_plain: int myint_field: int = field(repr=False) myint_field_default: int = field(repr=False, default=10) mylist: list[int] = field(default_factory=list) """ Test creating a dynamic data class. """ DataClassDynamic = make_dataclass( 'DataClassDynamic', [("x", int), "y", ("z", int, field(default=5))], namespace={'add_one': lambda self: self.x + 1}) @dataclass class DataClassWithKeywordOnly: """ Class for testing @dataclass with KW_ONLY. """ x: float _: KW_ONLY y: float z: float @dataclass class DataClassWithPostInit: """ Class for testing @dataclass with __post_init__ """
def __dask_graph__(self): return self.dask x = delayed(1) + 2 assert is_dask_collection(x) assert not is_dask_collection(2) assert is_dask_collection(DummyCollection({})) assert not is_dask_collection(DummyCollection()) assert not is_dask_collection(DummyCollection) try: import dataclasses # Avoid @dataclass decorator as Python < 3.7 fail to interpret the type hints ADataClass = dataclasses.make_dataclass('ADataClass', [('a', int)]) except ImportError: dataclasses = None def test_unpack_collections(): a = delayed(1) + 5 b = a + 1 c = a + 2 def build(a, b, c, iterator): t = (a, b, # Top-level collections {'a': a, # dict a: b, # collections as keys 'b': [1, 2, [b]], # list 'c': 10, # other builtins pass through unchanged
def wrap(klass): class _DecimalAwareJSONEncoder(JSONEncoder): def default(self, o): if isinstance(o, Decimal): return str(o) return super().default(o) class _JsonEncodable: def as_dict(self): return asdict(self) def as_json(self): return _DecimalAwareJSONEncoder().encode(self.as_dict()) @classmethod def from_json(cls, json_str): return cls.from_dict(loads(json_str)) @classmethod def _constructor_from_field_type(cls, field_type): if hasattr(field_type, 'from_dict'): return field_type.from_dict elif is_dataclass(field_type): return lambda data: field_type(**data) else: return field_type @classmethod def from_dict(cls, dict_data): data = {} for field in fields(cls): datum = dict_data.get(field.name) if datum: if issubclass(field.type, List): sub_type = field.type.__args__[0] constructor = cls._constructor_from_field_type(sub_type) data[field.name] = [constructor(sub_data) for sub_data in datum] else: constructor = cls._constructor_from_field_type(field.type) data[field.name] = constructor(datum) elif field.default_factory is not MISSING: data[field.name] = field.default_factory() elif field.default is not MISSING: data[field.name] = field.default return cls(**data) new_fields = [] for name, field_type in klass.__annotations__.items(): try: field = getattr(klass, name) except AttributeError: new_fields.append((name, field_type)) else: if isinstance(field, Field): new_fields.append((name, field_type, field)) else: new_fields.append((name, field_type, make_field(default=field))) return make_dataclass(klass.__name__, fields=new_fields, bases=(_JsonEncodable, klass), namespace=klass.__dict__, *args, **kwargs)
def dataclass_from_schema(self, class_name): '''Create a dataclass based on a table schema from Postgresql.''' return make_dataclass(class_name, self.__table_headers)
#is the info toggle box bad? I want it to be in every mode #style and numbers? #the f*****g sand dropping, ew #use pixels #class attribute at 1 #make sure sand never gets stuck #use dx/dy to calculate the "shower effct" of the sand particles ''' import math, copy, random from cmu_112_graphics import * from dataclasses import make_dataclass #testing with radius 10 circles instead of small circles or pixels Sand = make_dataclass('Sand',['tag','x','y','color']) def appStarted(app): app.tag = 0 app.sand = [] def mousePressed(app,event): if inBox(app, event.x, event.y) and notOverlappingAnyOthers(app, event.x, event.y): newSand = Sand(tag = app.tag, x = event.x, y = event.y, color = "black") app.sand.append(newSand) app.tag += 1 def mouseDragged(app,event): if inBox(app, event.x, event.y) and notOverlappingAnyOthers(app, event.x, event.y): newSand = Sand(tag = app.tag, x = event.x, y = event.y, color = "black") app.sand.append(newSand) app.tag += 1
def __eq__(self, other): return all( getattr(self, a) == getattr(other, a) for a in self.__slots__) if make_dataclass: # This decorator-style syntax with typed class attributes # prevents the module from loading in pypy3.5: # # @dataclass # class MyDataClass: # a: int # b: str # c: Decimal # # So I needed to do this instead: # MyDataClass = make_dataclass("MyDataClass", ["a", "b", "c"]) MyDataClass.__module__ = __name__ class Colour(Enum): RED = 1 GREEN = 2 BLUE = 3 class MyDeque(deque): pass
for path in fontpaths: fontfiles.update(map(os.path.abspath, list_fonts(path, fontexts))) return [fname for fname in fontfiles if os.path.exists(fname)] FontEntry = dataclasses.make_dataclass('FontEntry', [ ('fname', str, dataclasses.field(default='')), ('name', str, dataclasses.field(default='')), ('style', str, dataclasses.field(default='normal')), ('variant', str, dataclasses.field(default='normal')), ('weight', str, dataclasses.field(default='normal')), ('stretch', str, dataclasses.field(default='normal')), ('size', str, dataclasses.field(default='medium')), ], namespace={ '__doc__': """ A class for storing Font properties. It is used when populating the font lookup dictionary. """ }) def ttfFontProperty(font): """ Extract information from a TrueType font file. Parameters
""" Low-level text helper utilities. """ import dataclasses from . import _api from .ft2font import KERNING_DEFAULT, LOAD_NO_HINTING LayoutItem = dataclasses.make_dataclass( "LayoutItem", ["char", "glyph_idx", "x", "prev_kern"]) def warn_on_missing_glyph(codepoint): _api.warn_external( "Glyph {} ({}) missing from current font.".format( codepoint, chr(codepoint).encode("ascii", "namereplace").decode("ascii"))) block = ("Hebrew" if 0x0590 <= codepoint <= 0x05ff else "Arabic" if 0x0600 <= codepoint <= 0x06ff else "Devanagari" if 0x0900 <= codepoint <= 0x097f else "Bengali" if 0x0980 <= codepoint <= 0x09ff else "Gurmukhi" if 0x0a00 <= codepoint <= 0x0a7f else "Gujarati" if 0x0a80 <= codepoint <= 0x0aff else "Oriya" if 0x0b00 <= codepoint <= 0x0b7f else "Tamil" if 0x0b80 <= codepoint <= 0x0bff else "Telugu" if 0x0c00 <= codepoint <= 0x0c7f else "Kannada" if 0x0c80 <= codepoint <= 0x0cff else "Malayalam" if 0x0d00 <= codepoint <= 0x0d7f else "Sinhala" if 0x0d80 <= codepoint <= 0x0dff else
class creativeSandbox(SandboxingTime): #automatically go into this one first Sand = make_dataclass('Sand', ['tag', 'x', 'y', 'lasty', 'color', 'falling' ]) #the "falling" doesn't do anything, f**k def appStarted(mode): super().appStarted() mode.grad = mode.width // 75 #~15 #sand dataclass part mode.sand = [] mode.nonMovingSand = [] mode.everything = [] mode.tag = 0 mode.timerDelay = 0 #ewwww NOTE: ??????? mode.width10th = mode.width // 10 mode.height10th = mode.height // 10 mode.titleSize = mode.width // 25 mode.topx = mode.width10th * .5 mode.topy = mode.height10th * 2 mode.botx = mode.width10th * 7.5 mode.boty = mode.height10th * 9.5 def mousePressed(mode, event): super().mousePressed(event) x, y = event.x, event.y #TODO: THIS SHIT # if (super().topx <= x <= super().botx) and (super().topy <= y <= super().boty): # if ( (topx + mode.grad <= x <= botx - mode.grad) and (topy + mode.grad <= y <= boty - mode.grad) and creativeSandbox.notOverlappingAnyOthers(mode, event.x, event.y) ): if (creativeSandbox.inBox(mode, event.x, event.y) and creativeSandbox.notOverlappingAnyOthers( mode, event.x, event.y)): newSand = creativeSandbox.Sand(tag=mode.tag, x=x, y=y, lasty=y, color=mode.sandColor, falling=True) mode.sand.append(newSand) mode.tag += 1 def mouseDragged(mode, event): super().mouseDragged(event) x, y = event.x, event.y width10th = mode.width // 10 height10th = mode.height // 10 titleSize = mode.width // 25 topx = width10th * .5 topy = height10th * 2 botx = width10th * 7.5 boty = height10th * 9.5 #TODO: THIS SHIT # if (super().topx <= x <= super().botx) and (super().topy <= y <= super().boty): if (creativeSandbox.inBox(mode, event.x, event.y) and creativeSandbox.notOverlappingAnyOthers( mode, event.x, event.y)): newSand = creativeSandbox.Sand(tag=mode.tag, x=x, y=y, lasty=y, color=mode.sandColor, falling=True) mode.sand.append(newSand) mode.tag += 1 def inBox(mode, x, y): if (mode.topx + mode.grad <= x <= mode.botx - mode.grad) and ( mode.topy + mode.grad <= y <= mode.boty - mode.grad): return True def notOverlappingAnyOthers(mode, x, y): for grains in mode.nonMovingSand: if grains.x == x and grains.y == y: return True #Jank? if creativeSandbox.overlap(mode, grains.x, grains.y, x, y): return False return True def overlap(mode, gx, gy, x, y): #TODO: sort this shit out tooClose = ((gx - x)**2 + (gy - y)**2)**.5 <= (mode.grad * 2) return tooClose def moveParticle(mode): mode.everything = mode.sand + mode.nonMovingSand for g in range(len(mode.sand)): grains = mode.sand[g] if creativeSandbox.inBox( mode, grains.x, grains.y) and creativeSandbox.notOverlappingAnyOthers( mode, grains.x, grains.y): grains.y += 1 else: mode.nonMovingSand.append(grains) for g in mode.nonMovingSand: if g in mode.sand: sandIndex = mode.sand.index(g) mode.sand.pop(sandIndex) def timerFired(mode): creativeSandbox.moveParticle(mode) def redrawAll(mode, canvas): super().redrawAll(canvas) for grains in mode.everything: canvas.create_oval(grains.x - mode.grad, grains.y - mode.grad, grains.x + mode.grad, grains.y + mode.grad, fill=grains.color, outline=grains.color)
EXPENSE_FIELDS = [ ('amount', str), ('currency', str), ('event_type', str, field(default=EventType.EXPENSE.name)), ('event_detail', str, field(default=EventDetail.EXPENSE_OTHER.name)), ] INCOME_FIELDS = [ ('amount', str), ('currency', str), ('event_type', str, field(default=EventType.INCOME.name)), ('event_detail', str, field(default=EventDetail.INCOME_OTHER.name)), ] Transaction = make_dataclass("Transaction", BASE_FIELDS + TRANSACTION_FIELDS) Transfer = make_dataclass("Transfer", BASE_FIELDS + TRANSFER_FIELDS + TRANSACTION_FIELDS) Expense = make_dataclass("Expense", BASE_FIELDS + EXPENSE_FIELDS + TRANSACTION_FIELDS) Income = make_dataclass("Income", BASE_FIELDS + INCOME_FIELDS + TRANSACTION_FIELDS) def add_trade(self: "Trade", quantity: Decimal, price: Decimal, fees: Decimal) -> None: """Modifies an instance in place, adding a trade in such a way that price*quantity = stays correct even for executions at different prices. IE, the weighted average price of the execution. """ self.fees += fees
datetime.timedelta(days=1)) assert tokenize(datetime.timedelta(days=1)) != tokenize( datetime.timedelta(days=2)) @pytest.mark.parametrize("enum_type", [Enum, IntEnum, IntFlag, Flag]) def test_tokenize_enum(enum_type): class Color(enum_type): RED = 1 BLUE = 2 assert tokenize(Color.RED) == tokenize(Color.RED) assert tokenize(Color.RED) != tokenize(Color.BLUE) ADataClass = dataclasses.make_dataclass("ADataClass", [("a", int)]) BDataClass = dataclasses.make_dataclass( "BDataClass", [("a", Union[int, float])]) # type: ignore def test_tokenize_dataclass(): a1 = ADataClass(1) a2 = ADataClass(2) assert tokenize(a1) == tokenize(a1) assert tokenize(a1) != tokenize(a2) # Same field names and values, but dataclass types are different b1 = BDataClass(1) assert tokenize(a1) != tokenize(b1) class SubA(ADataClass):
def dataclass(self, validators=None, widgets=None): validators = validators or {} widgets = widgets or {} attrs = [] primary_key = [] brels = [ b["reference_relationship_uuid"] for b in self.backrelationships().values() ] for k, attr in self.attributes().items(): metadata = attr.field_metadata() if attr.uuid in brels: metadata["index"] = True name = attr["name"] if validators.get(name, []): metadata.setdefault("validators", []) metadata["validators"] += validators[name] if widgets.get(k, None): metadata["deform.widget"] = widgets[name] attrs.append((attr["name"], attr.datatype(), field(default=None, metadata=metadata))) if attr["primary_key"]: primary_key.append(attr["name"]) for r, rel in self.relationships().items(): refsearch = rel.reference_search_attribute() ref = rel.reference_attribute() dm = ref.entity() if refsearch: # refsearch field and ref field must come from the same entity assert dm["uuid"] == refsearch.entity()["uuid"] metadata = { "required": rel["required"], "title": rel["title"], "description": rel["description"], "validators": [], "index": True, } name = rel["name"] if validators.get(name, []): metadata.setdefault("validators", []) metadata["validators"] += validators[name] if widgets.get(name, None): metadata["deform.widget"] = widgets[name] attrs.append((rel["name"], rel.datatype(), field(default=None, metadata=metadata))) if rel["primary_key"]: primary_key.append(rel["name"]) name = self["name"] or "Model" bases = [] for behavior in self.behaviors(): bases.append(behavior.schema) bases.append(morpfw.Schema) dc = make_dataclass(name, fields=attrs, bases=tuple(bases)) if primary_key: dc.__unique_constraint__ = tuple(primary_key) if not self["allow_invalid"]: dc.__validators__ = [ ev.schema_validator() for ev in self.entity_validators() ] return dc
class Meta: model = dataclasses.make_dataclass( 'Chat', ['id', 'message_id', 'message', 'message_date', 'username'])
def spawn_class(class_name: str, fields: List[Tuple[Any, Any, Optional[Any]]]) -> None: the_class = make_dataclass(class_name, fields) globals()[class_name] = the_class return the_class
def as_dataclass(solution): solution_dict = solution.__dict__ fields = solution_dict.keys() return make_dataclass("Solution", fields)(**solution_dict)
def test_user_id_specification(): spec = UserIdSpecification("abc") DC = make_dataclass("DC", [("user_id", int)]) assert spec.is_satisfied_by(DC(**dict(user_id="abc")))
rnd1 = np.random.normal(0, np.pi / 20) rnd2 = np.random.normal(0, np.pi / 20) pos.azimuth = pos.azimuth + rnd1 pos.zenith = pos.zenith + rnd2 return pp.Cartesian3D(pos) Loss = make_dataclass( "Loss", [ ("E", np.float32), ("x", np.float32), ("y", np.float32), ("z", np.float32), ("time", np.float32), ("type", int), ], ) def get_initial_energy(): return expon.rvs(loc=1e5, scale=1.5e5) def produce_losses(): state = pp.particle.ParticleState() state.type = pp.particle.Particle_Type.MuMinus state.energy = get_initial_energy()
def complex_serialization(use_pickle): def assert_equal(obj1, obj2): module_numpy = (type(obj1).__module__ == np.__name__ or type(obj2).__module__ == np.__name__) if module_numpy: empty_shape = ((hasattr(obj1, "shape") and obj1.shape == ()) or (hasattr(obj2, "shape") and obj2.shape == ())) if empty_shape: # This is a special case because currently # np.testing.assert_equal fails because we do not properly # handle different numerical types. assert obj1 == obj2, ("Objects {} and {} are " "different.".format(obj1, obj2)) else: np.testing.assert_equal(obj1, obj2) elif hasattr(obj1, "__dict__") and hasattr(obj2, "__dict__"): special_keys = ["_pytype_"] assert (set(list(obj1.__dict__.keys()) + special_keys) == set( list(obj2.__dict__.keys()) + special_keys)), ("Objects {} and {} are different.".format( obj1, obj2)) for key in obj1.__dict__.keys(): if key not in special_keys: assert_equal(obj1.__dict__[key], obj2.__dict__[key]) elif type(obj1) is dict or type(obj2) is dict: assert_equal(obj1.keys(), obj2.keys()) for key in obj1.keys(): assert_equal(obj1[key], obj2[key]) elif type(obj1) is list or type(obj2) is list: assert len(obj1) == len(obj2), ("Objects {} and {} are lists with " "different lengths.".format( obj1, obj2)) for i in range(len(obj1)): assert_equal(obj1[i], obj2[i]) elif type(obj1) is tuple or type(obj2) is tuple: assert len(obj1) == len(obj2), ("Objects {} and {} are tuples " "with different lengths.".format( obj1, obj2)) for i in range(len(obj1)): assert_equal(obj1[i], obj2[i]) elif (ray.serialization.is_named_tuple(type(obj1)) or ray.serialization.is_named_tuple(type(obj2))): assert len(obj1) == len(obj2), ( "Objects {} and {} are named " "tuples with different lengths.".format(obj1, obj2)) for i in range(len(obj1)): assert_equal(obj1[i], obj2[i]) else: assert obj1 == obj2, "Objects {} and {} are different.".format( obj1, obj2) long_extras = [0, np.array([["hi", u"hi"], [1.3, 1]])] PRIMITIVE_OBJECTS = [ 0, 0.0, 0.9, 1 << 62, 1 << 100, 1 << 999, [1 << 100, [1 << 100]], "a", string.printable, "\u262F", u"hello world", u"\xff\xfe\x9c\x001\x000\x00", None, True, False, [], (), {}, np.int8(3), np.int32(4), np.int64(5), np.uint8(3), np.uint32(4), np.uint64(5), np.float32(1.9), np.float64(1.9), np.zeros([100, 100]), np.random.normal(size=[100, 100]), np.array(["hi", 3]), np.array(["hi", 3], dtype=object) ] + long_extras COMPLEX_OBJECTS = [ [[[[[[[[[[[[]]]]]]]]]]]], { "obj{}".format(i): np.random.normal(size=[100, 100]) for i in range(10) }, # {(): {(): {(): {(): {(): {(): {(): {(): {(): {(): { # (): {(): {}}}}}}}}}}}}}, ( (((((((((), ), ), ), ), ), ), ), ), ), { "a": { "b": { "c": { "d": {} } } } }, ] class Foo: def __init__(self, value=0): self.value = value def __hash__(self): return hash(self.value) def __eq__(self, other): return other.value == self.value class Bar: def __init__(self): for i, val in enumerate(PRIMITIVE_OBJECTS + COMPLEX_OBJECTS): setattr(self, "field{}".format(i), val) class Baz: def __init__(self): self.foo = Foo() self.bar = Bar() def method(self, arg): pass class Qux: def __init__(self): self.objs = [Foo(), Bar(), Baz()] class SubQux(Qux): def __init__(self): Qux.__init__(self) class CustomError(Exception): pass Point = collections.namedtuple("Point", ["x", "y"]) NamedTupleExample = collections.namedtuple( "Example", "field1, field2, field3, field4, field5") CUSTOM_OBJECTS = [ Exception("Test object."), CustomError(), Point(11, y=22), Foo(), Bar(), Baz(), # Qux(), SubQux(), NamedTupleExample(1, 1.0, "hi", np.zeros([3, 5]), [1, 2, 3]), ] # Test dataclasses in Python 3.7. if sys.version_info >= (3, 7): from dataclasses import make_dataclass DataClass0 = make_dataclass("DataClass0", [("number", int)]) CUSTOM_OBJECTS.append(DataClass0(number=3)) class CustomClass: def __init__(self, value): self.value = value DataClass1 = make_dataclass("DataClass1", [("custom", CustomClass)]) class DataClass2(DataClass1): @classmethod def from_custom(cls, data): custom = CustomClass(data) return cls(custom) def __reduce__(self): return (self.from_custom, (self.custom.value, )) CUSTOM_OBJECTS.append(DataClass2(custom=CustomClass(43))) BASE_OBJECTS = PRIMITIVE_OBJECTS + COMPLEX_OBJECTS + CUSTOM_OBJECTS LIST_OBJECTS = [[obj] for obj in BASE_OBJECTS] TUPLE_OBJECTS = [(obj, ) for obj in BASE_OBJECTS] # The check that type(obj).__module__ != "numpy" should be unnecessary, but # otherwise this seems to fail on Mac OS X on Travis. DICT_OBJECTS = ([{ obj: obj } for obj in PRIMITIVE_OBJECTS if ( obj.__hash__ is not None and type(obj).__module__ != "numpy")] + [{ 0: obj } for obj in BASE_OBJECTS] + [{ Foo(123): Foo(456) }]) RAY_TEST_OBJECTS = (BASE_OBJECTS + LIST_OBJECTS + TUPLE_OBJECTS + DICT_OBJECTS) @ray.remote def f(x): return x # Check that we can pass arguments by value to remote functions and # that they are uncorrupted. for obj in RAY_TEST_OBJECTS: assert_equal(obj, ray.get(f.remote(obj))) assert_equal(obj, ray.get(ray.put(obj))) # Test StringIO serialization s = io.StringIO(u"Hello, world!\n") s.seek(0) line = s.readline() s.seek(0) assert ray.get(ray.put(s)).readline() == line
def test_id_specification(): spec = IdSpecification(1) DC = make_dataclass("DC", [("id", int)]) assert spec.is_satisfied_by(DC(**dict(id=1)))
from dataclasses import dataclass, make_dataclass from typing import NamedTuple, Tuple, Union class InitialValue(NamedTuple): value: object value_type: type InitialState = dict[str, InitialValue] Param = InitialValue ParamSweep = Tuple[list[object], type] Parameters = dict[str, Union[Param, ParamSweep]] initial_state = { 'prey_count': InitialValue(100.0, float), 'predator_count': InitialValue(10.0, float) } def process_initial_state( initial_state: dict[str, InitialValue]) -> tuple[str, object]: for key, initial_value in initial_state.items(): yield (key, initial_value[1]) gen_expr = ((k, v[1]) for k, v in initial_state.items()) State = make_dataclass('State', gen_expr)
def __dask_graph__(self): return self.dask x = delayed(1) + 2 assert is_dask_collection(x) assert not is_dask_collection(2) assert is_dask_collection(DummyCollection({})) assert not is_dask_collection(DummyCollection()) assert not is_dask_collection(DummyCollection) try: import dataclasses # Avoid @dataclass decorator as Python < 3.7 fail to interpret the type hints ADataClass = dataclasses.make_dataclass("ADataClass", [("a", int)]) except ImportError: dataclasses = None def test_unpack_collections(): a = delayed(1) + 5 b = a + 1 c = a + 2 def build(a, b, c, iterator): t = ( a, b, # Top-level collections { "a": a, # dict
def mock_TriggerConfig(): with patch( "zhinst.toolkit.driver.modules.shfqa_sweeper.TriggerConfig", make_dataclass("Y", fields=[("s", str, 0)]), ) as trigger_config: yield trigger_config
def object_type_to_python_type( objtype: s_types.Type, schema: s_schema.Schema, *, base_class: typing.Optional[type] = None, _memo: typing.Optional[typing.Mapping[s_types.Type, type]] = None) -> type: if _memo is None: _memo = {} fields = [] subclasses = [] for pn, p in objtype.get_pointers(schema).items(schema): if pn in ('id', '__type__'): continue ptype = p.get_target(schema) if ptype.is_object_type(): pytype = _memo.get(ptype) if pytype is None: pytype = object_type_to_python_type(ptype, schema, base_class=base_class, _memo=_memo) _memo[ptype] = pytype for subtype in ptype.children(schema): subclasses.append( object_type_to_python_type(subtype, schema, base_class=pytype, _memo=_memo)) else: pytype = scalar_type_to_python_type(ptype, schema) is_multi = p.get_cardinality(schema) is qltypes.Cardinality.MANY if is_multi: pytype = typing.FrozenSet[pytype] default = p.get_default(schema) if default is None: if p.get_required(schema): default = dataclasses.MISSING else: default = ql_compiler.evaluate_to_python_val(default.text, schema=schema) if is_multi and not isinstance(default, frozenset): default = frozenset((default, )) constraints = p.get_constraints(schema).objects(schema) exclusive = schema.get('std::exclusive') unique = (not ptype.is_object_type() and any( c.issubclass(schema, exclusive) for c in constraints)) field = dataclasses.field( compare=unique, hash=unique, repr=True, default=default, ) fields.append((pn, pytype, field)) return dataclasses.make_dataclass( objtype.get_name(schema).name, fields=fields, bases=(base_class, ) if base_class is not None else (), frozen=True, namespace={'_subclasses': subclasses}, )
def test_create_update(self): def check(dataclass, representation, instance): empty_instance = copy.deepcopy(instance) for field in dataclasses.fields(empty_instance): setattr(empty_instance, field.name, None) serializer = self.create_serializer(dataclass) self.assertEqual(serializer.create(representation), instance) self.assertEqual(serializer.update(empty_instance, representation), instance) # simple dataclass with a single field simple = dataclasses.make_dataclass('child', [('value', str)]) check(simple, {'value': 'A'}, simple('A')) # nested dataclass parent = dataclasses.make_dataclass('parent', [('field', simple)]) check(parent, {'field': {'value': 'A'}}, parent(simple('A'))) # a nested dataclass that's optional optional = dataclasses.make_dataclass( 'optional', [('field', typing.Optional[simple])]) check(optional, {'field': {'value': 'A'}}, optional(simple('A'))) check(optional, {'field': None}, optional(None)) # we check all possible kinds of (optionally) nested dataclasses in lists and dictionaries here, as historically # this code has given us quite some problems (#13, #15). # a list of nested dataclasses listvalue = dataclasses.make_dataclass( 'listvalue', [('field', typing.Iterable[simple])]) check(listvalue, {'field': []}, listvalue([])) check(listvalue, {'field': [{'value': 'A'}]}, listvalue([simple('A')])) # an optional list of nested dataclasses optionallist = dataclasses.make_dataclass( 'optionallist', [('field', typing.Optional[typing.List[simple]])]) check(optionallist, {'field': None}, optionallist(None)) check(optionallist, {'field': []}, optionallist([])) check(optionallist, {'field': [{ 'value': 'A' }]}, optionallist([simple('A')])) # a list of optional nested dataclasses listoptional = dataclasses.make_dataclass( 'listoptional', [('field', typing.List[typing.Optional[simple]])]) check(listoptional, {'field': []}, listoptional([])) check(listoptional, {'field': [None]}, listoptional([None])) check(listoptional, {'field': [{ 'value': 'A' }]}, listoptional([simple('A')])) # a dictionary of nested dataclasses dictvalue = dataclasses.make_dataclass( 'dictvalue', [('field', typing.Mapping[str, simple])]) check(dictvalue, {'field': {}}, dictvalue({})) check(dictvalue, {'field': { 'K': { 'value': 'A' } }}, dictvalue({'K': simple('A')})) # an optional dictionary of nested dataclasses optionaldict = dataclasses.make_dataclass( 'optionaldict', [('field', typing.Optional[typing.Dict[str, simple]])]) check(optionaldict, {'field': None}, optionaldict(None)) check(optionaldict, {'field': {}}, optionaldict({})) check(optionaldict, {'field': { 'K': { 'value': 'A' } }}, optionaldict({'K': simple('A')})) # a dictionary of optional nested dataclasses dictoptional = dataclasses.make_dataclass( 'dictoptional', [('field', typing.Dict[str, typing.Optional[simple]])]) check(dictoptional, {'field': {}}, dictoptional({})) check(dictoptional, {'field': {'K': None}}, dictoptional({'K': None})) check(dictoptional, {'field': { 'K': { 'value': 'A' } }}, dictoptional({'K': simple('A')}))
rank: str suit: str @dataclass class Desk: cards: List[PlayingCard] @dataclass class WithoutExplicitTypes: name: Any value: Any = 42 Position = make_dataclass('Position', ['name', 'lat', 'lon']) @dataclass class Position: name: str lon: float = 0.0 lat: float = 0.0 def distance_to(self, other): r = 6371 lam_1, lam_2 = radians(self.lon), radians(other.lon) phi_1, phi_2 = radians(self.lat), radians(other.lat) h = (sin((phi_2 - phi_1) / 2)**2 + cos(phi_1) * cos(phi_2) * sin((lam_2 - lam_1) / 2)**2) return 2 * r * asin(sqrt(h))
"pS3", "ppS2", "ppS3", "ppS2_ppS2_S4", "ppS2_ppS2_ppS3", "ppS2_ppS3_ppS3", "ppS3_ppS3_S4", "ppS2_ppS3_S4", "ppS3_S4_S4", "ppS2_S4_S4", "gene", ] NUM: int = len(NAMES) Species = make_dataclass( cls_name="Species", fields=[(name, int) for name in NAMES], namespace={ "NAMES": NAMES, "NUM": NUM }, frozen=True, ) name2idx: Dict[str, int] = {k: v for v, k in enumerate(NAMES)} V = Species(**name2idx) del name2idx