def AllSubclassesOfClass(cls: typing.Type) -> typing.Set[typing.Type]: """Return the set of subclasses of a base class. This recursively returns all nested subclasses of a base class. Example: >>> class A(object): pass >>> class B(A): pass >>> class C(B): pass >>> AllSubclassesOfClass(A) {B, C} Args: cls: The class to return subclasses of. Returns: A set of class types. """ return set(cls.__subclasses__()).union( <<<<<<< HEAD <<<<<<< HEAD:labm8/py/labtypes.py <<<<<<< HEAD <<<<<<< HEAD:labm8/py/labtypes.py <<<<<<< HEAD:labm8/py/labtypes.py ======= >>>>>>> 4242aed2a... Automated code format. ======= >>>>>>> 4242aed2a... Automated code format. [s for c in cls.__subclasses__() for s in AllSubclassesOfClass(c)], )
def find_all_subclasses(cls: tp.Type) -> tp.List[tp.Type]: """ Recursively finds all subclasses of the given class and returns them as a list. :param cls: class to find the subclasses of :return: list of subclasses of the given class """ return cls.__subclasses__() + [ g for s in cls.__subclasses__() for g in find_all_subclasses(s) ]
def SetProtos( self, input_proto: pbutil.ProtocolBuffer, output_proto_class: typing.Type, ) -> None: """Set the input protocol buffer, and decode the output protocol buffer. This is performed by the SetProtos() method (rather than during Run()) so that when pickled, this class contains only basic types, not protocol buffer instances. Args: input_proto: The input protocol buffer message. output_proto_class: The protocol buffer class of the output message. """ assert not self._done self._done = True self._input_proto = input_proto # Only parse the output if the worker completed successfully. if not self._returncode: # Be careful that errors during protocol buffer decoding (e.g. # unrecognized fields, conflicting field type/tags) are silently ignored # here. self._output_proto = output_proto_class.FromString( self._output_proto_string, ) # No need to hand onto the string message any more. del self._output_proto_string
def jitclass(cls: typing.Type) -> typing.Type: if numba.config.DISABLE_JIT: return cls # print() # print(cls) try: annos = cls.__annotations__ except AttributeError: spec = {} else: spec = dict(annos) del cls.__annotations__ _annotations[cls.__qualname__] = spec values = {} for name, typ in spec.items(): try: value = getattr(cls, name) except AttributeError: pass else: values[name] = value delattr(cls, name) if isinstance(typ, JitClassType): spec[name] = typ.class_type.instance_type if values: sep = "\n " body = "" init_locals = {} for name, value in values.items(): newname = f"__init__local__{name}" body += sep + f"self.{name} = {newname}" init_locals[newname] = value if isinstance(cls.__init__, type(object.__init__)): cls__init__sig = "(self)" else: init_locals["__cls__init__"] = njit(cls.__init__) signature = inspect.signature(cls.__init__) param_names = ", ".join(signature.parameters) cls__init__sig = f"({param_names})" body += sep + f"__cls__init__{cls__init__sig}" code = f"def __jitcls__init__{cls__init__sig}:{body}" # print(code) # print() exec(code, init_locals) cls.__init__ = init_locals["__jitcls__init__"] return _jitclass(spec)(cls)
def spelunk(c: typ.Type): for subclass in c.__subclasses__(): yield from spelunk(subclass) if subclass not in unique_seen: yield subclass unique_seen.add(subclass)
def delete(self, object_id: str, dependent_id: Optional[str] = None) -> ResponseType: obj = getattr(request, Type().type) obj.deleted_at = datetime.datetime.utcnow() request.session.commit() return {"message": "success"}, 200
def _get_element_class(vector_class: typing.Type) -> typing.Type: element_class = vector_class._subs_tree()[1] if isinstance(element_class, tuple): element_class = element_class[0][element_class[1:]] return element_class
def _prepare_path( cls: typing.Type, resource_name: str, resource_id: typing.Optional[int] = None, nested_resource_name: typing.Optional[str] = None, query_params: typing.Optional[dict] = None, ) -> str: """Prepare the url path. Args: resource_name: The resource being accessed resource_id: The id of the resource. Defaults to None. query_params: the key value pairs for the query params. Returns: The request path. """ query_string = cls._get_query_string(query_params=query_params) path = f"/{resource_name}" if resource_id is not None: path = f"{path}/{resource_id}" if nested_resource_name is not None: path = f"{path}/{nested_resource_name}" path = f"{path}{query_string}" return path
def testAlgorithm(algorithm: Algorithm, test_set_generator: TestSetGenerator, test_set_result_handler_class: Type( TestSetResultHandler) = BasicTestResultHandler, output_handlers: List[OutputHandler] = [LogHandler()]): """ Uses the framework to test the given algorithm against the test set generated by the provided generator params: algorithm: An Algorithm that takes an input and responds with an output test_set_generator: Instance of TestSetGenerator that creates the test cases test_set_result_handler_class: subclass TestSetResultHandler, specifies how the results of the test runs will be compiled output_handlers: List of Instances of Subclasses of OutputHandler, each one will be passed the full results for storage / output """ test_set = test_set_generator.generate() test_set.algorithm = algorithm test_set_result_handler = test_set_result_handler_class(test_set) for test in test_set.tests: test_output = algorithm.run(test) test_set_result_handler.addOutput(test_output) test_set_result_handler.calculateResults() for output_handler in output_handlers: output_handler.handle(test_set_result_handler.test_set_result)
def _deserialize_record(record_class: typing.Type, data: bytes, data_offset: int) -> typing.Tuple[typing.Any, int]: if record_class is type(None): record = None elif hasattr(record_class, "deserialize"): record, data_offset = record_class.deserialize(data, data_offset) else: field_values = [] for field_name, field_class in record_class._field_types.items(): field_value, data_offset = _deserialize_value( field_class, data, data_offset) field_values.append(field_value) record = record_class._make(field_values) return record, data_offset
def AllSubclassesOfClass(cls: typing.Type) -> typing.Set[typing.Type]: """Return the set of subclasses of a base class. This recursively returns all nested subclasses of a base class. Example: >>> class A(object): pass >>> class B(A): pass >>> class C(B): pass >>> AllSubclassesOfClass(A) {B, C} Args: cls: The class to return subclasses of. Returns: A set of class types. """ return set(cls.__subclasses__()).union( [s for c in cls.__subclasses__() for s in AllSubclassesOfClass(c)], )
def get_local_subclasses(module, parent_class: typing.Type) -> typing.Sequence: if not hasattr(parent_class, "__subclasses__"): return list() subclasses = [ member for name, member in inspect.getmembers(module) if inspect.isclass(member) and not inspect.isabstract(member) and member in parent_class.__subclasses__() ] return subclasses
def patch(self, object_id: str, dependent_id: Optional[str] = None) -> ResponseType: form = Form(request.get_json() or {}, is_update=True) if not form.validate(): return {"message": "invalid data", "errors": form.errors}, 400 obj = getattr(request, Type().type) for name, value in form.valid_data.items(): setattr(obj, name, value) request.session.commit() return obj.export(), 200
def find(cls: typing.Type, name: str) -> "MarketingList": """Get the list with the given name. Args: name: The name of the list to find Returns: The list with the given name. """ for lst in cls.filter({"filters[name]": name}): return lst raise Http404
def find(cls: typing.Type, tag_name: str) -> "Tag": """Get the first tag with the given name. Args: tag: The name of the tag to find Returns: The tag with the given name. """ for tag in cls.filter({"search": tag_name}): return tag raise Http404
def filter(self, get_type: Type(Base), expr: str) -> List[Base]: """Retrieves the elements of the given type from the database with the given filter. :param get_type: The type of the elements to be found (subclass of Base) :type get_type: Type(Base) :param expr: The SQL expression to filter the results :type expr: str :return: The list of the elements that match the given expression :rtype: List[Base] """ return self.session.query(get_type).filter(text(expr)).all()
def filter( cls: typing.Type, filters: dict, parent_resource_id: typing.Optional[int] = None, parent_resource_name: typing.Optional[str] = None, ) -> typing.Generator: # noqa: A003 """Filter the list of resources with the given filters. Args: filters: dict key value pairs to filter by parent_resource_id: typing.Optional[int] The id of the parent resource parent_resource_name: typing.Optional[str] The name of the parent resource Yields: One recource at a time matching the filters. """ # Default and most common usage resource_name = cls.resource_name() nested_resource_name = None if parent_resource_id and parent_resource_name: # We want to list nested_resource_name resource_name = parent_resource_name nested_resource_name = cls.resource_name() data_list = ActiveCampaignAPI().list_resources( resource_name=resource_name, resource_id=parent_resource_id, nested_resource_name=nested_resource_name, query_params=filters, ) for data in data_list: resource_data = cls._to_attribute_dict(data) resource = cls(**resource_data) resource._created = True yield resource
def log_events(klass: typing.Type) -> typing.Type: """Class decorator to log Qt events.""" old_event = klass.event @functools.wraps(old_event) def new_event(self: typing.Any, e: QEvent) -> bool: """Wrapper for event() which logs events.""" log.misc.debug("Event in {}: {}".format(utils.qualname(klass), qenum_key(QEvent, e.type()))) return old_event(self, e) klass.event = new_event return klass
def create_schema(Type: Any, definitions: Definitions) -> Thing: type_class = Type if isinstance(Type, type) else Type.__class__ try: return PROXIES[type_class].get_json_schema(Type, definitions) # type: ignore except KeyError: pass if isinstance(Type, stubs._GenericAlias): return generic_alias_schema(Type, definitions) elif isinstance(Type, models.Field): return field_descriptor_schema(Type, definitions) elif Type == Any: return Thing(schema={}, definitions=definitions) elif callable(getattr(Type, "get_json_schema", None)): return Type.get_json_schema(definitions) # type: ignore[no-any-return] elif issubclass(Type, tuple) and callable(getattr(Type, "_asdict", None)): return named_tuple_schema(Type, definitions) elif type(Type) == stubs._TypedDictMeta: return named_tuple_schema(Type, definitions) elif issubclass(Type, datetime.datetime): return Thing(schema={"type": "string"}, definitions={}) elif issubclass(Type, datetime.date): return Thing(schema={"type": "string"}, definitions={}) elif issubclass(Type, bool): return Thing(schema={"type": "boolean"}, definitions={}) elif issubclass(Type, int): return Thing(schema={"type": "number"}, definitions={}) elif issubclass(Type, float): return Thing(schema={"type": "number"}, definitions={}) elif issubclass(Type, str): return Thing(schema={"type": "string"}, definitions={}) elif Type is type(None): # noqa: E721 return Thing(schema={"type": "null"}, definitions={}) elif issubclass(Type, enum.Enum): return enum_schema(Type, definitions) additional_schema_module: Optional[str] = getattr( settings, "REACTIVATED_SERIALIZATION", None) if additional_schema_module is not None: additional_schema: Callable[[Any, Definitions], Optional[Thing]] = import_string( additional_schema_module) schema = additional_schema(Type, definitions) if schema is not None: return schema assert False, f"Unsupported type {Type}"
def resolve_object_properties( self, cls: t.Type, *, history: t.List["Member"], ) -> t.Dict[str, t.Any]: properties: t.Dict[str, t.Dict] = {} for target in cls.mro(): for k, typ in t.get_type_hints(target).items(): if k in properties: continue if k in self._schema_ignore_props_set: continue if k.startswith("_"): continue properties[k] = self.resolve_field(typ, history=history) return properties
def __class_getitem__(cls, item: typing.Type) -> typing.Type[FlyteFile]: if item is None: return cls item = str(item) item = item.strip().lstrip("~").lstrip(".") if item == "": return cls class _SpecificFormatClass(FlyteFile): # Get the type engine to see this as kind of a generic __origin__ = FlyteFile @classmethod def extension(cls) -> str: return item return _SpecificFormatClass
def get(self, object_id: Optional[str] = None) -> ResponseType: """ Return all objects that match the given criteria and that the user is allowed to see. """ with settings.session() as session: if DependentTypes: dependent_type = DependentTypes[0]().type dependent_obj = getattr(request, dependent_type) filters = [Type.deleted_at == None] joins = [] if JoinBy: joins.append(JoinBy) filters.extend( [getattr(JoinBy, dependent_type) == dependent_obj]) else: filters.append( getattr(Type, dependent_type) == dependent_obj) query = session.query(Type).filter(*filters).join(*joins) else: visible_objs = ObjectRole.select_for( session, request.user, Type().type) # we add objects visible via the users organizations org_ids = admin_orgs_id_query(session, request.user) query = session.query(Type).filter( or_(Type.id.in_(visible_objs), Type.organization_id.in_(org_ids)), Type.deleted_at == None, ) # If requested, we join dependent objects for faster response times... if Joins: for j in Joins: joinedloads = None for Join in j: if joinedloads is None: joinedloads = joinedload(Join, innerjoin=True) else: joinedloads = joinedloads.joinedload( Join, innerjoin=True) query = query.options(joinedloads) objs = query.all() return {"data": [obj.export() for obj in objs]}, 200
def get(self, get_type: Type(Base), get_id: int = None) -> List[Base]: """Retrieves the elements of the given type from the database. :param get_type: The type of the elements to be found (subclass of Base) :type get_type: Type(Base) :param get_id: The ID of the element to retrieve, defaults to None :param get_id: int, optional :raises DatabaseError: If the requested element can not be found :return: The entire collection if get_id is not given, or a list composed of the element with the given ID :rtype: List[Base] """ if get_id is None: ret_obj = self.session.query(get_type).all() else: ret_obj = [self.session.query(get_type).get(get_id)] if ret_obj[0] is None: raise DatabaseError("The requested object could not be found.") return ret_obj
def register_type(cls: typing.Type, graphql_type: GraphQLType, *, store_type_information: bool = True) -> None: """Register a Strawberry type alongside its GraphQL type""" # register the annotation so we can use when getting the graphql type for # an annotation in type_converter ANNOTATION_REGISTRY[cls] = graphql_type GRAPHQL_TYPE_TO_STRAWBERRY_TYPE[graphql_type] = cls # register names to classes so that we can pass them to get_type_hints TYPE_HINTS_REGISTRY[cls.__name__] = cls # we currently have to store the "graphql core" type onto the # class, but we can't do that for all types (like builtins), # so we have an escape hatch for that if store_type_information: cls.graphql_type = graphql_type
def post(self, object_id: Optional[str] = None, organization_id: Optional[str] = None) -> ResponseType: form = Form(request.get_json() or {}) if not form.validate(): return {"message": "invalid data", "errors": form.errors}, 400 dependent_obj: Optional[Base] = None org: Optional[Organization] = None join_by: Optional[Base] = None if DependentTypes: dependent_type = DependentTypes[0]().type dependent_obj = getattr(request, dependent_type) with settings.session() as session: obj = Type(**form.valid_data) if organization_id is not None: org = Organization.get_or_create(session, request.organization) obj.organization = org if dependent_obj: if JoinBy: # if this object has a M2M table, we create a row in # the table for the newly created object join_by = JoinBy() setattr(join_by, dependent_type, dependent_obj) setattr(join_by, obj.type, obj) else: setattr(obj, dependent_type, dependent_obj) # we expunge the object from the session, as it might have been added # when we associated the dependent properties with it... session.expunge(obj) existing_obj = obj.get_existing(session) if existing_obj: # we update the existing object instead update_form = Form(request.get_json() or {}, is_update=True) if not update_form.validate(): return { "message": "invalid data", "errors": form.errors }, 400 for k, v in update_form.valid_data.items(): setattr(existing_obj, k, v) session.commit() return existing_obj.export(), 201 if join_by: session.add(join_by) session.add(obj) if not DependentTypes: # we create an object role for the newly created object # only if it does not depends on another object assert isinstance(org, Organization) for org_role in ["admin", "superuser"]: ObjectRole.get_or_create(session, obj, org, "admin", org_role) session.commit() return obj.export(), 201
def find_graphene_type( type_: T.Type, field: ModelField, registry: Registry, parent_type: T.Type = None, model: T.Type[BaseModel] = None, ) -> BaseType: # noqa: C901 """ Map a native Python type to a Graphene-supported Field type, where possible, throwing an error if we don't know what to map it to. """ if type_ == uuid.UUID: return UUID elif type_ in (str, bytes): return String elif type_ == datetime.datetime: return DateTime elif type_ == datetime.date: return Date elif type_ == datetime.time: return Time elif type_ == bool: return Boolean elif type_ == float: return Float elif type_ == decimal.Decimal: return GrapheneDecimal if DECIMAL_SUPPORTED else Float elif type_ == int: return Int elif type_ in (tuple, list, set): # TODO: do Sets really belong here? return List elif registry and registry.get_type_for_model(type_): return registry.get_type_for_model(type_) elif registry and isinstance(type_, BaseModel): # If it's a Pydantic model that hasn't yet been wrapped with a ObjectType, # we can put a placeholder in and request that `resolve_placeholders()` # be called to update it. registry.add_placeholder_for_model(type_) # NOTE: this has to come before any `issubclass()` checks, because annotated # generic types aren't valid arguments to `issubclass` elif hasattr(type_, "__origin__"): return convert_generic_python_type(type_, field, registry, parent_type=parent_type, model=model) elif isinstance(type_, T.ForwardRef): # A special case! We have to do a little hackery to try and resolve # the type that this points to, by trying to reference a "sibling" type # to where this was defined so we can get access to that namespace... sibling = model or parent_type if not sibling: raise ConversionError( "Don't know how to convert the Pydantic field " f"{field!r} ({field.type_}), could not resolve " "the forward reference. Did you call `resolve_placeholders()`? " "See the README for more on forward references.") module_ns = sys.modules[sibling.__module__].__dict__ resolved = type_._evaluate(module_ns, None) # TODO: make this behavior optional. maybe this is a place for the TypeOptions to play a role? if registry: registry.add_placeholder_for_model(resolved) return find_graphene_type(resolved, field, registry, parent_type=parent_type, model=model) elif issubclass(type_, enum.Enum): return Enum.from_enum(type_) else: raise ConversionError( f"Don't know how to convert the Pydantic field {field!r} ({field.type_})" )
def has_read_permission(self, qs) -> Type(db.Model): return qs
def get(self, object_id: str, dependent_id: Optional[str] = None) -> ResponseType: return getattr(request, Type().type).export(), 200
def construct_query_set(self) -> Type(db.Model): return
def get_export_headers(self) -> Type(List): return self.headers