def __init__(__pydantic_self__, **data: Any) -> None: """ """ resource_type = data.pop("resource_type", None) errors = [] if ("resourceType" in data and "resourceType" not in __pydantic_self__.__fields__): resource_type = data.pop("resourceType", None) if (resource_type is not None and resource_type != __pydantic_self__.__fields__["resource_type"].default): expected_resource_type = __pydantic_self__.__fields__[ "resource_type"].default error = (f"``{__pydantic_self__.__class__.__module__}." f"{__pydantic_self__.__class__.__name__}`` " f"expects resource type ``{expected_resource_type}``, " f"but got ``{resource_type}``. " "Make sure resource type name is correct and right " "ModelClass has been chosen.") errors.append( ErrorWrapper(WrongResourceType(error=error), loc="resource_type")) if errors: raise ValidationError(errors, __pydantic_self__.__class__) BaseModel.__init__(__pydantic_self__, **data)
def __init__(self, samplers, dataset): BaseModel.__init__(self, samplers=samplers, dataset=dataset, length=len(dataset) * len(samplers), merged_samplers=MultiSampler.merge_samplers( samplers, [1 for _ in samplers], ))
def __init__(self, length, proportion=1.0, replacement=False): BaseModel.__init__(self, proportion=proportion, replacement=replacement, sampler=torch.utils.data.WeightedRandomSampler( torch.ones(length).double(), num_samples=int(length * proportion), replacement=replacement, ))
def __init__(self, samplers, datasets): BaseModel.__init__( self, samplers=samplers, datasets=datasets, length=max(map(len, samplers)), from_mapping=Dataset.create_from_combine_mapping(datasets), zipped_samplers=ZipSampler.zip_samplers(samplers, datasets), )
def __init__(self, sampler, length, epoch_bound=False): ''' Wrapper that repeats and limits length of sampling based on epoch length and batch size ''' BaseModel.__init__(self, sampler=sampler, length=length, epoch_bound=epoch_bound, queue=iter(sampler))
def __init__(self, dictionary): BaseModel.__init__(self) if ("user_id" in dictionary.keys()): self.user_id = dictionary["user_id"] if ("accepted" in dictionary.keys()): self.accepted = dictionary["accepted"] if ("waiting" in dictionary.keys()): self.waiting = dictionary["waiting"] if ("turn" in dictionary.keys()): self.turn = dictionary["turn"]
def __init__(self, samplers, datasets, ns): BaseModel.__init__( self, samplers=samplers, datasets=datasets, ns=ns, length=MergeSampler.merged_samplers_length(samplers), from_mapping=Dataset.create_from_concat_mapping(datasets), merged_samplers=MergeSampler.merge_samplers( samplers, datasets, ns), )
def add_init_forgiveness(self, **kwargs): fields: Dict[str, ModelField] = self.__class__.__fields__ model_fields_changed: list = [] for model_field in fields.values(): if model_field.name not in kwargs and getattr(model_field, 'required', False): model_fields_changed.append(model_field) model_field.required = False BaseModel.__init__(self, **kwargs) for model_field in model_fields_changed: model_field.required = True
def __init__(self, dictionary): BaseModel.__init__(self) if ("user_id" in dictionary.keys()): self.user_id = dictionary["user_id"] if ("accepted" in dictionary.keys()): self.accepted = dictionary["accepted"] if ("locations" in dictionary.keys()): self.locations = dictionary["locations"] if ("eggs" in dictionary.keys()): self.eggs = dictionary["eggs"] if ("splashes" in dictionary.keys()): self.splashes = dictionary["splashes"] if ("last_check" in dictionary.keys()): self.last_check = dictionary["last_check"]
def __init__(self, from_db=False, forgiveness=None, **kwargs): if forgiveness is None: forgiveness = from_db self.set_db_fields(kwargs, from_db) self.add_init_forgiveness( **kwargs) if forgiveness else BaseModel.__init__(self, **kwargs)
def __init__(self): """ Initialize the store. Args: self: the instance Returns: Store Instance """ BaseModel.__init__(self, students={}, missions={}, checkers={}, observer=Observer()) self.read_data() self.__start_observer()
def __init__(self, **data): build_index = False if ("hash_index" not in data) or data.pop("build_index", False): build_index = True data["hash_index"] = "placeholder" BaseModel.__init__(self, **data) # Overwrite options with massaged values kwargs = {"lowercase": self.lowercase} if self.exact_floats: kwargs["digits"] = False self.__values__["values"] = recursive_normalizer(self.values, **kwargs) # Build a hash index if we need it if build_index: self.__values__["hash_index"] = self.get_hash_index()
def __init__(self, length): BaseModel.__init__(self, sampler=torch.utils.data.SequentialSampler( torch.ones(length)))
def validate_py(self): """Validates but also adds None for the removed fields. The first init validates all current info and adds None to fields that were del The second init validates all the None fields to make sure they can be None.""" BaseModel.__init__(self, **self.dict()) BaseModel.__init__(self, **self.dict())
def __init__(self, firestore_client: FirestoreClient, **kwargs) -> None: self.__firestore__.client = firestore_client BaseModel.__init__(self, **kwargs)
def __init__(self, **kwargs): if "id" not in kwargs: kwargs["id"] = uuid4() BaseModel.__init__(self, **kwargs)