class RepeatCopyTaskParams(object): name = attrib(default="repeat-copy-task") controller_size = attrib(default=100, convert=int) controller_layers = attrib(default=1, convert=int) controller_type = attrib(default='NTM-LSTM', convert=str, validator=validators.in_( ['NTM-LSTM', 'NTM-FFW', 'LSTM'])) num_heads = attrib(default=1, convert=int) head_activation_type = attrib(default='softplus', convert=str, validator=validators.in_( ['softplus', 'relu'])) sequence_width = attrib(default=8, convert=int) sequence_min_len = attrib(default=1, convert=int) sequence_max_len = attrib(default=10, convert=int) repeat_min = attrib(default=1, convert=int) repeat_max = attrib(default=10, convert=int) memory_n = attrib(default=128, convert=int) memory_m = attrib(default=20, convert=int) num_batches = attrib(default=250000, convert=int) batch_size = attrib(default=1, convert=int) rmsprop_lr = attrib(default=1e-4, convert=float) rmsprop_momentum = attrib(default=0.9, convert=float) rmsprop_alpha = attrib(default=0.95, convert=float)
class TodoAttrs(CalendarEntryAttrs): percent: Optional[int] = attr.ib(default=None, validator=v_optional( in_(range(0, MAX_PERCENT + 1)))) priority: Optional[int] = attr.ib(default=None, validator=v_optional( in_(range(0, MAX_PRIORITY + 1)))) completed: Optional[datetime] = attr.ib( default=None, converter=ensure_datetime) # type: ignore
class Request(object): request_type = attr.ib(validator=in_(('Create', 'Update', 'Delete')), ) request_id = attr.ib(validator=instance_of(str), ) response_url = attr.ib(validator=instance_of(str), ) resource_type = attr.ib(validator=instance_of(str), ) logical_resource_id = attr.ib(validator=instance_of(str), ) stack_id = attr.ib(validator=instance_of(str), ) resource_properties = attr.ib(validator=instance_of(dict), ) @classmethod def from_dict(cls, obj): request_cls = globals().get(obj.get('RequestType', cls.__name__), cls) request_cls_attrs = set(field.name for field in attr.fields(request_cls)) kwargs = { re.sub('(?!^)([A-Z]+)', r'_\1', k).lower(): v for k, v in obj.items() } request_cls_kwargs = { k: v for k, v in kwargs.items() if k in request_cls_attrs } try: return request_cls(**request_cls_kwargs) except ValueError as ex: raise EventSerializationException( "Couldn't instantiate request object: {}; ".format( get_reason_from_exception(ex)) + "Source object: {}".format(json.dumps(obj, sort_keys=True)), )
class Result(object): """Base result class.""" time = attr.ib(convert=arrow.get) check = attr.ib() type = attr.ib(validator=in_(ResultType)) meta = attr.ib(default=attr.Factory(dict))
class JobDescription: # The job driver language, this field determines how to start the # driver. The value is one of the names of enum Language defined in # common.proto, e.g. PYTHON language = attr.ib(type=str, validator=in_(common_pb2.Language.keys())) # The runtime_env (RuntimeEnvDict) for the job config. runtime_env = attr.ib(type=RuntimeEnv, converter=lambda kw: RuntimeEnv(**kw)) # The entry to start the driver. # PYTHON: # - The basename of driver filename without extension in the job # package archive. # JAVA: # - The driver class full name in the job package archive. driver_entry = attr.ib(type=str, validator=instance_of(str)) # The driver arguments in list. # PYTHON: # - The arguments to pass to the main() function in driver entry. # e.g. [1, False, 3.14, "abc"] # JAVA: # - The arguments to pass to the driver command line. # e.g. ["-custom-arg", "abc"] driver_args = attr.ib(type=list, validator=instance_of(list), default=[]) # The environment vars to pass to job config, type of keys should be str. env = attr.ib(type=dict, validator=deep_mapping(key_validator=instance_of(str), value_validator=any_(), mapping_validator=instance_of(dict)), default={})
class Fillna(BaseProcessor): source_type = Series result_type = Series METHODS = ('min', 'max', 'mean', 'median', 'mode') value = attrib(default=None) how = attrib(default=None, validator=optional(in_(METHODS))) @how.validator def validate_exclusive(self, attribute, value): if self.value is not None and value is not None: raise ValueError("Either 'val' or 'method' can be specified") if self.value is None and value is None: raise ValueError("Either 'val' or 'method' must be specified") def process(self, series): if self.value is not None: series = series.fillna(self.value) elif self.how is not None: series = self._fill_by_method(series) return series def _fill_by_method(self, series): if self.how == 'min': val = series.min() elif self.how == 'max': val = series.max() elif self.how == 'mean': val = series.mean() elif self.how == 'median': val = series.median() elif self.how == 'mode': val = series.mode()[0] return series.fillna(val)
class HorizontalAxisOfObject(Generic[_ObjectT], AxisFunction[_ObjectT]): _object: _ObjectT = attrib() _index: int = attrib(validator=in_(Range.closed(0, 1))) def to_concrete_axis( self, axes_info: Optional[AxesInfo[_ObjectT]] # pylint:disable=unused-argument ) -> GeonAxis: if not isinstance(self._object, HasAxes): raise RuntimeError( "Can only instantiate an axis function if the object is of a " "concrete type (e.g. perception or situation object)" ) horizontal_axes = tuple( axis for axis in self._object.axes.all_axes if not axis.aligned_to_gravitational ) return horizontal_axes[self._index] # pylint:disable=invalid-sequence-index def copy_remapping_objects( self, object_map: Mapping[_ObjectT, _ObjectToT] ) -> "HorizontalAxisOfObject[_ObjectToT]": return HorizontalAxisOfObject(object_map[self._object], index=self._index) def __repr__(self, object_map: Optional[Mapping[_ObjectT, str]] = None) -> str: handle = object_map[self._object] if object_map else self._object return f"HorizontalAxisOfObject(_object={handle}, _index={str(self._index)})"
def attrib_enum(type_: Optional[EnumMeta] = None, default: Any = attr.NOTHING) -> attr._make._CountingAttr: """ Create a new attr attribute with validator for enums When a default value is provided the type_ is automatically computed Ex.: class Foo(Enum): A = 'first' Valid calls: attrib_enum(type_=Foo) # Default is attr.NOTHING, a value must be supplied when instantiating; attrib_enum(default=Foo.a) # Default is Foo.a and type_ is Foo attrib_enum(type_=Foo, default=Foo.a) """ if default is attr.NOTHING and not type_: raise RuntimeError("Default or type_ parameter must be provided") if default is not attr.NOTHING: type_ = type(default) if isinstance(default, EnumMeta): raise ValueError( f"Default must be a member of Enum and not the Enum class itself, got {default} while expecting" f" some of the following members {', '.join([str(i) for i in default])}" ) metadata = {"type": "enum", "enum_class": type_} return attr.ib(default=default, validator=in_(type_), type=type_, metadata=metadata)
class AlertList(object): """Generates the AlertList Panel.""" description = attr.ib(default="") id = attr.ib(default=None) limit = attr.ib(default=DEFAULT_LIMIT) links = attr.ib(default=attr.Factory(list)) onlyAlertsOnDashboard = attr.ib(default=True, validator=instance_of(bool)) show = attr.ib(default=ALERTLIST_SHOW_CURRENT) sortOrder = attr.ib(default=SORT_ASC, validator=in_([1, 2, 3])) stateFilter = attr.ib(default=attr.Factory(list)) title = attr.ib(default="") transparent = attr.ib(default=False, validator=instance_of(bool)) def to_json_data(self): return { 'description': self.description, 'id': self.id, 'limit': self.limit, 'links': self.links, 'onlyAlertsOnDashboard': self.onlyAlertsOnDashboard, 'show': self.show, 'sortOrder': self.sortOrder, 'stateFilter': self.stateFilter, 'title': self.title, 'transparent': self.transparent, 'type': ALERTLIST_TYPE, }
def test_success_with_value(self): """ If the value is in our options, nothing happens. """ v = in_([1, 2, 3]) a = simple_attr("test") v(1, a, 3)
class Region(Generic[ReferenceObjectT]): """ A region of space perceived by the learner. We largely follow Barbara Landau and Ray Jackendoff. "'What' and 'where' in spatial language and spatial cognition. Brain and Behavioral Sciences (1993) 16:2. who analyze spatial relations in term of a `Distance` and `Direction` with respect to some *reference_object*. At least one of *distance* and *direction* must be specified. """ reference_object: ReferenceObjectT = attrib() distance: Optional[Distance] = attrib(validator=optional( in_(LANDAU_AND_JACKENDOFF_DISTANCES)), default=None) direction: Optional[Direction[ReferenceObjectT]] = attrib( validator=optional(instance_of(Direction)), default=None) def copy_remapping_objects( self, object_map: Mapping[ReferenceObjectT, NewObjectT], *, axis_mapping: Mapping[GeonAxis, GeonAxis] = immutabledict(), ) -> "Region[NewObjectT]": return Region( reference_object=object_map[self.reference_object], distance=self.distance, direction=self.direction.copy_remapping_objects( object_map, axis_mapping=axis_mapping) if self.direction else None, ) def accumulate_referenced_objects( self, object_accumulator: List[ReferenceObjectT]) -> None: r""" Adds all objects referenced by this `Region` to *object_accumulator*. """ object_accumulator.append(self.reference_object) if self.direction: if isinstance(self.direction.relative_to_axis, AxisFunction): self.direction.relative_to_axis.accumulate_referenced_objects( object_accumulator) def __attrs_post_init__(self) -> None: check_arg( self.distance or self.direction, "A region must have either a distance or direction specified.", ) def __repr__(self) -> str: parts = [str(self.reference_object)] if self.distance: parts.append(f"distance={self.distance}") if self.direction: parts.append(f"direction={self.direction}") return f"Region({','.join(parts)})"
class GetTaskRequest(Base): id = attrib( convert=strconv, validator=instance_of(str) ) view = attrib( default=None, validator=optional(in_(["MINIMAL", "BASIC", "FULL"])) )
class Hypothesis: pattern_template: PerceptionGraphTemplate = attrib( validator=instance_of(PerceptionGraphTemplate)) association_score: float = attrib(validator=instance_of(float), default=0) probability: float = attrib(validator=in_(Range.open(0, 1)), default=0) observation_count: int = attrib(default=1)
class ParserNode(MindmapTreeNode): """ When we parse mindmap files we extract nodes as flat list and should also store some meta data. """ level: int = attrib(validator=instance_of(int)) side: Optional[str] = attrib(validator=optional(in_(VALID_SIDES))) @level.validator def is_positive(self, attribute, value): if value < 1: raise ValueError('level must be a positive integer greater than 1') @property def is_root(self) -> bool: return self.level == 1 def to_node_dict(self): return asdict( self, filter=lambda attr, value: attr.name not in ['level', 'side']) def to_root(self) -> RootNode: return RootNode(**self.to_node_dict()) def to_child(self) -> ChildNode: return ChildNode(**self.to_node_dict())
class ConfigfileVersion1(object): version: str = attr.ib(validator=v.in_(["1", "1.0"])) schema_template: str = attr.ib(validator=v.instance_of(str)) output: str = attr.ib(validator=attr.validators.instance_of(str)) template_directories: typing.List[TemplateDir] = attr.ib( validator=v.instance_of(list) ) template_packages: typing.List[str] = attr.ib(default=list()) transaction: bool = attr.ib( validator=v.instance_of(bool), default=False ) variables: typing.Mapping[str, str] = attr.ib(default=dict()) def extract(self) -> Config: target_config = TargetConfig( transaction=self.transaction, schema_template=self.schema_template, variables={} ) config = Config( template_directories=self.template_directories, variables=self.variables, targets={self.output: target_config} ) return config
def test_repr(self): """ Returned validator has a useful `__repr__`. """ v = in_([3, 4, 5]) assert( ("<in_ validator with options [3, 4, 5]>") ) == repr(v)
def test_fail(self): """ Raise ValueError if the value is outside our options. """ v = in_([1, 2, 3]) a = simple_attr("test") with pytest.raises(ValueError) as e: v(None, a, None) assert ("'test' must be in [1, 2, 3] (got None)", ) == e.value.args
def test_fail_with_string(self): """ Raise ValueError if the value is outside our options when the options are specified as a string and the value is not a string. """ v = in_("abc") a = simple_attr("test") with pytest.raises(ValueError) as e: v(None, a, None) assert ("'test' must be in 'abc' (got None)",) == e.value.args
class HydrodynamicModelInfo: """ HydrodynamicModelInfo provides information about which layer, fields, and phases the currently Hydrodynamic model is using. """ selected_base_type = attr.attrib(validator=in_(HydrodynamicModelType)) phases = attr.attrib(validator=list_of_strings) fields = attr.attrib(validator=list_of_strings) layers = attr.attrib(validator=list_of_strings) has_water_phase = attr.attrib(type=bool, validator=instance_of(bool))
def test_fail(self): """ Raise ValueError if the value is outside our options. """ v = in_([1, 2, 3]) a = simple_attr("test") with pytest.raises(ValueError) as e: v(None, a, None) assert ( "'test' must be in [1, 2, 3] (got None)", ) == e.value.args
class MdParserConfig: """Configuration options for the Markdown Parser. Note in the sphinx configuration these option names are prepended with ``myst_`` """ renderer: str = attr.ib( default="sphinx", validator=in_(["sphinx", "html", "docutils"]) ) commonmark_only: bool = attr.ib(default=False, validator=instance_of(bool)) dmath_enable: bool = attr.ib(default=True, validator=instance_of(bool)) dmath_allow_labels: bool = attr.ib(default=True, validator=instance_of(bool)) dmath_allow_space: bool = attr.ib(default=True, validator=instance_of(bool)) dmath_allow_digits: bool = attr.ib(default=True, validator=instance_of(bool)) amsmath_enable: bool = attr.ib(default=False, validator=instance_of(bool)) deflist_enable: bool = attr.ib(default=False, validator=instance_of(bool)) update_mathjax: bool = attr.ib(default=True, validator=instance_of(bool)) admonition_enable: bool = attr.ib(default=False, validator=instance_of(bool)) figure_enable: bool = attr.ib(default=False, validator=instance_of(bool)) disable_syntax: List[str] = attr.ib( factory=list, validator=deep_iterable(instance_of(str), instance_of((list, tuple))), ) html_img_enable: bool = attr.ib(default=False, validator=instance_of(bool)) # see https://en.wikipedia.org/wiki/List_of_URI_schemes url_schemes: Optional[List[str]] = attr.ib( default=None, validator=optional(deep_iterable(instance_of(str), instance_of((list, tuple)))), ) heading_anchors: Optional[int] = attr.ib( default=None, validator=optional(in_([1, 2, 3, 4, 5, 6, 7])) ) def as_dict(self, dict_factory=dict) -> dict: return attr.asdict(self, dict_factory=dict_factory)
class PhysicsOptionsInfo: """ PhysicsOptionsInfo provides information about the ``Physics Options`` available at ``ALFAsim``. The following option can be accessed: Emulsion Model: Informs which emulsion model the application is currently using. For more information about all options available check ``alfasim_sdk.context.EmulsionModelType`` Solids Model: Informs the current solid model being used by the application For more information about all options available check ``alfasim_sdk.context.SolidsModelType`` Hydrodynamic Model: Provides a ``alfasim_sdk.context.HydrodynamicModelInfo`` informing which layers, fields and phases the application is currently using. For more information about all options available check ``alfasim_sdk.context.HydrodynamicModelInfo`` """ emulsion_model = attr.attrib(validator=in_(EmulsionModelType)) solids_model = attr.attrib(validator=in_(SolidsModelType)) hydrodynamic_model = attr.attrib( validator=instance_of(HydrodynamicModelInfo))
class PhysicsOptionsInfo: """ ``PhysicsOptionsInfo`` provides information about the physics options available at ``ALFAsim``. The following option can be accessed: Emulsion Model: Informs which emulsion model the application is currently using. For more information about all options available check :py:class:`~alfasim_sdk._internal.constants.EmulsionModelType` Solids Model: Informs the current solid model being used by the application For more information about all options available check :py:class:`~alfasim_sdk._internal.constants.SolidsModelType` Hydrodynamic Model: Provides a :class:`HydrodynamicModelInfo` informing which layers, fields and phases the application is currently using. """ emulsion_model: EmulsionModelType = attr.attrib( validator=in_(EmulsionModelType)) solids_model: SolidsModelType = attr.attrib(validator=in_(SolidsModelType)) hydrodynamic_model: HydrodynamicModelInfo = attr.attrib( validator=instance_of(HydrodynamicModelInfo))
class ListTasksRequest(Base): project = attrib(default=None, converter=strconv, validator=optional(instance_of(str))) name_prefix = attrib(default=None, converter=strconv, validator=optional(instance_of(str))) page_size = attrib(default=None, validator=optional(instance_of(int))) page_token = attrib(default=None, converter=strconv, validator=optional(instance_of(str))) view = attrib(default=None, validator=optional(in_(["MINIMAL", "BASIC", "FULL"])))
def _decorate(cls: StateMirror) -> StateMirror: cls = task_type(cls) cls.Pattern = RHODES_ATTRIB(default=options[0], validator=in_(options)) cls.__doc__ = docstring_with_param( cls, "Pattern", IntegrationPattern, description="Step Functions integration pattern", default=options[0]) def to_dict(instance) -> Dict: """Serialize state as a dictionary.""" for required in instance._required_fields: require_field(instance=instance, required_value=required) task = instance._build_task() return task.to_dict() cls.to_dict = to_dict def _build_task(instance) -> Task: task_fields = [field.name for field in attr.fields(Task)] field_name_blacklist = ("Pattern", ) resource_name = instance._resource_name.value + instance.Pattern.value task_kwargs = {} parameters_kwargs = {} for field in attr.fields(type(instance)): if field.name in field_name_blacklist or field.name.startswith( "_"): continue value = getattr(instance, field.name) if value is None: continue if field.name in task_fields and field.name != "Parameters": task_kwargs[field.name] = value else: parameters_kwargs[field.name] = value params = Parameters(**parameters_kwargs) return Task(Parameters=params, Resource=resource_name, **task_kwargs) cls._build_task = _build_task return cls
class Output(Base): url = attrib(default=None, converter=strconv, validator=optional(instance_of(str))) path = attrib(default=None, converter=strconv, validator=optional(instance_of(str))) type = attrib(default="FILE", validator=in_(["FILE", "DIRECTORY"])) name = attrib(default=None, converter=strconv, validator=optional(instance_of(str))) description = attrib(default=None, converter=strconv, validator=optional(instance_of(str)))
class RgbColorPerception: """ A perceived color. """ red: int = attrib(validator=in_(Range.closed(0, 255))) green: int = attrib(validator=in_(Range.closed(0, 255))) blue: int = attrib(validator=in_(Range.closed(0, 255))) def inverse(self) -> "RgbColorPerception": return RgbColorPerception(255 - self.red, 255 - self.green, 255 - self.blue) @property def hex(self) -> str: return f"#{self.red:02x}{self.green:02x}{self.blue:02x}" def __repr__(self) -> str: """ We represent colors by hex strings because these are easy to visualize using web tools. Returns: """ return self.hex
class NumericalSeriesPreprocessor(BaseSeriesPreprocessor): kind = 'numerical' FILLMETHODS = ('min', 'max', 'mean', 'median', 'mode') fillval = attrib(default=None, validator=optional(instance_of(Number))) fillmethod = attrib(default=None, validator=optional(in_(FILLMETHODS))) minval = attrib(default=None, validator=optional(instance_of(Number))) maxval = attrib(default=None, validator=optional(instance_of(Number))) normalize = attrib(default=True, validator=instance_of(bool)) def process(self, series): if self.fillval is not None: series = series.fillna(self.fillval) if self.fillmethod is not None: series = self._fill_by_method(series) if self.minval is not None: series = self._min(series) if self.maxval is not None: series = self._max(series) if self.normalize: series = self._normalize(series) return series.to_frame('VALUE') def _fill_by_method(self, series): method = self.fillmethod if method == 'min': fillv = series.min() elif method == 'max': fillv = series.max() elif method == 'mean': fillv = series.mean() elif method == 'median': fillv = series.median() elif method == 'mode': fillv = series.mode()[0] return series.fillna(fillv) def _min(self, series): return series.map(lambda v: max(self.minval, v), na_action='ignore') def _max(self, series): return series.map(lambda v: min(self.maxval, v), na_action='ignore') def _normalize(self, series): smin = series.min() smax = series.max() return (series - smin) / (smax - smin)
class Configuration: # pylint: disable=R0903 """Root configuration.""" @environ.config class FlaskConfig: # pylint: disable=R0903 """Flask specific configuration.""" SECRET_KEY = environ.var(_gen_secret_key()) DEBUG = environ.bool_var(False) TESTING = environ.bool_var(False) SQLALCHEMY_DATABASE_URI = environ.var("sqlite://") SQLALCHEMY_TRACK_MODIFICATIONS = environ.bool_var(False) flask = environ.group(FlaskConfig) skip_db_setup = environ.bool_var(False) verbosity = environ.var( "WARNING", validator=in_(["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]))
class Response(object): physical_resource_id = attr.ib(validator=instance_of(str), ) stack_id = attr.ib(validator=instance_of(str), ) request_id = attr.ib(validator=instance_of(str), ) logical_resource_id = attr.ib(validator=instance_of(str), ) status = attr.ib( converter=lambda x: ('FAILED', 'SUCCESS')[x] if isinstance(x, bool) else x, validator=in_(('FAILED', 'SUCCESS')), ) reason = attr.ib( validator=instance_of(str), default='', ) data = attr.ib( validator=instance_of(dict), default=attr.Factory(dict), ) no_echo = attr.ib( validator=instance_of(bool), default=False, ) @classmethod def from_request(cls, request, **kwargs): kwargs.update({ 'stack_id': request.stack_id, 'request_id': request.request_id, 'logical_resource_id': request.logical_resource_id, }) if 'physical_resource_id' not in kwargs and hasattr( request, 'physical_resource_id'): kwargs['physical_resource_id'] = request.physical_resource_id return cls(**kwargs) def to_dict(self): return { k.title().replace('_', ''): v for k, v in attr.asdict(self).items() } @physical_resource_id.validator def _physical_resource_id_validator(self, attribute, value): if len(value) > 1024: raise EventSerializationException( 'Physical resource ID can be up to 1KB in size')
class CopyTaskParams(object): name = attrib(default="copy-task") controller_size = attrib(default=100, convert=int) controller_layers = attrib(default=1, convert=int) controller_type = attrib(default='lstm-ntm', convert=str, validator=validators.in_(['lstm-ntm', 'ffw-ntm'])) num_heads = attrib(default=1, convert=int) sequence_width = attrib(default=8, convert=int) sequence_min_len = attrib(default=1, convert=int) sequence_max_len = attrib(default=20, convert=int) memory_n = attrib(default=128, convert=int) memory_m = attrib(default=20, convert=int) num_batches = attrib(default=50000, convert=int) batch_size = attrib(default=1, convert=int) rmsprop_lr = attrib(default=1e-4, convert=float) rmsprop_momentum = attrib(default=0.9, convert=float) rmsprop_alpha = attrib(default=0.95, convert=float)
class RequestFactory(UserList): url = attr.ib(validator=instance_of(Url)) method = attr.ib(default='GET', validator=in_(HttpAcceptedTypes.ACCEPTED_METHODS)) param = attr.ib(default=Param(), validator=instance_of(Param)) header = attr.ib(default=Header(), validator=instance_of(Header)) auth = attr.ib(default=Auth(), validator=instance_of(Auth)) req_data = attr.ib(default=Data(), validator=instance_of(Data)) cookie = attr.ib(default=Cookie(), validator=instance_of(Cookie)) zip_type = attr.ib(default=zip_longest_ffill, validator=instance_of(types.FunctionType)) file_pattern = attr.ib(default=FilePattern(), validator=instance_of(FilePattern)) mod_response = attr.ib(default=lambda x: x, validator=instance_of(types.FunctionType)) storage = attr.ib(default=None, validator=instance_of((StorageBase, type(None)))) verbose = attr.ib(default=False, validator=instance_of(bool)) @property def data(self): zipped_request = self.zip_type(self.url, self.param, self.header, self.auth, self.req_data, self.cookie) requests = [] for url, param, header, auth, data, cookie in zipped_request: r = APIRequest( method=self.method, url=url, param=param, header=header, auth=auth if any(auth) else None, #None if there isn't auth data=data, cookie=cookie, file_pattern=self.file_pattern, mod_response=self.mod_response, storage=self.storage, verbose=self.verbose, ) requests.append(r) return requests