class FewSamplesInLabel(DatasetValidationError): label_name = field() count = field() def __str__(self): return f"The number of samples in the label '{self.label_name}'" \ f" might be too low. Found '{self.count}' samples."
class UndefinedAttribute(DatasetItemValidationError): label_name = field() attr_name = field() def __str__(self): return f"Item has the attribute '{self.attr_name}' for the " \ f"label '{self.label_name}' which is not defined in metadata."
class TrackedWebsocket: """The state that is tracked for a websocket within the state manager.""" websocket: WebSocketServerProtocol registration_date: DateTime = field() last_active_date: DateTime = field() activity_state: ActivityState = ActivityState.ACTIVE player_ids: OrderedSet[str] = field(factory=OrderedSet) # noinspection PyUnresolvedReferences @registration_date.default def _default_registration_date(self) -> DateTime: return pendulum.now( ) # not using field(factory=pendulum.now) to support mocking in unit tests # noinspection PyUnresolvedReferences @last_active_date.default def _default_last_active_date(self) -> DateTime: return pendulum.now( ) # not using field(factory=pendulum.now) to support mocking in unit tests def mark_active(self) -> None: """Mark the websocket as active.""" self.last_active_date = pendulum.now() self.activity_state = ActivityState.ACTIVE def mark_idle(self) -> None: """Mark the websocket as idle.""" self.activity_state = ActivityState.IDLE def mark_inactive(self) -> None: """Mark the websocket as inactive.""" self.activity_state = ActivityState.INACTIVE
class Action: """Action of Attachment""" name: str text: str type: str | ActionType = field(converter=ActionType) style: str | ActionStyle | None = field( converter=call_or_none(ActionStyle), # type: ignore default=None, ) data_source: str | ActionDataSource | None = field( converter=call_or_none(ActionDataSource), # type: ignore default=None, ) id: str | None = None confirm: Confirmation | None = None min_query_length: int | None = None options: list[OptionField] | None = None option_groups: list[OptionFieldGroup] | None = None selected_options: list[OptionField] | None = None value: str | None = None url: str | None = None def __attrs_post_init__(self): if self.data_source != ActionDataSource.external: self.min_query_length = None if self.options is not None and self.option_groups is not None: self.options = None
class ImbalancedAttribute(DatasetValidationError): label_name = field() attr_name = field() def __str__(self): return "There is an imbalance in the distribution of attribute" \ f" '{self. attr_name}' for the label '{self.label_name}'."
class ImbalancedDistInLabel(DatasetValidationError): label_name = field() prop = field() def __str__(self): return f"Values of '{self.prop}' are not evenly " \ f"distributed for '{self.label_name}' label."
class MissingAttribute(DatasetItemValidationError): label_name = field() attr_name = field() def __str__(self): return f"Item needs the attribute '{self.attr_name}' " \ f"for the label '{self.label_name}'."
class InvalidValue(DatasetItemValidationError): ann_id = field() prop = field() def __str__(self): return f"Annotation '{self.ann_id}' in " \ 'the item has an inf or a NaN value of ' \ f"'{self.prop}'."
class AttributeDefinedButNotFound(DatasetValidationError): label_name = field() attr_name = field() def __str__(self): return f"The attribute '{self.attr_name}' for the label " \ f"'{self.label_name}' is defined in metadata, but not " \ "found in the dataset."
class ValidatedSetter2: a: int b: str = attrs.field(on_setattr=attrs.setters.NO_OP) c: bool = attrs.field(on_setattr=attrs.setters.frozen) d: int = attrs.field( on_setattr=[attrs.setters.convert, attrs.setters.validate]) e: bool = attrs.field(on_setattr=attrs.setters.pipe( attrs.setters.convert, attrs.setters.validate))
class E2: a: int = attrs.field(default=1) b: int = attrs.field(default=attrs.Factory(int)) c: int = attrs.field(factory=int) d: int = attrs.field<error descr="Cannot specify both 'default' and 'factory'">(default=1, factory=int)</error> e: int = attrs.field<error descr="Cannot specify both 'default' and 'factory'">(default=attrs.Factory(int), factory=int)</error> f: int = attrs.field(default=attrs.NOTHING, factory=int) g: int = attrs.field(default=1, factory=None)
class NoMatchingAnnError(DatasetMergeError): item_id = field() ann = field() def __str__(self): return "Item %s: can't find matching annotation " \ "in sources %s, annotation is %s" % \ (self.item_id, self.sources, self.ann)
class FailedLabelVotingError(DatasetMergeError): item_id = field() votes = field() ann = field(default=None) def __str__(self): return "Item %s: label voting failed%s, votes %s, sources %s" % \ (self.item_id, 'for ann %s' % self.ann if self.ann else '', self.votes, self.sources)
class AnnotationsTooCloseError(DatasetQualityError): item_id = field() a = field() b = field() distance = field() def __str__(self): return "Item %s: annotations are too close: %s, %s, distance = %s" % \ (self.item_id, self.a, self.b, self.distance)
class ExportContext: progress_reporter: ProgressReporter = field( default=None, converter=attr.converters.default_if_none( factory=NullProgressReporter)) error_policy: ExportErrorPolicy = field( default=None, converter=attr.converters.default_if_none( factory=FailingExportErrorPolicy))
class NegativeLength(DatasetItemValidationError): ann_id = field() prop = field() val = field() def __str__(self): return f"Annotation '{self.ann_id}' in " \ "the item should have a positive value of " \ f"'{self.prop}' but got '{self.val}'."
class OnlyOneAttributeValue(DatasetValidationError): label_name = field() attr_name = field() value = field() def __str__(self): return "The dataset has the only attribute value " \ f"'{self.value}' for the attribute '{self.attr_name}' for the " \ f"label '{self.label_name}'."
class DatasetItemValidationError(DatasetValidationError): item_id = field() subset = field() def to_dict(self): dict_repr = super().to_dict() dict_repr['item_id'] = self.item_id dict_repr['subset'] = self.subset return dict_repr
class WrongGroupError(DatasetQualityError): item_id = field() found = field(converter=set) expected = field(converter=set) group = field(converter=list) def __str__(self): return "Item %s: annotation group has wrong labels: " \ "found %s, expected %s, group %s" % \ (self.item_id, self.found, self.expected, self.group)
class FailedAttrVotingError(DatasetMergeError): item_id = field() attr = field() votes = field() ann = field() def __str__(self): return "Item %s: attribute voting failed " \ "for ann %s, votes %s, sources %s" % \ (self.item_id, self.ann, self.votes, self.sources)
class FewSamplesInAttribute(DatasetValidationError): label_name = field() attr_name = field() attr_value = field() count = field() def __str__(self): return "The number of samples for attribute = value " \ f"'{self.attr_name} = {self.attr_value}' for the label " \ f"'{self.label_name}' might be too low. " \ f"Found '{self.count}' samples."
class TaskQueue: """A queue of asynchronous tasks to be executed.""" messages: List[Tuple[str, WebSocketServerProtocol]] = field(factory=list) disconnects: Set[WebSocketServerProtocol] = field(factory=OrderedSet) def is_empty(self) -> bool: return len(self.messages) == 0 and len(self.disconnects) == 0 def clear(self) -> None: del self.messages[:] # pylint: disable=unsupported-delete-operation: self.disconnects.clear() def message( self, message: Message, websockets: Optional[List[WebSocketServerProtocol]] = None, players: Optional[List[TrackedPlayer]] = None, ) -> None: """Enqueue a task to send a message to one or more destination websockets.""" destinations = OrderedSet(websockets) if websockets else OrderedSet() destinations.update( [player.websocket for player in players if player.websocket] if players else []) self.messages.extend([(message.to_json(), destination) for destination in destinations]) def disconnect(self, websocket: Optional[WebSocketServerProtocol]) -> None: """Enqueue a task to disconnect a websocket.""" if websocket: self.disconnects.add(websocket) async def execute(self) -> None: """Execute all tasks in the queue, sending messages first and then disconnecting websockets.""" # It seems like there could be a race condition here. The messages need to be sent in order, # and that does seem to work. However, if things happen really fast (i.e. a client receives # a message and triggers another event before we finish sending all of these messages) then it # seems possible that the messages for that second event could be intermingled with these. I # guess it's unlikely? But I'm leaving this note here in case I ever have to debug some strange # behavior with messages being received out-of-order. tasks = [ send(websocket, message) for message, websocket in self.messages ] if tasks: for task in tasks: await asyncio.wait([ task ]) # if we do them all at once, they can get sent out of order tasks = [close(websocket) for websocket in self.disconnects] if tasks: await asyncio.wait( tasks ) # TODO: not entirely sure how we handle errors that happen here
class FarFromLabelMean(DatasetItemValidationError): label_name = field() ann_id = field() prop = field() mean = field() val = field() def __str__(self): return f"Annotation '{self.ann_id}' in " \ f"the item has a value of '{self.prop}' that " \ "is too far from the label average. (mean of " \ f"'{self.label_name}' label: {self.mean}, got '{self.val}')."
class AttrsDemoClass(FromDictMixin): w: int x: int = field(converter=int) y: float = field(converter=float, default=2.1) z: str = field(converter=str, default="z") liststr: List[str] = field(default=["qwerty", "asdf"], validator=iter_validator(list, str)) array: np.ndarray = field( default=[1.0, 2.0], converter=floris_array_converter, # validator=iter_validator(np.ndarray, floris_float_type) )
def _make_anno_func(self, id_field=False, mixin=()): cdict = {} if id_field: cdict["id"] = attrs.field() for prop, prop_name in zip(self._props, self._prop_names): if prop in self._required_props: cdict[prop_name] = attrs.field() for prop, prop_name in zip(self._props, self._prop_names): if prop not in self._required_props: cdict[prop_name] = attrs.field(default=None) return attrs.make_class(self.name, cdict, bases=mixin)
class _AddObjectsFromFeature: feature_name: str bbox_member: str label_member: Optional[str] = field(default=None, kw_only=True) attribute_members: Tuple[_AttributeMemberMapping, ...] = field( default=(), kw_only=True, converter=lambda values: tuple(value if isinstance( value, _AttributeMemberMapping) else _AttributeMemberMapping(value) for value in values)) def __call__( self, tfds_example: Any, item: DatasetItem, state: namespace, ) -> None: tfds_objects = tfds_example[self.feature_name] tfds_bboxes = tfds_objects[self.bbox_member] num_objects = tfds_bboxes.shape[0] tfds_labels = None if self.label_member is not None: tfds_labels = tfds_objects[self.label_member] assert tfds_labels.shape[0] == num_objects for am_mapping in self.attribute_members: assert tfds_objects[am_mapping.member_name].shape[0] == num_objects for i in range(num_objects): norm_ymin, norm_xmin, norm_ymax, norm_xmax = tfds_bboxes[i].numpy() new_bbox = Bbox( x=norm_xmin * item.image.size[1], y=norm_ymin * item.image.size[0], w=(norm_xmax - norm_xmin) * item.image.size[1], h=(norm_ymax - norm_ymin) * item.image.size[0], ) if tfds_labels is not None: new_bbox.label = tfds_labels[i].numpy() for am_mapping in self.attribute_members: attr_value = tfds_objects[am_mapping.member_name][i].numpy() if am_mapping.value_converter: attr_value = am_mapping.value_converter(attr_value, state) new_bbox.attributes[am_mapping.attribute_name] = attr_value item.annotations.append(new_bbox)
class B: a: int = attrs.field(default=1) b: int = attrs.field(default=attr.Factory(int)) c: int = attrs.field() d: int = attrs.field(init=False) e: int = attrs.field(init=False) f: int = attrs.field(init=False) g: int = attrs.field(default=attrs.NOTHING) h: int = attrs.field(factory=int) i: int = attrs.field(factory=None)
class Portal(object): _id: int = field(default=0) name: str = "" _type: int = field(default=0) destination: int = 0 destination_label: str = "" x: int = 0 y: int = 0 def __post_init__(self): self.point = TagPoint(self.x, self.y) def __str__(self): return f"{id} @ {self.point} -> {self.destination}"
class TestWriteVectors: """Test and write vectors for a specific share.""" test_vectors: Sequence[TestVector] = field(factory=list) write_vectors: Sequence[WriteVector] = field(factory=list) new_length: Optional[int] = None def asdict(self) -> dict: """Return dictionary suitable for sending over CBOR.""" d = asdict(self) d["test"] = d.pop("test_vectors") d["write"] = d.pop("write_vectors") d["new-length"] = d.pop("new_length") return d
class MemoryWormholeServer(object): """ A factory for in-memory wormholes. :ivar _apps: Wormhole state arranged by the application id and relay URL it belongs to. :ivar _waiters: Observers waiting for a wormhole to be created for a specific application id and relay URL combination. """ _apps: dict[ApplicationKey, _WormholeApp] = field(default=Factory(dict)) _waiters: dict[ApplicationKey, Deferred] = field(default=Factory(dict)) def create( self, appid, relay_url, reactor, versions={}, delegate=None, journal=None, tor=None, timing=None, stderr=stderr, _eventual_queue=None, _enable_dilate=False, ): """ Create a wormhole. It will be able to connect to other wormholes created by this instance (and constrained by the normal appid/relay_url rules). """ if tor is not None: raise ValueError("Cannot deal with Tor right now.") if _enable_dilate: raise ValueError("Cannot deal with dilation right now.") key = (relay_url, appid) wormhole = _MemoryWormhole(self._view(key)) if key in self._waiters: self._waiters.pop(key).callback(wormhole) return wormhole def _view(self, key: ApplicationKey) -> _WormholeServerView: """ Created a view onto this server's state that is limited by a certain appid/relay_url pair. """ return _WormholeServerView(self, key)