def get(acquisition_id: str = None, dataset_id: str = None, tags: List[str] = [], metadata: Dict[str, str] = {}) \ -> Union[Acquisition, List[Acquisition]]: """Get an acquisition identified by `acquisition_id` or a list of all the acquisitions Keyword Arguments: acquisition_id {str} -- id of the acquisition (default: {None}) dataset_id {str} -- id of the dataset to which the acquisitions belong to (default: {None}) tags {List[str]} -- tags of the acquisitions (default: {[]}) metadata {Dict[str, str]} -- metadata of the acquisitions (default: {{}}) Returns: Union[Acquisition, List[Acquisition]] -- acquisition or list of acquisitions """ if acquisition_id is None: processed_metadata = {f"metadata.{k}": metadata[k] for k in metadata} acquisitions = _api_calls.get(_ENDPOINT, params={"datasetId": dataset_id, "tags": tags, **processed_metadata}) \ .json(object_hook=Acquisition.from_json) for acquisition in acquisitions: cache._cache_data("acquisitions", acquisition.id, acquisition, Acquisition.to_protobuf) return acquisitions else: try: acquisition = cache._get_cached_data("acquisitions", acquisition_id, Acquisition.from_protobuf) except FileNotFoundError: acquisition = _api_calls.get(_ENDPOINT + acquisition_id).json( object_hook=Acquisition.from_json) cache._cache_data("acquisitions", acquisition_id, acquisition, Acquisition.to_protobuf) return acquisition
def get(organization_id: str = None ) -> Union[Organization, List[Organization]]: """Get an organization identified by `organization_id` or a list of all organizations Keyword Arguments: organization_id {str} -- id of organization (default: {None}) Returns: Union[Organization, List[Organization]] -- organization or list of organizations """ if organization_id is None: organizations = _api_calls.get(_ENDPOINT).json( object_hook=Organization.from_json) for organization in organizations: cache._cache_data("organizations", organization.id, organization, Organization.to_protobuf) return organizations else: try: organization = cache._get_cached_data("organizations", organization_id, Organization.from_protobuf) except FileNotFoundError: organization = _api_calls.get(_ENDPOINT + organization_id).json( object_hook=Organization.from_json) cache._cache_data("organizations", organization_id, organization, Organization.to_protobuf) return organization
def get(tags: List[str] = [], metadata: Dict[str, str] = {}) -> SampleList: """Get all the timeseries samples that belong to this acquisition Keyword Arguments: tags {List[str]} -- tags of the timeseries samples (default: {[]}) metadata {Dict[str, str]} -- metadata of the timeseries samples (default: {{}}) Returns: SampleList -- list of timeseries samples """ if len(tags) > 0 or len(metadata) > 0: processed_metadata = { f"metadata.{k}": metadata[k] for k in metadata } samples = _api_calls.get(Inner._TIMESERIES_SAMPLE_ENDPOINT, params={"tags": tags, **processed_metadata}) \ .json(object_hook=TimeseriesSample.from_json) samples.sort(key=lambda sample: sample.timestamp) return SampleList(samples) try: samples = cache._get_cached_data( "samples/{}/".format(self.id), "timeseries", SampleList.from_protobuf) except FileNotFoundError: samples = _api_calls.get(Inner._TIMESERIES_SAMPLE_ENDPOINT) \ .json(object_hook=TimeseriesSample.from_json) samples.sort(key=lambda sample: sample.timestamp) samples = SampleList(samples) cache._cache_data("samples/{}/".format(self.id), "timeseries", samples, SampleList.to_protobuf) return samples
def count() -> int: """Get the number of datasets Returns: int -- number of datasets """ return _api_calls.get(_ENDPOINT + "count").json()
def count() -> int: """Get the number of acquisitions on this dataset Returns: int -- number of acquisitions """ return _api_calls.get(Inner._ACQUISITIONS_ENDPOINT + "count").json()
def count() -> int: """Get number of acquisitions Returns: int -- number of acquisitions """ return _api_calls.get(_ENDPOINT + "count").json()
def count() -> int: """Get the number of organizations Returns: int -- number of organizations """ return _api_calls.get(_ENDPOINT + "count").json()
def count() -> int: """Get the number of video samples on this acquisition Returns: int -- number of video samples """ return _api_calls.get(Inner._VIDEO_SAMPLE_ENDPOINT + "count").json()
def get( sample_id: str = None, tags: List[str] = [], metadata: Dict[str, str] = {}) -> Union[VideoSample, SampleList]: """Get all the video samples that belong to this acquisition Keyword Arguments: sample_id {str} -- id of the sample (default: {None}) tags {List[str]} -- tags of image samples (default: {[]}) metadata {Dict[str, str]} -- metadata of the image samples (default: {{}}) Returns: Union[VideoSample, SampleList] -- video sample or list of video samples """ if sample_id is None: if len(tags) > 0 or len(metadata) > 0: processed_metadata = { f"metadata.{k}": metadata[k] for k in metadata } samples = _api_calls.get(Inner._VIDEO_SAMPLE_ENDPOINT, params={"tags": tags, **processed_metadata}) \ .json(object_hook=VideoSample.from_json) return SampleList(samples) samples = _api_calls.get( Inner._VIDEO_SAMPLE_ENDPOINT).json( object_hook=VideoSample.from_json) for sample in samples: cache._cache_data("samples/{}/videos/".format(self.id), sample.id, sample, VideoSample.to_protobuf) return SampleList(samples) else: try: sample = cache._get_cached_data( "samples/{}/videos/".format(self.id), sample_id, VideoSample.from_protobuf) except FileNotFoundError: sample = _api_calls.get( Inner._VIDEO_SAMPLE_ENDPOINT + sample_id).json(object_hook=VideoSample.from_json) cache._cache_data("samples/{}/videos/".format(self.id), sample_id, sample, VideoSample.to_protobuf) return sample
def count() -> int: """Get the number of timeseries samples on this acquisition Returns: int -- number of timeseries samples """ return _api_calls.get(Inner._TIMESERIES_SAMPLE_ENDPOINT + "count").json()
def count() -> int: """Get the number of annotations on this acquisition Returns: int -- number of annotations """ return _api_calls.get(Inner._ANNOTATIONS_ENDPOINT + "count").json()
def usage() -> Dict[str, List[str]]: """Get a map identifying which device(s) and sensor(s) were used to acquire time series samples Returns: Mapping[str, List[str]] -- map of (key, value) pairs, with key being id of device and the value a list of sensor ids which were used to capture samples """ return _api_calls.get(Inner._DEVICES_ENDPOINT + "usage").json()
def get(annotation_id: str = None, tags: List[str] = [], metadata: Dict[str, str] = {}) -> AnnotationList: """Get all the annotations that belong to this acquisition Keyword Arguments: annotation_id {str} -- id of the annotation (default: {None}) tags {List[str]} -- tags of the annotation (default: {[]}) metadata {Dict[str, str]} -- metadata of the annotation (default: {{}}) Returns: AnnotationList -- annotation or annotation of video samples """ if annotation_id is None: if len(tags) > 0 or len(metadata) > 0: processed_metadata = { f"metadata.{k}": metadata[k] for k in metadata } annotations = _api_calls.get(Inner._ANNOTATIONS_ENDPOINT, params={"tags": tags, **processed_metadata}) \ .json(object_hook=Annotation.from_json) return AnnotationList(annotations) annotations = _api_calls.get( Inner._ANNOTATIONS_ENDPOINT).json( object_hook=Annotation.from_json) for annotation in annotations: cache._cache_data("annotations", annotation.id, annotation, Annotation.to_protobuf) return AnnotationList(annotations) else: try: annotation = cache._get_cached_data( "annotations", annotation_id, Annotation.from_protobuf) except FileNotFoundError: annotation = _api_calls.get( Inner._ANNOTATIONS_ENDPOINT + annotation_id).json( object_hook=Annotation.from_json) cache._cache_data("annotations", annotation_id, annotation, Annotation.to_protobuf) return annotation
def get(dataset_id: str = None, tags: List[str] = []) -> Union[Dataset, List[Dataset]]: """Get a dataset identified by `dataset_id` or a list of all the datasets Keyword Arguments: dataset_id {str} -- id of the dataset (default: {None}) tags {List[str]} -- tags of the datasets (default: {[]}) Returns: Union[Dataset, List[Dataset]] -- dataset or list of datasets """ if dataset_id is None: datasets = _api_calls.get(_ENDPOINT, params={"tags": tags}).json(object_hook=Dataset.from_json) for dataset in datasets: cache._cache_data("datasets", dataset.id, dataset, Dataset.to_protobuf) return datasets else: try: dataset = cache._get_cached_data("datasets", dataset_id, Dataset.from_protobuf) except FileNotFoundError: dataset = _api_calls.get(_ENDPOINT + dataset_id).json(object_hook=Dataset.from_json) cache._cache_data("datasets", dataset_id, dataset, Dataset.to_protobuf) return dataset
def get(user_id: str = None) -> Union[User, List[User]]: """Get an user identified by `user_id` or a list of all users Keyword Arguments: user_id {str} -- id of the user (default: {None}) Returns: Union[User, List[User]] -- user or list of users """ if user_id is None: users = _api_calls.get(_ENDPOINT).json(object_hook=User.from_json) for user in users: cache._cache_data("users", user.id, user, User.to_protobuf) return users else: try: user = cache._get_cached_data("users", user_id, User.from_protobuf) except FileNotFoundError: user = _api_calls.get(_ENDPOINT + user_id).json(object_hook=User.from_json) cache._cache_data("users", user_id, user, User.to_protobuf) return user
def raw(sample_id: str) -> ByteString: """Get actual image from image sample identified by `sample_id` on this acquisition Arguments: sample_id {str} -- id of the sample Returns: ByteString -- bytes of the image """ try: image = cache._get_cached_data( "samples/{}/images/raw/".format(self.id), sample_id) except FileNotFoundError: image = _api_calls.get(Inner._IMAGE_SAMPLE_ENDPOINT + sample_id + "/raw") file_ext = "." + image.headers["Content-Type"].split( "/")[-1] cache._cache_data("samples/{}/images/raw/".format(self.id), sample_id + file_ext, image.content) return image
def raw(sample_id: str) -> ByteString: """Get actual video from video sample identified by `sample_id` on this acquisition Arguments: sample_id {str} -- id of the sample Returns: ByteString -- bytes of the video """ try: video = cache._get_cached_data( "samples/{}/videos/raw/".format(self.id), sample_id) except FileNotFoundError: video = _api_calls.get(Inner._VIDEO_SAMPLE_ENDPOINT + sample_id + "/raw") file_ext = "." + video.headers["Content-Type"].split( "/")[-1] cache._cache_data("samples/{}/videos/raw/".format(self.id), sample_id + file_ext, video.content) return video