def process_measure_for_metric(self, metric): sack = self.sack_for_metric(metric.id) files = self._list_measure_files_for_metric_id(sack, metric.id) measures = self._make_measures_array() for f in files: response = self.s3.get_object(Bucket=self._bucket_name_measures, Key=f) measures = numpy.append( measures, self._unserialize_measures(f, response['Body'].read())) yield measures # Now clean objects s3.bulk_delete(self.s3, self._bucket_name_measures, files)
def process_measure_for_metrics(self, metric_ids): measures = defaultdict(self._make_measures_array) all_files = [] for metric_id in metric_ids: sack = self.sack_for_metric(metric_id) files = self._list_measure_files_for_metric(sack, metric_id) all_files.extend(files) for f in files: response = self.s3.get_object( Bucket=self._bucket_name_measures, Key=f) measures[metric_id] = numpy.concatenate( (measures[metric_id], self._unserialize_measures(f, response['Body'].read()))) yield measures # Now clean objects s3.bulk_delete(self.s3, self._bucket_name_measures, all_files)
def _delete_metric(self, metric): bucket = self._bucket_name response = {} while response.get('IsTruncated', True): if 'NextContinuationToken' in response: kwargs = { 'ContinuationToken': response['NextContinuationToken'] } else: kwargs = {} try: response = self.s3.list_objects_v2( Bucket=bucket, Prefix=self._prefix(metric), **kwargs) except botocore.exceptions.ClientError as e: if e.response['Error'].get('Code') == "NoSuchKey": # Maybe it never has been created (no measure) return raise s3.bulk_delete(self.s3, bucket, [c['Key'] for c in response.get('Contents', ())])
def process_measure_for_metrics(self, metric_ids): measures = defaultdict(self._make_measures_array) all_files = [] for metric_id in metric_ids: sack = self.sack_for_metric(metric_id) files = self._list_measure_files_for_metric(sack, metric_id) all_files.extend(files) for f in files: response = self.s3.get_object( Bucket=self._bucket_name_measures, Key=f) measures[metric_id] = numpy.concatenate(( measures[metric_id], self._unserialize_measures(f, response['Body'].read()) )) yield measures # Now clean objects s3.bulk_delete(self.s3, self._bucket_name_measures, all_files)
def process_measures_for_sack(self, sack): measures = defaultdict(self._make_measures_array) files = self._list_measure_files((str(sack), )) for f in files: try: sack, metric_id, measure_id = f.split("/") metric_id = uuid.UUID(metric_id) except ValueError: LOG.warning("Unable to parse measure file name %s", f) continue response = self.s3.get_object(Bucket=self._bucket_name_measures, Key=f) measures[metric_id] = numpy.concatenate( (measures[metric_id], self._unserialize_measures(f, response['Body'].read()))) yield measures # Now clean objects s3.bulk_delete(self.s3, self._bucket_name_measures, files)
def process_measures_for_sack(self, sack): measures = defaultdict(self._make_measures_array) files = self._list_measure_files((str(sack),)) for f in files: try: sack, metric_id, measure_id = f.split("/") metric_id = uuid.UUID(metric_id) except ValueError: LOG.warning("Unable to parse measure file name %s", f) continue response = self.s3.get_object( Bucket=self._bucket_name_measures, Key=f) measures[metric_id] = numpy.concatenate(( measures[metric_id], self._unserialize_measures(f, response['Body'].read()) )) yield measures # Now clean objects s3.bulk_delete(self.s3, self._bucket_name_measures, files)
def delete_unprocessed_measures_for_metric_id(self, metric_id): sack = self.sack_for_metric(metric_id) files = self._list_measure_files_for_metric_id(sack, metric_id) s3.bulk_delete(self.s3, self._bucket_name_measures, files)
def delete_unprocessed_measures_for_metric(self, metric_id): sack = self.sack_for_metric(metric_id) files = self._list_measure_files_for_metric(sack, metric_id) s3.bulk_delete(self.s3, self._bucket_name_measures, files)