def download_images(self): print( 'BatchScorer, download_images(), use_url is {}, metadata_available is {}' .format(self.use_url, self.metadata_available)) if not self.use_url: print('blob_service created') blob_service = BlockBlobService( account_name=BatchScorer.get_account_from_uri( self.input_container_sas), sas_token=BatchScorer.get_sas_key_from_uri( self.input_container_sas)) container_name = BatchScorer.get_container_from_uri( self.input_container_sas) for i in self.image_ids_to_score: if self.metadata_available: image_id = i[0] image_meta = i[1] else: image_id = i image_meta = None try: if self.use_url: # local_filename will be a tempfile with a generated name local_filename, headers = request.urlretrieve( image_id) # TODO do not save to disk image = TFDetector.open_image(local_filename) image = TFDetector.resize_image(image) else: stream = io.BytesIO() _ = blob_service.get_blob_to_stream( container_name, image_id, stream) image = TFDetector.open_image(stream) image = TFDetector.resize_image(image) # image loaded here self.images.append(image) self.image_ids.append(image_id) self.image_metas.append(image_meta) except Exception as e: print( 'score.py, failed to download or open image {}: {}'.format( image_id, str(e))) self.failed_images.append(image_id) self.failed_metas.append(image_meta) continue
def download_images(self): print('BatchScorer, download_images()') blob_service = BlockBlobService( account_name=BatchScorer.get_account_from_uri( self.input_container_sas), sas_token=BatchScorer.get_sas_key_from_uri( self.input_container_sas)) container_name = BatchScorer.get_container_from_uri( self.input_container_sas) for image_id in self.image_ids_to_score: try: stream = io.BytesIO() _ = blob_service.get_blob_to_stream(container_name, image_id, stream) image = TFDetector.open_image(stream) image = TFDetector.resize_image(image) # image loaded here self.images.append(image) self.image_ids.append(image_id) except Exception as e: print( 'score.py, failed to download or open image {}, exception: {}' .format(image_id, str(e))) self.failed_images.append(image_id) continue
def _detect_process_request_data(request): files = request.files params = request.args # check that the content uploaded is not too big # request.content_length is the length of the total payload content_length = request.content_length if request.content_length \ else api_config.MAX_CONTENT_LENGTH_IN_MB * 1024 * 1024 + 1 if content_length > api_config.MAX_CONTENT_LENGTH_IN_MB * 1024 * 1024: abort(413, ('Payload size {:.2f} MB exceeds the maximum allowed of {} MB, or payload content length' ' cannot be determined. Please upload fewer or more compressed images.').format( content_length / (1024 * 1024), api_config.MAX_CONTENT_LENGTH_IN_MB)) render_boxes = True if params.get('render', '') in ['True', 'true'] else False # validate detection confidence value if 'confidence' in params: detection_confidence = float(params['confidence']) print('runserver, post_detect_sync, detection confidence: ', detection_confidence) if detection_confidence < 0.0 or detection_confidence > 1.0: abort(400, 'Detection confidence {} is invalid. Needs to be between 0.0 and 1.0.'.format( detection_confidence)) else: detection_confidence = api_config.DEFAULT_DETECTION_CONFIDENCE # check that the number of images is acceptable for this synchronous API num_images = sum([1 if file.content_type in api_config.IMAGE_CONTENT_TYPES else 0 for file in files.values()]) print('runserver, post_detect_sync, number of images received: ', num_images) if num_images > api_config.MAX_IMAGES_ACCEPTED: abort(413, 'Too many images. Maximum number of images that can be processed in one call is {}.'.format(str( api_config.MAX_IMAGES_ACCEPTED))) elif num_images == 0: abort(400, 'No image(s) of accepted types (image/jpeg, image/png, application/octet-stream) received.') # read input images and parameters try: print('runserver, post_detect_sync, reading input images...') images, image_names = [], [] for k, file in files.items(): # file of type SpooledTemporaryFile has attributes content_type and a read() method if file.content_type in api_config.IMAGE_CONTENT_TYPES: images.append(TFDetector.open_image(file)) image_names.append(k) except Exception as e: log.log_exception('Error reading the images: ' + str(e)) abort(500, 'Error reading the images: ' + str(e)) return { 'render_boxes': render_boxes, 'detection_confidence': detection_confidence, 'images': images, 'image_names': image_names }
def download_images(self): print( 'BatchScorer, download_images(), use_url is {}, metadata_available is {}' .format(self.use_url, self.metadata_available)) if not self.use_url: print('blob_service created') blob_service = SasBlob.get_service_from_uri( self.input_container_sas) container_name = SasBlob.get_container_from_uri( self.input_container_sas) for i in self.image_ids_to_score: if self.metadata_available: image_id = i[0] image_meta = i[1] else: image_id = i image_meta = None try: if self.use_url: # im_to_open will be a tempfile with a generated name im_to_open, headers = request.urlretrieve( image_id) # TODO do not save to disk else: im_to_open = io.BytesIO() _ = blob_service.get_blob_to_stream( container_name, image_id, im_to_open) # open is lazy; load() loads the image so we know it can be read successfully image = TFDetector.open_image(im_to_open) image.load() self.images.append(image) self.image_ids.append(image_id) self.image_metas.append(image_meta) except Exception as e: print( 'score.py, failed to download or open image {}: {}'.format( image_id, str(e))) self.failed_images.append(image_id) self.failed_metas.append(image_meta) continue