Пример #1
0
	def _depr_upload_dataset(self, dataset_name, image_data):
		'''
		DEPRECATED
		
		Uploads multiple images and groups them into a dataset

		@dataset_name: name of the dataset
		@image_data: list of pairs of (filename, metadata) input
		'''
		
		ds_list = []
		for filename, metadata in image_data:
			image = self.upload_image(filename=filename, metadata=metadata)

		if image is not None:
				ds_list.append((image.name, image.uri))
		self._debug_print('Dataset ' + dataset_name + ' contans '+ str(len(ds_list)) + ' element(s)')
		
		dataset = BQDataset (name=dataset_name)
		dataset.value = [ (uri, 'object') for image_name, uri in ds_list ]
		self.session = self._authenticate(self.bqs)
		if self.session.save(dataset):
			self.logger.info('Dataset ' + dataset_name + ' uploaded successfully!')
			self._debug_print('Dataset ' + dataset_name + ' uploaded successfully!')
		else:
			self.logger.error('Dataset ' + dataset_name + ' failed to upload')
			self._debug_print('Dataset ' + dataset_name + ' failed to upload')
Пример #2
0
    def _depr_upload_dataset(self, dataset_name, image_data):
        '''
		DEPRECATED
		
		Uploads multiple images and groups them into a dataset

		@dataset_name: name of the dataset
		@image_data: list of pairs of (filename, metadata) input
		'''

        ds_list = []
        for filename, metadata in image_data:
            image = self.upload_image(filename=filename, metadata=metadata)

        if image is not None:
            ds_list.append((image.name, image.uri))
        self._debug_print('Dataset ' + dataset_name + ' contans ' +
                          str(len(ds_list)) + ' element(s)')

        dataset = BQDataset(name=dataset_name)
        dataset.value = [(uri, 'object') for image_name, uri in ds_list]
        self.session = self._authenticate(self.bqs)
        if self.session.save(dataset):
            self.logger.info('Dataset ' + dataset_name +
                             ' uploaded successfully!')
            self._debug_print('Dataset ' + dataset_name +
                              ' uploaded successfully!')
        else:
            self.logger.error('Dataset ' + dataset_name + ' failed to upload')
            self._debug_print('Dataset ' + dataset_name + ' failed to upload')
Пример #3
0
def upload_dataset(session, dataset_name, uri_list):
    """Save a dataset
    Create a data setup from the uploaded list
    @param session: a bqsession
    @param dataset_name : name the new dataset
    @param url_list : array of uris (strings)
    @return: None
    """

    dataset = BQDataset(name="%s-%s" % (dataset_name, str(uuid.uuid4())[:4]))
    # force dataset values to be interpreted as object references
    dataset.value = [(url, 'object') for url in uri_list]

    if session.args.verbose:
        six.print_("DATASET ", BQFactory.to_string(dataset))

    if not session.args.dryrun:
        session.save(dataset)
Пример #4
0
    def upload_dataset(self, dataset_name, image_data, num_threads=4):
        '''
		Uploads multiple images and groups them into a dataset via threading
		
		@dataset_name: name of the dataset
		@image_data: list of pairs of (filename, metadata) input
		'''

        pool = multiprocessing.Pool(num_threads)
        result = pool.map_async(self._upload_image_helper,
                                [(image[0], image[1]) for image in image_data])

        # OLD Method of async
        #async_results = [ pool.apply_async(self.upload_image, (idata[0],idata[1])) for idata in image_data ]
        #print async_results
        #map(ApplyResult.wait, async_results)
        #ds_list = [r.get() for r in async_results]

        while not result.ready():
            self._debug_print("Chunks Remaining: >={}".format(
                result._number_left))
            time.sleep(30)

        # Build dataset
        ds_list = result.get()
        pool.close()

        self._debug_print('Dataset ' + dataset_name + ' contans ' +
                          str(len(ds_list)) + ' element(s)')
        dataset = BQDataset(name=dataset_name)
        dataset.value = [(uri, 'object') for image_name, uri in ds_list]

        # Refresh authentication in cases of long uploads
        local_session = self._authenticate()
        if local_session.save(dataset):
            self._debug_print('Dataset ' + dataset_name +
                              ' uploaded successfully!')
        else:
            self._debug_print('Dataset ' + dataset_name + ' failed to upload',
                              log="error")

        return ds_list
Пример #5
0
	def upload_dataset(self, dataset_name, image_data, num_threads=4):
		'''
		Uploads multiple images and groups them into a dataset via threading
		
		@dataset_name: name of the dataset
		@image_data: list of pairs of (filename, metadata) input
		'''

		pool = multiprocessing.Pool(num_threads)
		result = pool.map_async(self._upload_image_helper, [(image[0], image[1]) for image in image_data] )
		
		# OLD Method of async
		#async_results = [ pool.apply_async(self.upload_image, (idata[0],idata[1])) for idata in image_data ]
		#print async_results
		#map(ApplyResult.wait, async_results)
		#ds_list = [r.get() for r in async_results]
		
		while not result.ready():
			self._debug_print("Chunks Remaining: >={}".format(result._number_left))
			time.sleep(30)
		
		# Build dataset
		ds_list = result.get()
		pool.close()
		
		self._debug_print('Dataset ' + dataset_name + ' contans '+ str(len(ds_list)) + ' element(s)')
		dataset = BQDataset (name=dataset_name)
		dataset.value = [ (uri, 'object') for image_name, uri in ds_list ]
		
		# Refresh authentication in cases of long uploads
		local_session = self._authenticate()
		if local_session.save(dataset):
			self._debug_print('Dataset ' + dataset_name + ' uploaded successfully!')
		else:
			self._debug_print('Dataset ' + dataset_name + ' failed to upload', log="error")
			
		return ds_list