def sub_adapter_job(self, tag: str, sub_adapter: ModuleServiceAdapterABC, job: JobDescriptor): new_service_descriptor = ServiceDescriptor(sub_adapter.service) new_job = JobDescriptor(new_service_descriptor) item_count = 0 for job_item in job: # Just pass the inputs on directly to the subtasks. new_job_item = {} new_job_item['input_type'] = job_item['input_type'] new_job_item['input_url'] = job_item['input_url'] output_type = job_item['output_type'] new_job_item['output_type'] = output_type # Transform the output URL so we can get separate output files for each sub-adapter # That way we can assemble the various parts at the end using these separate URLs. if output_type == 'file_url_put': output_url = job_item['output_url'] sub_adapter_output_url = self.transform_output_url(tag, item_count, output_url) else: raise RuntimeError("Bad output type %s for job %s" % (output_type, self)) new_job_item['output_url'] = sub_adapter_output_url new_job.append_job_item(new_job_item) item_count += 1 return new_job
def test_jobs(): print() setup_logging() init_test_jobs() test_jobs = JobDescriptor.get_test_jobs(ontology.DOCUMENT_SUMMARIZER_ID) for job in test_jobs: service_id = 0 if str(job) != "NO_JOB": service_id = ontology.DOCUMENT_SUMMARIZER_ID job_parameters = { 'input_type': 'file', 'input_url': 'http://test.com/inputs/test_input.txt', 'output_type': 'file_url_put', 'output_url': 'test_output.txt' } job_parameters_2 = { 'input_type': 'file', 'input_url': 'http://test.com/inputs/test_input.txt', 'output_type': 'file_url_put', 'output_url': 'test_output.txt' } service_id = ontology.DOCUMENT_SUMMARIZER_ID # Create a new job descriptor with four sets of parameters. job_list = [ job_parameters, job_parameters, job_parameters, job_parameters ] new_job = JobDescriptor(ServiceDescriptor(service_id), job_list) file_count = 0 for job_item in new_job: if job_item['input_type'] == 'file': file_count += 1 else: file_count = 0 assert (file_count == 4) # Cover and test iteration and list item retrieval and length. new_job[0] = job_parameters_2 assert (new_job[0] == job_parameters_2) job_count = len(new_job) del new_job[1] assert (len(new_job) == job_count - 1) # Test equality and string conversion functions. last_job = new_job assert (last_job == new_job) assert (str(last_job) == str(new_job)) test_jobs.append(new_job) total_jobs = len(test_jobs) test_jobs[0] = new_job del test_jobs[total_jobs - 1] # Check the string conversion with no ServiceDescriptor... new_job = JobDescriptor(None, [job_parameters]) assert (str(new_job) != "")
def perform_one_service(app, service_manager, service_id): log.debug(" test_one_service") service_adapter = service_manager.get_service_adapter_for_id( service_id) test_jobs = JobDescriptor.get_test_jobs(service_id) # Remap the job item output URLs to the output directory. for job in test_jobs: for job_item in job: output_type = job_item['output_type'] if output_type == 'file_url_put': job_item['output_url'] = remap_file_url( job_item['output_url']) else: raise RuntimeError("Bad output type %s for job %s" % (output_type, self)) log.debug(" Testing jobs") job = None try: exception_caught = False for job in test_jobs: log.debug(" testing job %s", job) service_adapter.perform(job) except RuntimeError as exception: exception_caught = True log.error(" Exception caught %s", exception) log.debug(" Error performing %s %s", job, service_adapter) assert not exception_caught
async def perform(service_node_id=None, job_params=None, context=None): service_descriptor = ServiceDescriptor(service_node_id) job = JobDescriptor(service_descriptor, job_params) app = context result = await perform_job(app, job) logging.debug('Result of perform was %s', result) return result
def test_tensorflow_mnist_adapter(app): setup_logging() log.debug("Testing Tensorflow NNIST Adapter") # Setup a test job for classifying a test mnist image. The test is a 28 x 28 image of a 7 which # has been flattened into a single float 784 element vector format as required by the tensorflow # example (see mnist_seven_image definition above). job_parameters = { 'input_type': 'attached', 'input_data': { 'images': [mnist_seven_image], }, 'output_type': 'attached', } # Get the service for an MNIST classifier. A service identifies a unique service provided by # SingularityNET and is part of the ontology. ontology = app['ontology'] mnist_service = ontology.get_service(MNIST_CLASSIFIER_ID) # Create the Tensorflow MNIST service adapter. mnist_service_adapter = TensorflowMNIST(app, mnist_service) # Create a service descriptor. These are post-contract negotiated descriptors that may include # other parameters like quality of service, input and output formats, etc. mnist_service_descriptor = ServiceDescriptor(MNIST_CLASSIFIER_ID) # Create a new job descriptor with a single set of parameters for the test image of a 7 in the # format defined above for the python variable: mnist_seven_image. job_list = [job_parameters] job = JobDescriptor(mnist_service_descriptor, job_list) # Setup the service manager. NOTE: This will add services that are (optionally) passed in # so you can manually create services in addition to those that are loaded from the config # file. After all the services are added, it will call post_load_initialize on all the # services. setup_service_manager(app, [mnist_service_adapter]) # Test perform for the mnist service adapter. try: exception_caught = False results = mnist_service_adapter.perform(job) except RuntimeError as exception: exception_caught = True log.error(" Exception caught %s", exception) log.debug(" Error performing %s %s", job, mnist_service_adapter) assert not exception_caught # Check our results for format and content. assert len(results) == 1 assert results[0]['predictions'] == [7] assert results[0]['confidences'][0] > 0.9900 if results[0]['predictions'] == [7]: log.debug( "Tensorflow NNIST Adapter - CORRECT evaluation of image as 7")
def perform_one_service(app, service_manager, service_id): log.debug(" test_one_service") service_adapter = service_manager.get_service_adapter_for_id(service_id) test_jobs = JobDescriptor.get_test_jobs(service_id) log.debug(" Testing jobs") job = None try: exception_caught = False for job in test_jobs: log.debug(" testing job %s", job) service_adapter.perform(job) except RuntimeError as exception: exception_caught = True log.error(" Exception caught %s", exception) log.debug(" Error performing %s %s", job, service_adapter) assert not exception_caught
async def perform_job(app, job_descriptor: JobDescriptor): logger.debug("perform_job: %s", job_descriptor) service_manager = app['service_manager'] accounting = app['accounting'] if accounting.job_is_contracted(job_descriptor): # Get the adapter for this job's service. service_descriptor = job_descriptor.service service_adapter = service_manager.get_service_adapter_for_id(service_descriptor.ontology_node_id) if service_adapter is None: raise Exception('Service not available') results = service_adapter.perform(job_descriptor) else: results = [{ 'error': "Job {0} has no valid contract".format(job_descriptor.error_description()) }] return results
def test_tensorflow_imagenet_adapter(app): setup_logging() log.debug("Testing Tensorflow ImageNet Adapter") # images to be tested images = ["bucket.jpg", "cup.jpg", "bowtie.png"] encoded_images = [] image_types = [] for image in images: # Load each image and encode it base 64. image_path = os.path.join(TEST_DIRECTORY, "data", "imagenet", image) image_file = open(image_path, 'rb') image_bytes = image_file.read() encoded_images.append(base64.b64encode(image_bytes)) image_types.append(image.split('.')[1]) # Setup a test job for classifying the test images. job_parameters = { 'input_type': 'attached', 'input_data': { 'images': encoded_images, 'image_types': image_types }, 'output_type': 'attached', } # Get the service for an ImageNet classifier. A service identifies a unique service provided by # SingularityNET and is part of the ontology. ontology = app['ontology'] imagenet_service = ontology.get_service(IMAGENET_CLASSIFIER_ID) # Create the Tensorflow ImageNet service adapter. imagenet_service_adapter = TensorflowImageNet(app, imagenet_service) # Create a service descriptor. These are post-contract negotiated descriptors that may include # other parameters like quality of service, input and output formats, etc. imagenet_service_descriptor = ServiceDescriptor(IMAGENET_CLASSIFIER_ID) # Create a new job descriptor with a single set of parameters for the test image of a 7 in the # format defined above for the python variable: mnist_seven_image. job_list = [job_parameters] job = JobDescriptor(imagenet_service_descriptor, job_list) # Setup the service manager. NOTE: This will add services that are (optionally) passed in # so you can manually create services in addition to those that are loaded from the config # file. After all the services are added, it will call post_load_initialize on all the # services. setup_service_manager(app, [imagenet_service_adapter]) # Test perform for the ImageNet service adapter. try: exception_caught = False results = imagenet_service_adapter.perform(job) except RuntimeError as exception: exception_caught = True log.error(" Exception caught %s", exception) log.debug(" Error performing %s %s", job, imagenet_service_adapter) assert not exception_caught print(results) # Check our results for format and content. assert len(results) == 1 assert results[0]['predictions'] == [['bucket, pail'], ['cup', 'coffee mug'], ['bow tie, bow-tie, bowtie']] assert results[0]['confidences'][0][0] > 0.9600 and results[0][ 'confidences'][2][0] < 1.0 assert results[0]['confidences'][1][0] > 0.4000 and results[0][ 'confidences'][1][1] < 0.4200 assert results[0]['confidences'][1][1] > 0.4000 and results[0][ 'confidences'][1][1] < 0.4100 assert results[0]['confidences'][2][0] > 0.9990 and results[0][ 'confidences'][2][0] < 1.0
async def internal_perform_job(app, service_node_id, job_params): service_descriptor = ServiceDescriptor(service_node_id) job = JobDescriptor(service_descriptor, job_params) result = await perform_job(app, job) return result