def test_post_delete_batch_images(channel): stub = service_pb2_grpc.V2Stub(channel) post_response = stub.PostInputs( service_pb2.PostInputsRequest( inputs=[ resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image(url=TRUCK_IMAGE_URL, allow_duplicate_url=True) ) ), resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image(url=TRUCK_IMAGE_URL, allow_duplicate_url=True) ) ), ] ), metadata=metadata(), ) raise_on_failure(post_response) input_id1 = post_response.inputs[0].id input_id2 = post_response.inputs[1].id wait_for_inputs_upload(stub, metadata(), [input_id1, input_id2]) delete_response = stub.DeleteInputs( service_pb2.DeleteInputsRequest(ids=[input_id1, input_id2]), metadata=metadata() ) raise_on_failure(delete_response)
def test_image_with_bytes(channel): stub = service_pb2_grpc.V2Stub(channel) with open(RED_TRUCK_IMAGE_FILE_PATH, "rb") as f: file_bytes = f.read() post_response = stub.PostInputs( service_pb2.PostInputsRequest( inputs=[ resources_pb2.Input( data=resources_pb2.Data(image=resources_pb2.Image(base64=file_bytes)) ) ] ), metadata=metadata(), ) raise_on_failure(post_response) input_id = post_response.inputs[0].id wait_for_inputs_upload(stub, metadata(), [input_id]) delete_response = stub.DeleteInputs( service_pb2.DeleteInputsRequest(ids=[input_id]), metadata=metadata() ) raise_on_failure(delete_response)
def test_post_list_patch_get_delete_image(channel): stub = service_pb2_grpc.V2Stub(channel) post_response = stub.PostInputs( service_pb2.PostInputsRequest( inputs=[ resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image(url=TRUCK_IMAGE_URL, allow_duplicate_url=True), concepts=[resources_pb2.Concept(id="some-concept")], ) ) ] ), metadata=metadata(), ) raise_on_failure(post_response) input_id = post_response.inputs[0].id try: wait_for_inputs_upload(stub, metadata(), [input_id]) list_response = stub.ListInputs( service_pb2.ListInputsRequest(per_page=1), metadata=metadata() ) raise_on_failure(list_response) assert len(list_response.inputs) == 1 # Most likely we don"t have that many inputs, so this should return 0. list_response2 = stub.ListInputs( service_pb2.ListInputsRequest(per_page=500, page=1000), metadata=metadata() ) raise_on_failure(list_response2) assert len(list_response2.inputs) == 0 patch_response = stub.PatchInputs( service_pb2.PatchInputsRequest( action="overwrite", inputs=[ resources_pb2.Input( id=input_id, data=resources_pb2.Data( concepts=[resources_pb2.Concept(id="some-new-concept")] ), ) ], ), metadata=metadata(), ) raise_on_failure(patch_response) get_response = stub.GetInput( service_pb2.GetInputRequest(input_id=input_id), metadata=metadata() ) raise_on_failure(get_response) assert get_response.input.data.concepts[0].name == "some-new-concept" finally: delete_request = service_pb2.DeleteInputRequest(input_id=input_id) delete_response = stub.DeleteInput(delete_request, metadata=metadata()) raise_on_failure(delete_response)
def __enter__(self) -> resources_pb2.Input: my_concept_id = "my-concept-id-" + uuid.uuid4().hex my_concept_name = "my concept name " + uuid.uuid4().hex image_metadata = struct_pb2.Struct() image_metadata.update({ "some-key": "some-value", "another-key": { "inner-key": "inner-value" } }) post_response = self._stub.PostInputs( service_pb2.PostInputsRequest(inputs=[ resources_pb2.Input(data=resources_pb2.Data( image=resources_pb2.Image(url=DOG_IMAGE_URL, allow_duplicate_url=True), concepts=[ resources_pb2.Concept( id=my_concept_id, name=my_concept_name, value=1) ], metadata=image_metadata, geo=resources_pb2.Geo(geo_point=resources_pb2.GeoPoint( longitude=44, latitude=55)), ), ) ]), metadata=metadata(), ) raise_on_failure(post_response) self._input = post_response.inputs[0] wait_for_inputs_upload(self._stub, metadata(), [self._input.id]) return self._input
def __enter__(self) -> resources_pb2.Input: post_response = self._stub.PostInputs( service_pb2.PostInputsRequest(inputs=[ resources_pb2.Input(data=resources_pb2.Data( image=resources_pb2.Image(url=TRAVEL_IMAGE_URL, allow_duplicate_url=True), ), ) ]), metadata=metadata(), ) raise_on_failure(post_response) self._input = post_response.inputs[0] wait_for_inputs_upload(self._stub, metadata(), [self._input.id]) return self._input
def test_post_patch_get_train_evaluate_delete_model(channel): stub = service_pb2_grpc.V2Stub(channel) # Add some inputs with the concepts that we'll need in the model. post_inputs_response = stub.PostInputs( service_pb2.PostInputsRequest( inputs=[ resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image(url=TRUCK_IMAGE_URL, allow_duplicate_url=True), concepts=[resources_pb2.Concept(id="some-initial-concept")], ) ), resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image(url=DOG_IMAGE_URL, allow_duplicate_url=True), concepts=[resources_pb2.Concept(id="some-new-concept")], ) ), ] ), metadata=metadata(), ) raise_on_failure(post_inputs_response) input_id_1 = post_inputs_response.inputs[0].id input_id_2 = post_inputs_response.inputs[1].id wait_for_inputs_upload(stub, metadata(), [input_id_1, input_id_2]) model_id = u"我的新模型-" + uuid.uuid4().hex post_response = stub.PostModels( service_pb2.PostModelsRequest( models=[ resources_pb2.Model( id=model_id, output_info=resources_pb2.OutputInfo( data=resources_pb2.Data( concepts=[resources_pb2.Concept(id="some-initial-concept")], ), ), ) ] ), metadata=metadata(), ) raise_on_failure(post_response) try: patch_response = stub.PatchModels( service_pb2.PatchModelsRequest( action="overwrite", models=[ resources_pb2.Model( id=model_id, name="some new name", output_info=resources_pb2.OutputInfo( data=resources_pb2.Data( concepts=[resources_pb2.Concept(id="some-new-concept", value=1)] ), ), ) ], ), metadata=metadata(), ) raise_on_failure(patch_response) get_response = stub.GetModelOutputInfo( service_pb2.GetModelRequest(model_id=model_id), metadata=metadata() ) raise_on_failure(get_response) assert get_response.model.id == model_id assert get_response.model.name == "some new name" assert len(get_response.model.output_info.data.concepts) == 1 assert get_response.model.output_info.data.concepts[0].id == "some-new-concept" post_model_versions_response = stub.PostModelVersions( service_pb2.PostModelVersionsRequest(model_id=model_id), metadata=metadata() ) raise_on_failure(post_model_versions_response) model_version_id = post_model_versions_response.model.model_version.id wait_for_model_trained(stub, metadata(), model_id, model_version_id) post_model_version_metrics_response = stub.PostModelVersionMetrics( service_pb2.PostModelVersionMetricsRequest( model_id=model_id, version_id=model_version_id ), metadata=metadata(), ) raise_on_failure(post_model_version_metrics_response) wait_for_model_evaluated(stub, metadata(), model_id, model_version_id) finally: delete_response = stub.DeleteModel( service_pb2.DeleteModelRequest(model_id=model_id), metadata=metadata() ) raise_on_failure(delete_response) delete_inputs_response = stub.DeleteInputs( service_pb2.DeleteInputsRequest(ids=[input_id_1, input_id_2]), metadata=metadata() ) raise_on_failure(delete_inputs_response)
def test_model_creation_training_and_evaluation(channel): model_id = str(uuid.uuid4()) stub = service_pb2_grpc.V2Stub(channel) raise_on_failure( stub.PostModels( service_pb2.PostModelsRequest( models=[ resources_pb2.Model( id=model_id, output_info=resources_pb2.OutputInfo( data=resources_pb2.Data( concepts=[ resources_pb2.Concept(id="dog"), resources_pb2.Concept(id="toddler"), ] ) ), ) ] ), metadata=metadata(), ) ) post_inputs_response = stub.PostInputs( service_pb2.PostInputsRequest( inputs=[ resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image( url="https://samples.clarifai.com/dog2.jpeg", allow_duplicate_url=True, ), concepts=[resources_pb2.Concept(id="dog")], ) ), resources_pb2.Input( data=resources_pb2.Data( image=resources_pb2.Image( url="https://samples.clarifai.com/toddler-flowers.jpeg", allow_duplicate_url=True, ), concepts=[resources_pb2.Concept(id="toddler")], ) ), ] ), metadata=metadata(), ) raise_on_failure(post_inputs_response) input_ids = [i.id for i in post_inputs_response.inputs] wait_for_inputs_upload(stub, metadata, input_ids) response = stub.PostModelVersions( service_pb2.PostModelVersionsRequest(model_id=model_id), metadata=metadata() ) raise_on_failure(response) model_version_id = response.model.model_version.id wait_for_model_trained(stub, metadata, model_id, model_version_id) raise_on_failure( stub.PostModelVersionMetrics( service_pb2.PostModelVersionMetricsRequest( model_id=model_id, version_id=model_version_id, ), metadata=metadata(), ) ) wait_for_model_evaluated(stub, metadata, model_id, model_version_id) response = stub.GetModelVersionMetrics( service_pb2.GetModelVersionMetricsRequest( model_id=model_id, version_id=model_version_id, fields=resources_pb2.FieldsValue( confusion_matrix=True, cooccurrence_matrix=True, label_counts=True, binary_metrics=True, test_set=True, ), ), metadata=metadata(), ) raise_on_failure(response) raise_on_failure( stub.DeleteModel(service_pb2.DeleteModelRequest(model_id=model_id), metadata=metadata()) ) raise_on_failure( stub.DeleteInputs(service_pb2.DeleteInputsRequest(ids=input_ids), metadata=metadata()) )
def test_post_patch_get_image_with_id_concepts_geo_and_metadata(channel): stub = service_pb2_grpc.V2Stub(channel) input_id = uuid.uuid4().hex input_metadata = Struct() input_metadata.update({ "key1": 123, "key2": { "inner-key1": "inner-val1", "inner-key2": "inner-val2", }, }) post_response = stub.PostInputs( service_pb2.PostInputsRequest(inputs=[ resources_pb2.Input( id=input_id, data=resources_pb2.Data( image=resources_pb2.Image(url=TRUCK_IMAGE_URL, allow_duplicate_url=True), concepts=[ resources_pb2.Concept(id="some-positive-concept", value=1), resources_pb2.Concept(id="some-negative-concept", value=0), ], geo=resources_pb2.Geo(geo_point=resources_pb2.GeoPoint( longitude=55.0, latitude=66), ), metadata=input_metadata, ), ) ]), metadata=metadata(), ) raise_on_failure(post_response) wait_for_inputs_upload(stub, metadata(), [input_id]) get_response = stub.GetInput( service_pb2.GetInputRequest(input_id=input_id), metadata=metadata()) raise_on_failure(get_response) inp = get_response.input assert inp.id == input_id assert inp.data.concepts[0].id == "some-positive-concept" assert inp.data.concepts[0].value == 1.0 assert inp.data.concepts[1].id == "some-negative-concept" assert inp.data.concepts[1].value == 0.0 assert inp.data.metadata["key1"] == 123 assert inp.data.metadata["key2"]["inner-key1"] == "inner-val1" assert inp.data.metadata["key2"]["inner-key2"] == "inner-val2" assert inp.data.geo.geo_point.longitude == 55.0 assert inp.data.geo.geo_point.latitude == 66.0 new_metadata = Struct() new_metadata.update({"new-key": "new-value"}) patch_response = stub.PatchInputs( service_pb2.PatchInputsRequest( action="merge", inputs=[ resources_pb2.Input( id=input_id, data=resources_pb2.Data( concepts=[ resources_pb2.Concept( id="another-positive-concept", value=1) ], geo=resources_pb2.Geo(geo_point=resources_pb2.GeoPoint( longitude=77.0, latitude=88.0)), metadata=new_metadata, ), ) ], ), metadata=metadata(), ) raise_on_failure(patch_response) inp = patch_response.inputs[0] assert inp.data.concepts[2].id == "another-positive-concept" assert inp.data.geo.geo_point.longitude == 77.0 assert inp.data.geo.geo_point.latitude == 88.0 assert inp.data.metadata["new-key"] == "new-value" assert (inp.data.metadata["key1"] == 123 ) # Since we use the merge action, the old values should remain delete_response = stub.DeleteInputs( service_pb2.DeleteInputsRequest(ids=[input_id]), metadata=metadata()) raise_on_failure(delete_response)
def test_deep_classification_training_with_queries(): stub = service_pb2_grpc.V2Stub(ClarifaiChannel.get_grpc_channel()) app_id = "my-app-" + uuid.uuid4().hex[:20] post_apps_response = stub.PostApps( service_pb2.PostAppsRequest( apps=[ resources_pb2.App( id=app_id, default_workflow_id="General", ) ] ), metadata=pat_key_metadata(), ) raise_on_failure(post_apps_response) post_keys_response = stub.PostKeys( service_pb2.PostKeysRequest( keys=[ resources_pb2.Key( description="All scopes", scopes=["All"], apps=[resources_pb2.App(id=app_id, user_id="me")], ) ], ), metadata=pat_key_metadata(), ) raise_on_failure(post_keys_response) api_key = post_keys_response.keys[0].id template_name = "classification_cifar10_v1" model_id = "my-deep-classification-" + uuid.uuid4().hex model_type = _get_model_type_for_template(stub, api_key, template_name) train_info_params = struct_pb2.Struct() train_info_params.update( { "template": template_name, "num_epochs": 2, "num_gpus": 0, } ) post_models_response = stub.PostModels( service_pb2.PostModelsRequest( models=[ resources_pb2.Model( id=model_id, model_type_id=model_type.id, train_info=resources_pb2.TrainInfo(params=train_info_params), output_info=resources_pb2.OutputInfo( data=resources_pb2.Data( concepts=[ resources_pb2.Concept(id="train-concept"), resources_pb2.Concept(id="test-only-concept"), ] ), ), ) ] ), metadata=api_key_metadata(api_key), ) raise_on_failure(post_models_response) train_and_test = ["train", "test"] inputs = [] annotations = [] for i, url in enumerate(URLS): input_id = str(i) inputs.append( resources_pb2.Input( id=input_id, data=resources_pb2.Data(image=resources_pb2.Image(url=url)) ) ) train_annotation_info = struct_pb2.Struct() train_annotation_info.update({"split": train_and_test[i % 2]}) ann = resources_pb2.Annotation( input_id=input_id, annotation_info=train_annotation_info, data=resources_pb2.Data(concepts=[resources_pb2.Concept(id="train-concept", value=1)]), ) # Add an extra concept to the test set which show should up in evals, but have a bad score since there is # no instance of it in the train set. if i % 2 == 1: ann.data.concepts.append(resources_pb2.Concept(id="test-only-concept", value=1)) annotations.append(ann) post_inputs_response = stub.PostInputs( service_pb2.PostInputsRequest(inputs=inputs), metadata=api_key_metadata(api_key), ) raise_on_failure(post_inputs_response) wait_for_inputs_upload(stub, api_key_metadata(api_key), [str(i) for i in range(len(URLS))]) post_annotations_response = stub.PostAnnotations( service_pb2.PostAnnotationsRequest(annotations=annotations), metadata=api_key_metadata(api_key), ) raise_on_failure(post_annotations_response) train_annotation_info = struct_pb2.Struct() train_annotation_info.update({"split": "train"}) train_query = resources_pb2.Query( ands=[ resources_pb2.And( annotation=resources_pb2.Annotation(annotation_info=train_annotation_info) ), ] ) test_annotation_info = struct_pb2.Struct() test_annotation_info.update({"split": "train"}) test_query = resources_pb2.Query( ands=[ resources_pb2.And( negate=True, annotation=resources_pb2.Annotation(annotation_info=test_annotation_info), ), ] ) post_model_versions_response = stub.PostModelVersions( service_pb2.PostModelVersionsRequest( model_id=model_id, train_search=resources_pb2.Search(query=train_query), test_search=resources_pb2.Search(query=test_query), ), metadata=api_key_metadata(api_key), ) raise_on_failure(post_model_versions_response) model_version_id = post_model_versions_response.model.model_version.id wait_for_model_trained(stub, api_key_metadata(api_key), model_id, model_version_id) post_model_outputs_response = stub.PostModelOutputs( service_pb2.PostModelOutputsRequest( model_id=model_id, version_id=model_version_id, inputs=[ resources_pb2.Input( data=resources_pb2.Data(image=resources_pb2.Image(url=URLS[0])) ) ], ), metadata=api_key_metadata(api_key), ) raise_on_failure(post_model_outputs_response) concepts = post_model_outputs_response.outputs[0].data.concepts assert len(concepts) == 2 assert concepts[0].id == "train-concept" assert concepts[1].id == "test-only-concept" assert concepts[1].value <= 0.0001 delete_app_response = stub.DeleteApp( service_pb2.DeleteAppRequest( user_app_id=resources_pb2.UserAppIDSet(user_id="me", app_id=app_id) ), metadata=pat_key_metadata(), ) raise_on_failure(delete_app_response)