def test_search_by_geo_box_and_annotated_name_and_predicted_name(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: my_concept_name = input_.data.concepts[0].name response = stub.PostSearches( service_pb2.PostSearchesRequest( query=resources_pb2.Query(ands=[ resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(geo=resources_pb2.Geo(geo_box=[ resources_pb2.GeoBoxedPoint( geo_point=resources_pb2.GeoPoint(longitude=43, latitude=54)), resources_pb2.GeoBoxedPoint( geo_point=resources_pb2.GeoPoint(longitude=45, latitude=56)), ])))), resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(concepts=[ resources_pb2.Concept(name=my_concept_name, value=1) ]))), resources_pb2.And(output=resources_pb2.Output( data=resources_pb2.Data(concepts=[ resources_pb2.Concept(name="dog", value=1) ]))), ]), pagination=service_pb2.Pagination(page=1, per_page=1000), ), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) > 0 assert input_.id in [hit.input.id for hit in response.hits]
def test_search_by_image_url_and_geo_box(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: response = stub.PostSearches( service_pb2.PostSearchesRequest( query=resources_pb2.Query(ands=[ resources_pb2.And(output=resources_pb2.Output( input=resources_pb2.Input(data=resources_pb2.Data( image=resources_pb2.Image(url=DOG_IMAGE_URL))))), resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(geo=resources_pb2.Geo(geo_box=[ resources_pb2.GeoBoxedPoint( geo_point=resources_pb2.GeoPoint(longitude=43, latitude=54)), resources_pb2.GeoBoxedPoint( geo_point=resources_pb2.GeoPoint(longitude=45, latitude=56)), ])))), ]), pagination=service_pb2.Pagination(page=1, per_page=1000), ), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) > 0 assert input_.id in [hit.input.id for hit in response.hits]
def test_save_and_execute_search_by_id(channel): stub = service_pb2_grpc.V2Stub(channel) search_id = "my-search-id-" + uuid.uuid4().hex[:15] with SetupImage(stub) as input_: my_concept_id = input_.data.concepts[0].id # This saves the search under an ID, but does not execute it / return any results. save_search_response = stub.PostSearches( service_pb2.PostSearchesRequest(searches=[ resources_pb2.Search( id=search_id, save=True, query=resources_pb2.Query(ands=[ resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(concepts=[ resources_pb2.Concept(id=my_concept_id, value=1) ]))) ]), ) ]), metadata=metadata(), ) raise_on_failure(save_search_response) # Executing the search returns results. post_search_by_id_response = stub.PostSearchesByID( service_pb2.PostSearchesByIDRequest(id=search_id), metadata=metadata(), ) raise_on_failure(post_search_by_id_response) assert len(post_search_by_id_response.hits) == 1 assert post_search_by_id_response.hits[0].input.id == input_.id
def test_search_by_annotated_concept_name(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: my_concept_name = input_.data.concepts[0].name response = stub.PostSearches( service_pb2.PostSearchesRequest(query=resources_pb2.Query(ands=[ resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(concepts=[ resources_pb2.Concept(name=my_concept_name, value=1) ]))) ])), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) == 1 assert response.hits[0].input.id == input_.id
def test_search_by_image_url(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: response = stub.PostSearches( service_pb2.PostSearchesRequest( query=resources_pb2.Query(ands=[ resources_pb2.And(output=resources_pb2.Output( input=resources_pb2.Input(data=resources_pb2.Data( image=resources_pb2.Image(url=DOG_IMAGE_URL))))) ]), pagination=service_pb2.Pagination(page=1, per_page=1000), ), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) > 0 assert input_.id in [hit.input.id for hit in response.hits]
def test_search_by_predicted_concept_name(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: response = stub.PostSearches( service_pb2.PostSearchesRequest( query=resources_pb2.Query(ands=[ resources_pb2.And(output=resources_pb2.Output( data=resources_pb2.Data(concepts=[ resources_pb2.Concept(name="dog", value=1) ]))) ]), pagination=service_pb2.Pagination(page=1, per_page=1000), ), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) > 0 assert input_.id in [hit.input.id for hit in response.hits]
def test_search_by_metadata(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: search_metadata = struct_pb2.Struct() search_metadata.update({"another-key": {"inner-key": "inner-value"}}) response = stub.PostSearches( service_pb2.PostSearchesRequest( query=resources_pb2.Query(ands=[ resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(metadata=search_metadata))) ]), pagination=service_pb2.Pagination(page=1, per_page=1000), ), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) > 0 assert input_.id in [hit.input.id for hit in response.hits]
def test_search_by_geo_point_and_limit(channel): stub = service_pb2_grpc.V2Stub(channel) with SetupImage(stub) as input_: response = stub.PostSearches( service_pb2.PostSearchesRequest( query=resources_pb2.Query(ands=[ resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(geo=resources_pb2.Geo( geo_point=resources_pb2.GeoPoint(longitude=43, latitude=56), geo_limit=resources_pb2.GeoLimit( value=1000, type="withinKilometers"), )))) ]), pagination=service_pb2.Pagination(page=1, per_page=1000), ), metadata=metadata(), ) raise_on_failure(response) assert len(response.hits) > 0 assert input_.id in [hit.input.id for hit in response.hits]
def test_save_and_execute_annotations_search_by_id(channel): stub = service_pb2_grpc.V2Stub(channel) my_search_id = "my-search-id-" + uuid.uuid4().hex[:15] my_concept_id = "my-anno-conc-" + uuid.uuid4().hex[:15] with SetupImage(stub) as input1, SetupImage(stub) as input2: list_annotations_response = stub.ListAnnotations( service_pb2.ListAnnotationsRequest( input_ids=[input1.id, input2.id]), metadata=metadata(), ) raise_on_failure(list_annotations_response) input_id_to_annotation_id = { an.input_id: an.id for an in list_annotations_response.annotations } patch_annotations_response = stub.PatchAnnotations( service_pb2.PatchAnnotationsRequest( action="merge", annotations=[ resources_pb2.Annotation( id=input_id_to_annotation_id[input1.id], input_id=input1.id, data=resources_pb2.Data(concepts=[ resources_pb2.Concept(id=my_concept_id, value=1) ]), ), resources_pb2.Annotation( id=input_id_to_annotation_id[input2.id], input_id=input2.id, data=resources_pb2.Data(concepts=[ resources_pb2.Concept(id=my_concept_id, value=1) ]), ), ], ), metadata=metadata(), ) raise_on_failure(patch_annotations_response) as_of = timestamp_pb2.Timestamp() as_of.FromSeconds(int(time.time() + 5)) save_search_response = stub.PostSearches( service_pb2.PostSearchesRequest(searches=[ resources_pb2.Search( id=my_search_id, save=True, as_of=as_of, query=resources_pb2.Query(ands=[ resources_pb2.And(input=resources_pb2.Input( data=resources_pb2.Data(concepts=[ resources_pb2.Concept(id=my_concept_id, value=1) ]))) ]), ) ]), metadata=metadata(), ) raise_on_failure(save_search_response) # Executing the search returns results. post_search_by_id_response = stub.PostSearchesByID( service_pb2.PostSearchesByIDRequest(id=my_search_id), metadata=metadata(), ) raise_on_failure(post_search_by_id_response) hits = post_search_by_id_response.hits assert len(hits) == 2 assert input1.id in [hit.input.id for hit in hits] assert input2.id in [hit.input.id for hit in hits] assert all(hit.score == 1 for hit in hits)
def test_deep_classification_training_with_queries(): stub = service_pb2_grpc.V2Stub(ClarifaiChannel.get_grpc_channel()) app_id = "my-app-" + uuid.uuid4().hex[:20] post_apps_response = stub.PostApps( service_pb2.PostAppsRequest( apps=[ resources_pb2.App( id=app_id, default_workflow_id="General", ) ] ), metadata=pat_key_metadata(), ) raise_on_failure(post_apps_response) post_keys_response = stub.PostKeys( service_pb2.PostKeysRequest( keys=[ resources_pb2.Key( description="All scopes", scopes=["All"], apps=[resources_pb2.App(id=app_id, user_id="me")], ) ], ), metadata=pat_key_metadata(), ) raise_on_failure(post_keys_response) api_key = post_keys_response.keys[0].id template_name = "classification_cifar10_v1" model_id = "my-deep-classification-" + uuid.uuid4().hex model_type = _get_model_type_for_template(stub, api_key, template_name) train_info_params = struct_pb2.Struct() train_info_params.update( { "template": template_name, "num_epochs": 2, "num_gpus": 0, } ) post_models_response = stub.PostModels( service_pb2.PostModelsRequest( models=[ resources_pb2.Model( id=model_id, model_type_id=model_type.id, train_info=resources_pb2.TrainInfo(params=train_info_params), output_info=resources_pb2.OutputInfo( data=resources_pb2.Data( concepts=[ resources_pb2.Concept(id="train-concept"), resources_pb2.Concept(id="test-only-concept"), ] ), ), ) ] ), metadata=api_key_metadata(api_key), ) raise_on_failure(post_models_response) train_and_test = ["train", "test"] inputs = [] annotations = [] for i, url in enumerate(URLS): input_id = str(i) inputs.append( resources_pb2.Input( id=input_id, data=resources_pb2.Data(image=resources_pb2.Image(url=url)) ) ) train_annotation_info = struct_pb2.Struct() train_annotation_info.update({"split": train_and_test[i % 2]}) ann = resources_pb2.Annotation( input_id=input_id, annotation_info=train_annotation_info, data=resources_pb2.Data(concepts=[resources_pb2.Concept(id="train-concept", value=1)]), ) # Add an extra concept to the test set which show should up in evals, but have a bad score since there is # no instance of it in the train set. if i % 2 == 1: ann.data.concepts.append(resources_pb2.Concept(id="test-only-concept", value=1)) annotations.append(ann) post_inputs_response = stub.PostInputs( service_pb2.PostInputsRequest(inputs=inputs), metadata=api_key_metadata(api_key), ) raise_on_failure(post_inputs_response) wait_for_inputs_upload(stub, api_key_metadata(api_key), [str(i) for i in range(len(URLS))]) post_annotations_response = stub.PostAnnotations( service_pb2.PostAnnotationsRequest(annotations=annotations), metadata=api_key_metadata(api_key), ) raise_on_failure(post_annotations_response) train_annotation_info = struct_pb2.Struct() train_annotation_info.update({"split": "train"}) train_query = resources_pb2.Query( ands=[ resources_pb2.And( annotation=resources_pb2.Annotation(annotation_info=train_annotation_info) ), ] ) test_annotation_info = struct_pb2.Struct() test_annotation_info.update({"split": "train"}) test_query = resources_pb2.Query( ands=[ resources_pb2.And( negate=True, annotation=resources_pb2.Annotation(annotation_info=test_annotation_info), ), ] ) post_model_versions_response = stub.PostModelVersions( service_pb2.PostModelVersionsRequest( model_id=model_id, train_search=resources_pb2.Search(query=train_query), test_search=resources_pb2.Search(query=test_query), ), metadata=api_key_metadata(api_key), ) raise_on_failure(post_model_versions_response) model_version_id = post_model_versions_response.model.model_version.id wait_for_model_trained(stub, api_key_metadata(api_key), model_id, model_version_id) post_model_outputs_response = stub.PostModelOutputs( service_pb2.PostModelOutputsRequest( model_id=model_id, version_id=model_version_id, inputs=[ resources_pb2.Input( data=resources_pb2.Data(image=resources_pb2.Image(url=URLS[0])) ) ], ), metadata=api_key_metadata(api_key), ) raise_on_failure(post_model_outputs_response) concepts = post_model_outputs_response.outputs[0].data.concepts assert len(concepts) == 2 assert concepts[0].id == "train-concept" assert concepts[1].id == "test-only-concept" assert concepts[1].value <= 0.0001 delete_app_response = stub.DeleteApp( service_pb2.DeleteAppRequest( user_app_id=resources_pb2.UserAppIDSet(user_id="me", app_id=app_id) ), metadata=pat_key_metadata(), ) raise_on_failure(delete_app_response)