def test_index_documents(self, mock_index): client = SearchIndexClient("endpoint", "index name", CREDENTIAL) batch = IndexDocumentsBatch() batch.add_upload_documents("upload1") batch.add_delete_documents("delete1", "delete2") batch.add_merge_documents(["merge1", "merge2", "merge3"]) batch.add_merge_or_upload_documents("merge_or_upload1") client.index_documents(batch, extra="foo") assert mock_index.called assert mock_index.call_args[0] == () assert len(mock_index.call_args[1]) == 2 assert mock_index.call_args[1]["extra"] == "foo" index_documents = mock_index.call_args[1]["batch"] assert isinstance(index_documents, IndexBatch) assert index_documents.actions == batch.actions
def create_resource(self, name, **kwargs): if self.schema: schema = json.loads(self.schema) else: schema = None self.service_name = self.create_random_name() self.endpoint = "https://{}.search.windows.net".format( self.service_name) if not self.is_live: return { "api_key": "api-key", "index_name": schema["name"] if schema else None, "endpoint": self.endpoint, } group_name = self._get_resource_group(**kwargs).name from azure.mgmt.search import SearchManagementClient from azure.mgmt.search.models import ProvisioningState self.mgmt_client = self.create_mgmt_client(SearchManagementClient) # create the search service from azure.mgmt.search.models import SearchService, Sku service_config = SearchService(location="West US", sku=Sku(name="free")) resource = self.mgmt_client.services.create_or_update( group_name, self.service_name, service_config) retries = 4 for i in range(retries): try: result = resource.result() if result.provisioning_state == ProvisioningState.succeeded: break except Exception as ex: if i == retries - 1: raise time.sleep(TIME_TO_SLEEP) time.sleep(TIME_TO_SLEEP) # note the for/else here: will raise an error if we *don't* break # above i.e. if result.provisioning state was never "Succeeded" else: raise AzureTestError("Could not create a search service") api_key = self.mgmt_client.admin_keys.get( group_name, self.service_name).primary_key if self.schema: response = requests.post( SERVICE_URL_FMT.format(self.service_name), headers={ "Content-Type": "application/json", "api-key": api_key }, data=self.schema, ) if response.status_code != 201: raise AzureTestError( "Could not create a search index {}".format( response.status_code)) self.index_name = schema["name"] # optionally load data into the index if self.index_batch and self.schema: from azure.core.credentials import AzureKeyCredential from azure.search.documents import SearchIndexClient from azure.search.documents._index._generated.models import IndexBatch batch = IndexBatch.deserialize(self.index_batch) index_client = SearchIndexClient(self.endpoint, self.index_name, AzureKeyCredential(api_key)) results = index_client.index_documents(batch) if not all(result.succeeded for result in results): raise AzureTestError("Document upload to search index failed") # Indexing is asynchronous, so if you get a 200 from the REST API, that only means that the documents are # persisted, not that they're searchable yet. The only way to check for searchability is to run queries, # and even then things are eventually consistent due to replication. In the Track 1 SDK tests, we "solved" # this by using a constant delay between indexing and querying. import time time.sleep(TIME_TO_SLEEP) return { "api_key": api_key, "index_name": self.index_name, "endpoint": self.endpoint, }