def _set_up_index(service_name, endpoint, api_key, schema, index_batch): from azure.core.credentials import AzureKeyCredential from azure.search.documents import SearchClient from azure.search.documents._generated.models import IndexBatch schema = _load_schema(schema) index_batch = _load_batch(index_batch) if schema: index_name = json.loads(schema)["name"] response = requests.post( SERVICE_URL_FMT.format(service_name, SEARCH_ENDPOINT_SUFFIX), headers={"Content-Type": "application/json", "api-key": api_key}, data=schema, ) if response.status_code != 201: raise AzureTestError( "Could not create a search index {}".format(response.status_code) ) # optionally load data into the index if index_batch and schema: batch = IndexBatch.deserialize(index_batch) index_client = SearchClient(endpoint, index_name, AzureKeyCredential(api_key)) results = index_client.index_documents(batch) if not all(result.succeeded for result in results): raise AzureTestError("Document upload to search index failed") # Indexing is asynchronous, so if you get a 200 from the REST API, that only means that the documents are # persisted, not that they're searchable yet. The only way to check for searchability is to run queries, # and even then things are eventually consistent due to replication. In the Track 1 SDK tests, we "solved" # this by using a constant delay between indexing and querying. import time time.sleep(TIME_TO_SLEEP)
def create_resource(self, name, **kwargs): if self.schema: schema = json.loads(self.schema) else: schema = None self.service_name = self.create_random_name() self.endpoint = "https://{}.{}".format(self.service_name, SEARCH_ENDPOINT_SUFFIX) if not self.is_live: return { "api_key": "api-key", "index_name": schema["name"] if schema else None, "endpoint": self.endpoint, } group_name = self._get_resource_group(**kwargs).name from azure.mgmt.search import SearchManagementClient from azure.mgmt.search.models import ProvisioningState self.mgmt_client = self.create_mgmt_client(SearchManagementClient) # create the search service from azure.mgmt.search.models import SearchService, Sku service_config = SearchService(location="West US", sku=Sku(name="basic")) resource = self.mgmt_client.services.begin_create_or_update( group_name, self.service_name, service_config) retries = 4 for i in range(retries): try: result = resource.result() if result.provisioning_state == ProvisioningState.succeeded: break except Exception as ex: if i == retries - 1: raise time.sleep(TIME_TO_SLEEP) time.sleep(TIME_TO_SLEEP) # note the for/else here: will raise an error if we *don't* break # above i.e. if result.provisioning state was never "Succeeded" else: raise AzureTestError("Could not create a search service") api_key = self.mgmt_client.admin_keys.get( group_name, self.service_name).primary_key if self.schema: response = requests.post( SERVICE_URL_FMT.format(self.service_name, SEARCH_ENDPOINT_SUFFIX), headers={ "Content-Type": "application/json", "api-key": api_key }, data=self.schema, ) if response.status_code != 201: raise AzureTestError( "Could not create a search index {}".format( response.status_code)) self.index_name = schema["name"] # optionally load data into the index if self.index_batch and self.schema: from azure.core.credentials import AzureKeyCredential from azure.search.documents import SearchClient from azure.search.documents._generated.models import IndexBatch batch = IndexBatch.deserialize(self.index_batch) index_client = SearchClient(self.endpoint, self.index_name, AzureKeyCredential(api_key)) results = index_client.index_documents(batch) if not all(result.succeeded for result in results): raise AzureTestError("Document upload to search index failed") # Indexing is asynchronous, so if you get a 200 from the REST API, that only means that the documents are # persisted, not that they're searchable yet. The only way to check for searchability is to run queries, # and even then things are eventually consistent due to replication. In the Track 1 SDK tests, we "solved" # this by using a constant delay between indexing and querying. import time time.sleep(TIME_TO_SLEEP) return { "api_key": api_key, "index_name": self.index_name, "endpoint": self.endpoint, }