def test_delete_indexes_if_unchanged(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) # First create an index name = "hotels" fields = [{ "name": "hotelId", "type": "Edm.String", "key": True, "searchable": False }, { "name": "baseRate", "type": "Edm.Double" }] scoring_profile = ScoringProfile(name="MyProfile") scoring_profiles = [] scoring_profiles.append(scoring_profile) cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) index = SearchIndex(name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) result = client.create_index(index) etag = result.e_tag # get e tag and update index.scoring_profiles = [] client.create_or_update_index(index) index.e_tag = etag with pytest.raises(HttpResponseError): client.delete_index(index, match_condition=MatchConditions.IfNotModified)
def test_delete_indexes(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) client.delete_index(index_name) import time if self.is_live: time.sleep(TIME_TO_SLEEP) result = client.list_indexes() with pytest.raises(StopIteration): next(result)
def _clean_up_indexes(endpoint, api_key): from azure.search.documents.indexes import SearchIndexClient client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) # wipe the synonym maps which seem to survive the index for map in client.get_synonym_maps(): client.delete_synonym_map(map.name) # wipe any existing indexes for index in client.list_indexes(): client.delete_index(index)
def dispmsg(self): name_label2 = ttk.Label(self.window, text = "File with the queried intents is downloaded at " + str(self.name_var.get()), font=('Times New Roman', 10, 'normal')) name_label2.grid(row=10,column=1,padx = 5, pady = 10) if str(self.name_var1.get()) != '': learning = 'active' Data,UserFnames = Read_Files(str(self.name_var.get()), learning=learning, vertical= str(self.vertical.get()).lower()) Data_Frame = pd.DataFrame(Data, columns = ['FileName', 'FilePath', 'Text']) Data_Frame = NER(Data_Frame) kf = [] for ind in Data_Frame.index: text = Data_Frame['Text'][ind] tr4w = TextRank4Keyword() tr4w.analyze(text, candidate_pos = ['NOUN', 'PROPN'], window_size=4, lower=False) kf.append(tr4w.get_keywords(100)) Data_Frame['KeyPhrases'] = kf name = str(self.vertical.get()).lower() endpoint = "https://<EndPoint>.search.windows.net" key = "<Cognitive search key>" if name == 'default': create_index(name, endpoint, key) upload_docs(Data_Frame=Data_Frame, index_name= name, endpoint=endpoint, key=key) result = search(rootdir=str(self.name_var.get()), Query=str(self.name_var1.get()), index_name=name, endpoint=endpoint, key= key, fnames = UserFnames, vertical=str(self.vertical.get()).lower()) if name == 'default': from azure.search.documents.indexes import SearchIndexClient from azure.core.credentials import AzureKeyCredential client = SearchIndexClient(endpoint, AzureKeyCredential(key)) client.delete_index(name) elif str(self.name_var1.get()) == '' and str(self.classes.get()) != 'None': learning = 'passive' Data,UserFnames = Read_Files(str(self.name_var.get()), learning=learning, vertical= None) Data_Frame = pd.DataFrame(Data, columns = ['FileName', 'FilePath', 'Text']) result = classifier(dataframe=Data_Frame, classs=str(self.classes.get()), rootdir=str(self.name_var.get())) else: pass