def __init__(self, url, key, engine): self.url = url self.key = key self.engine = engine try: self.app_search = AppSearch(url, http_auth=self.key) except: LOGGER.critical("Couldn't create AppSerch Client") raise
def make_json(csvFilePath, engine, url, private_key, batch_size): #initialize appSearch client app_search = AppSearch( url, http_auth = private_key, ) # Open a csv reader called DictReader with open(csvFilePath) as csvf: csvReader = csv.DictReader(csvf) #lines = len(list(csvReader)) #print ("lines",lines) #batch_size = batch_size batch = [] count = 0 for row in csvReader: if count >= batch_size: print (batch) json_data = json.dumps(batch) #print (json_data) batch = [] count = 0 ingest_data(engine, json_data, app_search) batch.append(row) count += 1 if batch: json_data = json.dumps(batch) ingest_data(engine, json_data, app_search)
class AppSearchConnector: def __init__(self, url, key, engine): self.url = url self.key = key self.engine = engine try: self.app_search = AppSearch(url, http_auth=self.key) except: LOGGER.critical("Couldn't create AppSerch Client") raise def insert_new_document(self, body): try: self.app_search.index_documents(engine_name=self.engine, documents=body) # ToDo Error Handling for response except: LOGGER.critical("Something went wrong with elastic for domain: %s", str(body)) def update_existing_document(self, body): try: self.app_search.put_documents(engine_name=self.engine, documents=body) LOGGER.debug("Updated document: %s", str(body)) # ToDo Error Handling for response except: LOGGER.critical("Something went wrong with elastic for domain: %s", str(body)) def search_document(self, body): try: return self.app_search.search(engine_name=self.engine, body=body) except: LOGGER.critical("Something went wrong with elastic for domain: %s", str(body))
def test_create_signed_search_key(): private_key = "private-" signed_key = AppSearch.create_signed_search_key( api_key=private_key, api_key_name="api-key-name", search_fields={"first_name": {}}, filters={"status": "available"}, facets=None, ) assert isinstance(signed_key, str) assert jwt.decode(signed_key, private_key, algorithms="HS256") == { "api_key_name": "api-key-name", "facets": None, "filters": {"status": "available"}, "search_fields": {"first_name": {}}, }
def app_search(): yield AppSearch("http://localhost:3002", http_auth="private-k3ra4bqu12vgnhe3wibdw69f")
def app_search(ent_search_url, app_search_bearer_auth): with AppSearch(ent_search_url, bearer_auth=app_search_bearer_auth) as client: yield client
from elastic_enterprise_search import AppSearch import glob, os import json app_search = AppSearch( "app_search_api_endpoint", http_auth="api_private_key" ) response = [] print("Uploading movies to App Search...") os.chdir("movies_directory") for file in glob.glob("*.json"): with open(file, 'r') as json_file: try: response = app_search.index_documents(engine_name="movies",documents=json.load(json_file)) print(".", end='', flush=True) except: print("Fail!") print(response) break
def app_search(): yield AppSearch( "https://my-deployment-c6095a.ent.us-central1.gcp.cloud.es.io:443", bearer_auth="private-ybzoyx7cok65hpxyxkwaarnn", )
import os from elastic_enterprise_search import AppSearch app_search = AppSearch( os.environ.get("CLOUD_URI"), http_auth=os.environ.get("PRIVATE_AUTH"), ) engine_name = os.environ.get("ENGINE_NAME")
from elastic_enterprise_search import AppSearch from . import settings client = AppSearch(settings.APP_SEARCH_URL, http_auth=settings.APP_SEARCH_PRIVATE_KEY) # Ensure the engine is created and has the proper schema client.create_engine(engine_name=settings.APP_SEARCH_ENGINE_NAME, ignore_status=400) client.put_schema( engine_name=settings.APP_SEARCH_ENGINE_NAME, schema=settings.APP_SEARCH_ENGINE_SCHEMA, )
def init_app_search_client(cls, crawler_settings): app_search = AppSearch(crawler_settings.get('APPSEARCH_URL'), http_auth=crawler_settings.get('APPSEARCH_KEY')) return app_search