def _check_es_health(product, env): if product is not None: hosts = [_url_for_host(env)] else: logging.info( "No product specified ; Checking health of all Elasticsearch hosts for env '%s'\n" % env) all_hosts = set(product_host.values()) hosts = [] for host in all_hosts: hosts.append(_url_for_host(env, host)) es = ElasticSearch(hosts, port=port) # Add check on elasticsearch health health = es.health() if health['status'] == 'red': logging.error( "Elasticsearch status is red. Search will hang. Exiting\n") sys.exit(-1) elif health['status'] == 'yellow': logging.warning( 'Elasticsearch status is yellow. Search quality will be degraded\n' )
def _check_es_health(product, env): hosts = [_url_for_host(env)] es = ElasticSearch(hosts, port = port) # Add check on elasticsearch health health = es.health() if health['status'] == 'red': logging.error("Elasticsearch status is red. Search will hang. Exiting\n") sys.exit(-1) elif health['status'] == 'yellow': logging.warning('Elasticsearch status is yellow. Search quality will be degraded\n')
def test_cluster_size_3(self): cluster = self._make_one(size=3) cluster.start() self.assertEqual(len(cluster), 3) self.assertEqual(len(cluster.hosts), 3) self.assertEqual(len(os.listdir(cluster.working_path)), 3) self.assertEqual(len(cluster.urls), 3) client = ElasticSearch(cluster.urls, max_retries=2) self.assertEqual(client.health()['number_of_nodes'], 3) # test if routing works and data is actually distributed across nodes client.create_index('test_shards', settings={ 'number_of_shards': 1, 'number_of_replicas': 2, }) client.index('test_shards', 'spam', {'eggs': 'bacon'}) client.refresh('test_shards') shard_info = client.status()['indices']['test_shards']['shards']['0'] nodes = set([s['routing']['node'] for s in shard_info]) self.assertTrue(len(nodes) > 1)
def _check_es_health(product, env): if product is not None: hosts = [_url_for_host(env)] else: logging.info("No product specified ; Checking health of all Elasticsearch hosts for env '%s'\n" % env) all_hosts = set(product_host.values()) hosts = [] for host in all_hosts: hosts.append(_url_for_host(env, host)) es = ElasticSearch(hosts, port = port) # Add check on elasticsearch health health = es.health() if health['status'] == 'red': logging.error("Elasticsearch status is red. Search will hang. Exiting\n") sys.exit(-1) elif health['status'] == 'yellow': logging.warning('Elasticsearch status is yellow. Search quality will be degraded\n')
'channelName': {'type': 'string'}, 'adId': {'type': 'integer'}, 'adUrl': {'type': 'string'}, 'adType': {'type': 'string'}, 'adSize': {'type': 'string'}, 'dateCreated': {'type': 'date', 'format' : 'YYYY-MM-dd HH:mm:ss'}, 'websiteId': {'type': 'integer'}, 'website': {'type': 'string', 'analyzer': 'simple'}, 'category': {'type': 'string'}, 'subCategory': {'type': 'string'} } } } es.health(wait_for_status='yellow') es.delete_index('write-ads') es.create_index('write-ads', settings={'mappings': ad_mapping}) dateYMD = args["date"] prepareDataFromDB(dateYMD) dir = DATA_FILES_JSON + '/' + dateYMD for filename in os.listdir(dir): if filename.endswith('.json'): with open(dir + '/' + filename) as open_file: json_docs = json.load(open_file) es.bulk((es.index_op(doc) for doc in json_docs), index='write-ads', doc_type='ad')