def _get_pods_and_versions(project_name, case_name, installer): query_json = json.JSONEncoder().encode({ "query": { "bool": { "must": [ {"match_all": {}} ], "filter": [ {"match": {"installer": {"query": installer, "type": "phrase"}}}, {"match": {"project_name": {"query": project_name, "type": "phrase"}}}, {"match": {"case_name": {"query": case_name, "type": "phrase"}}} ] } } }) elastic_data = shared_utils.get_elastic_data(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'), es_user, es_passwd, query_json) pods_and_versions = {} for data in elastic_data: pod = data['pod_name'] if pod in pods_and_versions: pods_and_versions[pod].add(data['version']) else: pods_and_versions[pod] = {data['version']} if 'all' in pods_and_versions: pods_and_versions['all'].add(data['version']) else: pods_and_versions['all'] = {data['version']} return pods_and_versions
def _get_pods_and_versions(project_name, case_name, installer): query_json = json.JSONEncoder().encode({ "query": { "bool": { "must": [{ "match_all": {} }], "filter": [{ "match": { "installer": { "query": installer, "type": "phrase" } } }, { "match": { "project_name": { "query": project_name, "type": "phrase" } } }, { "match": { "case_name": { "query": case_name, "type": "phrase" } } }] } } }) elastic_data = shared_utils.get_elastic_data( urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'), es_user, es_passwd, query_json) pods_and_versions = {} for data in elastic_data: pod = data['pod_name'] if pod in pods_and_versions: pods_and_versions[pod].add(data['version']) else: pods_and_versions[pod] = {data['version']} if 'all' in pods_and_versions: pods_and_versions['all'].add(data['version']) else: pods_and_versions['all'] = {data['version']} return pods_and_versions
args = parser.parse_args() base_elastic_url = urlparse.urljoin(args.elasticsearch_url, '/test_results/mongo2elastic') output_destination = args.output_destination days = args.merge_latest es_user = args.elasticsearch_username es_passwd = args.elasticsearch_password if output_destination == 'elasticsearch': output_destination = base_elastic_url # parsed_test_results will be printed/sent to elasticsearch if days == 0: # TODO get everything from mongo publish_mongo_data(output_destination) elif days > 0: body = '''{{ "query" : {{ "range" : {{ "creation_date" : {{ "gte" : "now-{}d" }} }} }} }}'''.format(days) elastic_data = shared_utils.get_elastic_data(base_elastic_url, es_user, es_passwd, body) logger.info('number of hits in elasticsearch for now-{}d: {}'.format(days, len(elastic_data))) mongo_data = get_mongo_data(days) publish_difference(mongo_data, elastic_data, output_destination, es_user, es_passwd) else: raise Exception('Update must be non-negative')
days = args.merge_latest es_user = args.elasticsearch_username es_passwd = args.elasticsearch_password if output_destination == 'elasticsearch': output_destination = base_elastic_url # parsed_test_results will be printed/sent to elasticsearch if days == 0: # TODO get everything from mongo publish_mongo_data(output_destination) elif days > 0: body = '''{{ "query" : {{ "range" : {{ "creation_date" : {{ "gte" : "now-{}d" }} }} }} }}'''.format(days) elastic_data = shared_utils.get_elastic_data(base_elastic_url, es_user, es_passwd, body) logger.info('number of hits in elasticsearch for now-{}d: {}'.format( days, len(elastic_data))) mongo_data = get_mongo_data(days) publish_difference(mongo_data, elastic_data, output_destination, es_user, es_passwd) else: raise Exception('Update must be non-negative')
def delete_all(url, es_user, es_passwd): ids = shared_utils.get_elastic_data(url, es_user, es_passwd, body=None, field='_id') for id in ids: del_url = '/'.join([url, id]) shared_utils.delete_request(del_url, es_user, es_passwd)