def build_dashboard(params): parser = get_params_parser() parser_create_dash = get_params_parser_create_dash() args = parser.parse_args(params['p2o_params'].split()) config_logging(args.debug) url = args.elastic_url clean = False async_ = True q = Queue('create', connection=Redis(args.redis), async=async_) task_feed = q.enqueue(feed_backend, url, clean, args.fetch_cache, args.backend, args.backend_args) q = Queue('enrich', connection=Redis(args.redis), async=async_) if async_: # Task enrich after feed result = q.enqueue(enrich_backend, url, clean, args.backend, args.backend_args, depends_on=task_feed) else: result = q.enqueue(enrich_backend, url, clean, args.backend, args.backend_args) result = q.enqueue(enrich_backend, url, clean, args.backend, args.backend_args, depends_on=task_feed) # The creation of the dashboard is quick. Do it sync and return the URL. enrich_index = args.backend + "_" enrich_index += get_backend_id(args.backend, args.backend_args) + "_enrich" args = parser_create_dash.parse_args(params['e2k_params'].split()) kibana_host = "http://localhost:5601" dash_url = create_dashboard(args.elastic_url, args.dashboard, enrich_index, kibana_host) return dash_url
def build_dashboard(params): parser = get_params_parser() parser_create_dash = get_params_parser_create_dash() args = parser.parse_args(params['p2o_params'].split()) config_logging(args.debug) url = args.elastic_url clean = False async_ = True q = Queue('create', connection=Redis(args.redis), async=async_) task_feed = q.enqueue(feed_backend, url, clean, args.fetch_cache, args.backend, args.backend_args) q = Queue('enrich', connection=Redis(args.redis), async=async_) if async_: # Task enrich after feed result = q.enqueue(enrich_backend, url, clean, args.backend, args.backend_args, depends_on=task_feed) else: result = q.enqueue(enrich_backend, url, clean, args.backend, args.backend_args) result = q.enqueue(enrich_backend, url, clean, args.backend, args.backend_args, depends_on=task_feed) # The creation of the dashboard is quick. Do it sync and return the URL. enrich_index = args.backend + "_" enrich_index += get_backend_id(args.backend, args.backend_args) + "_enrich" args = parser_create_dash.parse_args(params['e2k_params'].split()) kibana_host = "http://localhost:5601" dash_url = create_dashboard(args.elastic_url, args.dashboard, enrich_index, kibana_host) return dash_url
from datetime import datetime from os import sys from grimoire_elk.elk import feed_backend, enrich_backend from grimoire_elk.elastic import ElasticSearch from grimoire_elk.elastic_items import ElasticItems from grimoire_elk.utils import get_params, config_logging if __name__ == '__main__': """Perceval2Ocean tool""" app_init = datetime.now() args = get_params() config_logging(args.debug) url = args.elastic_url clean = args.no_incremental if args.fetch_cache: clean = True try: if args.backend: # Configure elastic bulk size and scrolling if args.bulk_size: ElasticSearch.max_items_bulk = args.bulk_size if args.scroll_size: ElasticItems.scroll_size = args.scroll_size if not args.enrich_only:
return parser def get_params(): parser = get_params_parser_create_dash() args = parser.parse_args() if not (args.export_file or args.import_file or args.list): parser.error("--export or --import or --list needed") else: if args.export_file and not args.dashboard: parser.error("--export needs --dashboard") return args if __name__ == '__main__': ARGS = get_params() config_logging(ARGS.debug) if ARGS.import_file: import_dashboard(ARGS.elastic_url, ARGS.import_file, ARGS.kibana_index, ARGS.data_sources) elif ARGS.export_file: if os.path.isfile(ARGS.export_file): logging.info("%s exists. Remove it before running.", ARGS.export_file) sys.exit(0) export_dashboard(ARGS.elastic_url, ARGS.dashboard, ARGS.export_file, ARGS.kibana_index) elif ARGS.list: list_dashboards(ARGS.elastic_url, ARGS.kibana_index)
''' Get params definition from ElasticOcean and from all the backends ''' parser = get_params_parser() args = parser.parse_args() return args if __name__ == '__main__': app_init = datetime.now() args = get_params() config_logging(args.debug) url = args.elastic_url clean = args.no_incremental if args.fetch_cache: clean = True try: if args.backend: # Configure elastic bulk size and scrolling if args.bulk_size: ElasticSearch.max_items_bulk = args.bulk_size if args.scroll_size: ElasticItems.scroll_size = args.scroll_size if not args.enrich_only: