Beispiel #1
0
def build_dashboard(params):
    parser = get_params_parser()
    parser_create_dash = get_params_parser_create_dash()
    args = parser.parse_args(params['p2o_params'].split())

    config_logging(args.debug)

    url = args.elastic_url
    clean = False
    async_ = True

    q = Queue('create', connection=Redis(args.redis), async=async_)
    task_feed = q.enqueue(feed_backend, url, clean, args.fetch_cache,
                          args.backend, args.backend_args)
    q = Queue('enrich', connection=Redis(args.redis), async=async_)
    if async_:
        # Task enrich after feed
        result = q.enqueue(enrich_backend, url, clean,
                           args.backend, args.backend_args,
                           depends_on=task_feed)
    else:
        result = q.enqueue(enrich_backend, url, clean,
                           args.backend, args.backend_args)

    result = q.enqueue(enrich_backend, url, clean,
                       args.backend, args.backend_args,
                       depends_on=task_feed)
    # The creation of the dashboard is quick. Do it sync and return the URL.
    enrich_index = args.backend+"_"
    enrich_index += get_backend_id(args.backend, args.backend_args)+"_enrich"
    args = parser_create_dash.parse_args(params['e2k_params'].split())
    kibana_host = "http://localhost:5601"
    dash_url = create_dashboard(args.elastic_url, args.dashboard, enrich_index, kibana_host)

    return dash_url
Beispiel #2
0
            search_id = get_search_from_vis(elastic, vis_id)
            if search_id and search_id not in search_ids_done:
                search_ids_done.append(search_id)
                kibana["searches"].append({"id":search_id,
                                           "value":get_search_json(elastic, search_id)})
            index_pattern_id = get_index_pattern_from_vis(elastic, vis_id)
            if index_pattern_id and index_pattern_id not in index_ids_done:
                index_ids_done.append(index_pattern_id)
                kibana["index_patterns"].append({"id":index_pattern_id,
                                                 "value":get_index_pattern_json(elastic, index_pattern_id)})
    logging.debug("Done")

    with open(export_file, 'w') as f:
        f.write(json.dumps(kibana))

if __name__ == '__main__':

    args = get_params()

    config_logging(args.debug)

    if args.import_file:
        import_dashboard(args.elastic_url, args.import_file, args.kibana_index)
    elif args.export_file:
        if os.path.isfile(args.export_file):
            logging.info("%s exists. Remove it before running." % (args.export_file))
            sys.exit(0)
        export_dashboard(args.elastic_url, args.dashboard, args.export_file, args.kibana_index)
    elif args.list:
        list_dashboards(args.elastic_url, args.kibana_index)
        repo_url = repo['clone_url']
        q = "INSERT INTO project_repositories (project_id, data_source, repository_name) VALUES (%s, %s, %s)"
        cursor.execute(q, (project_id, PROJECTS_DS, repo_url))

    db.close()


if __name__ == '__main__':

    task_init = datetime.now()

    arthur_repos = {"repositories": []}

    args = get_params()

    config_logging(args.debug)

    total_repos = 0

    # enrich ocean
    index_enrich = OCEAN_INDEX + "_" + PERCEVAL_BACKEND + "_enrich"
    es_enrich = None
    try:
        es_enrich = ElasticSearch(args.elastic_url, index_enrich)
    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")

    # The owner could be an org or an user.
    for org in args.org:
        owner_url = get_owner_repos_url(org, args.token)
        try: