예제 #1
0
def get_perceval_params(url, index):
    logging.info("Get perceval params for index: %s" % (index))
    elastic = get_elastic(url, ConfOcean.get_index())
    ConfOcean.set_elastic(elastic)

    r = requests.get(elastic.index_url+"/repos/"+index)

    params = r.json()['_source']['params']

    return params
예제 #2
0
파일: p2o.py 프로젝트: dpose/GrimoireELK
def feed_backends(url, clean, debug = False, redis = None):
    ''' Update Ocean for all existing backends '''

    logging.info("Updating all Ocean")
    elastic = get_elastic(url, ConfOcean.get_index(), clean)
    ConfOcean.set_elastic(elastic)
    fetch_cache = False

    q = Queue('update', connection=Redis(redis), async=async_)

    for repo in ConfOcean.get_repos():
        task_feed = q.enqueue(feed_backend, url, clean, fetch_cache,
                              repo['backend_name'], repo['backend_params'],
                              repo['index'], repo['index_enrich'], repo['project'])
        logging.info("Queued job")
        logging.info(task_feed)
예제 #3
0
def feed_backends(url, clean, debug = False, redis = None):
    ''' Update Ocean for all existing backends '''

    logging.info("Updating all Ocean")
    elastic = get_elastic(url, ConfOcean.get_index(), clean)
    ConfOcean.set_elastic(elastic)
    fetch_cache = False

    q = Queue('update', connection=Redis(redis), async=async_)

    for repo in ConfOcean.get_repos():
        task_feed = q.enqueue(feed_backend, url, clean, fetch_cache,
                              repo['backend_name'], repo['backend_params'],
                              repo['index'], repo['index_enrich'], repo['project'])
        logging.info("Queued job")
        logging.info(task_feed)
예제 #4
0
def get_elastic():

    try:
        ocean_index = ConfOcean.get_index()
        elastic_ocean = ElasticSearch(args.elastic_url, ocean_index)

    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")
        sys.exit(1)

    except ElasticWriteException:
        logging.error("Can't write to Elastic Search.")
        sys.exit(1)


    return elastic_ocean
예제 #5
0
파일: p2o.py 프로젝트: dpose/GrimoireELK
def enrich_backends(url, clean, debug = False, redis = None,
                    db_projects_map=None, db_sortinghat=None):
    ''' Enrich all existing indexes '''

    logging.info("Enriching repositories")

    elastic = get_elastic(url, ConfOcean.get_index(), clean)
    ConfOcean.set_elastic(elastic)
    fetch_cache = False

    q = Queue('update', connection=Redis(redis), async=async_)

    for repo in ConfOcean.get_repos():
        enrich_task = q.enqueue(enrich_backend,
                                url, clean,
                                repo['backend_name'], repo['backend_params'],
                                repo['index'], repo['index_enrich'], db_projects_map, db_sortinghat)
        logging.info("Queued job")
        logging.info(enrich_task)
예제 #6
0
def enrich_backends(url, clean, debug = False, redis = None,
                    db_projects_map=None, db_sortinghat=None):
    ''' Enrich all existing indexes '''

    logging.info("Enriching repositories")

    elastic = get_elastic(url, ConfOcean.get_index(), clean)
    ConfOcean.set_elastic(elastic)
    fetch_cache = False

    q = Queue('update', connection=Redis(redis), async=async_)

    for repo in ConfOcean.get_repos():
        enrich_task = q.enqueue(enrich_backend,
                                url, clean,
                                repo['backend_name'], repo['backend_params'],
                                repo['index'], repo['index_enrich'], db_projects_map, db_sortinghat)
        logging.info("Queued job")
        logging.info(enrich_task)