Ejemplo n.º 1
0
def list_repos_ids():
    logging.debug("Listing repos ids")
    elastic = get_elastic()
    ConfOcean.set_elastic(elastic)

    for repo_id in ConfOcean.get_repos_ids():
        print(repo_id)
Ejemplo n.º 2
0
def get_perceval_params(url, index):
    logging.info("Get perceval params for index: %s" % (index))
    elastic = get_elastic(url, ConfOcean.get_index())
    ConfOcean.set_elastic(elastic)

    r = requests.get(elastic.index_url+"/repos/"+index)

    params = r.json()['_source']['params']

    return params
Ejemplo n.º 3
0
def get_perceval_params(url, index):
    logging.info("Get perceval params for index: %s" % (index))
    elastic = get_elastic(url, ConfOcean.get_index())
    ConfOcean.set_elastic(elastic)

    r = requests.get(elastic.index_url + "/repos/" + index)

    params = r.json()['_source']['params']

    return params
Ejemplo n.º 4
0
def list_repos():
    logging.debug("Listing repos")
    elastic = get_elastic()
    ConfOcean.set_elastic(elastic)

    for repo_id in ConfOcean.get_repos_ids():
        elastic = get_elastic()
        url = elastic.index_url + "/repos/" + repo_id
        r = requests.get(url)
        repo = r.json()['_source']
        print("%s %s %s" % (repo_id, repo['repo_update'], repo['success']))
Ejemplo n.º 5
0
def feed_backends(url, clean, debug=False, redis=None):
    ''' Update Ocean for all existing backends '''

    logging.info("Updating all Ocean")
    elastic = get_elastic(url, ConfOcean.get_index(), clean)
    ConfOcean.set_elastic(elastic)
    fetch_cache = False

    q = Queue('update', connection=Redis(redis), async=async_)

    for repo in ConfOcean.get_repos():
        task_feed = q.enqueue(feed_backend, url, clean, fetch_cache,
                              repo['backend_name'], repo['backend_params'],
                              repo['index'], repo['index_enrich'],
                              repo['project'])
        logging.info("Queued job")
        logging.info(task_feed)
Ejemplo n.º 6
0
def enrich_backends(url,
                    clean,
                    debug=False,
                    redis=None,
                    db_projects_map=None,
                    db_sortinghat=None):
    ''' Enrich all existing indexes '''

    logging.info("Enriching repositories")

    elastic = get_elastic(url, ConfOcean.get_index(), clean)
    ConfOcean.set_elastic(elastic)
    fetch_cache = False

    q = Queue('update', connection=Redis(redis), async=async_)

    for repo in ConfOcean.get_repos():
        enrich_task = q.enqueue(enrich_backend, url, clean,
                                repo['backend_name'], repo['backend_params'],
                                repo['index'], repo['index_enrich'],
                                db_projects_map, db_sortinghat)
        logging.info("Queued job")
        logging.info(enrich_task)
Ejemplo n.º 7
0
def get_elastic():

    try:
        ocean_index = ConfOcean.get_index()
        elastic_ocean = ElasticSearch(args.elastic_url, ocean_index)

    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")
        sys.exit(1)

    except ElasticWriteException:
        logging.error("Can't write to Elastic Search.")
        sys.exit(1)

    return elastic_ocean