Example #1
0
def export_dashboard(elastic_url, dash_id, export_file, es_index=None):

    # Kibana dashboard fields
    kibana = {
        "dashboard": None,
        "visualizations": [],
        "index_patterns": [],
        "searches": []
    }

    # Used to avoid having duplicates
    search_ids_done = []
    index_ids_done = []

    logging.debug("Exporting dashboard %s to %s" %
                  (args.dashboard, args.export_file))
    if not es_index:
        es_index = ".kibana"

    elastic = ElasticSearch(elastic_url, es_index)

    kibana["dashboard"] = {
        "id": dash_id,
        "value": get_dashboard_json(elastic, dash_id)
    }

    if "panelsJSON" not in kibana["dashboard"]["value"]:
        # The dashboard is empty. No visualizations included.
        return kibana

    # Export all visualizations and the index patterns and searches in them
    for panel in json.loads(kibana["dashboard"]["value"]["panelsJSON"]):
        if panel['type'] in ['visualization']:
            vis_id = panel['id']
            vis_json = get_vis_json(elastic, vis_id)
            kibana["visualizations"].append({"id": vis_id, "value": vis_json})
            search_id = get_search_from_vis(elastic, vis_id)
            if search_id and search_id not in search_ids_done:
                search_ids_done.append(search_id)
                kibana["searches"].append({
                    "id":
                    search_id,
                    "value":
                    get_search_json(elastic, search_id)
                })
            index_pattern_id = get_index_pattern_from_vis(elastic, vis_id)
            if index_pattern_id and index_pattern_id not in index_ids_done:
                index_ids_done.append(index_pattern_id)
                kibana["index_patterns"].append({
                    "id":
                    index_pattern_id,
                    "value":
                    get_index_pattern_json(elastic, index_pattern_id)
                })
    logging.debug("Done")

    with open(export_file, 'w') as f:
        f.write(json.dumps(kibana))
Example #2
0
def create_search(elastic_url, dashboard, index_pattern, es_index=None):
    """ Create the base search for vis if used

        :param elastic_url: URL for ElasticSearch (ES) server
        :param dashboard: kibana dashboard to be used as template
        :param enrich_index: ES index with enriched items used in the new dashboard

    """

    search_id = None
    if not es_index:
        es_index = ".kibana"
    elastic = ElasticSearch(elastic_url, es_index)

    dash_data = get_dashboard_json(elastic, dashboard)

    # First vis
    if "panelsJSON" not in dash_data:
        logging.error("Can not find vis in dashboard: %s" % (dashboard))
        raise

    # Get the search from the first vis in the panel
    for panel in json.loads(dash_data["panelsJSON"]):
        panel_id = panel["id"]
        logging.debug("Checking search in %s vis" % (panel_id))

        search_id = get_search_from_vis(elastic, panel_id)
        if search_id:
            break

    # And now time to create the search found
    if not search_id:
        logging.info("Can't find search  %s" % (dashboard))
        return

    logging.debug("Found template search %s" % (search_id))

    search_json = get_search_json(elastic, search_id)
    search_source = search_json['kibanaSavedObjectMeta']['searchSourceJSON']
    new_search_source = json.loads(search_source)
    new_search_source['index'] = index_pattern
    new_search_source = json.dumps(new_search_source)
    search_json['kibanaSavedObjectMeta'][
        'searchSourceJSON'] = new_search_source

    search_json['title'] += " " + index_pattern
    new_search_id = search_id + "__" + index_pattern

    url = elastic.index_url + "/search/" + new_search_id
    requests.post(url, data=json.dumps(search_json), verify=False)

    logging.debug("New search created: %s" % (url))

    return new_search_id
Example #3
0
def create_index_pattern(elastic_url, dashboard, enrich_index, es_index=None):
    """ Create a index pattern using as template the index pattern
        in dashboard template vis

        :param elastic_url: URL for ElasticSearch (ES) server
        :param dashboard: kibana dashboard to be used as template
        :param enrich_index: ES index with enriched items used in the new dashboard

    """

    index_pattern = None
    if not es_index:
        es_index = ".kibana"
    elastic = ElasticSearch(elastic_url, es_index)

    dash_data = get_dashboard_json(elastic, dashboard)

    # First vis
    if "panelsJSON" not in dash_data:
        logging.error("Can not find vis in dashboard: %s" % (dashboard))
        raise

    # Get the index pattern from the first vis in the panel
    # that as index pattern data
    for panel in json.loads(dash_data["panelsJSON"]):
        panel_id = panel["id"]
        logging.debug("Checking index pattern in %s vis" % (panel_id))

        index_pattern = get_index_pattern_from_vis(elastic, panel_id)
        if index_pattern:
            break

    # And now time to create the index pattern found
    if not index_pattern:
        logging.error("Can't find index pattern for %s" % (dashboard))
        raise

    logging.debug("Found %s template index pattern" % (index_pattern))


    new_index_pattern_json = get_index_pattern_json(elastic, index_pattern)

    new_index_pattern_json['title'] = enrich_index
    url = elastic.index_url+"/index-pattern/"+enrich_index
    requests.post(url, data = json.dumps(new_index_pattern_json))

    logging.debug("New index pattern created: %s" % (url))

    return enrich_index
Example #4
0
def get_elastic(url, es_index, clean=None, ocean_backend=None):

    mapping = None

    if ocean_backend:
        mapping = ocean_backend.get_elastic_mappings()

    try:
        ocean_index = es_index
        elastic_ocean = ElasticSearch(url, ocean_index, mapping, clean)

    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")
        sys.exit(1)

    return elastic_ocean
Example #5
0
def get_elastic():

    try:
        ocean_index = ConfOcean.get_index()
        elastic_ocean = ElasticSearch(args.elastic_url, ocean_index)

    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")
        sys.exit(1)

    except ElasticWriteException:
        logging.error("Can't write to Elastic Search.")
        sys.exit(1)


    return elastic_ocean
Example #6
0
def import_dashboard(elastic_url, import_file, es_index=None):
    logging.debug("Reading from %s the JSON for the dashboard to be imported" %
                  (args.import_file))

    with open(import_file, 'r') as f:
        try:
            kibana = json.loads(f.read())
        except ValueError:
            logging.error("Wrong file format")
            sys.exit(1)

        if 'dashboard' not in kibana:
            logging.error("Wrong file format. Can't find 'dashboard' field.")
            sys.exit(1)

        if not es_index:
            es_index = ".kibana"
        elastic = ElasticSearch(elastic_url, es_index)

        url = elastic.index_url + "/dashboard/" + kibana['dashboard']['id']
        requests.post(url,
                      data=json.dumps(kibana['dashboard']['value']),
                      verify=False)

        if 'searches' in kibana:
            for search in kibana['searches']:
                url = elastic.index_url + "/search/" + search['id']
                requests.post(url,
                              data=json.dumps(search['value']),
                              verify=False)

        if 'index_patterns' in kibana:
            for index in kibana['index_patterns']:
                url = elastic.index_url + "/index-pattern/" + index['id']
                requests.post(url,
                              data=json.dumps(index['value']),
                              verify=False)

        if 'visualizations' in kibana:
            for vis in kibana['visualizations']:
                url = elastic.index_url + "/visualization" + "/" + vis['id']
                requests.post(url, data=json.dumps(vis['value']), verify=False)

        logging.debug("Done")
Example #7
0
def list_dashboards(elastic_url, es_index=None):
    if not es_index:
        es_index = ".kibana"

    elastic = ElasticSearch(elastic_url, es_index)

    dash_json_url = elastic.index_url + "/dashboard/_search?size=10000"

    print(dash_json_url)

    r = requests.get(dash_json_url)

    res_json = r.json()

    if "hits" not in res_json:
        logging.error("Can't find dashboards")
        raise RuntimeError("Can't find dashboards")

    for dash in res_json["hits"]["hits"]:
        print(dash["_id"])
Example #8
0
    task_init = datetime.now()

    arthur_repos = {"repositories": []}

    args = get_params()

    config_logging(args.debug)

    total_repos = 0

    # enrich ocean
    index_enrich = OCEAN_INDEX + "_" + PERCEVAL_BACKEND + "_enrich"
    es_enrich = None
    try:
        es_enrich = ElasticSearch(args.elastic_url, index_enrich)
    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")

    # The owner could be an org or an user.
    for org in args.org:
        owner_url = get_owner_repos_url(org, args.token)
        try:
            repos = get_repositores(owner_url, args.token, args.nrepos)
        except requests.exceptions.HTTPError:
            logging.error("Can't get repos for %s" % (owner_url))
            continue
        if args.db_projects_map:
            insert_projects_mapping(args.db_projects_map, org, repos)

        for repo in repos:
Example #9
0
def create_dashboard(elastic_url, dashboard, enrich_index, kibana_host, es_index=None):
    """ Create a new dashboard using dashboard as template
        and reading the data from enriched_index """

    def new_panels(elastic, panels, search_id):
        """ Create the new panels and their vis for the dashboard from the
            panels in the template dashboard """

        dash_vis_ids = []
        new_panels = []
        for panel in panels:
            if panel['type'] in ['visualization', 'search']:
                if panel['type'] == 'visualization':
                    dash_vis_ids.append(panel['id'])
                panel['id'] += "__"+enrich_index
                if panel['type'] == 'search':
                    panel['id'] = search_id
            new_panels.append(panel)

        create_vis(elastic, dash_vis_ids, search_id)


        return new_panels

    def create_vis(elastic, dash_vis_ids, search_id):
        """ Create new visualizations for the dashboard """

        # Create visualizations for the new dashboard
        item_template_url = elastic.index_url+"/visualization"
        # Hack: Get all vis if they are <10000. Use scroll API to get all.
        # Better: use mget to get all vis in dash_vis_ids
        item_template_url_search = item_template_url+"/_search?size=10000"
        r = requests.get(item_template_url_search)
        all_visualizations =r.json()['hits']['hits']

        visualizations = []
        for vis in all_visualizations:
            if vis['_id'] in dash_vis_ids:
                visualizations.append(vis)

        logging.info("Total template vis found: %i" % (len(visualizations)))

        for vis in visualizations:
            vis_data = vis['_source']
            vis_name = vis['_id'].split("_")[-1]
            vis_id = vis_name+"__"+enrich_index
            vis_data['title'] = vis_id
            vis_meta = json.loads(vis_data['kibanaSavedObjectMeta']['searchSourceJSON'])
            vis_meta['index'] = enrich_index
            vis_data['kibanaSavedObjectMeta']['searchSourceJSON'] = json.dumps(vis_meta)
            if "savedSearchId" in vis_data:
                vis_data["savedSearchId"] = search_id

            url = item_template_url+"/"+vis_id

            r = requests.post(url, data = json.dumps(vis_data))
            logging.debug("Created new vis %s" % (url))

    if not es_index:
        es_index = ".kibana"

    # First create always the index pattern as data source
    index_pattern = create_index_pattern(elastic_url, dashboard, enrich_index, es_index)
    # If search is used create a new search with the new index_pĂ ttern
    search_id = create_search(elastic_url, dashboard, index_pattern, es_index)

    elastic = ElasticSearch(elastic_url, es_index)

    # Create the new dashboard from the template
    dash_data = get_dashboard_json(elastic, dashboard)
    dash_data['title'] = enrich_index
    # Load template panels to create the new ones with their new vis
    panels = json.loads(dash_data['panelsJSON'])
    dash_data['panelsJSON'] = json.dumps(new_panels(elastic, panels, search_id))
    dash_path = "/dashboard/"+dashboard+"__"+enrich_index
    url = elastic.index_url+dash_path
    requests.post(url, data = json.dumps(dash_data))

    dash_url = kibana_host+"/app/kibana#"+dash_path
    return dash_url
Example #10
0
    task_init = datetime.now()

    arthur_repos = {"repositories": []}

    args = get_params()

    config_logging(args.debug)

    total_repos = 0

    # enrich ocean
    index_enrich = OCEAN_INDEX + "_" + PERCEVAL_BACKEND + "_enrich"
    es_enrich = None
    try:
        es_enrich = ElasticSearch(args.elastic_url, index_enrich)
    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")

    # The owner could be an org or an user.
    for org in args.org:
        owner_url = get_owner_repos_url(org, args.token)
        try:
            repos = get_repositores(owner_url, args.token, args.nrepos)
        except requests.exceptions.HTTPError:
            logging.error("Can't get repos for %s" % (owner_url))
            continue
        if args.db_projects_map:
            insert_projects_mapping(args.db_projects_map, org, repos)

        for repo in repos:
Example #11
0
    connector = get_connector_from_name(backend_name, connectors)
    backend = connector[0](**vars(args))
    ocean_backend = connector[1](backend, **vars(args))
    enrich_backend = connector[2](backend, **vars(args))

    es_index = backend.get_name() + "_" + backend.get_id()

    clean = args.no_incremental

    if args.cache:
        clean = True

    try:
        # Ocean
        elastic_state = ElasticSearch(args.elastic_url, es_index,
                                      ocean_backend.get_elastic_mappings(),
                                      clean)

        # Enriched ocean
        enrich_index = es_index + "_enrich"
        elastic = ElasticSearch(args.elastic_url, enrich_index,
                                enrich_backend.get_elastic_mappings(), clean)

    except ElasticConnectException:
        logging.error("Can't connect to Elastic Search. Is it running?")
        sys.exit(1)

    ocean_backend.set_elastic(elastic_state)
    enrich_backend.set_elastic(elastic)

    try:
Example #12
0
                tweet.update(logstash_fields(tweet))
                yield tweet

if __name__ == '__main__':

    args = get_params()

    if args.debug:
        logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s')
        logging.debug("Debug mode activated")
    else:
        logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')

    logging.info("Importing tweets from %s to %s/%s", args.json_dir, args.elastic_url, args.index)

    elastic = ElasticSearch(args.elastic_url, args.index)

    total = 0

    first_date = None
    last_date = None

    ids = []
    tweets = []

    for tweet in fetch_tweets(args.json_dir):
        # Check first and last dates
        tweet_date = parser.parse(tweet['created_at'])
        if not first_date or tweet_date <= first_date:
            first_date = tweet_date
        if not last_date or tweet_date >= last_date: