def find_search_files(self, search_path): """Find the search-related files (index patterns) saved on disk. This method locates the files containing the objects referenced in `search_path` by looking into `index_patterns_folder`. :returns the list of files containing the objects that compose a search """ search_files = [] search_content = load_json(search_path) index_pattern_id = self.find_index_pattern(search_content) if not index_pattern_id: logger.info("No index pattern declared for %s", search_path) search_files.append(search_path) return search_files ip_path = self.find_file_by_name( self.index_patterns_folder, self.build_file_name(INDEX_PATTERN, index_pattern_id)) if ip_path not in search_files: search_files.append(ip_path) search_files.append(search_path) return search_files
def find_file_by_content_title(folder_path, content_title): """Find a file on disk by its content title. This method locates a file with a title equal to `content_title` in a `folder_path`. The method raises a `NotFoundError` in case the file is not found. :param folder_path: the folder where to look for the file :param content_title: the content title of the target file :returns: the file path of the file """ if not Manager.folder_exists(folder_path): cause = "Folder %s not found" % folder_path logger.error(cause) raise NotFoundError(cause=cause) files = Manager.get_files(folder_path) found = None for file_name in files: file_path = os.path.join(folder_path, file_name) content = load_json(file_path) if content['attributes']['title'] == content_title: found = file_path break if not found: cause = "File with content title %s not found in %s" % ( content_title, folder_path) logger.error(cause) raise NotFoundError(cause=cause) return found
def __init__(self, root_path): """Registry class. This class allows to handle the meta objects associated to the Kibana objects using aliases, thus avoiding the user to deal with their IDs and titles. The registry is saved in the root folder of Archimedes, with name .registry. The content of the registry is as following: [ "1": { 'id': 'Search:_pull_request:false', 'title': 'Search:_pull_request:false', 'type': 'search', 'version': 1 }, "2": { 'id': '8539ada0-9960-11e8-8771-a349686d998a', 'title': 'dockerhub', 'type': 'index-pattern', 'version': 1 }, ... ] :param root_path: path where the registry will be stored """ self.path = os.path.join(root_path, REGISTRY_NAME) if not os.path.exists(self.path): self.__create_registry() self.content = load_json(self.path)
def __import_objects(self, obj_paths, force=False): """Import Kibana object to the Kibana instance. This method imports dashboard, index pattern, visualization and search objects from a list of JSON files to Kibana. Each JSON file can be either a list of objects or a dict having a key 'objects' with a list of objects as value (e.g,, {'objects': [...]}. The method can overwrite previous versions of existing objects by setting the parameter `force` to True. :param obj_paths: target object paths :param force: overwrite any existing objects on ID conflict """ logger.info("Importing %s objects", len(obj_paths)) for obj_path in obj_paths: json_content = load_json(obj_path) if not json_content: logger.warning("No objects in %s", obj_path) continue if 'objects' not in json_content: objects = {'objects': [json_content]} else: objects = json_content logger.info("Importing %s", obj_path) self.kibana.import_objects(objects, force)
def test_load_json(self): """Test whether the content of JSON is correctly loaded""" target_file = 'data/object_visualization' obj_file = read_file(target_file) expected = json.loads(obj_file) self.assertDictEqual(load_json(os.path.join(os.path.dirname(os.path.abspath(__file__)), target_file)), expected)
def find_dashboard_files(self, dashboard_path): """Find the dashboard-related files (visualizations, searches, index patterns) saved on disk. This method locates the files containing the objects referenced in `dashboard_path` by looking into `visualizations_folder`, `searches_folder` and `index_patterns_folder`. :param dashboard_path: path of the dashboard to import :returns the list of files containing the objects that compose a dashboard """ dashboard_files = [] dash_content = load_json(dashboard_path) if not self.folder_exists(self.visualizations_folder): logger.info( "Visualizations not loaded for %s, visualizations folder doesn't exist", dashboard_path) dashboard_files.append(dashboard_path) return dashboard_files panels = json.loads(dash_content['attributes']['panelsJSON']) for panel in panels: if panel['type'] == VISUALIZATION: panel_path = self.find_file_by_name( self.visualizations_folder, self.build_file_name(VISUALIZATION, panel['id'])) panel_files = self.find_visualization_files(panel_path) elif panel['type'] == SEARCH: panel_path = self.find_file_by_name( self.searches_folder, self.build_file_name(SEARCH, panel['id'])) panel_files = self.find_search_files(panel_path) else: cause = "Panel type %s not handled" % (panel['type']) logger.error(cause) raise ObjectTypeError(cause=cause) [ dashboard_files.append(p) for p in panel_files if p not in dashboard_files ] dashboard_files.append(dashboard_path) return dashboard_files
def find_visualization_files(self, visualization_path): """Find the visualization-related files (searches, index patterns) saved on disk. This method locates the files containing the objects referenced in `visualization_path` by looking into `searches_folder` and `index_patterns_folder`. :param visualization_path: path of the visualization to import :returns the list of files containing the objects that compose a visualization """ visualization_files = [] vis_content = load_json(visualization_path) if not self.folder_exists(self.searches_folder): logger.info( "Searches won't be loaded for %s, searches folder doesn't exist", visualization_path) if not self.folder_exists(self.index_patterns_folder): logger.info( "Index patterns won't be loaded for %s, index patterns folder doesn't exist", visualization_path) if 'savedSearchId' in vis_content['attributes'] and self.folder_exists( self.searches_folder): search_id = vis_content['attributes']['savedSearchId'] search_path = self.find_file_by_name( self.searches_folder, self.build_file_name(SEARCH, search_id)) if search_path not in visualization_files: visualization_files.append(search_path) search_files = self.find_search_files(search_path) [ visualization_files.append(sf) for sf in search_files if sf not in visualization_files ] search_files = self.find_search_files(visualization_path) [ visualization_files.append(sf) for sf in search_files if sf not in visualization_files ] return visualization_files
def import_from_disk(self, obj_type=None, obj_id=None, obj_title=None, obj_alias=None, find=False, force=False): """Import Kibana objects stored on disk. Locate an object on disk based on its type and ID, title or alias and import it to Kibana. If `find` is set to True, it also loads the related objects (i.e., visualizations, search and index pattern) using the `manager`. The method can overwrite previous versions of existing objects by setting the parameter `force` to True. :param obj_type: type of the target object :param obj_id: ID of the target object :param obj_title: title of the target object :param obj_alias: alias of the target object :param find: find the objects referenced in the file :param force: overwrite any existing objects on ID conflict """ if obj_alias: alias, meta = self.registry.find(obj_alias) target_obj_type = meta.type target_obj_id = meta.id target_obj_title = None else: target_obj_type = obj_type target_obj_id = obj_id target_obj_title = obj_title folder_path = self.manager.build_folder_path(target_obj_type) if target_obj_id: file_name = self.manager.build_file_name(target_obj_type, target_obj_id) file_path = self.manager.find_file_by_name(folder_path, file_name) elif target_obj_title: file_path = self.manager.find_file_by_content_title( folder_path, target_obj_title) else: cause = "Object id, title or alias cannot be None" logger.error(cause) raise DataImportError(cause=cause) json_content = load_json(file_path) if not json_content: logger.warning("File %s is empty", file_path) return if not find: logger.info("Do not find related files") self.__import_objects([file_path], force) return if target_obj_type == DASHBOARD: files = self.manager.find_dashboard_files(file_path) elif target_obj_type == VISUALIZATION: files = self.manager.find_visualization_files(file_path) elif target_obj_type == SEARCH: files = self.manager.find_search_files(file_path) elif target_obj_type == INDEX_PATTERN: cause = "Find not supported for %s" % target_obj_type logger.error(cause) raise DataImportError(cause=cause) else: cause = "Object type %s not known" % target_obj_type logger.error(cause) raise ObjectTypeError(cause=cause) self.__import_objects(files, force=force)