Пример #1
0
    def _get_all_file_paths(self, cinfo_path):
        files = []

        if os.path.isfile(cinfo_path):
            if not self._is_compressed_file(cinfo_path):
                files.append(cinfo_path)
            else:
                files += logutil.get_all_files(self.COLLECTINFO_DIR)

        elif os.path.isdir(cinfo_path):
            files += logutil.get_all_files(cinfo_path)

            if os.path.exists(self.COLLECTINFO_DIR):
                # ToDo: Before adding file from COLLECTINFO_DIR, we need to check file already exists in input file list or not,
                # ToDo: collectinfo_parser fails if same file exists twice in input file list. This is possible if input has zip file and
                # ToDo: user unzipped it but did not remove zipped file, in that case collectinfo-analyser creates new unzipped file,
                # ToDo: which results in two copies of same file (one unzipped by user and one unzipped by collectinfo-analyser).

                if not self._get_files_by_type(JSON_FILE, cinfo_path):
                    for collectinfo_json_file in self._get_files_by_type(
                            JSON_FILE, self.COLLECTINFO_DIR):
                        files.append(collectinfo_json_file)

                if not self._get_files_by_type(CLUSTER_FILE, cinfo_path):
                    for old_collectinfo_file in self._get_files_by_type(
                            CLUSTER_FILE, self.COLLECTINFO_DIR):
                        files.append(old_collectinfo_file)

                if not self._get_files_by_type(SYSTEM_FILE, cinfo_path):
                    for sysinfo_file in self._get_files_by_type(
                            SYSTEM_FILE, self.COLLECTINFO_DIR):
                        files.append(sysinfo_file)

        return files
Пример #2
0
    def _get_all_file_paths(self, cinfo_path):
        files = []


        if os.path.isfile(cinfo_path):
            if not self._is_compressed_file(cinfo_path):
                files.append(cinfo_path)
            else:
                files += logutil.get_all_files(self.collectinfo_dir)

        elif os.path.isdir(cinfo_path):
            files += logutil.get_all_files(cinfo_path)

            if os.path.exists(self.collectinfo_dir):
                # ToDo: Before adding file from collectinfo_dir, we need to check file already exists in input file list or not,
                # ToDo: collectinfo_parser fails if same file exists twice in input file list. This is possible if input has zip file and
                # ToDo: user unzipped it but did not remove zipped file, in that case collectinfo-analyser creates new unzipped file,
                # ToDo: which results in two copies of same file (one unzipped by user and one unzipped by collectinfo-analyser).

                if not self._get_files_by_type(JSON_FILE, cinfo_path):
                    for collectinfo_json_file in self._get_files_by_type(JSON_FILE, self.collectinfo_dir):
                        files.append(collectinfo_json_file)

                if not self._get_files_by_type(CLUSTER_FILE, cinfo_path):
                    for old_collectinfo_file in self._get_files_by_type(CLUSTER_FILE, self.collectinfo_dir):
                        files.append(old_collectinfo_file)

                if not self._get_files_by_type(SYSTEM_FILE, cinfo_path):
                    for sysinfo_file in self._get_files_by_type(SYSTEM_FILE, self.collectinfo_dir):
                        files.append(sysinfo_file)

        return files
Пример #3
0
    def _validate_and_extract_compressed_files(self, cinfo_path, dest_dir=None):
        if not cinfo_path or not os.path.exists(cinfo_path):
            return

        if not dest_dir:
            dest_dir = self.collectinfo_dir

        if not os.path.exists(dest_dir):
            os.makedirs(dest_dir)

        if os.path.isfile(cinfo_path):
            if not self._is_compressed_file(cinfo_path):
                return

            if self._extract_to(cinfo_path, dest_dir):
                self._validate_and_extract_compressed_files(dest_dir, dest_dir=os.path.join(dest_dir, COLLECTINFO_INTERNAL_DIR))
                return

        files = logutil.get_all_files(cinfo_path)
        if not files:
            return

        file_extracted = False
        for file in files:
            if not self._is_compressed_file(file):
                continue

            if self._extract_to(file, dest_dir):
                file_extracted = True

        if file_extracted:
            self._validate_and_extract_compressed_files(dest_dir, dest_dir=os.path.join(dest_dir, COLLECTINFO_INTERNAL_DIR))
Пример #4
0
    def _get_all_file_paths(self, cinfo_path):
        files = []

        if os.path.isfile(cinfo_path):
            if not self._is_compressed_file(cinfo_path):
                files.append(cinfo_path)
            else:
                files += logutil.get_all_files(self.collectinfo_dir)

        elif os.path.isdir(cinfo_path):
            files += logutil.get_all_files(cinfo_path)
            if os.path.exists(self.collectinfo_dir):
                # ToDo: Before adding file from collectinfo_dir, we need to check file already exists in input file list or not,
                # ToDo: collectinfo_parser fails if same file exists twice in input file list. This is possible if input has zip file and
                # ToDo: user unzipped it but did not remove zipped file, in that case collectinfo-analyser creates new unzipped file,
                # ToDo: which results in two copies of same file (one unzipped by user and one unzipped by collectinfo-analyser).

                files += self._get_valid_files(self.collectinfo_dir)

        return files
Пример #5
0
    def _get_valid_log_files(self, log_path=""):

        if not log_path:
            log_path = self.log_path

        try:
            server_log_files = []
            log_files = logutil.get_all_files(log_path)
            for log_file in log_files:
                try:
                    if self.reader.is_server_log_file(log_file):
                        server_log_files.append(log_file)
                except Exception:
                    pass
            return server_log_files

        except Exception:
            return []
Пример #6
0
    def _get_files_by_type(self, file_type, cinfo_path=""):
        try:
            if not cinfo_path:
                cinfo_path = self.cinfo_path

            log_files = logutil.get_all_files(cinfo_path)
            if file_type == CLUSTER_FILE:
                cinfo_files = []
                for log_file in log_files:
                    try:
                        if self.reader.is_cinfo_log_file(log_file):
                            cinfo_files.append(log_file)
                    except Exception:
                        pass

                return cinfo_files

            if file_type == JSON_FILE:
                json_files = []
                for log_file in log_files:
                    try:
                        # ToDo: It should be some proper check for asadm
                        # collectinfo json file.
                        if os.path.splitext(log_file)[1] == ".json":
                            json_files.append(log_file)
                    except Exception:
                        pass

                return json_files

            if file_type == SYSTEM_FILE:
                system_files = []
                for log_file in log_files:
                    try:
                        if self.reader.is_system_log_file(log_file):
                            system_files.append(log_file)
                    except Exception:
                        pass

                return system_files

            return []
        except Exception:
            return []
Пример #7
0
    def _get_files_by_type(self, file_type, cinfo_path=""):
        try:
            if not cinfo_path:
                cinfo_path = self.cinfo_path

            log_files = logutil.get_all_files(cinfo_path)
            if file_type == CLUSTER_FILE:
                cinfo_files = []
                for log_file in log_files:
                    try:
                        if self.reader.is_cinfo_log_file(log_file):
                            cinfo_files.append(log_file)
                    except Exception:
                        pass

                return cinfo_files

            if file_type == JSON_FILE:
                json_files = []
                for log_file in log_files:
                    try:
                        # ToDo: It should be some proper check for asadm
                        # collectinfo json file.
                        if os.path.splitext(log_file)[1] == ".json":
                            json_files.append(log_file)
                    except Exception:
                        pass

                return json_files

            if file_type == SYSTEM_FILE:
                system_files = []
                for log_file in log_files:
                    try:
                        if self.reader.is_system_log_file(log_file):
                            system_files.append(log_file)
                    except Exception:
                        pass

                return system_files

            return []
        except Exception:
            return []
Пример #8
0
    def _get_valid_files(self, cinfo_path=""):
        try:
            if not cinfo_path:
                cinfo_path = self.cinfo_path

            log_files = logutil.get_all_files(cinfo_path)
            valid_files = []
            for log_file in log_files:
                try:
                    if self.reader.is_cinfo_log_file(log_file):
                        valid_files.append(log_file)
                        continue
                except Exception:
                    pass

                try:
                    # ToDo: It should be some proper check for asadm
                    # collectinfo json file.
                    if os.path.splitext(log_file)[1] == ".json":
                        valid_files.append(log_file)
                        continue
                except Exception:
                    pass

                try:
                    if self.reader.is_system_log_file(log_file):
                        valid_files.append(log_file)
                        continue
                except Exception:
                    pass

                try:
                    # ToDo: It should be some proper check for asadm
                    # conf file.
                    if os.path.splitext(log_file)[1] == ".conf":
                        valid_files.append(log_file)
                except Exception:
                    pass

            return valid_files

        except Exception:
            return []
Пример #9
0
    def _validate_and_extract_compressed_files(self,
                                               cinfo_path,
                                               dest_dir=None):
        if not cinfo_path or not os.path.exists(cinfo_path):
            return

        if not dest_dir:
            dest_dir = self.COLLECTINFO_DIR

        if not os.path.exists(dest_dir):
            os.makedirs(dest_dir)

        if os.path.isfile(cinfo_path):
            if not self._is_compressed_file(cinfo_path):
                return

            if self._extract_to(cinfo_path, dest_dir):
                self._validate_and_extract_compressed_files(
                    dest_dir,
                    dest_dir=os.path.join(dest_dir,
                                          self.COLLECTINFO_INTERNAL_DIR))
                return

        files = logutil.get_all_files(cinfo_path)
        if not files:
            return

        file_extracted = False
        for file in files:
            if not self._is_compressed_file(file):
                continue

            if self._extract_to(file, dest_dir):
                file_extracted = True

        if file_extracted:
            self._validate_and_extract_compressed_files(
                dest_dir,
                dest_dir=os.path.join(dest_dir, self.COLLECTINFO_INTERNAL_DIR))