def merge_glock_dumps(self, path_to_output_dir):
     path_to_merged_files = []
     for gfs2_name in gsds.get_gfs2_names():
         list_of_gsd = gsds.get_glocks_dumps(gfs2_name)
         message = "Merging %d glocks dumps for the host \"%s\" for the filesystem \"%s\"." %(len(list_of_gsd),
                                                                                                 self.get_hostname(),
                                                                                                 gfs2_name)
         logging.getLogger(glocktop_analyze.MAIN_LOGGER_NAME).info(message)
         path_to_merged_file = os.path.join(path_to_output_dir,
                                            "glock_dump-%s-%s.txt" %(self.get_hostname(),
                                                                     gfs2_name))
         if (os.path.exists(path_to_merged_file)):
             try:
                 os.remove(path_to_merged_file)
             except IOError:
                 message = "There was an error removing the file: %s." %(path_to_merged_file)
                 logging.getLogger(glocktop_analyze.MAIN_LOGGER_NAME).error(message)
         for gsd in list_of_gsd:
             glocks_dump_data = ""
             for line in get_data_from_file(gsd.get_path_to_glockfile(), strip_leading_character=False):
                 glocks_dump_data += "%s\n" %(line)
             if (glocks_dump_data):
                 glocks_dump_data = "%s\n%s\n" %(gsd.get_header(), glocks_dump_data)
                 if (not write_to_file(path_to_merged_file, glocks_dump_data, append_to_file=True, create_file=True)):
                     message = "There was an error writing to the file: %s" %(path_to_merged_file)
                     logging.getLogger(glocktop_analyze.MAIN_LOGGER_NAME).error(message)
         if (os.path.exists(path_to_merged_file)):
             path_to_merged_files.append(path_to_merged_file)
     return path_to_merged_files
def find_glocks_dumps(path_to_dir):
    gsds = []
    if (os.path.isdir(path_to_dir)):
        hostinformation_files = find_files(path_to_dir, "hostinformation.txt")
        if (hostinformation_files):
            for hi_file in hostinformation_files:
                # Find the hostinformation.txt file first to get time
                # snapshot of the GFS2 filesystem taken
                hostname = ""
                nodename = ""
                date_time = ""
                for line in get_data_from_file(hi_file):
                    if (line.startswith("HOSTNAME")):
                        hostname = line.split("=")[1].strip()
                    elif (line.startswith("NODE_NAME")):
                        nodename = line.split("=")[1].strip()
                    elif (line.startswith("TIMESTAMP")):
                        timestamp = line.split("=")[1].strip()
                        date_time = datetime.strptime(line.split("=")[1].strip(), "%Y-%m-%d %H:%M:%S")
                for path_to_glockfile in find_files(os.path.split(hi_file)[0], "glocks"):
                    gparent_dir = os.path.split(os.path.split(path_to_glockfile)[0])[1]
                    cluster_name = gparent_dir.split(":")[0]
                    gfs2_name = gparent_dir.split(":")[1]
                    message = "Checking to see if the following files is a valid glock dump: %s" %(path_to_glockfile)
                    logging.getLogger(glocktop_analyze.MAIN_LOGGER_NAME).debug(message)
                    if (is_valid_lockdump_file(path_to_glockfile)):
                        gsds.append(GlocksDump(hostname, nodename, date_time, cluster_name, gfs2_name, path_to_glockfile))
    glocks_dumps = {}
    for glocks_dump in gsds:
        if (not glocks_dumps.has_key(glocks_dump.get_hostname())):
            glocks_dumps[glocks_dump.get_hostname()] = GlocksDumps(glocks_dump.get_hostname())
        glocks_dumps[glocks_dump.get_hostname()].add_glocks_dump(glocks_dump)
    return glocks_dumps
def __analyze_file(path_to_output_file, gfs2_filesystem_names, show_ended_process_and_tlocks):
    #All the snapshots for all the filesystems.
    snapshots_by_filesystem = {}
    # The glock that will have a container for all the lines associated with
    # the glock.
    gfs2_snapshot = None
    # The lines that are related to this snapshot of the
    # filesystem. Including glocks, waiters, etc.
    snapshot_lines = []
    lines = get_data_from_file(path_to_filename)
    for line in lines:
        # @, G, H, I, R, B, U, C, S
        if ((line.startswith("@")) or (not len(line) > 0)):
            if (not gfs2_snapshot == None):
                # Process any previous snapshot lines before starting a
                # new one. All the glocks, holder/waiters, etc.
                if ((not gfs2_filesystem_names) or
                    (gfs2_snapshot.get_filesystem_name().strip() in gfs2_filesystem_names)):
                    process_snapshot(gfs2_snapshot, snapshot_lines)
                    if (not snapshots_by_filesystem.has_key(gfs2_snapshot.get_filesystem_name())):
                        snapshots_by_filesystem[gfs2_snapshot.get_filesystem_name()] = []
                    snapshots_by_filesystem[gfs2_snapshot.get_filesystem_name()].append(gfs2_snapshot)
            # Process the new snapshot
            gfs2_snapshot = parse_snapshot(line, show_ended_process_and_tlocks)
            snapshot_lines = []
        else:
            snapshot_lines.append(line)
    # Process any remaining items
    if (not gfs2_snapshot == None):
        if ((not gfs2_filesystem_names) or
            (gfs2_snapshot.get_filesystem_name().strip() in gfs2_filesystem_names)):
            process_snapshot(gfs2_snapshot, snapshot_lines)
            if (not snapshots_by_filesystem.has_key(gfs2_snapshot.get_filesystem_name())):
                snapshots_by_filesystem[gfs2_snapshot.get_filesystem_name()] = []
            snapshots_by_filesystem[gfs2_snapshot.get_filesystem_name()].append(gfs2_snapshot)
    return snapshots_by_filesystem