def compress(self, clustering, workspace_handler, data_handler, matrix_handler, options): representatives = [] compression_type = self.parameters.get_value("type", default_value="KMEDOIDS") pdb_name = self.parameters.get_value("filename", default_value="compressed") pdb_path = os.path.join(workspace_handler["results"], "%s.pdb" % pdb_name) if compression_type == "RANDOM": representatives = self.__naive_compression(clustering, matrix_handler) elif compression_type == "KMEDOIDS": representatives = self.__kmedoids_compression( clustering, matrix_handler) else: print "[ERROR Compressor::compress] The compression type does not exist (%s)" % ( self.type) exit() save_cluster_elements(representatives, pdb_path, data_handler, options) return pdb_name
def run(self, clustering, postprocessing_parameters, data_handler, workspaceHandler, matrixHandler, generatedFiles): results_place = workspaceHandler["results"] clusters_place = workspaceHandler["clusters"] cluster_files = [] for cluster in clustering.clusters: output_path = os.path.join(clusters_place, "%s.pdb"%(cluster.id)) save_cluster_elements(cluster.all_elements, [cluster.id]*len(cluster.all_elements), # all share same cluster id output_path, data_handler, postprocessing_parameters) cluster_files.append(output_path) # Add all bz2 files to a tar file tar_path = os.path.join(results_place, "clusters.tar.gz") tar = tarfile.open(tar_path, "w:gz") for comp_file in cluster_files: tar.add(comp_file, os.path.basename(comp_file)) tar.close() generatedFiles.append({ "description":"Clusters", "path":os.path.abspath(tar_path), "type":"compressed_pdb" })
def run(self, clustering, postprocessing_parameters, data_handler, workspaceHandler, matrixHandler, generatedFiles): results_place = workspaceHandler["results"] clusters_place = workspaceHandler["clusters"] cluster_files = [] for cluster in clustering.clusters: output_path = os.path.join(clusters_place, "%s.pdb" % (cluster.id)) save_cluster_elements( cluster.all_elements, [cluster.id] * len(cluster.all_elements), # all share same cluster id output_path, data_handler, postprocessing_parameters, ) cluster_files.append(output_path) # Add all bz2 files to a tar file tar_path = os.path.join(results_place, "clusters.tar.gz") tar = tarfile.open(tar_path, "w:gz") for comp_file in cluster_files: tar.add(comp_file, os.path.basename(comp_file)) tar.close() generatedFiles.append({"description": "Clusters", "path": os.path.abspath(tar_path), "type": "compressed_pdb"})
def compress(self, clustering, workspace_handler, data_handler, matrix_handler, options): representatives = [] compression_type = self.parameters.get_value("type", default_value="KMEDOIDS") pdb_name = self.parameters.get_value("filename", default_value="compressed") pdb_path = os.path.join(workspace_handler["results"], "%s.pdb" % pdb_name) if compression_type == "RANDOM": representatives = self.__naive_compression(clustering, matrix_handler) elif compression_type == "KMEDOIDS": representatives = self.__kmedoids_compression(clustering, matrix_handler) else: print "[ERROR Compressor::compress] The compression type does not exist (%s)" % (self.type) exit() save_cluster_elements(representatives, pdb_path, data_handler, options) return pdb_name