def launch(self, data_file, region_volume=None): datatype = self._base_before_launch(data_file, region_volume) tract_start_indices = [0] tract_region = [] with TvbZip(data_file) as zipf: for tractf in sorted(zipf.namelist()): # one track per file if not tractf.endswith( '.txt'): # omit directories and other non track files continue vertices_file = zipf.open(tractf) tract_vertices = numpy.loadtxt(vertices_file, dtype=numpy.float32) tract_start_indices.append(tract_start_indices[-1] + len(tract_vertices)) datatype.store_data_chunk("vertices", tract_vertices, grow_dimension=0, close_file=False) if region_volume is not None: tract_region.append( self._get_tract_region(tract_vertices[0])) vertices_file.close() datatype.tract_start_idx = tract_start_indices datatype.tract_region = numpy.array(tract_region, dtype=numpy.int16) return datatype
def export_simulator_configuration(self, burst_id): burst = dao.get_burst_by_id(burst_id) if burst is None: raise InvalidExportDataException("Could not find burst with ID " + str(burst_id)) simulator_from_burst = dao.get_generic_entity(SimulatorIndex, burst.id, 'fk_parent_burst')[0] simulator_h5 = h5.path_for_stored_index(simulator_from_burst) simulator_config_folder = os.path.dirname(simulator_h5) now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, str(burst_id), self.ZIP_FILE_EXTENSION) tmp_export_folder = self._build_data_export_folder( simulator_from_burst) result_path = os.path.join(tmp_export_folder, zip_file_name) with TvbZip(result_path, "w") as zip_file: for filename in os.listdir(simulator_config_folder): zip_file.write(os.path.join(simulator_config_folder, filename), filename) return result_path
def launch(self, view_model): # type: (TrackImporterModel) -> [TractsIndex] datatype = self._base_before_launch(view_model.data_file, view_model.region_volume) tracts_h5 = TractsH5( path_for(self.storage_path, TractsH5, datatype.gid)) tract_start_indices = [0] tract_region = [] with TvbZip(view_model.data_file) as zipf: for tractf in sorted(zipf.namelist()): # one track per file if not tractf.endswith( '.txt'): # omit directories and other non track files continue vertices_file = zipf.open(tractf) datatype.tract_vertices = numpy.loadtxt(vertices_file, dtype=numpy.float32) tract_start_indices.append(tract_start_indices[-1] + len(datatype.tract_vertices)) tracts_h5.write_vertices_slice(datatype.tract_vertices) if view_model.region_volume is not None: tract_region.append( self._get_tract_region(datatype.tract_vertices[0])) vertices_file.close() tracts_h5.close() self.region_volume_h5.close() datatype.tract_start_idx = tract_start_indices datatype.tract_region = numpy.array(tract_region, dtype=numpy.int16) return datatype
def launch(self, data_file): if data_file is None: raise LaunchException( "Please select ZIP file which contains data to import") #todo warn: all in memory, this is memory hungry; at least twice the tractografy tracts = [] max_tract_count = 0 with TvbZip(data_file) as zipf: # todo sort tract8 before tract74 parse ints out of file names for tractf in zipf.namelist(): vertices_file = zipf.open(tractf) tract_vertices = numpy.loadtxt(vertices_file, dtype=numpy.float32) tracts.append(tract_vertices) max_tract_count = max(max_tract_count, len(tract_vertices)) vertices_file.close() vertices_arr = numpy.zeros((len(tracts), max_tract_count, 3)) counts_arr = numpy.zeros((len(tracts), )) for i, tr in enumerate(tracts): vertices_arr[i, :len(tr)] = tr counts_arr[i] = len(tr) datatype = Tracts() datatype.storage_path = self.storage_path datatype.vertices = vertices_arr datatype.vertex_counts = counts_arr return datatype
def __init__(self, path): self.bi_hemispheric = False self.vertices, self.normals, self.triangles = [], [], [] self.hemisphere_mask = [] self._read_vertices = 0 with TvbZip(path) as self._zipf: self._read()
def export_data_with_references(self, export_data_zip_path, data_export_folder): with TvbZip(export_data_zip_path, "w") as zip_file: for filename in os.listdir(data_export_folder): zip_file.write(os.path.join(data_export_folder, filename), filename) return None, export_data_zip_path, True
def export_project(self, project, optimize_size=False): """ Given a project root and the TVB storage_path, create a ZIP ready for export. :param project: project object which identifies project to be exported """ if project is None: raise ExportException("Please provide project to be exported") files_helper = FilesHelper() project_folder = files_helper.get_project_folder(project) project_datatypes = self._gather_project_datatypes(project, optimize_size) to_be_exported_folders = [] considered_op_ids = [] if optimize_size: ## take only the DataType with visibility flag set ON for dt in project_datatypes: if dt[KEY_OPERATION_ID] not in considered_op_ids: to_be_exported_folders.append({'folder': files_helper.get_project_folder(project, str(dt[KEY_OPERATION_ID])), 'archive_path_prefix': str(dt[KEY_OPERATION_ID]) + os.sep}) considered_op_ids.append(dt[KEY_OPERATION_ID]) else: to_be_exported_folders.append({'folder': project_folder, 'archive_path_prefix': '', 'exclude': ["TEMP"]}) # Compute path and name of the zip file now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION) export_folder = self._build_data_export_folder(project) result_path = os.path.join(export_folder, zip_file_name) with TvbZip(result_path, "w") as zip_file: # Pack project [filtered] content into a ZIP file: LOG.debug("Done preparing, now we will write folders " + str(len(to_be_exported_folders))) LOG.debug(str(to_be_exported_folders)) for pack in to_be_exported_folders: zip_file.write_folder(**pack) LOG.debug("Done exporting files, now we will write the burst configurations...") self._export_bursts(project, project_datatypes, zip_file) LOG.debug("Done exporting burst configurations, now we will export linked DTs") self._export_linked_datatypes(project, zip_file) ## Make sure the Project.xml file gets copied: if optimize_size: LOG.debug("Done linked, now we write the project xml") zip_file.write(files_helper.get_project_meta_file_path(project.name), files_helper.TVB_PROJECT_FILE) LOG.debug("Done, closing") return result_path
def export_simulator_configuration(self, burst_id): burst = dao.get_burst_by_id(burst_id) if burst is None: raise InvalidExportDataException("Could not find burst with ID " + str(burst_id)) op_folder = self.files_helper.get_project_folder( burst.project, str(burst.fk_simulation)) tmp_export_folder = self._build_data_export_folder(burst) tmp_sim_folder = os.path.join(tmp_export_folder, self.EXPORTED_SIMULATION_NAME) if not os.path.exists(tmp_sim_folder): os.makedirs(tmp_sim_folder) all_view_model_paths, all_datatype_paths = h5.gather_references_of_view_model( burst.simulator_gid, op_folder) burst_path = h5.determine_filepath(burst.gid, op_folder) all_view_model_paths.append(burst_path) for vm_path in all_view_model_paths: dest = os.path.join(tmp_sim_folder, os.path.basename(vm_path)) self.files_helper.copy_file(vm_path, dest) for dt_path in all_datatype_paths: dest = os.path.join(tmp_sim_folder, self.EXPORTED_SIMULATION_DTS_DIR, os.path.basename(dt_path)) self.files_helper.copy_file(dt_path, dest) main_vm_path = h5.determine_filepath(burst.simulator_gid, tmp_sim_folder) H5File.remove_metadata_param(main_vm_path, 'history_gid') now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, str(burst_id), self.ZIP_FILE_EXTENSION) result_path = os.path.join(tmp_export_folder, zip_file_name) with TvbZip(result_path, "w") as zip_file: zip_file.write_folder(tmp_sim_folder) self.files_helper.remove_folder(tmp_sim_folder) return result_path
def export_simulator_configuration(self, burst_id): burst = dao.get_burst_by_id(burst_id) if burst is None: raise InvalidExportDataException("Could not find burst with ID " + str(burst_id)) op_folder = FilesHelper().get_project_folder(burst.project, str(burst.fk_simulation)) now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, str(burst_id), self.ZIP_FILE_EXTENSION) tmp_export_folder = self._build_data_export_folder(burst) result_path = os.path.join(tmp_export_folder, zip_file_name) with TvbZip(result_path, "w") as zip_file: for filename in os.listdir(op_folder): zip_file.write(os.path.join(op_folder, filename), filename) return result_path
def test_export(self, initialize_linked_projects): export_file = self.export_mng.export_project(self.dest_project) with TvbZip(export_file) as z: assert 'links-to-external-projects/Operation.xml' in z.namelist()
def export_project(self, project, optimize_size=False): """ Given a project root and the TVB storage_path, create a ZIP ready for export. :param project: project object which identifies project to be exported """ if project is None: raise ExportException("Please provide project to be exported") project_folder = self.files_helper.get_project_folder(project) project_datatypes = dao.get_datatypes_in_project( project.id, only_visible=optimize_size) to_be_exported_folders = [] considered_op_ids = [] folders_to_exclude = self._get_op_with_errors(project.id) if optimize_size: # take only the DataType with visibility flag set ON for dt in project_datatypes: op_id = dt.fk_from_operation if op_id not in considered_op_ids: to_be_exported_folders.append({ 'folder': self.files_helper.get_project_folder( project, str(op_id)), 'archive_path_prefix': str(op_id) + os.sep, 'exclude': folders_to_exclude }) considered_op_ids.append(op_id) else: folders_to_exclude.append("TEMP") to_be_exported_folders.append({ 'folder': project_folder, 'archive_path_prefix': '', 'exclude': folders_to_exclude }) # Compute path and name of the zip file now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION) export_folder = self._build_data_export_folder(project) result_path = os.path.join(export_folder, zip_file_name) with TvbZip(result_path, "w") as zip_file: # Pack project [filtered] content into a ZIP file: self.logger.debug("Done preparing, now we will write folders " + str(len(to_be_exported_folders))) self.logger.debug(str(to_be_exported_folders)) for pack in to_be_exported_folders: zip_file.write_folder(**pack) self.logger.debug( "Done exporting files, now we will export linked DTs") self._export_linked_datatypes(project, zip_file) # Make sure the Project.xml file gets copied: if optimize_size: self.logger.debug("Done linked, now we write the project xml") zip_file.write( self.files_helper.get_project_meta_file_path(project.name), self.files_helper.TVB_PROJECT_FILE) self.logger.debug("Done, closing") return result_path
def test_export(self): export_file = self.export_mng.export_project(self.dest_project) with TvbZip(export_file) as z: self.assertTrue('links-to-external-projects/Operation.xml' in z.namelist())
def test_export(self, initialize_linked_projects): export_file = self.export_mng.export_project(self.dest_project) with TvbZip(export_file) as z: assert sum('links-to-external-projects' in s for s in z.namelist()) == 2,\ "Two linked datatypes should have been created!"
def test_export(self, transactional_setup_fixture): export_file = self.export_mng.export_project(self.dest_project) with TvbZip(export_file) as z: assert 'links-to-external-projects/Operation.xml' in z.namelist()