def test_multi_write(): # Makes a random dict of random paths and variables (random number # of randomized paths with random numpy arrays as values). data = dict() for i in range(0, random.randint(min_dict_keys, \ max_dict_keys)): name = random_name() data[name] = \ random_numpy(random_numpy_shape( \ dict_value_subarray_dimensions, \ max_dict_value_subarray_axis_length), \ dtype=random.choice(dtypes)) # Write it and then read it back item by item. fld = None try: fld = tempfile.mkstemp() os.close(fld[0]) filename = fld[1] hdf5storage.writes(mdict=data, filename=filename) out = dict() for p in data: out[p] = hdf5storage.read(path=p, filename=filename) except: raise finally: if fld is not None: os.remove(fld[1]) # Compare data and out. assert_equal(out, data)
def write_mat(mat_fn, vertices, faces, save_areas=True): mat_data = {} mat_data[u'vertices'] = vertices mat_data[u'faces'] = faces if save_areas: mat_data[u'areas'] = cal_areas(vertices, faces) hs.writes(mat_data, mat_fn, options=hs.Options(matlab_compatible=True))
def test_multi_write(): # Makes a random dict of random paths and variables (random number # of randomized paths with random numpy arrays as values). data = dict() for i in range(0, random.randint(min_dict_keys, \ max_dict_keys)): name = random_name() data[name] = \ random_numpy(random_numpy_shape( \ dict_value_subarray_dimensions, \ max_dict_value_subarray_axis_length), \ dtype=random.choice(dtypes)) # Write it and then read it back item by item. if os.path.exists(filename): os.remove(filename) try: hdf5storage.writes(mdict=data, filename=filename) out = dict() for p in data: out[p] = hdf5storage.read(path=p, filename=filename) except: raise finally: if os.path.exists(filename): os.remove(filename) # Compare data and out. assert_equal(out, data)
def saveData(): if f.rank == 0: timing('start', 'saveTime') dataSave = {'netParams': replaceFuncObj(f.net.params), 'simConfig': f.cfg, 'simData': f.allSimData, 'netCells': f.net.allCells} if 'timestampFilename' in f.cfg: # add timestamp to filename if f.cfg['timestampFilename']: timestamp = time() timestampStr = datetime.fromtimestamp(timestamp).strftime('%Y%m%d_%H%M%S') f.cfg['filename'] = f.cfg['filename']+'-'+timestampStr # Save to pickle file if f.cfg['savePickle']: import pickle print('Saving output as %s ... ' % (f.cfg['filename']+'.pkl')) with open(f.cfg['filename']+'.pkl', 'wb') as fileObj: pickle.dump(dataSave, fileObj) print('Finished saving!') # Save to dpk file if f.cfg['saveDpk']: import os,gzip print('Saving output as %s ... ' % (f.cfg['filename']+'.dpk')) fn=f.params['filename'].split('.') fn='{}{:d}.{}'.format(fn[0],int(round(h.t)),fn[1]) # insert integer time into the middle of file name gzip.open(fn, 'wb').write(pk.dumps(f.alls.simData)) # write compressed string print 'Wrote file {}/{} of size {:.3f} MB'.format(os.getcwd(),fn,os.path.getsize(file)/1e6) # Save to json file if f.cfg['saveJson']: import json print('Saving output as %s ... ' % (f.cfg['filename']+'.json ')) with open(f.cfg['filename']+'.json', 'w') as fileObj: json.dump(dataSave, fileObj) print('Finished saving!') # Save to mat file if f.cfg['saveMat']: from scipy.io import savemat print('Saving output as %s ... ' % (f.cfg['filename']+'.mat')) savemat(f.cfg['filename']+'.mat', replaceNoneObj(dataSave)) # replace None and {} with [] so can save in .mat format print('Finished saving!') # Save to HDF5 file if f.cfg['saveHDF5']: dataSaveUTF8 = dict2utf8(replaceNoneObj(dataSave)) # replace None and {} with [], and convert to utf import hdf5storage print('Saving output as %s... ' % (f.cfg['filename']+'.hdf5')) hdf5storage.writes(dataSaveUTF8, filename=f.cfg['filename']+'.hdf5') print('Finished saving!') # Save timing timing('stop', 'saveTime') if f.cfg['timing'] and f.cfg['saveTiming']: import pickle with open('timing.pkl', 'wb') as file: pickle.dump(f.timing, file)
def write_pdir(pdir_fn, vertices, faces, save_normal_dir=True, mat_fn=''): vert_nei_vert, vert_nei_face = construct_vert_nei(vertices, faces) H, K, k1, k2, shape_index, curvedness_index, pdir1, pdir2 = get_curvature(vertices, vert_nei_vert, nei_k=5) f = open(pdir_fn, 'w') for i in range(pdir1.shape[0]): f.write('%f %f %f\n' % (pdir1[i][0], pdir1[i][1], pdir1[i][2])) # f.write('%f %f %f\n' % (pdir2[i][0], pdir2[i][1], pdir2[i][2])) # if save_normal_dir: # normal_dir = np.cross(pdir1[i], pdir2[i]) # f.write('%f %f %f\n' % (normal_dir[0], normal_dir[1], normal_dir[2])) f.close() mat_data = {} mat_data[u'shape_index'] = shape_index hs.writes(mat_data, mat_fn, options=hs.Options(matlab_compatible=True))
def savemat(fname, data): ### Description # Matlab: default < 2 GB. More than 2 GB: 7.3v # scipy.io.savemat can save maximally 4 GB. ### Input: # dirname: directory name for hdf5storage # fname: filename # data: dictionary check_make_dir(os.path.dirname(fname)) # debugging #sio.savemat(os.path.join(dirname, fname), mdict=data) hdf5storage.writes(mdict=data, filename=fname, truncate_existing=True, matlab_compatible=True, compress=True, compression_algorithm='gzip')
def mat_file_write(log_mat, mat_file_name, mat_file_dir, mat_num_dig, mat_var, mat_cmds, index, sim): """ Writes new data to the specified mat-file :parameters: log_mat: (bool) : variables are saved to mat-file if true = self.config.p_loop.log_mat mat_file_name: (string) : file-name of mat-file = self.config.p_loop.mat_file_name mat_file_dir: (string) : directory of mat-file = self.config.p_loop.mat_file_dir mat_num_dig: (int) : number of digits used in the mat-file variables = self.config.p_loop.mat_num_dig mat_var: (list of strings) : list of variable names used in mat-file = self.config.p_loop.mat_var mat_cmds: (list of strings) : list of COMPASS commands used to retrievedesiered data = self.config.p_loop.mat_cmds index: (int) : iteration index of optical calculation """ # Creating a list of strings: # ["Alpha001", "Bravo001", etc.] (mat_var with appended index) num_setting = '{:0%d}' % mat_num_dig list_names = [None] * len(mat_var) for k in range(len(mat_var)): list_names[k] = mat_var[k] + num_setting.format(index + 1) # Creating a list of variables to be saved in mat-file list_vars = [None] * len(mat_cmds) for k in range(len(mat_cmds)): cmd = "list_vars[%i]" % k cmd = cmd + " = " + mat_cmds[k] exec(cmd) # Create dictionary with variables to be saved mat_dict = dict(zip(list_names, list_vars)) # Save desired variables to mat-file hdf5storage.writes(mat_dict, filename=mat_file_dir + mat_file_name, matlab_compatible=True)
def make_mat(self, save_path=None): """ Makes spatial footprints .mat for CellReg Matlab package. :parameter --- save_path: str, path to save .mat file. Defaults to a folder called SpatialFootprints inside the session_id folder. """ if save_path is None: save_path = os.path.join(self.mouse_path, "SpatialFootprints") cellreg_path = os.path.join(save_path, "CellRegResults") try: os.mkdir(save_path) except: print("Directory already exists. Proceeding.") try: os.mkdir(cellreg_path) except: print("Directory already exists. Proceeding.") for session, session_number in zip(self.session_paths, self.session_numbers): # File name. fname = os.path.join(save_path, session_number + ".mat") # Load data. data = open_minian(session) # Reshape matrix. CellReg reads (neuron, x, y) arrays. footprints = np.asarray(data.A) if not self.minian_perf: footprints = np.rollaxis(footprints, 2) # Save. matfiledata = {} matfiledata[u'footprints'] = footprints hdf5storage.writes(mdict=matfiledata, filename=Path(fname)) print(f"Saved {fname}")
def write_normal(norm_fn, vertices, faces): _, vert_nei_face = construct_vert_nei(vertices, faces) v0 = vertices[faces[:,0]] v1 = vertices[faces[:,1]] v2 = vertices[faces[:,2]] e1 = v1 - v0 e2 = v2 - v0 face_normal_vec = np.cross(e1, e2) face_normal_vec /= np.linalg.norm(face_normal_vec, axis=1, keepdims=True) vert_normal_vec = [] for i in range(len(vert_nei_face)): nv = np.zeros(3) for vnf in vert_nei_face[i]: nv += face_normal_vec[vnf, :] nv /= len(vert_nei_face[i]) vert_normal_vec.append(nv) vert_normal_vec = np.array(vert_normal_vec) mat_data = {} mat_data[u'normals'] = vert_normal_vec hs.writes(mat_data, norm_fn, options=hs.Options(matlab_compatible=True))
def mat_file_write(matlog_object, index, sim): """ Writes new data to the specified mat-file :parameters: matlog_object (Parameter class) : class of Param_mat_logge index: (int) : iteration index of optical calculation sim (class sim) : simulation object """ # assignments mat_file_name = matlog_object.mat_file_name mat_file_dir = matlog_object.mat_file_dir mat_num_dig = matlog_object.mat_num_dig mat_var = matlog_object.mat_var mat_cmds = matlog_object.mat_cmds # Creating a list of strings: # ["Alpha001", "Bravo001", etc.] (mat_var with appended index) num_setting = '{:0%d}' % mat_num_dig list_names = [None] * len(mat_var) for k in range(len(mat_var)): list_names[k] = mat_var[k] + num_setting.format(index + 1) # Creating a list of variables to be saved in mat-file list_vars = [None] * len(mat_cmds) for k in range(len(mat_cmds)): cmd = "list_vars[%i]" % k cmd = cmd + " = " + mat_cmds[k] exec(cmd) # Create dictionary with variables to be saved mat_dict = dict(zip(list_names, list_vars)) # Save desired variables to mat-file hdf5storage.writes(mat_dict, filename=mat_file_dir + mat_file_name, matlab_compatible=True)
def create_buffer(param, array, buffer_pixel_amount, GeoRef, Outraster): kernel = np.zeros( (2 * buffer_pixel_amount + 1, 2 * buffer_pixel_amount + 1)) y, x = np.ogrid[-buffer_pixel_amount:buffer_pixel_amount + 1, -buffer_pixel_amount:buffer_pixel_amount + 1] mask = x**2 + y**2 <= buffer_pixel_amount**2 kernel[mask] = 1 # kernel = np.tri(2 * buffer_pixel_amount + 1, 2 * buffer_pixel_amount + 1, buffer_pixel_amount).astype(int) # kernel = kernel * kernel.T * np.flipud(kernel) * np.fliplr(kernel) A_array = scipy.ndimage.maximum_filter(array, footprint=kernel, mode="constant", cval=0) A_NotArray = (~A_array).astype(int) #saving file hdf5storage.writes({"BUFFER": A_NotArray}, Outraster, store_python_metadata=True, matlab_compatible=True) if param["savetiff_inputmaps"]: array2raster(ul.changeExt2tif(Outraster), GeoRef["RasterOrigin"], GeoRef["pixelWidth"], GeoRef["pixelHeight"], A_NotArray)
def saveData(include=None, filename=None, saveLFP=True): """ Function to save simulation data to file Parameters ---------- include : list What data to save **Default:** ``sim.cfg.saveDataInclude`` **Options:** The list may include any combination of the following: ``'simData'``, ``'simConfig'``, ``'netParams'``, ``'net'``. filename : str Path and file name to save data to **Default:** ``None`` """ from .. import sim if sim.cfg.validateDataSaveOptions() is False: return [] if sim.rank == 0 and not getattr(sim.net, 'allCells', None) and not getattr( sim, 'allSimData', None): needGather = True else: needGather = False if needGather: gather.gatherData() if filename: sim.cfg.filename = filename if sim.rank == 0: sim.timing('start', 'saveTime') import os # copy source files if isinstance(sim.cfg.backupCfgFile, list) and len( sim.cfg.backupCfgFile) == 2: simName = sim.cfg.simLabel if sim.cfg.simLabel else os.path.basename( sim.cfg.filename) print(('Copying cfg file %s ... ' % simName)) source = sim.cfg.backupCfgFile[0] targetFolder = sim.cfg.backupCfgFile[1] # make dir try: os.mkdir(targetFolder) except OSError: if not os.path.exists(targetFolder): print(' Could not create target folder: %s' % (targetFolder)) # copy file targetFile = targetFolder + '/' + simName + '_cfg.py' if os.path.exists(targetFile): print(' Removing prior cfg file', targetFile) os.system('rm ' + targetFile) os.system('cp ' + source + ' ' + targetFile) # create folder if missing targetFolder = os.path.dirname(sim.cfg.filename) if targetFolder and not os.path.exists(targetFolder): try: os.mkdir(targetFolder) except OSError: print(' Could not create target folder: %s' % (targetFolder)) # saving data if not include: include = sim.cfg.saveDataInclude dataSave = {} net = {} dataSave['netpyne_version'] = sim.version(show=False) dataSave['netpyne_changeset'] = sim.gitChangeset(show=False) if getattr(sim.net.params, 'version', None): dataSave['netParams_version'] = sim.net.params.version if 'netParams' in include: sim.net.params.__dict__.pop('_labelid', None) net['params'] = utils.replaceFuncObj(sim.net.params.__dict__) if 'net' in include: include.extend(['netPops', 'netCells']) if 'netCells' in include and hasattr(sim.net, 'allCells'): net['cells'] = sim.net.allCells if 'netPops' in include and hasattr(sim.net, 'allPops'): net['pops'] = sim.net.allPops if net: dataSave['net'] = net if 'simConfig' in include: dataSave['simConfig'] = sim.cfg.__dict__ if 'simData' in include: if saveLFP: if 'LFP' in sim.allSimData: sim.allSimData['LFP'] = sim.allSimData['LFP'].tolist() if hasattr(sim.net, 'recXElectrode'): dataSave['net'][ 'recXElectrode'] = sim.net.recXElectrode dataSave['simData'] = sim.allSimData savedFiles = [] if dataSave: if sim.cfg.timestampFilename: timestamp = time() timestampStr = '-' + datetime.fromtimestamp( timestamp).strftime('%Y%m%d_%H%M%S') else: timestampStr = '' filePath = sim.cfg.filename + '_data' + timestampStr if hasattr(sim.cfg, 'saveFolder') and sim.cfg.saveFolder: filePath = os.path.join( sim.cfg.saveFolder, sim.cfg.filename + '_data' + timestampStr) if hasattr(sim.cfg, 'simLabel') and sim.cfg.simLabel: filePath = os.path.join( sim.cfg.saveFolder, sim.cfg.simLabel + '_data' + timestampStr) # create folder if missing targetFolder = os.path.dirname(filePath) if targetFolder and not os.path.exists(targetFolder): try: os.mkdir(targetFolder) except OSError: print(' Could not create target folder: %s' % (targetFolder)) # Save to pickle file if sim.cfg.savePickle: import pickle path = filePath + '.pkl' dataSave = utils.replaceDictODict(dataSave) print(f'Saving output as {path} ... ') with open(path, 'wb') as fileObj: pickle.dump(dataSave, fileObj) savedFiles.append(path) print('Finished saving!') # Save to dpk file if sim.cfg.saveDpk: import gzip path = filePath + '.dpk' print(f'Saving output as {path} ... ') #fn=filePath #.split('.') gzip.open(path, 'wb').write( pk.dumps(dataSave)) # write compressed string savedFiles.append(path) print('Finished saving!') # Save to json file if sim.cfg.saveJson: path = filePath + '.json' # Make it work for Python 2+3 and with Unicode print(f'Saving output as {path} ... ') #dataSave = utils.replaceDictODict(dataSave) # not required since json saves as dict sim.saveJSON(path, dataSave, checkFileTimeout=5) savedFiles.append(path) print('Finished saving!') # Save to mat file if sim.cfg.saveMat: from scipy.io import savemat path = filePath + '.mat' print(f'Saving output as {path} ... ') savemat( path, utils.tupleToList(utils.replaceNoneObj(dataSave)) ) # replace None and {} with [] so can save in .mat format savedFiles.append(path) print('Finished saving!') # Save to HDF5 file (uses very inefficient hdf5storage module which supports dicts) if sim.cfg.saveHDF5: dataSaveUTF8 = utils._dict2utf8( utils.replaceNoneObj(dataSave) ) # replace None and {} with [], and convert to utf import hdf5storage path = filePath + '.hdf5' print(f'Saving output as {path} ... ') hdf5storage.writes(dataSaveUTF8, filename=path) savedFiles.append(path) print('Finished saving!') # Save to CSV file (currently only saves spikes) if sim.cfg.saveCSV: if 'simData' in dataSave: import csv path = filePath + '.csv' print(f'Saving output as {path} ... ') writer = csv.writer(open(path, 'wb')) for dic in dataSave['simData']: for values in dic: writer.writerow(values) savedFiles.append(path) print('Finished saving!') # Save to Dat file(s) if sim.cfg.saveDat: traces = sim.cfg.recordTraces for ref in list(traces.keys()): for cellid in list(sim.allSimData[ref].keys()): dat_file_name = '%s_%s.dat' % (ref, cellid) dat_file = open(dat_file_name, 'w') trace = sim.allSimData[ref][cellid] print(("Saving %i points of data on: %s:%s to %s" % (len(trace), ref, cellid, dat_file_name))) for i in range(len(trace)): dat_file.write( '%s\t%s\n' % ((i * sim.cfg.dt / 1000), trace[i] / 1000)) dat_file.close() savedFiles.append(dat_file_name) print('Finished saving!') # Save timing if sim.cfg.timing: sim.timing('stop', 'saveTime') print((' Done; saving time = %0.2f s.' % sim.timingData['saveTime'])) if sim.cfg.timing and sim.cfg.saveTiming: import pickle with open('timing.pkl', 'wb') as file: pickle.dump(sim.timing, file) # clean to avoid mem leaks for key in list(dataSave.keys()): del dataSave[key] del dataSave else: print('Nothing to save') return savedFiles
def saveInNode(gatherLFP=True, include=None, filename=None): from .. import sim from ..specs import Dict, ODict #This first part should be split to a separate function in gather.py # flag to avoid saving sections data for each cell (saves gather time and space; cannot inspect cell secs or re-simulate) if not sim.cfg.saveCellSecs: for cell in sim.net.cells: cell.secs = None cell.secLists = None # flag to avoid saving conns data for each cell (saves gather time and space; cannot inspect cell conns or re-simulate) if not sim.cfg.saveCellConns: for cell in sim.net.cells: cell.conns = [] # Store conns in a compact list format instead of a long dict format (cfg.compactConnFormat contains list of keys to include) elif sim.cfg.compactConnFormat: sim.compactConnFormat() # remove data structures used to calculate LFP if gatherLFP and sim.cfg.recordLFP and hasattr( sim.net, 'compartCells') and sim.cfg.createNEURONObj: for cell in sim.net.compartCells: try: del cell.imembVec del cell.imembPtr del cell._segCoords except: pass for pop in list(sim.net.pops.values()): try: del pop._morphSegCoords except: pass simDataVecs = ['spkt', 'spkid', 'stims'] + list( sim.cfg.recordTraces.keys()) singleNodeVecs = ['t'] if sim.cfg.createNEURONObj: sim.net.allCells = [Dict(c.__getstate__()) for c in sim.net.cells] else: sim.net.allCells = [c.__dict__ for c in sim.net.cells] sim.net.allPops = ODict() for popLabel, pop in sim.net.pops.items(): sim.net.allPops[popLabel] = pop.__getstate__( ) # can't use dict comprehension for OrderedDict saveData = Dict() for k in list( sim.simData.keys()): # initialize all keys of allSimData dict saveData[k] = Dict() for key, val in sim.simData.items( ): # update simData dics of dics of h.Vector if key in simDataVecs + singleNodeVecs: # simData dicts that contain Vectors if isinstance(val, dict): for cell, val2 in val.items(): if isinstance(val2, dict): saveData[key].update(Dict({cell: Dict()})) for stim, val3 in val2.items(): saveData[key][cell].update( {stim: list(val3)} ) # udpate simData dicts which are dicts of dicts of Vectors (eg. ['stim']['cell_1']['backgrounsd']=h.Vector) else: saveData[key].update( {cell: list(val2)} ) # udpate simData dicts which are dicts of Vectors (eg. ['v']['cell_1']=h.Vector) else: saveData[key] = list(saveData[key]) + list( val) # udpate simData dicts which are Vectors else: saveData[key] = val # update simData dicts which are not Vectors if filename: sim.cfg.filename = filename sim.timing('start', 'saveTime') import os # copy source files if isinstance(sim.cfg.backupCfgFile, list) and len( sim.cfg.backupCfgFile) == 2: simName = sim.cfg.simLabel if sim.cfg.simLabel else os.path.basename( sim.cfg.filename) print(('Copying cfg file %s ... ' % simName)) source = sim.cfg.backupCfgFile[0] targetFolder = sim.cfg.backupCfgFile[1] # make dir try: os.mkdir(targetFolder) except OSError: if not os.path.exists(targetFolder): print(' Could not create target folder: %s' % (targetFolder)) # copy file targetFile = targetFolder + '/' + simName + '_cfg.py' if os.path.exists(targetFile): print(' Removing prior cfg file', targetFile) os.system('rm ' + targetFile) os.system('cp ' + source + ' ' + targetFile) # create folder if missing targetFolder = os.path.dirname(sim.cfg.filename) if targetFolder and not os.path.exists(targetFolder): try: os.mkdir(targetFolder) except OSError: print(' Could not create target folder: %s' % (targetFolder)) # saving data if not include: include = sim.cfg.saveDataInclude dataSave = {} net = {} dataSave['netpyne_version'] = sim.version(show=False) dataSave['netpyne_changeset'] = sim.gitChangeset(show=False) if getattr(sim.net.params, 'version', None): dataSave['netParams_version'] = sim.net.params.version if 'netParams' in include: sim.net.params.__dict__.pop('_labelid', None) net['params'] = utils.replaceFuncObj(sim.net.params.__dict__) if 'net' in include: include.extend(['netPops', 'netCells']) if 'netCells' in include: net['cells'] = sim.net.allCells if 'netPops' in include: net['pops'] = sim.net.allPops if net: dataSave['net'] = net if 'simConfig' in include: dataSave['simConfig'] = sim.cfg.__dict__ if 'simData' in include: if 'LFP' in saveData: saveData['LFP'] = saveData['LFP'].tolist() dataSave['simData'] = saveData if dataSave: if sim.cfg.timestampFilename: timestamp = time() timestampStr = '-' + datetime.fromtimestamp(timestamp).strftime( '%Y%m%d_%H%M%S') else: timestampStr = '' filePath = sim.cfg.filename + timestampStr # Save to pickle file if sim.cfg.savePickle: import pickle dataSave = utils.replaceDictODict(dataSave) print(('Saving output as %s ... ' % (filePath + '.pkl'))) with open(filePath + '.pkl', 'wb') as fileObj: pickle.dump(dataSave, fileObj) print('Finished saving!') # Save to dpk file if sim.cfg.saveDpk: import gzip print(('Saving output as %s ... ' % (filePath + '.dpk'))) #fn=filePath #.split('.') gzip.open(filePath, 'wb').write( pk.dumps(dataSave)) # write compressed string print('Finished saving!') # Save to json file if sim.cfg.saveJson: # Make it work for Python 2+3 and with Unicode print(('Saving output as %s ... ' % (filePath + '.json '))) #dataSave = utils.replaceDictODict(dataSave) # not required since json saves as dict sim.saveJSON(filePath + str(sim.rank) + '.json', dataSave) print('Finished saving!') # Save to mat file if sim.cfg.saveMat: from scipy.io import savemat print(('Saving output as %s ... ' % (filePath + '.mat'))) savemat(filePath + '.mat', utils.tupleToList(utils.replaceNoneObj(dataSave)) ) # replace None and {} with [] so can save in .mat format print('Finished saving!') # Save to HDF5 file (uses very inefficient hdf5storage module which supports dicts) if sim.cfg.saveHDF5: dataSaveUTF8 = utils._dict2utf8(utils.replaceNoneObj( dataSave)) # replace None and {} with [], and convert to utf import hdf5storage print(('Saving output as %s... ' % (filePath + '.hdf5'))) hdf5storage.writes(dataSaveUTF8, filename=filePath + '.hdf5') print('Finished saving!') # Save to CSV file (currently only saves spikes) if sim.cfg.saveCSV: if 'simData' in dataSave: import csv print(('Saving output as %s ... ' % (filePath + '.csv'))) writer = csv.writer(open(filePath + '.csv', 'wb')) for dic in dataSave['simData']: for values in dic: writer.writerow(values) print('Finished saving!') # Save to Dat file(s) if sim.cfg.saveDat: traces = sim.cfg.recordTraces for ref in list(traces.keys()): for cellid in list(saveData[ref].keys()): dat_file_name = '%s_%s.dat' % (ref, cellid) dat_file = open(dat_file_name, 'w') trace = saveData[ref][cellid] print(("Saving %i points of data on: %s:%s to %s" % (len(trace), ref, cellid, dat_file_name))) for i in range(len(trace)): dat_file.write( '%s\t%s\n' % ((i * sim.cfg.dt / 1000), trace[i] / 1000)) print('Finished saving!') # Save timing if sim.rank == 0: if sim.cfg.timing: sim.timing('stop', 'saveTime') print((' Done; saving time = %0.2f s.' % sim.timingData['saveTime'])) if sim.cfg.timing and sim.cfg.saveTiming: import pickle with open('timing.pkl', 'wb') as file: pickle.dump(sim.timing, file) # clean to avoid mem leaks for key in list(dataSave.keys()): del dataSave[key] del dataSave # return full path import os return os.getcwd() + '/' + filePath
def saveData(): if f.rank == 0: timing('start', 'saveTime') dataSave = {'netParams': replaceFuncObj(f.net.params), 'simConfig': f.cfg, 'simData': f.allSimData, 'netCells': f.net.allCells} #dataSave = {'netParams': replaceFuncObj(f.net.params), 'simConfig': f.cfg, 'netCells': f.net.allCells} if 'timestampFilename' in f.cfg: # add timestamp to filename if f.cfg['timestampFilename']: timestamp = time() timestampStr = datetime.fromtimestamp(timestamp).strftime('%Y%m%d_%H%M%S') f.cfg['filename'] = f.cfg['filename']+'-'+timestampStr # Save to pickle file if f.cfg['savePickle']: import pickle print('Saving output as %s ... ' % (f.cfg['filename']+'.pkl')) with open(f.cfg['filename']+'.pkl', 'wb') as fileObj: pickle.dump(dataSave, fileObj) print('Finished saving!') # Save to dpk file if f.cfg['saveDpk']: import gzip print('Saving output as %s ... ' % (f.cfg['filename']+'.dpk')) fn=f.cfg['filename'] #.split('.') gzip.open(fn, 'wb').write(pk.dumps(dataSave)) # write compressed string print('Finished saving!') # Save to json file if f.cfg['saveJson']: import json print('Saving output as %s ... ' % (f.cfg['filename']+'.json ')) with open(f.cfg['filename']+'.json', 'w') as fileObj: json.dump(dataSave, fileObj) print('Finished saving!') # Save to mat file if f.cfg['saveMat']: from scipy.io import savemat print('Saving output as %s ... ' % (f.cfg['filename']+'.mat')) savemat(f.cfg['filename']+'.mat', replaceNoneObj(dataSave)) # replace None and {} with [] so can save in .mat format print('Finished saving!') # Save to HDF5 file (uses very inefficient hdf5storage module which supports dicts) if f.cfg['saveHDF5']: dataSaveUTF8 = dict2utf8(replaceNoneObj(dataSave)) # replace None and {} with [], and convert to utf import hdf5storage print('Saving output as %s... ' % (f.cfg['filename']+'.hdf5')) hdf5storage.writes(dataSaveUTF8, filename=f.cfg['filename']+'.hdf5') print('Finished saving!') # Save to CSV file (currently only saves spikes) if f.cfg['saveCSV']: import csv print('Saving output as %s ... ' % (f.cfg['filename']+'.csv')) writer = csv.writer(open(f.cfg['filename']+'.csv', 'wb')) for dic in dataSave['simData']: for values in dic: writer.writerow(values) print('Finished saving!') # Save to Dat file(s) if f.cfg['saveDat']: traces = f.cfg['recordTraces'] for ref in traces.keys(): for cellid in f.allSimData[ref].keys(): dat_file_name = '%s_%s.dat'%(ref,cellid) dat_file = open(dat_file_name, 'w') trace = f.allSimData[ref][cellid] print("Saving %i points of data on: %s:%s to %s"%(len(trace),ref,cellid,dat_file_name)) for i in range(len(trace)): dat_file.write('%s\t%s\n'%((i*f.cfg['dt']/1000),trace[i]/1000)) print('Finished saving!') # Save timing timing('stop', 'saveTime') if f.cfg['timing'] and f.cfg['saveTiming']: import pickle with open('timing.pkl', 'wb') as file: pickle.dump(f.timing, file)
def find_representative_locations(paths, param, tech): """ This function reads the masked FLH raster and finds the coordinates and indices of the pixels for the user-defined quantiles for each region. It creates a shapefile containing the position of those points for each region, and two MAT files with their coordinates and indices. :param paths: Dictionary of dictionaries containing path values for FLH MAT files, region statistics, and output paths. :type paths: dict :param param: Dictionary of dictionaries containing the user-defined quantiles, FLH resolution, and spatial scope. :type param: dict :param tech: Technology under study. :type tech: str :return: The shapefile with the locations and the two MAT files for the coordinates and the indices are saved directly in the given paths, along with their corresponding metadata in JSON files. :rtype: None """ ul.timecheck("Start") FLH_mask = hdf5storage.read("FLH_mask", paths[tech]["FLH_mask"]) quantiles = param["quantiles"] res_desired = param["res_desired"] filter = pd.read_csv(paths[tech]["Region_Stats"], sep=";", decimal=",", index_col=0).index if tech == "WindOff": Crd = param["Crd_offshore"] GeoRef = param["GeoRef_offshore"] # Select only indices in the report regions_shp = param["regions_sea"].loc[filter] nRegions = len(regions_shp) Crd_regions = param["Crd_regions_sea"] else: Crd = param["Crd_all"] GeoRef = param["GeoRef"] # Select only indices in the report regions_shp = param["regions_land"].loc[filter] nRegions = len(regions_shp) Crd_regions = param["Crd_regions_land"] Ind = ind_merra(Crd_regions, Crd, res_desired) reg_ind = np.zeros((nRegions, len(quantiles), 2)) k = 0 list_names = [] list_quantiles = [] for reg in filter: # A_region A_region = calc_region(regions_shp.loc[reg], Crd_regions[reg, :], res_desired, GeoRef) FLH_reg = A_region * FLH_mask[Ind[reg, 2] - 1:Ind[reg, 0], Ind[reg, 3] - 1:Ind[reg, 1]] FLH_reg[FLH_reg == 0] = np.nan X = FLH_reg.flatten(order="F") I_old = np.argsort(X) # Escape loop if intersection only yields NaN if np.isnan(X).all(): # do nothing continue q_rank = 0 for q in quantiles: if tech == "WindOff": list_names.append(regions_shp["ISO_Ter1"].loc[reg]) else: # list_names.append(regions_shp["NAME_SHORT"].loc[reg]) list_names.append(regions_shp["GID_0"].loc[reg]) list_quantiles.append("q" + str(q)) if q == 100: I = I_old[(len(X) - 1) - sum(np.isnan(X).astype(int))] elif q == 0: I = I_old[0] else: I = I_old[int( np.round(q / 100 * (len(X) - 1 - sum(np.isnan(X).astype(int)))))] # Convert the indices to row-column indices I, J = ul.ind2sub(FLH_reg.shape, I) reg_ind[k, q_rank, :] = np.array([I + Ind[reg, 2], J + Ind[reg, 3]]).astype(int) q_rank = q_rank + 1 k = k + 1 reg_ind = np.reshape(reg_ind, (-1, 2), "C").astype(int) reg_ind = (reg_ind[:, 0] - 1, reg_ind[:, 1] - 1) param[tech]["Ind_points"] = reg_ind param[tech]["Crd_points"] = ind2crd(reg_ind, Crd, res_desired) param[tech]["Crd_points"] = (param[tech]["Crd_points"][0], param[tech]["Crd_points"][1], list_names, list_quantiles) # Format point locations points = [(param[tech]["Crd_points"][1][i], param[tech]["Crd_points"][0][i]) for i in range(0, len(param[tech]["Crd_points"][0]))] # Create shapefile schema = { "geometry": "Point", "properties": { "NAME_SHORT": "str", "quantile": "str" } } with fiona.open(paths[tech]["Locations"], "w", "ESRI Shapefile", schema) as c: c.writerecords([{ "geometry": mapping(Point(points[i])), "properties": { "NAME_SHORT": list_names[i], "quantile": list_quantiles[i] } } for i in range(0, len(points))]) hdf5storage.writes({"Ind_points": param[tech]["Ind_points"]}, paths[tech]["Locations"][:-4] + "_Ind.mat", store_python_metadata=True, matlab_compatible=True) hdf5storage.writes({"Crd_points": param[tech]["Crd_points"]}, paths[tech]["Locations"][:-4] + "_Crd.mat", store_python_metadata=True, matlab_compatible=True) ul.create_json( paths[tech]["Locations"], param, [ "author", "comment", tech, "region_name", "subregions_name", "quantiles" ], paths, ["subregions"], ) print("files saved: " + paths[tech]["Locations"]) ul.timecheck("End")