def post_process( out_physical, metadata ): """ application: how to handle/save results of minimizer input: PhysicalData (solution), list (user-defined output of minim) output: None """ out_physical.archive( 'final_solution.ncf' ) with open( os.path.join( archive.get_archive_path(), 'ans_details.pickle' ), 'w' ) as f: pickle.dump( metadata, f ) return None
def archive(self, dirname=None): """ extension: save copy of files to archive/experiment directory input: string or None output: None notes: this will overwrite any clash of namespace. if input is None file will write to experiment directory else it will create dirname in experiment directory and save there. """ save_path = get_archive_path() if dirname is not None: save_path = os.path.join(save_path, dirname) ensure_path(save_path, inc_file=False) for record in self.file_data.values(): source = record['actual'] dest = os.path.join(save_path, record['archive']) ncf.copy_compress(source, dest) return None
def archive(self, name=None, force_lite=False): """ extension: save a copy of data to archive/experiment directory input: string or None, boolean output: None notes: this will overwrite any clash in namespace. if input is None file will use default name. output file is in acceptable format for from_file method. force_lite will archive obs-lite file (no weight_grid). """ save_path = get_archive_path() if name is None: name = self.archive_name save_path = os.path.join(save_path, name) domain = deepcopy(self.grid_attr) domain['SDATE'] = np.int32(dt.replace_date('<YYYYMMDD>', dt.start_date)) domain['EDATE'] = np.int32(dt.replace_date('<YYYYMMDD>', dt.end_date)) if force_lite is True: domain['is_lite'] = True else: domain['is_lite'] = self.is_lite obs_list = [] for i in range(self.length): odict = deepcopy(self.misc_meta[i]) odict['value'] = self.value[i] odict['uncertainty'] = self.uncertainty[i] odict['offset_term'] = self.offset_term[i] odict['lite_coord'] = self.lite_coord[i] if domain['is_lite'] is False: odict['weight_grid'] = self.weight_grid[i] obs_list.append(odict) archive_list = [domain] + obs_list fh.save_list(archive_list, save_path) return None
o_unc = np.array(d.ObservationData.uncertainty) obs_pert = d.ObservationData(np.random.normal(o_val, o_unc)) unk = transform(phys_true, d.UnknownData) unk_pert = d.UnknownData(np.random.normal(unk.get_vector(), 1.0)) phys_pert = transform(unk_pert, d.PhysicalData) phys_true.archive(prior_true_archive) phys_pert.archive(prior_pert_archive) obs_true.archive(obs_true_archive) obs_pert.archive(obs_pert_archive) cmaq.wipeout_fwd() # Output the target cost value for this test bg_path = os.path.join(archive.get_archive_path(), prior_true_archive) user.background = d.PhysicalData.from_file(bg_path) obs_path = os.path.join(archive.get_archive_path(), obs_pert_archive) user.observed = d.ObservationData.from_file(obs_path) init_vec = transform(user.background, d.UnknownData).get_vector() cost = main.cost_func(init_vec) logger.info('No. obs = {:}'.format(o_val.size)) logger.info('Target cost = {:}'.format(cost)) #replace current background/prior and observations with perturbed versions. bg_path = os.path.join(archive.get_archive_path(), prior_pert_archive) user.background = d.PhysicalData.from_file(bg_path) obs_path = os.path.join(archive.get_archive_path(), obs_pert_archive) user.observed = d.ObservationData.from_file(obs_path) #run minimizer
import time import numpy as np import cPickle as pickle import context import fourdvar.user_driver as user import fourdvar.datadef as d from fourdvar._transform import transform import fourdvar.util.archive_handle as archive import fourdvar.params.archive_defn as archive_defn import fourdvar.util.cmaq_handle as cmaq archive_defn.experiment = 'tmp_grad_verbose' archive_defn.desc_name = '' archive_path = archive.get_archive_path() print 'saving results in:\n{}'.format(archive_path) print 'get observations in ObservationData format' st = time.time() observed = user.get_observed() print 'completed in {}s'.format(int(time.time() - st)) observed.archive('observed.pickle') print 'archived.' print 'get prior in PhysicalData format' st = time.time() prior_phys = user.get_background() print 'completed in {}s'.format(int(time.time() - st)) prior_phys.archive('prior.ncf') print 'archived.'
def archive(self, path=None): """ extension: save a copy of data to archive/experiment directory input: string or None output: None notes: this will overwrite any clash in namespace. if input is None file will write default archive_name. output is a netCDF file compatible with from_file method. """ unc = lambda spc: spc + '_UNC' save_path = get_archive_path() if path is None: path = self.archive_name save_path = os.path.join(save_path, path) if os.path.isfile(save_path): os.remove(save_path) #construct netCDF file attr_dict = { 'SDATE': np.int32(dt.replace_date('<YYYYDDD>', dt.start_date)), 'EDATE': np.int32(dt.replace_date('<YYYYDDD>', dt.end_date)) } minute, second = divmod(self.tsec, 60) hour, minute = divmod(minute, 60) day, hour = divmod(hour, 24) hms = int('{:02}{:02}{:02}'.format(hour, minute, second)) attr_dict['TSTEP'] = np.array([np.int32(day), np.int32(hms)]) var_list = ''.join(['{:<16}'.format(s) for s in self.spcs]) attr_dict['VAR-LIST'] = var_list dim_dict = {'ROW': self.nrows, 'COL': self.ncols} root = ncf.create(path=save_path, attr=attr_dict, dim=dim_dict, is_root=True) if inc_icon is True: icon_dim = {'LAY': self.nlays_icon} icon_var = {} emis_dim = {'LAY': self.nlays_emis, 'TSTEP': None} emis_var = {} for spc in self.spcs: if inc_icon is True: icon_var[spc] = ('f4', ( 'LAY', 'ROW', 'COL', ), self.icon[spc]) icon_var[unc(spc)] = ('f4', ('LAY', 'ROW', 'COL'), self.icon_unc[spc]) emis_var[spc] = ('f4', ('TSTEP', 'LAY', 'ROW', 'COL'), self.emis[spc]) emis_var[unc(spc)] = ('f4', ('TSTEP', 'LAY', 'ROW', 'COL'), self.emis_unc[spc]) if inc_icon is True: ncf.create(parent=root, name='icon', dim=icon_dim, var=icon_var, is_root=False) ncf.create(parent=root, name='emis', dim=emis_dim, var=emis_var, is_root=False) root.close() return None