def call_reformat(args, log_path, exe, out_file, dependency=None): """Reformat outputs using the script provided.""" import pyorac.local_defaults as defaults from pyorac.colour_print import colour_print from pyorac.definitions import OracError, COLOURING from subprocess import check_call, check_output, CalledProcessError # Optionally print command and driver file contents to StdOut if args.verbose or args.script_verbose or args.dry_run: colour_print('{} {} 1 <<<'.format(exe, out_file), COLOURING['header']) if args.dry_run: return -1 job_name = args.File.job_name(args.revision, 'format') if not args.batch: try: check_call([exe, out_file, "1"]) except CalledProcessError as err: raise OracError('{:s} failed with error code {:d}. {}'.format( ' '.join(err.cmd), err.returncode, err.output)) else: try: # Collect batch settings from defaults, command line, and script batch_params = defaults.batch_values.copy() batch_params['job_name'] = job_name batch_params['log_file'] = os.path.join(log_path, job_name + '.log') batch_params['err_file'] = os.path.join(log_path, job_name + '.err') batch_params['duration'] = '01:00' batch_params['ram'] = 5000 batch_params['procs'] = 1 if dependency is not None: batch_params['depend'] = dependency batch_params.update({key: val for key, val in args.batch_settings}) # Form batch queue command and call batch queuing system cmd = defaults.batch.list_batch(batch_params, exe=[exe, out_file, "1"]) if args.verbose or args.script_verbose: colour_print(' '.join(cmd), COLOURING['header']) out = check_output(cmd, universal_newlines=True) # Parse job ID # and return it to the caller jid = defaults.batch.parse_out(out, 'ID') return jid except CalledProcessError as err: raise OracError('Failed to queue job ' + exe) except SyntaxError as err: raise OracError(str(err))
def build_preproc_driver(args): """Prepare a driver file for the preprocessor.""" from itertools import product from re import search from subprocess import CalledProcessError, check_output, STDOUT from uuid import uuid4 from pyorac.definitions import BadValue from pyorac.util import (build_orac_library_path, extract_orac_libraries, read_orac_library_file) l1b = _glob_dirs(args.in_dir, args.File.l1b, 'L1B file') geo = _glob_dirs(args.in_dir, args.File.geo, 'geolocation file') # Select NISE file if args.use_ecmwf_snow or args.no_snow_corr: nise = '' else: # There are usually too many files in this directory to glob quickly. # Instead, guess where it is. If your search is failing here, but the # appropriate file is present, you need to add a format to one of # these loops that finds your file. nise_locations = ( 'NISE.005/%Y.%m.%d', 'NISE.004/%Y.%m.%d', 'NISE.002/%Y.%m.%d', 'NISE.001/%Y.%m.%d', '%Y', '%Y.%m.%d', '%Y_%m_d', '%Y-%m-%d', '' ) nise_formats = ( 'NISE_SSMISF18_%Y%m%d.HDFEOS', 'NISE_SSMISF17_%Y%m%d.HDFEOS', 'NISE_SSMIF13_%Y%m%d.HDFEOS', ) for nise_location, nise_format in product(nise_locations, nise_formats): nise = args.File.time.strftime(os.path.join( args.nise_dir, nise_location, nise_format )) if os.path.isfile(nise): break else: raise FileMissing('NISE', args.nise_dir) # Select previous surface reflectance and emissivity files if args.swansea: alb = _date_back_search(args.swansea_dir, args.File.time, 'SW_SFC_PRMS_%m.nc', 'years') brdf = None else: for ver in (61, 6, 5): try: alb = _date_back_search(args.mcd43c3_dir, args.File.time, f'MCD43C3.A%Y%j.{ver:03d}.*.hdf', 'days') brdf = None if args.lambertian else _date_back_search( args.mcd43c1_dir, args.File.time, f'MCD43C1.A%Y%j.{ver:03d}.*.hdf', 'days' ) break except FileMissing: pass else: raise FileMissing('MODIS albedo', args.mcd43c3_dir) if args.use_modis_emis: emis = None elif args.use_camel_emis: emis = _date_back_search( args.camel_dir, args.File.time, 'CAM5K30EM_emis_%Y%m_V???.nc', 'years' ) else: emis = _date_back_search( args.emis_dir, args.File.time, 'global_emis_inf10_monthFilled_MYD11C3.A%Y%j.*nc', 'days' ) # Select ECMWF files bounds = _bound_time(args.File.time + args.File.dur // 2) if args.nwp_flag == 0: ecmwf_nlevels = 91 raise NotImplementedError('Filename syntax for --nwp_flag 0 unknown') elif args.nwp_flag == 4: ecmwf_nlevels = 60 ggam = _form_bound_filenames(bounds, args.ggam_dir, 'ggam%Y%m%d%H%M.grb') ggas = _form_bound_filenames(bounds, args.ggas_dir, 'ggas%Y%m%d%H%M.nc') spam = _form_bound_filenames(bounds, args.spam_dir, 'spam%Y%m%d%H%M.grb') elif args.nwp_flag == 3: ecmwf_nlevels = 60 raise NotImplementedError('Filename syntax for --nwp_flag 3 unknown') elif args.nwp_flag == 1: ecmwf_nlevels = 137 for form, ec_hour in (('C3D*%m%d%H*.nc', 3), ('ECMWF_OPER_%Y%m%d_%H+00.nc', 6), ('ECMWF_ERA5_%Y%m%d_%H_0.5.nc', 6), ('ECMWF_ERA_%Y%m%d_%H_0.5.nc', 6), ('ECMWF_ERA_%Y%m%d_%H+00_0.5.nc', 6)): try: bounds = _bound_time(args.File.time + args.File.dur // 2, ec_hour) ggam = _form_bound_filenames(bounds, args.ecmwf_dir, form) break except FileMissing as tmp_err: err = tmp_err else: raise err ggas = ["", ""] spam = ["", ""] elif args.nwp_flag == 2: ecmwf_nlevels = 137 # Interpolation is done in the code ggam = [args.ecmwf_dir, args.ecmwf_dir] ggas = ["", ""] spam = ["", ""] else: raise BadValue('nwp_flag', args.nwp_flag) if args.use_oc: for oc_version in (5.0, 4.2, 4.1, 4.0, 3.1, 3.0, 2.0, 1.0): occci = args.File.time.strftime(os.path.join( args.occci_dir, 'ESACCI-OC-L3S-IOP-MERGED-1M_MONTHLY' f'_4km_GEO_PML_OCx_QAA-%Y%m-fv{oc_version:.1f}.nc' )) if os.path.isfile(occci): break else: raise FileMissing('Ocean Colour CCI', occci) else: occci = '' # ------------------------------------------------------------------------ if args.uuid: uid = str(uuid4()) else: uid = 'n/a' libs = read_orac_library_file(args.orac_lib) lib_list = extract_orac_libraries(libs) os.environ["LD_LIBRARY_PATH"] = build_orac_library_path(lib_list=lib_list) # Determine current time production_time = datetime.now().strftime("%Y%m%d%H%M%S") # Determine NCDF version from command line for fdr in lib_list: ncdf_exe = os.path.join(fdr, "..", "bin", "ncdump") try: tmp0 = check_output(ncdf_exe, stderr=STDOUT, universal_newlines=True) except FileNotFoundError: continue except CalledProcessError: raise OracError('ncdump is non-functional.') mat0 = search(r'netcdf library version (.+?) of', tmp0) if mat0: ncdf_version = mat0.group(1) else: ncdf_version = 'n/a' warnings.warn('Output formatting of ncdump may have changed.', OracWarning, stacklevel=2) break else: raise OracError('NetCDF lib improperly built as ncdump not present. ' 'LD_LIBRARY_PATH=' + os.environ["LD_LIBRARY_PATH"]) # Fetch ECMWF version from header of NCDF file if 3 <= args.nwp_flag <= 4: try: ecmwf_check_file = ggam[0] if ggam[0].endswith('nc') else ggas[0] tmp1 = check_output([ncdf_exe, "-h", ecmwf_check_file], universal_newlines=True) except OSError: raise FileMissing('ECMWF ggas file', ggas[0]) mat1 = search(r':history = "(.+?)" ;', tmp1) if mat1: ecmwf_version = mat1.group(1) else: ecmwf_version = 'n/a' warnings.warn('Header of ECMWF file may have changed.', OracWarning, stacklevel=2) elif args.nwp_flag == 2: ecmwf_version = 'ERA5' else: # TODO: Fetch version information from GFS files ecmwf_version = 'n/a' # RTTOV version number from small executable try: rttov_version_exe = os.path.join(args.orac_dir, "common", "rttov_version") if not os.path.isfile(rttov_version_exe): rttov_version_exe = os.path.join(args.orac_dir, "rttov_version") rttov_version = check_output( rttov_version_exe, universal_newlines=True ).strip() except CalledProcessError: rttov_version = 'n/a' warnings.warn('RTTOV library version number unavailable.', OracWarning, stacklevel=2) # Fetch GIT version cwd = os.getcwd() try: os.chdir(os.path.join(args.orac_dir, 'pre_processing')) tmp3 = check_output(["git", "--version"], universal_newlines=True) mat3 = search('git version (.+?)\n', tmp3) git_version = mat3.group(1) except (FileNotFoundError, CalledProcessError, AttributeError): git_version = 'n/a' warnings.warn('Unable to call git.', OracWarning, stacklevel=2) finally: os.chdir(cwd) file_version = f'R{args.File.revision}' chunk_flag = False # File chunking no longer required assume_full_paths = True # We pass absolute paths cldtype = not args.skip_cloud_type include_full_brdf = not args.lambertian # ------------------------------------------------------------------------ # Write driver file driver = f"""{args.File.sensor} {l1b} {geo} {args.usgs_file} {ggam[0]} {args.coef_dir} {args.atlas_dir} {nise} {alb} {brdf} {emis} {args.dellon} {args.dellat} {args.out_dir} {args.limit[0]} {args.limit[1]} {args.limit[2]} {args.limit[3]} {ncdf_version} {args.cfconvention} {args.institute} {args.processor} {args.email} {args.url} {file_version} {args.references} {args.history} {args.summary} {args.keywords} {args.comments} {args.project} {args.license} {uid} {production_time} {args.calib_file} {args.nwp_flag} {ggas[0]} {spam[0]} {chunk_flag} {args.day_flag} {args.verbose} - {assume_full_paths} {include_full_brdf} {rttov_version} {ecmwf_version} {git_version} ECMWF_TIME_INT_METHOD={args.single_ecmwf} ECMWF_PATH_2={ggam[1]} ECMWF_PATH2_2={ggas[1]} ECMWF_PATH3_2={spam[1]} USE_ECMWF_SNOW_AND_ICE={args.use_ecmwf_snow} USE_MODIS_EMIS_IN_RTTOV={args.use_modis_emis} ECMWF_NLEVELS={ecmwf_nlevels} USE_L1_LAND_MASK={args.l1_land_mask} USE_OCCCI={args.use_oc} OCCCI_PATH={occci} DISABLE_SNOW_ICE_CORR={args.no_snow_corr} DO_CLOUD_EMIS={args.cloud_emis} DO_IRONLY={args.ir_only} DO_CLDTYPE={cldtype} USE_CAMEL_EMIS={args.use_camel_emis} USE_SWANSEA_CLIMATOLOGY={args.swansea}""" if args.available_channels is not None: driver += "\nN_CHANNELS={}".format(len(args.available_channels)) driver += "\nCHANNEL_IDS={}".format( ','.join(str(k) for k in args.available_channels) ) for part, filename in args.extra_lines: if part == "pre" and filename != "": try: with open(filename, "r") as extra: driver += "\n" + extra.read() except IOError: raise FileMissing('extra_lines_file', filename) for sec, key, val in args.additional: if sec == "pre": driver += f"\n{key}={val}" if args.File.predef and not args.no_predef: driver += f""" USE_PREDEF_LSM=False EXT_LSM_PATH={args.prelsm_file} USE_PREDEF_GEO=False EXT_GEO_PATH={args.pregeo_file}""" if args.product_name is not None: driver += f"\nPRODUCT_NAME={args.product_name}" return driver
if not orig_args.benchmark: orig_args.revision += 1 try: for test in orig_args.tests: colour_print(test, COLOURING['header']) args = deepcopy(orig_args) # Set filename to be processed and output folder args.out_dir = os.path.join(base_out_dir, test) try: args.target, args.limit, args.preset_settings = REGRESSION_TESTS[test] except KeyError: raise OracError("Invalid regression test for given phases.") args.preset_settings += "_" + args.test_type jid, out_file = process_all(args) log_path = os.path.join(args.out_dir, log_dir) # Check for regressions if not args.benchmark and not args.dry_run: inst = FileName(args.out_dir, out_file) if not args.batch: args = check_args_common(args) args = check_args_preproc(args) try: run_regression(inst) except Regression as err:
args = check_args_common(args) args = check_args_cc4cl(args) log_path = os.path.join(args.out_dir, log_dir) try: inst = FileName(args.in_dir, args.target) if inst.oractype in ('primary', 'secondary'): jid, _ = process_post(args, log_path) elif inst.oractype is None: jid, _ = process_pre(args, log_path) elif inst.oractype in ('alb', 'clf', 'config', 'geo', 'loc', 'lsf', 'lwrtm', 'msi', 'prtm', 'swrtm'): jid, _ = process_main(args, log_path) else: raise OracError("Could not determine processing type. Please pass " "the filename of a valid ORAC input.") if args.script_verbose and args.batch: print("Job queued with ID {}".format(jid)) except OracError as err: colour_print('ERROR) ' + str(err), COLOURING['error']) except KeyboardInterrupt: colour_print('Execution halted by user.', COLOURING['error'])
def call_exe(args, exe, driver, values=dict()): """Call an ORAC executable, managing the necessary driver file. Args: :list args: Arguments of the script. :str exe: Name of the executable. :str driver: Contents of the driver file to pass. :dict values: Arguments for the batch queueing system.""" import pyorac.local_defaults as defaults from pyorac.colour_print import colour_print from pyorac.definitions import OracError, COLOURING from subprocess import check_call, check_output, CalledProcessError from tempfile import mkstemp from time import time # Optionally print command and driver file contents to StdOut if args.verbose or args.script_verbose or args.dry_run: colour_print(exe + ' <<<', COLOURING['header']) colour_print(driver, COLOURING['text']) if args.dry_run: return # Write driver file (fd, driver_file) = mkstemp('.driver', os.path.basename(exe) + '.', args.out_dir, True) f = os.fdopen(fd, "w") f.write(driver) f.close() if not args.batch: # Form processing environment os.environ["LD_LIBRARY_PATH"] = build_orac_library_path() # Define a directory for EMOS to put it's gridding try: os.environ["PPDIR"] = args.emos_dir os.environ["OPENBLAS_NUM_THREADS"] = "1" os.environ["OMP_NUM_THREADS"] = str(args.procs) except AttributeError: pass # Call program try: st = time() check_call([exe, driver_file]) if args.timing: colour_print(exe + ' took {:f}s'.format(time() - st), COLOURING['timing']) return True except CalledProcessError as err: raise OracError('{:s} failed with error code {:d}. {}'.format( ' '.join(err.cmd), err.returncode, err.output)) finally: if not args.keep_driver: os.remove(driver_file) elif args.verbose or args.script_verbose: print("Driver file stored at " + driver_file) else: # Write temporary script to call executable (gd, script_file) = mkstemp('.sh', os.path.basename(exe) + '.', args.out_dir, True) g = os.fdopen(gd, "w") g.write(defaults.batch_script) # Define processing environment libs = read_orac_libraries(args.orac_lib) g.write("export LD_LIBRARY_PATH=" + build_orac_library_path(libs) + "\n") g.write("export OPENBLAS_NUM_THREADS=1\n") try: g.write("export PPDIR=" + args.emos_dir + "\n") except AttributeError: pass defaults.batch.add_openmp_to_script(g) # Call executable and give the script permission to execute g.write(exe + ' ' + driver_file + "\n") if not args.keep_driver: g.write("rm -f " + driver_file + "\n") g.write("rm -f " + script_file + "\n") g.close() os.chmod(script_file, 0o700) try: # Collect batch settings from defaults, command line, and script batch_params = defaults.batch_values.copy() batch_params.update(values) batch_params.update({key: val for key, val in args.batch_settings}) batch_params['procs'] = args.procs # Form batch queue command and call batch queuing system cmd = defaults.batch.ListBatch(batch_params, exe=script_file) if args.verbose or args.script_verbose: colour_print(' '.join(cmd), COLOURING['header']) out = check_output(cmd, universal_newlines=True) # Parse job ID # and return it to the caller jid = defaults.batch.ParseOut(out, 'ID') return jid except CalledProcessError as err: raise OracError('Failed to queue job ' + exe) except SyntaxError as err: raise OracError(str(err))
def build_preproc_driver(args): """Prepare a driver file for the preprocessor.""" from pyorac.definitions import FileName, BadValue from pyorac.util import build_orac_library_path, read_orac_libraries from re import search from subprocess import check_output, STDOUT from uuid import uuid4 file = _glob_dirs(args.in_dir, args.File.l1b, 'L1B file') geo = _glob_dirs(args.in_dir, args.File.geo, 'geolocation file') # Select NISE file if args.use_ecmwf_snow or args.no_snow_corr: nise = '' else: for form in ('NISE.004/%Y.%m.%d/NISE_SSMISF17_%Y%m%d.HDFEOS', 'NISE.002/%Y.%m.%d/NISE_SSMIF13_%Y%m%d.HDFEOS', '%Y/NISE_SSMIF13_%Y%m%d.HDFEOS', '%Y/NISE_SSMIF17_%Y%m%d.HDFEOS'): nise = args.File.time.strftime(os.path.join(args.nise_dir, form)) if os.path.isfile(nise): break else: raise FileMissing('NISE', nise) # Select previous surface reflectance and emissivity files if args.swansea: alb = _date_back_search(args.swansea_dir, args.File.time, 'SW_SFC_PRMS_%m.nc') brdf = None else: alb = _date_back_search(args.mcd43c3_dir, args.File.time, 'MCD43C3.A%Y%j.*.hdf') brdf = None if args.lambertian else _date_back_search( args.mcd43c1_dir, args.File.time, 'MCD43C1.A%Y%j.*.hdf') emis = None if args.use_modis_emis else _date_back_search( args.emis_dir, args.File.time, 'global_emis_inf10_monthFilled_MYD11C3.A%Y%j.041.nc') # Select ECMWF files bounds = _bound_time(args.File.time + args.File.dur // 2) if args.ecmwf_flag == 0: ggam = _form_bound_filenames(bounds, args.ggam_dir, 'ERA_Interim_an_%Y%m%d_%H+00.nc') elif args.ecmwf_flag == 1: ggam = _form_bound_filenames(bounds, args.ggam_dir, 'ggam%Y%m%d%H%M.nc') ggas = _form_bound_filenames(bounds, args.ggas_dir, 'ggas%Y%m%d%H%M.nc') spam = _form_bound_filenames(bounds, args.spam_dir, 'gpam%Y%m%d%H%M.nc') elif args.ecmwf_flag == 2: ggam = _form_bound_filenames(bounds, args.ggam_dir, 'ggam%Y%m%d%H%M.grb') ggas = _form_bound_filenames(bounds, args.ggas_dir, 'ggas%Y%m%d%H%M.nc') spam = _form_bound_filenames(bounds, args.spam_dir, 'spam%Y%m%d%H%M.grb') elif args.ecmwf_flag == 3: raise NotImplementedError('Filename syntax for --ecmwf_flag 3 unknown') elif args.ecmwf_flag == 4: for form, hr in (('C3D*%m%d%H*.nc', 3), ('ECMWF_OPER_%Y%m%d_%H+00.nc', 6), ('ECMWF_ERA_%Y%m%d_%H+00_0.5.nc', 6)): try: bounds = _bound_time(args.File.time + args.File.dur // 2, timedelta(hours=hr)) ggam = _form_bound_filenames(bounds, args.ggam_dir, form) break except FileMissing as e: err = e else: raise err ggas = ggam spam = ggam else: raise BadValue('ecmwf_flag', args.ecmwf_flag) if not args.skip_ecmwf_hr: #hr_ecmwf = _form_bound_filenames(bounds, args.hr_dir, # 'ERA_Interim_an_%Y%m%d_%H+00_HR.grb') # These files don't zero-pad the hour for some reason bounds = _bound_time(args.File.time + args.File.dur // 2, timedelta(hours=6)) hr_ecmwf = [ time.strftime( os.path.join(args.hr_dir, 'ERA_Interim_an_%Y%m%d_') + '{:d}+00_HR.grb'.format(time.hour * 100)) for time in bounds ] if not os.path.isfile(hr_ecmwf[0]): hr_ecmwf = [ time.strftime( os.path.join(args.hr_dir, 'ERA_Interim_an_%Y%m%d_') + '{:d}+00_HR.grb'.format(time.hour * 100)) for time in bounds ] for f in hr_ecmwf: if not os.path.isfile(f): raise FileMissing('HR ECMWF file', f) else: hr_ecmwf = ['', ''] occci = args.File.time.strftime( os.path.join( args.occci_dir, 'ESACCI-OC-L3S-IOP-MERGED-1M_MONTHLY' '_4km_GEO_PML_OCx_QAA-%Y%m-fv3.0.nc')) #------------------------------------------------------------------------ if args.uuid: uid = str(uuid4()) else: uid = 'n/a' # Add NetCDF library to path so following calls works libs = read_orac_libraries(args.orac_lib) try: os.environ["PATH"] = os.path.join(libs["NCDFLIB"][:-4], 'bin:') + \ os.environ["PATH"] except KeyError: pass os.environ["LD_LIBRARY_PATH"] = build_orac_library_path() # Determine current time production_time = datetime.now().strftime("%Y%m%d%H%M%S") # Determine NCDF version from command line try: tmp0 = check_output("ncdump", stderr=STDOUT, universal_newlines=True) except OSError: raise OracError('NetCDF lib improperly built as ncdump not present.') m0 = search(r'netcdf library version (.+?) of', tmp0) if m0: ncdf_version = m0.group(1) else: ncdf_version = 'n/a' warnings.warn('Output formatting of ncdump may have changed.', OracWarning, stacklevel=2) # Fetch ECMWF version from header of NCDF file try: ecmwf_check_file = ggam[0] if ggam[0][-2:] == 'nc' else ggas[0] tmp1 = check_output(["ncdump", "-h", ecmwf_check_file], universal_newlines=True) except OSError: raise FileMissing('ECMWF ggas file', ggas[0]) m1 = search(r':history = "(.+?)" ;', tmp1) if m1: ecmwf_version = m1.group(1) else: ecmwf_version = 'n/a' warnings.warn('Header of ECMWF file may have changed.', OracWarning, stacklevel=2) # Strip RTTOV version from library definition try: rttov_lib = glob(os.path.join(libs['RTTOVLIB'], 'librttov?*_main.a')) except KeyError: rttov_lib = glob( os.path.join(libs['CONDA_PREFIX'] + '/lib', 'librttov?*_main.a')) for rttov_file in rttov_lib: try: m2 = search(r'librttov([\d\.]+)_main.a', rttov_file) rttov_version = m2.group(1) break except: pass else: rttov_version = 'n/a' warnings.warn('Naming of RTTOV library directory may have changed.', OracWarning, stacklevel=2) # Fetch GIT version cwd = os.getcwd() try: os.chdir(os.path.join(args.orac_dir, 'pre_processing')) tmp3 = check_output(["git", "--version"], universal_newlines=True) m3 = search('git version (.+?)\n', tmp3) git_version = m3.group(1) except: git_version = 'n/a' warnings.warn('Unable to call git.', OracWarning, stacklevel=2) finally: os.chdir(cwd) # Fetch repository commit number if not args.revision: args.revision = get_repository_revision() file_version = 'R{}'.format(args.revision) #------------------------------------------------------------------------ # Write driver file driver = """{sensor} {l1b} {geo} {usgs} {ggam[0]} {coef} {atlas} {nise} {alb} {brdf} {emis} {dellon} {dellat} {out_dir} {limit[0]} {limit[1]} {limit[2]} {limit[3]} {ncdf_version} {conventions} {institution} {l2_processor} {creator_email} {creator_url} {file_version} {references} {history} {summary} {keywords} {comment} {project} {license} {uuid} {production_time} {atsr_calib} {ecmwf_flag} {ggas[0]} {spam[0]} {chunk_flag} {day_flag} {verbose} - {assume_full_paths} {include_full_brdf} {rttov_version} {ecmwf_version} {git_version} ECMWF_TIME_INT_METHOD={ecmwf_int_method} ECMWF_PATH_2={ggam[1]} ECMWF_PATH2_2={ggas[1]} ECMWF_PATH3_2={spam[1]} USE_HR_ECMWF={use_ecmwf_hr} ECMWF_PATH_HR={ecmwf_hr[0]} ECMWF_PATH_HR_2={ecmwf_hr[1]} USE_ECMWF_SNOW_AND_ICE={ecmwf_nise} USE_MODIS_EMIS_IN_RTTOV={modis_emis} ECMWF_NLEVELS={ecmwf_nlevels} USE_L1_LAND_MASK={l1_land_mask} USE_OCCCI={use_occci} OCCCI_PATH={occci_file} DISABLE_SNOW_ICE_CORR={no_snow} DO_CLOUD_EMIS={cld_emis} DO_IRONLY={ir_only} DO_CLDTYPE={cldtype} USE_CAMEL_EMIS={camel} USE_SWANSEA_CLIMATOLOGY={swansea}""".format( alb=alb, assume_full_paths=True, # Above file searching returns paths nor dirs atlas=args.atlas_dir, atsr_calib=args.calib_file, brdf=brdf, camel=args.camel_emis, chunk_flag=False, # File chunking no longer required cldtype=not args.skip_cloud_type, cld_emis=args.cloud_emis, coef=args.coef_dir, comment=args.comments, conventions=args.cfconvention, creator_email=args.email, creator_url=args.url, day_flag=args.day_flag, # 0=1=Day, 2=Night dellat=args.dellat, dellon=args.dellon, ecmwf_flag=args.ecmwf_flag, ecmwf_hr=hr_ecmwf, ecmwf_int_method=args.single_ecmwf, ecmwf_nise=args.use_ecmwf_snow, ecmwf_nlevels=args.ecmwf_nlevels, ecmwf_version=ecmwf_version, emis=emis, file_version=file_version, geo=geo, ggam=ggam, ggas=ggas, history=args.history, include_full_brdf=not args.lambertian, institution=args.institute, ir_only=args.ir_only, keywords=args.keywords, l1_land_mask=args.l1_land_mask, l1b=file, l2_processor=args.processor, license=args.license, limit=args.limit, modis_emis=args.use_modis_emis, ncdf_version=ncdf_version, nise=nise, no_snow=args.no_snow_corr, occci_file=occci, out_dir=args.out_dir, usgs=args.usgs_file, production_time=production_time, project=args.project, references=args.references, rttov_version=rttov_version, sensor=args.File.sensor, spam=spam, summary=args.summary, swansea=args.swansea, git_version=git_version, uuid=uid, use_ecmwf_hr=not args.skip_ecmwf_hr, use_occci=args.use_oc, verbose=args.verbose, ) if args.available_channels is not None: driver += "\nN_CHANNELS={}".format(len(args.available_channels)) driver += "\nCHANNEL_IDS={}".format(','.join( str(k) for k in args.available_channels)) for part, f in args.extra_lines: if part == "pre" and f != "": try: with open(f, "r") as e: driver += "\n" + e.read() except IOError: raise FileMissing('extra_lines_file', f) for sec, key, val in args.additional: if sec == "pre": driver += "\n{}={}".format(key, val) if args.File.predef and not args.no_predef: driver += """ USE_PREDEF_LSM=True EXT_LSM_PATH={lsm} USE_PREDEF_GEO=True EXT_GEO_PATH={geo}""".format(lsm=args.prelsm_file, geo=args.pregeo_file) if args.product_name is not None: driver += "\nPRODUCT_NAME={}".format(args.product_name) return driver