def __init__(self, base_directory, force_l2i_subfolder=True): super(L2iDataHandler, self).__init__(self.__class__.__name__) self.error = ErrorStatus(caller_id=self.__class__.__name__) self._base_directory = base_directory self._force_l2i_subfolder = force_l2i_subfolder self._subdirectory_list = self.get_subdirectory_list() self._validate_base_directory()
def __init__(self, metadata, time_orbit, period=None): # Copy necessary fields form l1b self.error = ErrorStatus() self._n_records = metadata.n_records self.info = metadata self.track = time_orbit self.period = period self._auto_auxvar_num = 0 # A dictionary similar to the parameter catalog # To be filled during the set auxdata method self._auxiliary_catalog = {} # Metadata self._auxdata_source_dict = {} self._source_primary_filename = "unkown" self._l2_algorithm_id = "unkown" self._l2_version_tag = "unkown" self._doi = "" # Define time of dataset creation as the time of object initialization # to avoid slightly different timestamps for repated calls of datetime.now() self._creation_time = datetime.now() # Other Class properties self._is_evenly_spaced = time_orbit.is_evenly_spaced # Create Level2 Data Groups self._create_l2_data_items()
def __init__(self, output_handler): """ Init the netCDF output parent class. NOTE: This class should only be used as a parent class. :param output_handler: An output handler class for the different processing level """ # Init parent class_name = self.__class__.__name__ super(NCDataFile, self).__init__(class_name) self.error = ErrorStatus(caller_id=class_name) # Output handler property self.output_handler = output_handler # Class attributes self.data = None self.filename = None self.base_export_path = None self.parameter_attributes = None self.time_def = NCDateNumDef() # TODO: Make this an option? self.zlib = True self._rootgrp = None self._options = None self._proc_settings = None self.verbose = False
class Level2ProductDefinition(DefaultLoggingClass): """ Main configuration class for the Level-2 Processor """ def __init__(self, run_tag, l2_settings_file): super(Level2ProductDefinition, self).__init__(self.__class__.__name__) self.error = ErrorStatus(self.__class__.__name__) # Mandatory parameter self._run_tag = run_tag self._l2_settings_file = l2_settings_file self._parse_l2_settings() # Optional parameters (may be set to default values if not specified) self._output_handler = [] def add_output_definition(self, output_def_file, period="default", overwrite_protection=True): # Set given or default output handler self._output_handler.append(DefaultLevel2OutputHandler( output_def=output_def_file, subdirectory=self.run_tag, period=period, overwrite_protection=overwrite_protection)) def _parse_l2_settings(self): try: self._l2def = get_yaml_config(self._l2_settings_file) except Exception, msg: self.error.add_error("invalid-l2-settings", msg) self.error.raise_on_error()
def __init__(self, auxclass_cfg): """ This class should not be called directly, only its subclasses. auxclass_cfg needs to be of type AuxClassConfig """ # Error handler self.error = ErrorStatus(self.pyclass) # Auxiliary class options if not isinstance(auxclass_cfg, AuxClassConfig): msg = "Invalid config object: %s (needs to be of type pysiral.auxdata.AuxClassConfig" msg = msg % str(auxclass_cfg) self.error.add_error("invalid-auxclasscfg-type", msg) self.error.raise_on_error() self._cfg = auxclass_cfg # Main properties self._data = None # Data container for external data self._auxvars = [ ] # List of auxiliary variables generated by the child class # General messages self.msgs = [] # --- Class internals --- # This is for auxiliary data handlers that require to read external product files for # a defined period (daily, monthly, ...). The implementation currently keeps only one # external product in memory at the time. The period (date list: yyyy, mm, dd) of this # currently loaded product is designated as current_date This date is compared to the # requested date and if a new product is loaded upon mismatch of current & requested data # NOTE: This will be bypassed by static auxiliary data classes # TODO: Load all auxiliary products for processing period in memory (allow parallel processing) self._current_date = [0, 0, 0] self._requested_date = [-1, -1, -1]
def __init__(self, output_def): super(OutputHandlerBase, self).__init__(self.__class__.__name__) self.pysiral_config = psrlcfg self.error = ErrorStatus() self._basedir = "n/a" self._init_from_output_def(output_def) self.output_def_filename = output_def
def __init__(self, mask_dir, mask_name, cfg): super(MaskSourceBase, self).__init__(self.__class__.__name__) self.error = ErrorStatus() self._cfg = cfg self._mask_dir = mask_dir self._mask_name = mask_name self._mask = None self._area_def = None self._post_flipud = False
def __init__(self): """ Read all definition files """ super(ConfigInfo, self).__init__(self.__class__.__name__) self.error = ErrorStatus(self.__class__.__name__) # read the definition files in the config folder self._read_config_files() # read the local machine definition file self._read_local_machine_file()
def __init__(self): class_name = self.__class__.__name__ super(NCDataFile, self).__init__(class_name) self.error = ErrorStatus(caller_id=class_name) self.filename = None self.time_def = NCDateNumDef() self.zlib = True self._rootgrp = None self._options = None self._proc_settings = None self.verbose = False
def __init__(self, product_def): super(Level2PreProcessor, self).__init__(self.__class__.__name__) self.error = ErrorStatus() # Sanity check of product definition object if not isinstance(product_def, Level2PreProcProductDefinition): msg = "Invalid Level-2 PreProcessor product definition: %s" % \ type(product_def) self.error.add_error("invalid-l2preproc-def", msg) self.error.raise_on_error() self._job = product_def
def __init__(self, cfg): cls_name = self.__class__.__name__ super(Level1POutputHandler, self).__init__(cls_name) self.error = ErrorStatus(caller_id=cls_name) self.cfg = cfg self.pysiral_cfg = psrlcfg # Init class properties self._path = None self._filename = None
def __init__(self, l1p_settings_id_or_file, tcs, tce, exclude_month=None, hemisphere="global", platform=None, output_handler_cfg=None, source_repo_id=None): """ The settings for the Level-1 pre-processor job :param l1p_settings_id_or_file: An id of an proc/l1 processor config file (filename excluding the .yaml extension) or an full filepath to a yaml config file :param tcs: [int list] Time coverage start (YYYY MM [DD]) :param tce: [int list] Time coverage end (YYYY MM [DD]) [int list] :param exclude_month: [int list] A list of month that will be ignored :param hemisphere: [str] The target hemisphere (`north`, `south`, `global`:default). :param platform: [str] The target platform (pysiral id). Required if l1p settings files is valid for multiple platforms (e.g. ERS-1/2, ...) :param output_handler_cfg: [dict] An optional dictionary with options of the output handler (`overwrite_protection`: [True, False], `remove_old`: [True, False]) :param source_repo_id: [str] The tag in local_machine_def.yaml (l1b_repository.<platform>.<source_repo_id>) -> Overwrites the default source repo in the l1p settings (input_handler.options.local_machine_def_tag & output_handler.options.local_machine_def_tag) """ super(Level1PreProcJobDef, self).__init__(self.__class__.__name__) self.error = ErrorStatus() # Get pysiral configuration # TODO: Move to global self._cfg = psrlcfg # Store command line options self._hemisphere = hemisphere self._platform = platform self._source_repo_id = source_repo_id # Parse the l1p settings file self.set_l1p_processor_def(l1p_settings_id_or_file) # Get full requested time range self._time_range = DatePeriod(tcs, tce) logger.info("Requested time range is %s" % self.time_range.label) # Store the data handler options if output_handler_cfg is None: output_handler_cfg = {} self._output_handler_cfg = output_handler_cfg # Measure execution time self.stopwatch = StopWatch()
def __init__(self, run_tag, l2_settings_file): super(Level2ProductDefinition, self).__init__(self.__class__.__name__) self.error = ErrorStatus(self.__class__.__name__) # Mandatory parameter self._run_tag = run_tag self._l2_settings_file = l2_settings_file self._parse_l2_settings() # Optional parameters (may be set to default values if not specified) self._output_handler = []
def __init__(self, cfg, raise_on_error=False): cls_name = self.__class__.__name__ super(ESACryoSat2PDSBaselineD, self).__init__(cls_name) self.error = ErrorStatus(caller_id=cls_name) # Store arguments self.raise_on_error = raise_on_error self.cfg = cfg # Init main class variables self.nc = None
def __init__(self, cfg, use_daily_scaling): """ Init the class :param cfg: A copy of the auxdata class configuration :param use_daily_scaling: """ # Properties self.cfg = cfg self.use_daily_scaling = use_daily_scaling self.data = None self.filepaths = [] self.error = ErrorStatus()
def __init__(self, local_repository_path): # Init class and error handler class_name = self.__class__.__name__ super(ICESatGLAH13Repository, self).__init__(class_name) self.error = ErrorStatus(caller_id=class_name) # Sanity check on path to local repository if os.path.isdir(str(local_repository_path)): self._local_repository_path = local_repository_path else: msg = "Invalid GLAH13 directory: %s" % str(local_repository_path) self.error.add_error("invalid-dir", msg) self.error.raise_on_error()
def __init__(self, filename, verbose=False, autoscale=True, nan_fill_value=False, global_attrs_only=False): self.error = ErrorStatus() self.time_def = NCDateNumDef() self.parameters = [] self.attributes = [] self.verbose = verbose self.autoscale = autoscale self.global_attrs_only = global_attrs_only self.nan_fill_value = nan_fill_value self.filename = filename self.parameters = [] self.read_globals() self.read_content()
def __init__(self, start_dt, stop_dt, period="monthly", exclude_month=[], raise_if_empty=False): super(TimeRangeRequest, self).__init__(self.__class__.__name__) self.pysiral_config = ConfigInfo() self.error = ErrorStatus() self.set_range(start_dt, stop_dt) self.set_period(period) self.set_exclude_month(exclude_month) if raise_if_empty: self.raise_if_empty()
def __init__(self, variable_name, repo_dir=None, ctlg=None, squeeze_empty_dims=True, auxiliary_vars=[]): super(L3ParameterCollection, self).__init__(self.__class__.__name__) # Name of the parameter from the netCDF files self.variable_name = variable_name self.auxiliary_vars = auxiliary_vars self.squeeze_empty_dims = squeeze_empty_dims self._product = {} self._mask = {} self.error = ErrorStatus() # Simple consistency check if repo_dir is None and ctlg is None: msg = "Either repo_dir or ctlg must be specified" self.error.add_error("invalid-args", msg) self.error.raise_on_error() if repo_dir is not None and ctlg is not None: msg = "Both repo_dir or ctlg are specified, using ctlg" self.error.add_error("invalid-args", msg) # Construct L3 product catalog if repo_dir is not None: self.ctlg = L3CProductCatalog(repo_dir) # use existing catalog if ctlg is not None: self.ctlg = ctlg # parse the files for product in self.ctlg.product_list: nc = ReadNC(product.path) var = getattr(nc, self.variable_name) if self.squeeze_empty_dims: var = np.squeeze(var) l3par = L3Parameter(self.variable_name, var, product) for auxiliary_var_name in self.auxiliary_vars: auxvar = getattr(nc, auxiliary_var_name) if self.squeeze_empty_dims: auxvar = np.squeeze(auxvar) l3par.set_auxiliary_var(auxiliary_var_name, auxvar) self._product[product.id] = l3par # self.log.debug("Add product: %s" % product.id) self.log.info("Added %g product(s)" % len(self._product))
def __init__(self, mask_name, grid_id, flipud=False): """ Mask container for Level3Processor. Arguments are the name (id) of the mask (e.g. warren99_is_valid) and the id of the grid (e.g. nh25kmEASE2) """ super(L3Mask, self).__init__(self.__class__.__name__) self.error = ErrorStatus() # Save input self._mask_name = mask_name self._grid_id = grid_id self._flipud = flipud # Read the mask self._read_mask_netcdf()
def __init__(self, cls_name=None): """ Init the class with a (loguru) logger and an ErrorStatus error handler :param cls_name: """ self.error = ErrorStatus(cls_name)
def __init__(self, log_name, cfg): cls_name = self.__class__.__name__ super(L1PreProcPolarOceanCheck, self).__init__(log_name) self.error = ErrorStatus(caller_id=cls_name) # Save Parameter self.cfg = cfg
def __init__(self, cls_name, input_adapter, output_handler, cfg): # Make sure the logger/error handler has the name of the parent class super(L1PreProcBase, self).__init__(cls_name) self.error = ErrorStatus(caller_id=cls_name) # The class that translates a given input file into an L1BData object self.input_adapter = input_adapter # Output data handler that creates l1p netCDF files from l1 data objects self.output_handler = output_handler # The configuration for the pre-processor self.cfg = cfg # The stack of Level-1 objects is a simple list self.l1_stack = []
class Level2PreProcessor(DefaultLoggingClass): def __init__(self, product_def): super(Level2PreProcessor, self).__init__(self.__class__.__name__) self.error = ErrorStatus() # Sanity check of product definition object if not isinstance(product_def, Level2PreProcProductDefinition): msg = "Invalid Level-2 PreProcessor product definition: %s" % \ type(product_def) self.error.add_error("invalid-l2preproc-def", msg) self.error.raise_on_error() self._job = product_def def process_l2i_files(self, l2i_files, period): """ Reads all l2i files and merges the valid data into a l2p summary file """ # l2p: Container for storing l2i objects l2p = Level2PContainer(period) # Add all l2i objects to the l2p container. # NOTE: Only memory is the limit for l2i_file in l2i_files: try: l2i = L2iNCFileImport(l2i_file) except Exception as ex: msg = "Error (%s) in l2i file: %s" msg = msg % (ex, Path(l2i_file).name) self.log.error(msg) continue l2p.append_l2i(l2i) # Merge the l2i object to a single L2Data object l2 = l2p.get_merged_l2() if l2 is None: self.log.warning("- No valid freeboard data found for, skip day") return # Write output output = Level2Output(l2, self.job.output_handler) self.log.info("- Wrote %s data file: %s" % (self.job.output_handler.id, output.export_filename)) @property def job(self): return self._job
def __init__(self, run_tag: str, l2_settings_file: str, force_l2def_record_type: bool = False) -> None: super(Level2ProductDefinition, self).__init__(self.__class__.__name__) self.error = ErrorStatus(self.__class__.__name__) # Mandatory parameter self._l2_settings_file = l2_settings_file self._parse_l2_settings() self._run_tag = None self.force_l2def_record_type = force_l2def_record_type self._set_run_tag(run_tag) # Optional parameters (may be set to default values if not specified) self._output_handler = []
def __init__(self): super(L1bPreProcJob, self).__init__(self.__class__.__name__) # Save pointer to pysiral configuration self.pysiral_config = ConfigInfo() # Initialize the time range and set to monthly per default self.time_range = None # Error Status self.error = ErrorStatus() # Initialize job parameter self.options = L1bPreProcJobOptions() # List for iterations (currently only month-wise) self.iterations = []
def __init__(self): self.error = ErrorStatus() self.data_level = None self.path = None self.version = "default" self.mission_id = None self.year = None self.month = None self.config = psrlcfg
def __init__(self, cfg): cls_name = self.__class__.__name__ super(BaselineDFileDiscovery, self).__init__(cls_name) self.error = ErrorStatus(caller_id=cls_name) # Save config self.cfg = cfg # Init empty file lists self._reset_file_list()
def __init__(self, name): # Enable logging capability (self.log) super(L1bPreProc, self).__init__(name) # Error handler self.error = ErrorStatus() # Job definition ( class L1bPreProcJob) self._jobdef = None # Mission Options self._mdef = None # List of l1b input files # Needs to be filled by the mission specific classes self._l1b_file_list = [] # pysiral configuration self._pysiral_config = ConfigInfo()
def MaskSourceFile(mask_name, mask_cfg): """ Wrapper method for different mask source file classes """ error = ErrorStatus(caller_id="MaskSourceFile") try: mask_dir = psrlcfg.local_machine.auxdata_repository.mask[mask_name] except KeyError: mask_dir = None msg = "path to mask %s not in local_machine_def.yaml" % mask_name error.add_error("missing-lmd-def", msg) error.raise_on_error() # Return the Dataset class try: return globals()[mask_cfg.pyclass_name](mask_dir, mask_name, mask_cfg) except KeyError: msg = "pysiral.mask.%s not implemented" % str(mask_cfg.pyclass_name) error.add_error("missing-mask-class", msg) error.raise_on_error()