Esempio n. 1
0
    def _transfer_metadata(self):

        info = self.l1b.info

        # Processing System info
        info.set_attribute("pysiral_version", PYSIRAL_VERSION)

        # General CryoSat-2 metadata
        info.set_attribute("mission", self._mission)
        info.set_attribute("mission_data_version", self.cs2l1b.baseline)
        info.set_attribute("orbit", self.cs2l1b.sph.abs_orbit_start)
        info.set_attribute("cycle", self.cs2l1b.mph.cycle)
        mission_data_source = filename_from_path(self.cs2l1b.filename)
        info.set_attribute("mission_data_source", mission_data_source)

        # Get the product timeliness from the processor stage code
        # `proc_stage` in the L1b main product header uses the following
        # conventions:  N=Near-Real Time, T=Test, O=Off Line (Systematic),
        # R=Reprocessing, L=Long Term Archive
        #
        # We do want to use the following timelines codes adopted from the
        # S3 notation: NRT: Near-real time, STC: short-time critical
        # NTC: Non-Time Critical, REP: Reprocessed
        #
        # We therefore define a dictionary and use NTC as default value
        timeliness_dct = {"N": "NRT", "O": "NTC", "R": "REP", "L": "REP"}
        proc_stage = self.cs2l1b.mph.get_by_fieldname("proc_stage")[0]
        info.set_attribute("timeliness", timeliness_dct.get(proc_stage, "NTC"))

        # Time-Orbit Metadata
        start_time = parse_datetime_str(self.cs2l1b.sph.start_record_tai_time)
        stop_time = parse_datetime_str(self.cs2l1b.sph.stop_record_tai_time)
        info.set_attribute("start_time", start_time)
        info.set_attribute("stop_time", stop_time)
Esempio n. 2
0
 def _read_l1b_file(self, l1b_file):
     """ Read a L1b data file (l1bdata netCDF) """
     filename = filename_from_path(l1b_file)
     self.log.info("- Parsing l1bdata file: %s" % filename)
     l1b = L1bdataNCFile(l1b_file)
     l1b.parse()
     l1b.info.subset_region_name = self.l2def.hemisphere
     return l1b
Esempio n. 3
0
 def parse(self):
     self._log("Parsing cs2awi grid file: %s" %
               filename_from_path(self._filename))
     nc = ReadNC(self._filename)
     for parameter in nc.parameters:
         data = np.ma.array(getattr(nc, parameter))
         data.mask = np.isnan(data)
         setattr(self, parameter, data)
         self._register_parameter(parameter)
         self._log("- found parameter: %s dims:%s" %
                   (parameter, str(np.shape(data))))
Esempio n. 4
0
 def _transfer_metadata(self):
     pass
     """ Extract essential metadata information from SGDR file """
     info = self.l1b.info
     sgdr = self.sgdr
     info.set_attribute("pysiral_version", PYSIRAL_VERSION)
     info.set_attribute("mission", self._mission)
     info.set_attribute("mission_data_version", sgdr.nc.software_ver)
     info.set_attribute("orbit", sgdr.nc.abs_orbit)
     info.set_attribute("cycle", sgdr.nc.cycle)
     mission_data_source = filename_from_path(sgdr.nc.filename)
     info.set_attribute("mission_data_source", mission_data_source)
     info.set_attribute("timeliness", "REP")
Esempio n. 5
0
    def _set_input_file_metadata(self):
        """ Fill the product info """

        # Short cuts
        metadata = self.nc.attrs
        info = self.l1.info

        # Processing environment metadata
        info.set_attribute("pysiral_version", pysiral_version)

        # General product metadata
        info.set_attribute("mission", "cryosat2")
        info.set_attribute("mission_sensor", "siral")
        info.set_attribute("mission_data_version", "D")
        info.set_attribute("orbit", metadata["abs_orbit_start"])
        info.set_attribute("cycle", metadata["cycle_number"])
        info.set_attribute("mission_data_source",
                           filename_from_path(self.filepath))
        info.set_attribute(
            "timeliness",
            cs2_procstage2timeliness(metadata["processing_stage"]))

        # Time-Orbit Metadata
        lats = [
            float(metadata["first_record_lat"]) * 1e-6,
            float(metadata["last_record_lat"]) * 1e-6
        ]
        lons = [
            float(metadata["first_record_lon"]) * 1e-6,
            float(metadata["last_record_lon"]) * 1e-6
        ]
        info.set_attribute("start_time",
                           parse_datetime_str(
                               metadata["first_record_time"][4:]))  # TAI=....
        info.set_attribute("stop_time",
                           parse_datetime_str(
                               metadata["last_record_time"][4:]))  # TAI=....
        info.set_attribute("lat_min", np.amin(lats))
        info.set_attribute("lat_max", np.amax(lats))
        info.set_attribute("lon_min", np.amin(lons))
        info.set_attribute("lon_max", np.amax(lons))

        # Product Content Metadata
        for mode in ["sar", "sin", "lrm"]:
            percent_value = 0.0
            if metadata["sir_op_mode"].strip().lower() == mode:
                percent_value = 100.
            info.set_attribute("{}_mode_percent".format(mode), percent_value)
        info.set_attribute("open_ocean_percent",
                           float(metadata["open_ocean_percent"]) * 0.01)
Esempio n. 6
0
    def parse(self):
        self._log("Parsing sicci pickled grid file: %s" %
                  filename_from_path(self._filename))
        G = pickle.load(open(self._filename))

        # Extract vector fb values.
        self.sea_ice_freeboard = np.ma.array(G.AverageFreeboard(False)[0])
        self.sea_ice_freeboard *= 1.0e-3  # in mm!
        self.sea_ice_freeboard.mask = np.isnan(self.sea_ice_freeboard)
        self.latitude = np.array(G.lat)  # in degrees
        self.longitude = np.array(G.lon)  # in degrees

        self._register_parameter("longitude")
        self._register_parameter("latitude")
        self._register_parameter("sea_ice_freeboard")
Esempio n. 7
0
    def write_to_file(self, output_id, directory):
        """ Write a summary file to the defined export directory """

        # Create a simple filename
        filename = os.path.join(directory, "pysiral-l2proc-summary.txt")
        self.log.info("Exporting summary report: %s" % filename)

        lfmt = "  %-16s : %s\n"
        current_time = str(datetime.now()).split(".")[0]
        with open(filename, "w") as fhandle:

            # Write infos on settings, host, os, ....
            fhandle.write("# pysiral Level2Processor Summary\n\n")
            fhandle.write(lfmt % ("created", current_time))

            # Brief statistics of files, errors, warnings
            fhandle.write("\n# Processor Statistics\n\n")
            fhandle.write(lfmt % ("l1b files", str(self.n_files)))
            fhandle.write(lfmt % ("errors", str(self.n_discarded_files)))
            fhandle.write(lfmt % ("warnings", str(self.n_warnings)))

            fhandle.write("\n# Processor & Local Machine Settings\n\n")
            fhandle.write(lfmt % ("pysiral version", PYSIRAL_VERSION))
            fhandle.write(lfmt % ("python version", sys.version))
            fhandle.write(lfmt % ("hostname", HOSTNAME))

            # More info on this specific run
            fhandle.write(lfmt % ("data period", self.data_period_str))
            fhandle.write(lfmt % ("Level-2 settings", self.l2_settings_file))
            fhandle.write(lfmt % ("l1b repository", self.l1b_repository))

            # List discarded files and reason (error code & description)
            fhandle.write("\n# Detailed Error Breakdown\n\n")
            msg = "  No %s output generated for %g l1b files due " + \
                  "to following errors:\n"
            fhandle.write(msg % (output_id, self.n_discarded_files))

            for error_code in PYSIRAL_ERROR_CODES.keys():
                n_discarded_files = len(self.error_counter[error_code])
                if n_discarded_files == 0:
                    continue
                error_description = PYSIRAL_ERROR_CODES[error_code]
                msg = "\n  %g file(s): [error_code:%s] %s\n" % (
                    n_discarded_files, error_code, error_description)
                fhandle.write(msg)
                for discarded_file in self.error_counter[error_code]:
                    fn = filename_from_path(discarded_file)
                    fhandle.write("  * %s\n" % fn)
Esempio n. 8
0
    def parse(self):
        self._log("Parsing cs2awi grid file: %s" %
                  filename_from_path(self._filename))
        nc = ReadNC(self._filename)
        for parameter in nc.parameters:
            data = np.ma.array(getattr(nc, parameter))
            data.mask = np.isnan(data)
            setattr(self, parameter, data)
            self._register_parameter(parameter)
            self._log("- found parameter: %s dims:%s" %
                      (parameter, str(np.shape(data))))

        # XXX: dirty hack
        nan_mask = np.isnan(self.sea_surface_height_anomaly)
        self.radar_freeboard.mask = nan_mask
        self.sea_ice_freeboard = self.radar_freeboard
Esempio n. 9
0
    def merge_and_export_polar_ocean_subsets(self):
        """ loop over remaining files in file list """

        log = self.log
        n = len(self._l1b_file_list)
        if n == 0:
            return

        l1bdata_stack = []
        for i, l1b_file in enumerate(self._l1b_file_list):

            # Parse the current file and split into polar ocean segments
            log.info("+ [ Processing %g of %g ]" % (i+1, n))
            log.info("- Input file: %s" % filename_from_path(l1b_file))
            l1b_segments = self.get_ocean_segments_from_input_file(l1b_file)

            # Skip if no relevant data was found
            if l1b_segments is None:
                log.info("- %s polar ocean data: False -> skip file" % (
                     self._jobdef.hemisphere))
                continue
            else:
                log.info("- %s polar ocean data: True" % (
                     self._jobdef.hemisphere))
            log.info("- %g polar ocean data segments" % len(l1b_segments))

            # XXX: Debug
            # debug_stack_orbit_plot(l1bdata_stack, l1b_segments)
            # Loop over segments and check connectivity
            for l1b_segment in l1b_segments:

                if not self.l1b_is_connected_to_stack(
                        l1b_segment, l1bdata_stack):

                    # => break criterium, save existing stack and start over
                    log.info("- segment unconnected, exporting current stack")
                    l1b_merged = self.merge_l1b_stack(l1bdata_stack)
                    self.export_l1b_to_netcdf(l1b_merged)

                    # Reset the l1bdata stack
                    l1bdata_stack = [l1b_segment]
                    continue

                # polar ocean data and connected => add to stack
                l1bdata_stack.append(l1b_segment)
Esempio n. 10
0
    def process_l2i_files(self, l2i_files, period):
        """ Reads all l2i files and merges the valid data into a l2p
        summary file """

        # l2p: Container for storing l2i objects
        l2p = Level2PContainer(period)

        # Add all l2i objects to the l2p container.
        # NOTE: Only memory is the limit
        for l2i_file in l2i_files:
            try:
                l2i = L2iNCFileImport(l2i_file)
            except Exception, errmsg:
                msg = "Error (%s) in l2i file: %s"
                msg = msg % (errmsg, filename_from_path(l2i_file))
                self.log.error(msg)
                continue
            l2p.append_l2i(l2i)
Esempio n. 11
0
    def _set_input_file_metadata(self):
        """ Extract essential metadata information from SGDR file """
        info = self.l1.info
        sgdr = self.sgdr
        info.set_attribute("pysiral_version", PYSIRAL_VERSION)
        try:
            info.set_attribute(
                "mission", self.cfg.platform_name_dict[str(sgdr.nc.mission)])
        except KeyError:
            mission_id = self.sgdr.guess_mission_from_filename()
            info.set_attribute("mission",
                               self.cfg.platform_name_dict[str(mission_id)])

        info.set_attribute("mission_data_version", sgdr.nc.software_ver)
        info.set_attribute("orbit", sgdr.nc.abs_orbit)
        info.set_attribute("cycle", sgdr.nc.cycle)
        mission_data_source = filename_from_path(sgdr.nc.filename)
        info.set_attribute("mission_data_source", mission_data_source)
        info.set_attribute("timeliness", self.cfg.timeliness)
Esempio n. 12
0
    def parse_filename(self, fn):
        """ Parse info from pysiral output filename """
        filename = filename_from_path(fn)
        match_found = False
        for data_level in self._registered_parsers.keys():
            parser = parse.compile(self._registered_parsers[data_level])
            match = parser.parse(filename)
            if match:
                match_found = True
                self.data_level = data_level
                for parameter in match.named.keys():
                    value = match[parameter]
                    if parameter in ["start", "stop"]:
                        try:
                            value = dtparser.parse(value)
                        except:
                            match_found = False
                            break
                    setattr(self, parameter, value)
                break

        if not match_found:
            print "Unrecognized filename: %s" % filename