Exemplo n.º 1
0
Arquivo: data.py Projeto: tommz9/verif
    def get_fields(self):
        """ Get a list of fields that all inputs have

      Returns:
         list(verif.field.Field): A list of fields
      """
        all_fields = set()
        for f in range(self._get_num_inputs_with_clim()):
            input = self._inputs[f]
            if f == 0:
                all_fields = input.get_fields()
            else:
                all_fields = set(all_fields) & set(input.get_fields())
        return list(all_fields)
Exemplo n.º 2
0
 def test_valid2(self):
    input = verif.input.Netcdf("verif/tests/files/netcdf_valid2.nc")
    locations = input.locations
    self.assertTrue(1, len(locations))
    self.assertEqual(verif.location.Location(18700, 59.9423, 10.72, 94), locations[0])
    np.testing.assert_array_equal(np.array([0, 1, 2]), input.leadtimes)
    np.testing.assert_array_equal(np.array([1388534400, 1388620800]), input.times)
    obs = input.obs
    self.assertEqual(2, obs.shape[0])  # Time
    self.assertEqual(3, obs.shape[1])  # Leadtime
    self.assertEqual(1, obs.shape[2])  # Location
    self.assertAlmostEqual(1, obs[0, 0, 0])
    self.assertAlmostEqual(2, obs[0, 1, 0])
    self.assertAlmostEqual(3, obs[0, 2, 0])
    self.assertAlmostEqual(4, obs[1, 0, 0])
    self.assertAlmostEqual(5, obs[1, 1, 0])
    self.assertAlmostEqual(6, obs[1, 2, 0])
    self.assertEqual(0, len(input.thresholds))
    self.assertEqual(0, len(input.quantiles))
    self.assertTrue(input.ensemble is None)
    self.assertTrue(verif.field.Obs() in input.get_fields())
    self.assertFalse(verif.field.Fcst() in input.get_fields())
Exemplo n.º 3
0
def main():
    parser = argparse.ArgumentParser(
        prog="text2verif",
        description="Convert between Verif text and NetCDF files")
    parser.add_argument('ifile', type=str, help="Verif text file (input)")
    parser.add_argument('ofile', type=str, help="Verif NetCDF file (output)")
    parser.add_argument('--debug',
                        help='Print debug information',
                        action="store_true")

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(0)

    args = parser.parse_args()

    input = verif.input.get_input(args.ifile)
    times = input.times
    leadtimes = input.leadtimes
    locations = input.locations
    variable = input.variable

    output = netCDF4.Dataset(args.ofile, 'w')
    output.createDimension("time", None)
    output.createDimension("leadtime", len(leadtimes))
    output.createDimension("location", len(locations))
    thresholds = input.thresholds
    quantiles = input.quantiles
    if len(thresholds) > 0:
        if args.debug:
            print("Adding %d thresholds" % len(thresholds))
        output.createDimension("threshold", len(thresholds))
        output.createVariable("threshold", "f4", ["threshold"])
        output["threshold"][:] = thresholds
        output.createVariable("cdf", "f4",
                              ("time", "leadtime", "location", "threshold"))
        output.variables["cdf"][:] = input.threshold_scores
    if len(quantiles) > 0:
        if args.debug:
            print("Adding %d quantiles" % len(quantiles))
        output.createDimension("quantile", len(quantiles))
        output.createVariable("quantile", "f4", ["quantile"])
        output["quantile"][:] = quantiles
        output.createVariable("x", "f4",
                              ("time", "leadtime", "location", "quantile"))
        output.variables["x"][:] = input.quantile_scores

    vTime = output.createVariable("time", "i4", ("time", ))
    vOffset = output.createVariable("leadtime", "f4", ("leadtime", ))
    vLocation = output.createVariable("location", "i4", ("location", ))
    vLat = output.createVariable("lat", "f4", ("location", ))
    vLon = output.createVariable("lon", "f4", ("location", ))
    vElev = output.createVariable("altitude", "f4", ("location", ))
    vfcst = output.createVariable("fcst", "f4",
                                  ("time", "leadtime", "location"))
    vobs = output.createVariable("obs", "f4", ("time", "leadtime", "location"))

    # Create nonstandard fields
    standard = [verif.field.Obs(), verif.field.Fcst()]
    fields = [field for field in input.get_fields() if field not in standard]
    for field in fields:
        name = field.name()
        if field.__class__ == verif.field.Other:
            if args.debug:
                print("Adding non-standard score '%s'" % name)
            output.createVariable(name, "f4", ("time", "leadtime", "location"))
            output.variables[name][:] = input.other_score(field.name())[:]

    output.standard_name = variable.name
    output.units = unit = variable.units.replace("$", "")

    vobs[:] = input.obs
    vfcst[:] = input.fcst
    vTime[:] = input.times
    vOffset[:] = input.leadtimes
    vLocation[:] = [s.id for s in locations]
    vLat[:] = [s.lat for s in locations]
    vLon[:] = [s.lon for s in locations]
    vElev[:] = [s.elev for s in locations]
    output.Conventions = "verif_1.0.0"
    output.close()
Exemplo n.º 4
0
Arquivo: data.py Projeto: tommz9/verif
    def _get_score(self, field, input_index):
        """ Load the field variable from input, but only include the common data

      Scores loaded will have the same dimension, regardless what input_index
      is used.

      field:         The type is of verif.field
      input_index:   which input to load from
      """

        num_inputs = self._get_num_inputs_with_clim()
        if field == verif.field.Obs():
            """
         Treat observation different. Since the observations should be the same
         for all inputs, reuse the observations if one or more inputs are
         missing these
         """
            field = self._obs_field
            if field in self._get_score_cache[input_index]:
                return self._get_score_cache[input_index][field]

            found_obs = False
            for i in range(num_inputs):
                input = self._inputs[i]
                all_fields = input.get_fields()
                temp = input.obs
                if field in all_fields:
                    Itimes = self._get_time_indices(i)
                    Ileadtimes = self._get_leadtime_indices(i)
                    Ilocations = self._get_location_indices(i)
                    temp = temp[Itimes, :, :]
                    temp = temp[:, Ileadtimes, :]
                    temp = temp[:, :, Ilocations]
                    self._get_score_cache[i][field] = temp
                    found_obs = True
            if not found_obs:
                verif.util.error("No files have observations")

            for i in range(num_inputs):
                if field not in self._get_score_cache[i]:
                    for j in range(num_inputs):
                        if field in self._get_score_cache[j]:
                            verif.util.warning(
                                "No observations in %s. Loading from %s" %
                                (self._inputs[i].fullname,
                                 self._inputs[j].fullname))
                            self._get_score_cache[i][
                                field] = self._get_score_cache[j][field]
                            break
        else:
            # Check if data is cached
            if field in self._get_score_cache[input_index]:
                return self._get_score_cache[input_index][field]

            if field == verif.field.Fcst():
                field = self._fcst_field

            for i in range(num_inputs):
                if field not in self._get_score_cache[i]:
                    input = self._inputs[i]
                    all_fields = input.get_fields()
                    if field not in all_fields:
                        verif.util.error("%s does not contain '%s'" %
                                         (self.get_names()[i], field.name()))

                    elif field == verif.field.Obs():
                        temp = input.obs

                    elif field == verif.field.Fcst():
                        temp = input.fcst

                    elif field == verif.field.Pit():
                        temp = input.pit
                        x0 = self.variable.x0
                        x1 = self.variable.x1
                        if x0 is not None or x1 is not None:
                            temp = verif.field.Pit.randomize(
                                input.obs, temp, x0, x1)

                    elif field.__class__ is verif.field.Ensemble:
                        temp = input.ensemble[:, :, :, field.member]

                    elif field.__class__ is verif.field.Threshold:
                        I = np.where(input.thresholds == field.threshold)[0]
                        assert (len(I) == 1)
                        temp = input.threshold_scores[:, :, :, I[0]]

                    elif field.__class__ is verif.field.Quantile:
                        I = np.where(input.quantiles == field.quantile)[0]
                        assert (len(I) == 1)
                        temp = input.quantile_scores[:, :, :, I[0]]

                    else:
                        temp = input.other_score(field.name())
                    Itimes = self._get_time_indices(i)
                    Ileadtimes = self._get_leadtime_indices(i)
                    Ilocations = self._get_location_indices(i)
                    temp = temp[Itimes, :, :]
                    temp = temp[:, Ileadtimes, :]
                    temp = temp[:, :, Ilocations]
                    self._get_score_cache[i][field] = temp
        """
      Remove missing. If one configuration has a missing value, set all
      configurations to missing. This can happen when the times are
      available, but have missing values.
      """
        if self._remove_missing_across_all:
            is_missing = np.isnan(self._get_score_cache[0][field])
            for i in range(1, num_inputs):
                is_missing = is_missing | (np.isnan(
                    self._get_score_cache[i][field]))
            for i in range(num_inputs):
                self._get_score_cache[i][field][is_missing] = np.nan

        return self._get_score_cache[input_index][field]
Exemplo n.º 5
0
    def _get_score(self, field, input_index):
        """ Load the field variable from input, but only include the common data

      Scores loaded will have the same dimension, regardless what input_index
      is used.

      field:         The type is of verif.field
      input_index:   which input to load from
      """

        # Check if data is cached
        if (field in self._get_score_cache[input_index]):
            return self._get_score_cache[input_index][field]

        if field == verif.field.Obs():
            field = self._obs_field
        if field == verif.field.Fcst():
            field = self._fcst_field

        # Load all inputs
        for i in range(0, self._get_num_inputs_with_clim()):
            if (field not in self._get_score_cache[i]):
                input = self._inputs[i]
                all_fields = input.get_fields() + [
                    verif.field.ObsWindow(),
                    verif.field.FcstWindow()
                ]
                if (field not in all_fields):
                    verif.util.error("%s does not contain '%s'" %
                                     (self.get_names()[i], field.name()))
                if field == verif.field.Obs():
                    temp = input.obs

                elif field == verif.field.Fcst():
                    temp = input.fcst

                elif field == verif.field.Pit():
                    temp = input.pit
                    x0 = self.variable.x0
                    x1 = self.variable.x1
                    if x0 is not None or x1 is not None:
                        # w = ""
                        # if x0 is not None:
                        #    w += " obs=%g" % x0
                        # if x1 is not None:
                        #    w += " obs=%g" % x1
                        # verif.util.warning("Randomizing PIT values where %s" + w)
                        temp = verif.field.Pit.randomize(
                            input.obs, temp, x0, x1)

                elif field.__class__ is verif.field.Ensemble:
                    temp = input.ensemble[:, :, :, field.member]

                elif field.__class__ is verif.field.Threshold:
                    I = np.where(input.thresholds == field.threshold)[0]
                    assert (len(I) == 1)
                    temp = input.threshold_scores[:, :, :, I[0]]

                elif field.__class__ is verif.field.Quantile:
                    I = np.where(input.quantiles == field.quantile)[0]
                    assert (len(I) == 1)
                    temp = input.quantile_scores[:, :, :, I[0]]

                elif field == verif.field.ObsWindow():
                    temp = input.obs[:, :, :]
                    temp = self._calculate_window(temp, input.leadtimes)

                elif field == verif.field.FcstWindow():
                    temp = input.fcst[:, :, :]
                    temp = self._calculate_window(temp, input.leadtimes)

                else:
                    temp = input.other_score(field.name())
                Itimes = self._get_time_indices(i)
                Ileadtimes = self._get_leadtime_indices(i)
                Ilocations = self._get_location_indices(i)
                temp = temp[Itimes, :, :]
                temp = temp[:, Ileadtimes, :]
                temp = temp[:, :, Ilocations]
                self._get_score_cache[i][field] = temp

        # Remove missing. If one configuration has a missing value, set all
        # configurations to missing. This can happen when the times are
        # available, but have missing values.
        if self._remove_missing_across_all:
            is_missing = np.isnan(self._get_score_cache[0][field])
            for i in range(1, self._get_num_inputs_with_clim()):
                is_missing = is_missing | (np.isnan(
                    self._get_score_cache[i][field]))
            for i in range(0, self._get_num_inputs_with_clim()):
                self._get_score_cache[i][field][is_missing] = np.nan

        return self._get_score_cache[input_index][field]