Exemplo n.º 1
0
    def _create_ancil_datasets(self, model_run, session):
        """
        Create ancillary datasets if available and add them to the model run datasets
        :param model_run: Model run to update
        :param session: DB Session
        :return:
        """
        soil_props_file = model_run.get_python_parameter_value(constants.JULES_PARAM_SOIL_PROPS_FILE)
        if soil_props_file is not None:
            dataset_type = self.get_dataset_type(constants.DATASET_TYPE_SOIL_PROP, session)
            self._create_dataset(dataset_type, soil_props_file, True, model_run, session)

        frac_file = model_run.get_python_parameter_value(constants.JULES_PARAM_FRAC_FILE)
        user_upload_id = session.query(DrivingDataset) \
            .filter(DrivingDataset.name == constants.USER_UPLOAD_DRIVING_DATASET_NAME) \
            .one().id
        is_user_uploaded = model_run.driving_dataset_id == user_upload_id
        if len(model_run.land_cover_actions) > 0 or (model_run.is_for_single_cell() and not is_user_uploaded):
            frac_file = 'run{model_id}/{user_edited_file}'.format(
                model_id=model_run.id,
                user_edited_file=constants.USER_EDITED_FRACTIONAL_FILENAME)
        if frac_file is not None and frac_file != constants.FRACTIONAL_FILENAME:
            dataset_type = self.get_dataset_type(constants.DATASET_TYPE_LAND_COVER_FRAC, session)
            frac_file_vis = insert_before_file_extension(frac_file, constants.MODIFIED_FOR_VISUALISATION_EXTENSION)
            self._create_dataset(dataset_type, frac_file_vis, True, model_run, session)
 def generate_output_file_path(self, model_run_id, output_var_name, period, year, is_single_cell):
     """
     Generate a file path for an output dataset
     :param model_run_id: ID of Model Run to download from
     :param output_var_name: Variable name to download
     :param period: Period (e.g. 'daily')
     :param year: Calendar year to download (only required if period is daily or hourly and the run is multicell)
     :param is_single_cell: Is the model run single cell?
     :return: File path (relative to run directory)
     """
     name_template = "run{model_run_id}/{output_dir}/{run_id}.{var_name}_{period}.nc"
     file_name = name_template.format(
         model_run_id=model_run_id,
         output_dir=constants.OUTPUT_DIR,
         run_id=constants.RUN_ID,
         var_name=output_var_name,
         period=period.lower()
     )
     if is_single_cell:
         year = None   # Get all the data
     if year is not None and period.lower() not in ['monthly', 'yearly']:
         year_suffix = ".%s" % year
         file_name = insert_before_file_extension(file_name, year_suffix)
     return file_name
    def get_file_locations(self, results):
        """
        Return all the file locations referenced by the results parameters
        if there is an error add it to the error dictionary
        :param results: dictionary of results from the driving dataset page
        :return:list of locations
        """
        locations = []

        for key in ['land_frac_file', 'latlon_file', constants.LAND_COVER_FRAC_FILE_INPUT_NAME, 'soil_props_file']:
            self._check_location(key, results.get(key), self._errors, False)

        if constants.LAND_COVER_FRAC_FILE_INPUT_NAME in results:
            converted_filename = insert_before_file_extension(
                results.get(constants.LAND_COVER_FRAC_FILE_INPUT_NAME),
                constants.MODIFIED_FOR_VISUALISATION_EXTENSION)
            self._check_location(constants.LAND_COVER_FRAC_FILE_INPUT_NAME, converted_filename, self._errors, False)

        regions = results.get('region', [])
        region_errors = []
        region_error = False
        for region, index in zip(regions, range(len(regions))):
            region_errors.append({})
            local_error = not self._check_location('path', region['path'], region_errors[index], True)
            region_error = region_error or local_error
        if region_error:
            self._errors['region'] = region_errors

        driving_data_vars = results.get(constants.PREFIX_FOR_DRIVING_VARS, [])
        driving_data_errors = []
        driving_data_error = False
        drive_file = results.get('drive_file')
        start_date = results.get('driving_data_start')
        end_date = results.get('driving_data_end')
        for driving_data, index in zip(driving_data_vars, range(len(driving_data_vars))):
            driving_data_errors.append({})

            ncml_filename = self._get_ncml_filename(driving_data['templates'], drive_file)

            local_error = not self._check_location_and_add_to(
                'templates',
                locations,
                ncml_filename,
                driving_data_errors[index],
                self._dataset_types[constants.DATASET_TYPE_COVERAGE],
                driving_data['vars'])
            driving_data_error = driving_data_error or local_error

            for filename in self._get_drive_filenames(driving_data['templates'], drive_file, start_date, end_date):
                local_error = not self._check_location(
                    'templates',
                    filename,
                    driving_data_errors[index],
                    True)
                if local_error:
                    driving_data_error = True
                    break

        if driving_data_error:
            self._errors[constants.PREFIX_FOR_DRIVING_VARS] = driving_data_errors

        return locations
Exemplo n.º 4
0
 def test_insert_before_file_extension(self):
     path = "/home/user/data/file.name.here.nc"
     string_to_insert = "_INSERTED_STRING"
     modified_path = insert_before_file_extension(path, string_to_insert)
     expected_result = "/home/user/data/file.name.here_INSERTED_STRING.nc"
     assert_that(modified_path, is_(expected_result))