Exemple #1
0
    def create_two_driving_datasets(self):
        """
        Creates two driving datasets with datasets, parameter values etc set up
        :return: nothing
        """

        with session_scope(Session) as session:
            self.create_driving_dataset(session)

            model_run_service = ModelRunService()

            driving2 = DrivingDataset()
            driving2.name = "driving2"
            driving2.description = "driving 2 description"
            driving2.geographic_region = 'Global'
            driving2.spatial_resolution = 'Half degree'
            driving2.temporal_resolution = '3 Hours'
            driving2.boundary_lat_north = 85
            driving2.boundary_lat_south = -90
            driving2.boundary_lon_west = -180
            driving2.boundary_lon_east = 180
            driving2.time_start = datetime.datetime(1901, 1, 1, 0, 0, 0)
            driving2.time_end = datetime.datetime(2001, 1, 1, 0, 0, 0)
            driving2.view_order_index = 200
            driving2.usage_order_index = 2
            driving2.is_restricted_to_admins = False

            location3 = DrivingDatasetLocation()
            location3.base_url = "base_url3"
            location3.driving_dataset = driving2

            val = f90_helper.python_to_f90_str(8 * ["i"])
            pv1 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_DRIVE_INTERP, val)
            val = f90_helper.python_to_f90_str(3600)
            pv2 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_DRIVE_DATA_PERIOD, val)
            val = f90_helper.python_to_f90_str("data/driving2/frac.nc")
            pv3 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_FRAC_FILE, val)
            val = f90_helper.python_to_f90_str("frac2")
            pv4 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_FRAC_NAME, val)
            val = f90_helper.python_to_f90_str(['b', 'sathh', 'satcon', 'sm_sat', 'sm_crit',
                                                'sm_wilt', 'hcap', 'hcon', 'albsoil'])
            pv5 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_SOIL_PROPS_VAR, val)
            val = f90_helper.python_to_f90_str(['bexp', 'sathh', 'satcon', 'vsat', 'vcrit',
                                                'vwilt', 'hcap', 'hcon', 'albsoil'])
            pv6 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_SOIL_PROPS_VAR_NAME, val)
            val = f90_helper.python_to_f90_str(9)
            pv7 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_SOIL_PROPS_NVARS, val)
            val = f90_helper.python_to_f90_str("data/WATCH_2D/ancils/soil_igbp_bc_watch_0p5deg_capUM6.6_2D.nc")
            pv7 = DrivingDatasetParameterValue(model_run_service, driving2,
                                               constants.JULES_PARAM_SOIL_PROPS_FILE, val)
            session.add(driving2)
            session.commit()
def _create_watch_driving_data_basic(conf):
    watch_driving_dataset = DrivingDataset()
    watch_driving_dataset.name = "WATCH Forcing Data 20th Century"
    watch_driving_dataset.description = \
        "A sub-diurnal meteorological forcing dataset: based on ERA-40 for the sub-monthly variability, " \
        "tuned to CRU monthly observations. " \
        "See Weedon et al, 2011, Journal of Hydrometeorology, doi: 10.1175/2011JHM1369.1"
    watch_driving_dataset.geographic_region = 'Global'
    watch_driving_dataset.temporal_resolution = '3 Hours'
    watch_driving_dataset.spatial_resolution = '0.5 Degrees'
    watch_driving_dataset.boundary_lat_north = 84
    watch_driving_dataset.boundary_lat_south = -56
    watch_driving_dataset.boundary_lon_west = -180
    watch_driving_dataset.boundary_lon_east = 180
    watch_driving_dataset.time_start = datetime.datetime(1901, 1, 1, 0, 0, 0)
    if conf['full_data_range'].lower() == "true":
        watch_driving_dataset.time_end = datetime.datetime(2001, 12, 31, 21, 0, 0)
    else:
        watch_driving_dataset.time_end = datetime.datetime(1901, 1, 31, 21, 0, 0)
    watch_driving_dataset.view_order_index = 100
    watch_driving_dataset.usage_order_index = 2
    watch_driving_dataset.is_restricted_to_admins = False
    return watch_driving_dataset
def _create_chess_data_basic(conf):
    chess_driving_dataset = DrivingDataset()
    chess_driving_dataset.name = "UK CHESS Forcing Data"
    chess_driving_dataset.description = \
        "A daily meteorological forcing dataset: based on MORECS at 40km and downscaled to 1km using sub-grid scale " \
        "topography. See Robinson et al, 2014. manuscript in prep."
    chess_driving_dataset.geographic_region = 'UK'
    chess_driving_dataset.temporal_resolution = '24 Hours'
    chess_driving_dataset.spatial_resolution = '1 km'
    chess_driving_dataset.boundary_lat_north = 59
    chess_driving_dataset.boundary_lat_south = 50
    chess_driving_dataset.boundary_lon_west = -7.5
    chess_driving_dataset.boundary_lon_east = 1.5
    chess_driving_dataset.time_start = datetime.datetime(1961, 1, 1, 0, 0, 0)
    if conf['full_data_range'].lower() == "true":
        chess_driving_dataset.time_end = datetime.datetime(1999, 1, 1, 0, 0, 0)
    else:
        chess_driving_dataset.time_end = datetime.datetime(1961, 1, 31, 0, 0, 0)
    chess_driving_dataset.view_order_index = 200
    chess_driving_dataset.usage_order_index = 1
    chess_driving_dataset.is_restricted_to_admins = True

    return chess_driving_dataset
Exemple #4
0
def setup_app(command, conf, vars):
    """
    Place any commands to setup joj here - currently creating db tables
    :param command:
    :param conf:
    :param vars:
    :return:
    """

    # Don't reload the app if it was loaded under the testing environment
    if not pylons.test.pylonsapp:
        load_environment(conf.global_conf, conf.local_conf)

    # Create the tables
    Base.metadata.drop_all(bind=Session.bind)
    Base.metadata.create_all(bind=Session.bind)

    jules_config_parser = JulesNamelistParser()

    with session_scope(Session) as session:
        user = User()
        user.name = 'John Holt'
        user.first_name = 'John'
        user.last_name = 'Holt'
        user.username = '******'
        user.email = '*****@*****.**'
        user.access_level = "Admin"
        user.storage_quota_in_gb = conf['storage_quota_admin_GB']

        session.add(user)

        user2 = User()
        user2.name = 'Matt Kendall'
        user2.first_name = 'Matt'
        user2.last_name = 'Kendall'
        user2.username = '******'
        user2.email = '*****@*****.**'
        user2.access_level = "Admin"
        user2.storage_quota_in_gb = conf['storage_quota_admin_GB']

        session.add(user2)

        user3 = User()
        user3.name = 'Matt Fry'
        user3.first_name = 'Matt'
        user3.last_name = 'Fry'
        user3.username = '******'
        user3.email = '*****@*****.**'
        user3.access_level = "Admin"
        user3.storage_quota_in_gb = conf['storage_quota_admin_GB']

        session.add(user3)

        user4 = User()
        user4.name = 'Eleanor Blyth'
        user4.first_name = 'Eleanor'
        user4.last_name = 'Blyth'
        user4.username = '******'
        user4.email = '*****@*****.**'
        user4.access_level = "Admin"
        user4.storage_quota_in_gb = conf['storage_quota_admin_GB']

        session.add(user4)

        user5 = User()
        user5.name = 'Sandeep Kaur'
        user5.first_name = 'Sandeep'
        user5.last_name = 'Kaur'
        user5.username = '******'
        user5.email = '*****@*****.**'
        user5.access_level = "Admin"
        user5.storage_quota_in_gb = conf['storage_quota_admin_GB']

        session.add(user5)

        core_user = User()
        core_user.name = 'System'
        core_user.first_name = 'The'
        core_user.last_name = 'System'
        core_user.username = constants.CORE_USERNAME
        core_user.email = ''
        core_user.storage_quota_in_gb = conf['storage_quota_total_GB']  # Total storage for the group workspace

        session.add(core_user)

        cover_dst = DatasetType(type=constants.DATASET_TYPE_COVERAGE)
        session.add(cover_dst)
        land_cover_frac_dst = DatasetType(type=constants.DATASET_TYPE_LAND_COVER_FRAC)
        session.add(land_cover_frac_dst)
        soil_prop_dst = DatasetType(type=constants.DATASET_TYPE_SOIL_PROP)
        session.add(soil_prop_dst)
        single_cell_dst = DatasetType(type=constants.DATASET_TYPE_SINGLE_CELL)
        session.add(single_cell_dst)
        transects_dst = DatasetType(type=constants.DATASET_TYPE_TRANSECT)
        session.add(transects_dst)

        level = UserLevel()
        level.name = 'Beginner'
        session.add(level)

        stat_created = ModelRunStatus(constants.MODEL_RUN_STATUS_CREATED)
        stat_submitted = ModelRunStatus(constants.MODEL_RUN_STATUS_SUBMITTED)
        stat_pending = ModelRunStatus(constants.MODEL_RUN_STATUS_PENDING)
        stat_running = ModelRunStatus(constants.MODEL_RUN_STATUS_RUNNING)
        stat_completed = ModelRunStatus(constants.MODEL_RUN_STATUS_COMPLETED)
        stat_published = ModelRunStatus(constants.MODEL_RUN_STATUS_PUBLISHED)
        stat_public = ModelRunStatus(constants.MODEL_RUN_STATUS_PUBLIC)
        stat_failed = ModelRunStatus(constants.MODEL_RUN_STATUS_FAILED)
        stat_submit_failed = ModelRunStatus(constants.MODEL_RUN_STATUS_SUBMIT_FAILED)
        stat_unknown = ModelRunStatus(constants.MODEL_RUN_STATUS_UNKNOWN)

        map(session.add, [stat_created, stat_submitted, stat_pending, stat_running, stat_completed, stat_published,
                          stat_public, stat_failed, stat_submit_failed, stat_unknown])

        session.add(SystemAlertEmail(
            code=SystemAlertEmail.GROUP_SPACE_FULL_ALERT,
            sent_frequency_in_s=conf['alert_email_frequency_in_s']))

        default_code_version = CodeVersion()
        default_code_version.name = conf.local_conf['default_code_version']
        default_code_version.url_base = 'http://www.jchmr.org/jules/documentation/user_guide/vn3.4/'
        default_code_version.is_default = True
        session.add(default_code_version)

        # Namelists from docs
        jules_parameter_parser = JulesParameterParser()
        namelist_files = jules_parameter_parser.parse_all("docs/Jules/user_guide/html/namelists/", default_code_version)
        for namelist_file in namelist_files:
            session.add(namelist_file)

        # Output variable from docs
        jules_output_variables_parser = JulesOutputVariableParser()
        output_variables = jules_output_variables_parser.parse("docs/Jules/user_guide/html/output-variables.html")
        session.add_all(output_variables)

        # Add WATCH 100 Years run
        watch_model_run = jules_config_parser.parse_namelist_files_to_create_a_model_run(
            default_code_version,
            "Watch data run over the length of the data",
            "configuration/Jules/watch",
            "Watch 100 Years Data",
            namelist_files,
            stat_published,
            core_user,
            None)
        watch_model_run.date_created = datetime.datetime(2014, 8, 26, 16, 00, 00)
        watch_model_run.date_submitted = datetime.datetime(2014, 8, 26, 16, 00, 00)
        watch_model_run.date_started = datetime.datetime(2014, 8, 26, 16, 00, 00)
        watch_model_run.last_status_change = datetime.datetime(2014, 8, 26, 16, 00, 00)
        watch_model_run.storage_in_mb = 1000

        land_cover_file = insert_before_file_extension(WATCH_FRAC_FILE, constants.MODIFIED_FOR_VISUALISATION_EXTENSION)
        ancils = [
            [WATCH_SOIL_PROPS_FILE, 'Soil Properties', 0, 10, soil_prop_dst],
            [land_cover_file, 'Land Cover Fractions', 0, 10, land_cover_frac_dst]
        ]

        for path, var, name, min, max in WATCH_DRIVING_DATA:
            ds = Dataset()
            ds.name = name
            ds.wms_url = conf.local_conf['thredds.server_url'] \
                + "wms/model_runs/run1/data/WATCH_2D/driving/" + path + ".ncml"\
                "?service=WMS&version=1.3.0&request=GetCapabilities"
            ds.netcdf_url = conf.local_conf['thredds.server_url'] + "dodsC/model_runs/run1/data/WATCH_2D/driving/" + \
                path + ".ncml"
            ds.data_range_from = min
            ds.data_range_to = max
            ds.is_categorical = 0
            ds.deleted = 0
            ds.dataset_type = cover_dst
            ds.is_input = True
            ds.model_run = watch_model_run

        for path, name, min, max, dataset_type in ancils:
            ds = Dataset()
            ds.name = name
            ds.wms_url = conf.local_conf['thredds.server_url'] \
                + "wms/model_runs/run1/" + path + \
                "?service=WMS&version=1.3.0&request=GetCapabilities"
            ds.netcdf_url = conf.local_conf['thredds.server_url'] + "dodsC/model_runs/run1/" + path
            ds.data_range_from = min
            ds.data_range_to = max
            ds.is_categorical = 0
            ds.deleted = 0
            ds.dataset_type = dataset_type
            ds.is_input = True
            ds.model_run = watch_model_run

        profiles = ["Monthly"]
        outputs = [
            ['gpp_gb_{}', 'Gridbox gross primary productivity ({})', 0, 1e-7],
            ['rad_net_{}', 'Surface net radiation of land points ({})', -50, 150],
            ['resp_p_gb_{}', 'Gridbox plant respiration ({})', 0, 4e-8],
            ['smc_tot_{}', 'Gridbox total soil moisture in column ({})', 0, 1500],
            ['sub_surf_roff_{}', 'Gridbox sub-surface runoff ({})', 0, 1e-6],
            ['surf_roff_{}', 'Gridbox surface runoff ({})', 0, 5e-5],
            ['swet_liq_tot_{}', 'Gridbox unfrozen soil moisture as fraction of saturation ({})', 0, 1]]

        for profile in profiles:
            for path_template, name_template, min, max in outputs:
                path = path_template.format(profile.lower())
                name = name_template.format(profile)
                ds = Dataset()
                ds.name = name
                ds.wms_url = conf.local_conf['thredds.server_url'] \
                    + "wms/model_runs/run1/output/majic." + path + ".ncml" + \
                    "?service=WMS&version=1.3.0&request=GetCapabilities"
                ds.netcdf_url = conf.local_conf['thredds.server_url'] + "dodsC/model_runs/run1/output/majic." \
                    + path + ".ncml"
                ds.data_range_from = min
                ds.data_range_to = max
                ds.is_categorical = 0
                ds.deleted = 0
                ds.dataset_type = cover_dst
                ds.is_input = False
                ds.model_run = watch_model_run

        session.add(watch_model_run)

        jules_config_parser.parse_all(
            "configuration/Jules/scientific_configurations/",
            namelist_files,
            core_user,
            default_code_version,
            stat_created)

    with session_scope(Session) as session:
        watch_driving_data_name = create_watch_driving_data(conf, cover_dst, land_cover_frac_dst, soil_prop_dst,
                                                            "configuration/Jules/country.csv")
        chess_driving_data_name = create_chess_driving_data(conf, cover_dst, land_cover_frac_dst, soil_prop_dst)

    with session_scope(Session) as session:

        driving_ds_upload = DrivingDataset()
        driving_ds_upload.name = constants.USER_UPLOAD_DRIVING_DATASET_NAME
        driving_ds_upload.description = "Choose this option if you wish to use your own uploaded driving data for a " \
                                        "single cell site"
        driving_ds_upload.view_order_index = 1000
        driving_ds_upload.is_restricted_to_admins = False

        parameters_upload = [
            [constants.JULES_PARAM_POST_PROCESSING_ID, "3"],
            [constants.JULES_PARAM_INPUT_GRID_NX, "1"],
            [constants.JULES_PARAM_INPUT_GRID_NY, "1"],

            # Default soil properties, hopefully overridden in the background when the user sets the land cover.
            [constants.JULES_PARAM_SOIL_PROPS_NVARS, "9"],
            [constants.JULES_PARAM_SOIL_PROPS_VAR,
             "'b'       'sathh'  'satcon'  'sm_sat'  'sm_crit'  'sm_wilt'  'hcap'      'hcon'   'albsoil'"],
            [constants.JULES_PARAM_SOIL_PROPS_USE_FILE, ".false. .false. .false. .false. .false. .false. .false. "
                ".false. .false."],
            [constants.JULES_PARAM_SOIL_PROPS_CONST_VAL,
             "0.9     0.0      0.0         50.0     275.0        278.0    10.0 0.0"],

            [constants.JULES_PARAM_INITIAL_NVARS, "10"],
            [constants.JULES_PARAM_INITIAL_VAR,
                "'sthuf' 'canopy' 'snow_tile' 'rgrain' 'tstar_tile' 't_soil' 'cs' 'gs'  'lai' 'canht'"],
            [constants.JULES_PARAM_INITIAL_USE_FILE,
                ".false.  .false.  .false.  .false.  .false.  .false.  .false.  .false. .false.  .false."],
            [constants.JULES_PARAM_INITIAL_CONST_VAL,
                "0.9     0.0      0.0         50.0     275.0        278.0    10.0   0.0   1.0   2.0"]
        ]

        model_run_service = ModelRunService()
        for constant, value in parameters_upload:
            ddpv = DrivingDatasetParameterValue(model_run_service, driving_ds_upload, constant, value)
            driving_ds_upload.parameter_values.append(ddpv)

        session.add(driving_ds_upload)

        land_cover_types = {1: 'Broad-leaved Tree', 2: 'Needle-leaved Tree', 3: 'C3 Grass', 4: 'C4 Grass', 5: 'Shrub',
                            6: 'Urban', 7: 'Lake', 8: 'Soil', 9: constants.FRACTIONAL_ICE_NAME}
        for index in land_cover_types:
            land_cover_type = LandCoverValue()
            land_cover_type.index = index
            land_cover_type.name = land_cover_types[index]
            session.add(land_cover_type)

    with session_scope() as session:
        watch_model = session.query(ModelRun).filter(ModelRun.name == watch_model_run.name).one()
        science_config = session.query(ModelRun).filter(ModelRun.name == "Full carbon cycle").one()
        driving_dataset = session.query(DrivingDataset).filter(DrivingDataset.name == watch_driving_data_name).one()

        watch_model.science_configuration_id = science_config.id
        watch_model.driving_dataset_id = driving_dataset.id
Exemple #5
0
    def create_driving_dataset(
            self,
            session,
            jules_params=DrivingDatasetJulesParams(dataperiod=3600, var_interps=8 * ["i"]),
            is_restricted_to_admins=False,
            data_range_from = 10,
            data_range_to = 5):
        """
        Create a driving dataset
        :param session: session to use
        :param jules_params: set of jules parameters
        :return: dataset
        """
        model_run_service = ModelRunService()

        driving1 = DrivingDataset()
        driving1.name = "driving1"
        driving1.description = "driving 1 description"
        driving1.geographic_region = 'European'
        driving1.spatial_resolution = '1km'
        driving1.temporal_resolution = '24 hours'
        driving1.boundary_lat_north = 50
        driving1.boundary_lat_south = -10
        driving1.boundary_lon_west = -15
        driving1.boundary_lon_east = 30
        driving1.time_start = datetime.datetime(1979, 1, 1, 0, 0, 0)
        driving1.time_end = datetime.datetime(2010, 1, 1, 0, 0, 0)
        driving1.view_order_index = 100
        driving1.is_restricted_to_admins = is_restricted_to_admins
        location1 = DrivingDatasetLocation()
        location1.base_url = "base_url"
        location1.dataset_type_id = 1
        location1.driving_dataset = driving1
        location2 = DrivingDatasetLocation()
        location2.base_url = "base_url2"
        location2.driving_dataset = driving1
        location2.dataset_type_id = 1
        jules_params.add_to_driving_dataset(model_run_service, driving1, session)

        val = f90_helper.python_to_f90_str(8 * ["i"])
        pv1 = DrivingDatasetParameterValue(model_run_service, driving1,
                                           constants.JULES_PARAM_DRIVE_INTERP, val)
        val = f90_helper.python_to_f90_str(3600)
        pv2 = DrivingDatasetParameterValue(model_run_service, driving1,
                                           constants.JULES_PARAM_DRIVE_DATA_PERIOD, val)
        val = f90_helper.python_to_f90_str("data/driving1/frac.nc")
        pv3 = DrivingDatasetParameterValue(model_run_service, driving1,
                                           constants.JULES_PARAM_FRAC_FILE, val)
        val = f90_helper.python_to_f90_str("frac")
        pv4 = DrivingDatasetParameterValue(model_run_service, driving1,
                                           constants.JULES_PARAM_FRAC_NAME, val)

        session.add(driving1)
        session.commit()

        driving_data_filename_param_val = DrivingDatasetParameterValue(
            model_run_service,
            driving1,
            constants.JULES_PARAM_DRIVE_FILE,
            "'testFileName'")
        session.add(driving_data_filename_param_val)
        session.commit()

        return driving1