def main(args): ''' The main function ''' ( baseline_data_source, historical_data_source, projection_data_source, variable_name, baseline_start_date, baseline_end_date, projection_start_date, projection_end_date, ) = get_args(args) measurement = transform.to_standard_variable_name(variable_name) units = transform.standard_units_from_measurement(measurement) climatedb.connect() for month in range(1, climatedb.MONTHS_PER_YEAR + 1): #print('.', end='', flush=True) print('For month %d' % month) calibration.calibrate(baseline_data_source, historical_data_source, projection_data_source, measurement, units, baseline_start_date, baseline_end_date, projection_start_date, projection_end_date, month) climatedb.close()
def main(args): ''' The main function ''' filename = get_args(args) climatedb.connect() geonames.load_geonames(filename) climatedb.close()
def main(args): ''' The main function ''' input_file, variable_name, data_source, ignore_scale_factor = get_args(args) print(variable_name, data_source) lat_arr, lon_arr, units, data_arr = get_data_from_file(input_file, variable_name, ignore_scale_factor) lon_arr, data_arr = transform.normalize_longitudes(lon_arr, data_arr) measurement = transform.to_standard_variable_name(variable_name) climatedb.connect() climatedb.save_nontemporal_data(lat_arr, lon_arr, units, data_arr, measurement, data_source) climatedb.close() return 0
def main(args): ''' The main function ''' climatedb.connect() variable_name, data_source, frequency = get_args(args) print(data_source, variable_name) config = load_config() if data_source in config: organisation, author, year, article_url = get_citation_from_config( config, data_source) file_url = get_file_url_from_config(config, data_source, variable_name) file_urls = [file_url] else: model, scenario = data_source.split('.') search_url = config['esgf']['search']['url'] project = config['esgf']['search']['project'] variant = config['esgf']['search']['variant'] datasets = search_esgf(search_url, project, variable_name, model, scenario, frequency, variant) dataset_info = datasets[0] organisation = dataset_info['institution_id'][0] author, year, article_url = get_citation_from_url( dataset_info['citation_url'][0]) thredds_url, thredds_id = get_thredds_url(dataset_info) file_urls = get_file_urls_from_thredds(thredds_url, thredds_id) file_paths = fetch_urls(file_urls) if len(file_paths) == 0: print('No datasets found', file=sys.stderr) return 1 file_base, ext = file_paths[0].rsplit(os.path.extsep) output_file_path = os.path.join( DATASET_DIR, '%s-%s.%s' % (data_source, variable_name, ext)) os.path.exists(output_file_path) and os.remove(output_file_path) if len(file_paths) == 1: file_path = file_paths[0] os.symlink(os.path.basename(file_path), output_file_path) else: if ext == 'nc': cdo.Cdo().selall(input=sorted(file_paths), output=output_file_path, options='-f nc4') baseline = (config['baseline'] == data_source) create_data_source(data_source, organisation, author, year, article_url, baseline) return 0
def main(args): ''' The main function ''' data_source, variable_name, start_date, end_date, calibrated = get_args(args) measurement = transform.to_standard_variable_name(variable_name) units = transform.standard_units_from_measurement(measurement) climatedb.connect() unit_id = climatedb.fetch_unit(units)['id'] measurement_id = climatedb.fetch_measurement(measurement)['id'] data_source_record = climatedb.fetch_data_source(data_source) dataset = climatedb.fetch_dataset( data_source_record['id'], measurement_id, unit_id, start_date, end_date, calibrated=True if calibrated else False ) print(data_source, measurement, start_date.year, end_date.year, 'year') lat_arr, lon_arr, normals = climatedb.fetch_normals_from_dataset_mean(dataset) projected_y_arr = geo.lat2y(lat_arr) projected_x_arr = geo.lon2x(lon_arr) output_folder = tiling.tile_folder(data_source, variable_name, start_date, end_date) tiling.save_contour_tiles( projected_y_arr, projected_x_arr, measurement, units, normals, output_folder, data_source_record['id'] ) for start_month in (12, 3, 6, 9): months = start_month, (start_month + 1) % 12, (start_month + 2) % 12 print(data_source, measurement, start_date.year, end_date.year, months) aggregated_normals = None for month in months: lat_arr, lon_arr, normals = climatedb.fetch_normals_from_dataset(dataset, month) if aggregated_normals is None: aggregated_normals = normals.copy() else: aggregated_normals += normals aggregated_normals = aggregated_normals / len(months) output_folder = tiling.tile_folder(data_source, variable_name, start_date, end_date, months) tiling.save_contour_tiles( projected_y_arr, projected_x_arr, measurement, units, aggregated_normals, output_folder, data_source_record['id'] ) climatedb.close()
def main(args): ''' The main function ''' input_files, variable_name, start_year, end_year, data_source = get_args( args) # Extract normals from datasets def get_normals_function(month, start_time, end_time): def get_normals(input_file): input_fmt = get_input_fmt(input_file) if input_fmt == 'nc': return transform.normals_from_netcdf4(input_file, variable_name, start_time, end_time, month) elif input_fmt in ('tif', 'bil'): return transform.normals_from_geotiff(input_file, input_fmt) elif input_fmt == 'folder': return transform.normals_from_folder(input_file, variable_name, month) else: raise Exception('Unexpected input format "%s"' % input_fmt) return get_normals climatedb.connect() for month in range(1, climatedb.MONTHS_PER_YEAR + 1): print(data_source, variable_name, start_year, end_year, month) if month > 0: start_time = datetime(start_year, month, 1) # This will set the end time to the last second of the last day of the # specified month in the specified end year. # Credit to https://stackoverflow.com/a/4131114 for the div/mod by 12 end_time = datetime(end_year + month // 12, month % 12 + 1, 1) - timedelta(seconds=1) else: start_time = datetime(start_year, 1, 1) end_time = datetime(end_year + 1, 1, 1) - timedelta(seconds=1) lat_arr, lon_arr, units, normals = \ transform.aggregate_normals(input_files, get_normals_function(month, start_time, end_time)) units, normals = transform.data_to_standard_units( units, normals, month) lon_arr, normals = transform.normalize_longitudes(lon_arr, normals) normals = pack.pack_array(normals) measurement = transform.to_standard_variable_name(variable_name) climatedb.save_normals(lat_arr, lon_arr, units, normals, measurement, start_time, end_time, month, data_source) climatedb.close() return 0
# API for serving climate tiles # # Copyright (c) 2020 Carlos Torchia # from flask import Flask from flask import jsonify import os import tiling import climatedb app = Flask(__name__) climatedb.connect() ALLOWED_MEASUREMENTS = climatedb.fetch_measurements() climatedb.close() ALLOWED_PERIODS = [ '12_01_02', '03_04_05', '06_07_08', '09_10_11', 'year', ] @app.route( '/climate/<string:data_source>/<int:start_year>-<int:end_year>/<string:measurement>-<string:period>/<int:zoom_level>/<int:x>/<int:y>.<string:ext>' )
def before(): ''' Connects to the database in preparation for the request. ''' climatedb.connect()