def create_map(dir, roi_file): width = 800 height = 800 config = get_instance() lats = config.bbox["lats"] lons = config.bbox["lons"] buff = 1 bbox = mapnik.Box2d(lons[0] - buff, lats[0] - buff, lons[1] + buff, lats[1] + buff) map = mapnik.Map(width, height) plot_data_dir = os.path.join(os.path.dirname(__file__), "plot_data") mapnik.load_map(map, os.path.join(plot_data_dir, "ci_test.xml")) roi_csv_file = os.path.join(os.path.dirname(__file__), dir, roi_file) roi_csv_ds = mapnik.CSV(file=roi_csv_file) roi_layer = mapnik.Layer("roi_pred") roi_layer.datasource = roi_csv_ds roi_layer_style = create_roi_output_style("prediction") map.append_style("roi_layer_style", roi_layer_style) roi_layer.styles.append("roi_layer_style") map.layers.append(roi_layer) map.zoom_to_box(bbox) mapnik.render_to_file(map, os.path.join(dir, '%s.png' % roi_file), 'png')
import gzip import os from datetime import datetime, timedelta from ci.models.gdal_raster import GDALRaster from ci.util.nc_file_helper import nc_get_1d_vars_as_list from ci.ingest import config, base_ingestor, proj_helper from ci.config import get_instance from ci.db.pgdbhelper import PGDbHelper from ci.util.common import TimeMe block_sizes = [] for i in range(1, 50): block_sizes.append((i * 20, i * 20)) conf = get_instance() pgdb_helper = PGDbHelper(conn_str=conf.pgsql_conn_str(), echo=conf.logsql) def cb(x): if x < 35: return 0 else: return 1 def process_mrms_file(mrms_file): provider_name = "MRMS" variable_name = "REFL" ext_parts = os.path.splitext(mrms_file)
import uuid from datetime import timedelta, datetime from ci.db.pgdbhelper import PGDbHelper from ci.config import get_instance from ci.util.proj_helper import ProjHelper from ci.models.spatial_reference import SRID_RAP __author__ = 'ujjwal' forecast_time = datetime(year=2014, month=8, day=7, hour=20, minute=30, second=0) forecast_times = [forecast_time] #get accessor to the old db config = get_instance() pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=config.logsql) proj_helper = ProjHelper(config=config) logger = config.logger radius = config.ci_roi_radius mask_name = config.mask_name #create table pgdb_helper.submit("drop table if exists forecast_roi_geoms;") pgdb_helper.submit( """ create table forecast_roi_geoms ( id serial not null, roi_name text not null,
def config(): from ci.config import get_instance conf = get_instance() data = {"start_date": conf.start_date, "end_date": conf.end_date} obj = json.dumps(data, cls=new_alchemy_encoder()) return Response(obj, mimetype="application/json")
class RAPRUCDownload(Downloader): def __init__(self, origin_url, destination_dir): super(RAPRUCDownload, self).__init__(origin_url=origin_url, destination_dir=destination_dir) def construct_file_urls(self, start_dtime, end_dtime): dtime = start_dtime #http://nomads.ncdc.noaa.gov/data/rucanl/201407/20140722/rap_130_20140722_0200_001.grb2 while dtime <= end_dtime: str1 = dtime.strftime("%Y%m") str2 = dtime.strftime("%Y%m%d") fname = dtime.strftime("rap_130_%Y%m%d_%H00_001.grb2") url = "%s/%s/%s/%s" % (base_url, str1, str2, fname) self._file_urls.append(url) dtime = dtime + timedelta(hours=1) base_url = 'http://nomads.ncdc.noaa.gov/data/rucanl' config = get_instance(config_file=None) destination_dir = config.datadir + "/ruc" start_dtime = config.start_date # datetime(year=2014, month=7, day=22, hour=0, minute=0) end_dtime = config.end_date # datetime(year=2014, month=7, day=22, hour=5, minute=0) downloader = RAPRUCDownload(origin_url=base_url, destination_dir=destination_dir) downloader.construct_file_urls(start_dtime=start_dtime, end_dtime=end_dtime) downloader.download()
from ci.config import get_instance from ci.db.pgdbhelper import PGDbHelper from ci.db.sqa.access import SqaAccess from ci.db.sqa.mapper import Mapper from ci.db.sqa.models import DataGranule, Variable, Provider, RasterTile from sqlalchemy import create_engine __author__ = 'ujjwal' #config = get_instance() config_file = "/home/ujjwal/DPR_SM/python/dpr_sm/ingest/lis_config.yml" config = get_instance(config_file=config_file) logger = config.logger pgdb_helper = PGDbHelper(config.pgsql_conn_str(), echo=config.logsql) engine = create_engine(config.sqa_connection_string()) mapper = Mapper(engine=engine) mapper.map_tables() def get_granules(var_name, start_date, end_date): sql = """ select datagranule.id, datagranule.starttime, datagranule.endtime from datagranule join provider on provider.id = datagranule.provider_id join variable on variable.id = datagranule.variable_id where provider.name like 'RAP' and variable.name like '%s' and (('%s', '%s') overlaps (datagranule.starttime, datagranule.endtime)) order by datagranule.starttime asc """ % (var_name, start_date, end_date)