def updateSimpleDepthTime(self): ''' Read the time series of depth values for this activity, simplify it and insert the values in the SimpleDepthTime table that is related to the Activity. ''' measurements = m.Measurement.objects.using(self.dbAlias).filter( instantpoint__activity=self.activity) for meas in measurements: ems = 1000 * to_udunits(meas.instantpoint.timevalue, 'seconds since 1970-01-01') d = float(meas.depth) m.SimpleDepthTime.objects.using(self.dbAlias).create(activity = self.activity, instantpoint = meas.instantpoint, depth = d, epochmilliseconds = ems) logger.info('Inserted %d values into SimpleDepthTime', len(measurements))
def select_profiles(indexurl, begin=None, end=None, urcrnrlat=None, urcrnrlon=None, llcrnrlat=None, llcrnrlon=None): index = open_url(indexurl) begin = to_udunits(begin, index.sequence.time.units) end = to_udunits(end, index.sequence.time.units) conds = [] if begin: c = (index.sequence.time > begin) & (index.sequence.time < end) conds.append(c) if urcrnrlat: c = ((index.sequence.latitude > llcrnrlat) & (index.sequence.latitude < urcrnrlat) & (index.sequence.longitude > llcrnrlon) & (index.sequence.longitude < urcrnrlon)) conds.append(c) prof_ids = index.sequence[reduce(operator.__and__, conds)] return prof_ids
# read ctls files = glob.glob("*.ctl") n = len(files) bounds = [] data = [] for i, filename in enumerate(files): f = ctler.CTLReader(filename) values = f.variables[variable["name in BMGCS"].upper()][:] data.append(values * adjust_units(variable["units in BMGCS"], variable["desired units"])) bounds.append(f.variables["time"][0]) data = np.concatenate(data, axis=0) dt = bounds[1] - bounds[0] # bounds.insert(0, bounds[0] - dt ) bounds.append(bounds[-1] + dt) bounds = np.array([to_udunits(v, TIME) for v in bounds]) time = (bounds[:-1] + bounds[1:]) / 2.0 levels = f.variables["levels"][:] * 100.0 cell_methods = variable["cell_methods"] or "" if re.search("3hr", variable["table"]): if not re.match("time:\s*mean", cell_methods): bounds = None elif re.search("6hr", variable["table"]): if re.match("time:\s*mean", cell_methods): data = average_data(data, 2) time = average_data(time, 2) bounds = bounds[::2] else: data = data[::2] # sample every 6hr time = time[::2]
# read ctls files = glob.glob('*.ctl') n = len(files) bounds = [] data = [] for i, filename in enumerate(files): f = ctler.CTLReader(filename) values = f.variables[variable['name in BMGCS'].upper()][:] data.append(values * adjust_units(variable['units in BMGCS'], variable['desired units'])) bounds.append(f.variables['time'][0]) data = np.concatenate(data, axis=0) dt = (bounds[1] - bounds[0]) #bounds.insert(0, bounds[0] - dt ) bounds.append(bounds[-1] + dt) bounds = np.array([to_udunits(v, TIME) for v in bounds]) time = (bounds[:-1] + bounds[1:]) / 2. levels = f.variables['levels'][:] * 100. cell_methods = variable['cell_methods'] or '' if re.search('3hr', variable['table']): if not re.match('time:\s*mean', cell_methods): bounds = None elif re.search('6hr', variable['table']): if re.match('time:\s*mean', cell_methods): data = average_data(data, 2) time = average_data(time, 2) bounds = bounds[::2] else: data = data[::2] # sample every 6hr time = time[::2]
air_2m_mon_mean_url = monthly_monolevel + "air.2m.mon.mean.nc" precip_rate_url = dods + "gaussian/monolevel/prate.2008.nc" precip_month_url = dods + "Monthlies/gaussian/monolevel/prate.mon.mean.nc" #dataset = open_url(air_2m_mon_mean_url) dataset = open_url(precip_month_url) varname = "prate" missing = 32766 secs_per_month = 2592000 print dataset.keys() alllats = dataset.lat[:] alllons = dataset.lon[:] first = to_udunits(firstday, dataset.time.units) last = to_udunits(lastday, dataset.time.units) interval = ((first <= dataset.time) & (dataset.time <= last)) def date_str(time): date = from_udunits(time, dataset.time.units.replace('GMT', '+0:00')) return '%d-%02d-%02d' % (date.year, date.month, date.day) def code(n): if n < 10: return chr(ord('0')+n) else: return chr(ord('A')+n-10) pass
def main(): form=cgi.FieldStorage() tqx={} tqx['reqId']=0 tqx['out']='json' try: tqxa=form["tqx"].value tqxa=tqxa.split(';') for kv in tqxa: (k,v)=kv.split(':') tqx[k]=v pass except: pass month_start = 0 try: lat = float(form["lat"].value) lng = float(form["lng"].value) except: warn("lat and lng must be floats.") return try: year_start = int(form["yr0"].value) except: year_start = default_year_start pass try: year_end = int(form["yr1"].value) except: year_end = date.today().year pass try: month_start = int(form["mo"].value) except: pass try: month_start = int(form["mo0"].value) except: pass try: month_end = int(form["mo1"].value) except: month_end = 12 pass try: q = form["q"].value config = months20cr[q] if month_start == 0: month_start = 1 skip = 1 else: skip = 12 pass except: try: fi = form["fi"].value config = {} config['url'] = "http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets20thC_ReanV2/" + \ "gaussian/monolevel/" + fi + "." + str(year_start) + ".nc" config['url2'] = "http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets20thC_ReanV2/" + \ "gaussian/monolevel/" + fi + "." + str(year_end) + ".nc" config['var'] = fi.split(".")[0] config['en'] = fi config['convert'] = lambda data: data[:] skip = 1 except: k = months20cr.keys() warn("q must be one of " + str(k)) return pass varname = config['var'] firstday = datetime(year_start,month_start,1, tzinfo=UTC()) if (month_end == 12): lastday = datetime(year_end,12,31, tzinfo=UTC()) else: lastday = datetime(year_end,month_end+1,1, tzinfo=UTC()) pass outdata = [] # At present this code has 1 or 2 urls, but the code here could easily be extended # to cope with more, just add them to the list 'urls', but make sure they're in the # correct order, oldest data first. urls = [config['url']] try: urls.append(config['url2']) except: pass for dataurl in urls: dataset = open_url(dataurl) first = to_udunits(firstday, dataset.time.units) last = to_udunits(lastday, dataset.time.units) (x,y) = toXY(lat,lng) # Note one or other (or both) of the start or end times could be outside the # range of a given dataset, that's fine and no problems will result. a = dataset[varname][(first <= dataset.time) & (dataset.time <= last),y,x] seq = a.array[::skip] times = a.time[::skip] latitude = dataset['lat'][y] longitude = dataset['lon'][x] missing = dataset[varname].missing_value data = numpy.select([seq == missing],[None], default = seq) data = (data * dataset[varname].scale_factor + dataset[varname].add_offset) # Get the values and loop over them inserting into the outdata list # csv and json require slightly different treatment. values = config['convert'](data).astype('float').tolist() i = 0 if tqx['out'] == 'csv': for t in times: # See comment below. outdata.append({"date":str(dDate(t,dataset)),"value":values[i]}) i += 1 pass pass else: for t in times: outdata.append({"date":dDate(t,dataset),"value":values[i]}) i += 1 pass pass pass # Loading it into gviz_api.DataTable # Default output is 'json'. Ideally this is specified as tqx=out:json # but even when it isn't, that's what is produced. # For CSV the date and time is presented as a string, otherwise it would be a # javascript "new Date()" object. if tqx['out'] == 'csv': description = {"date": ("string", "Date"), "value": ("number", config['en'])} else: description = {"date": ("datetime", "Date"), "value": ("number", config['en'])} pass data_table = gviz_api.DataTable(description) data_table.LoadData(outdata) if tqx['out'] == 'csv': print 'Content-type: text/plain\n' csv = data_table.ToCsv(columns_order=("date", "value"),order_by="date",separator=",") print csv else: json = data_table.ToJSonResponse(columns_order=("date", "value"), order_by="date", req_id=tqx['reqId']) print 'Content-type: text/plain\n' print json pass return
def main(): form=cgi.FieldStorage() tqx={} tqx['reqId']=0 tqx['out']='json' try: tqxa=form["tqx"].value tqxa=tqxa.split(';') for kv in tqxa: (k,v)=kv.split(':') tqx[k]=v pass except: pass month_start = 0 try: lat = float(form["lat"].value) lng = float(form["lng"].value) except: warn("lat and lng must be floats.") return try: year_start = int(form["yr0"].value) except: year_start = default_year_start pass try: year_end = int(form["yr1"].value) except: year_end = date.today().year pass try: month_start = int(form["mo"].value) except: pass try: month_start = int(form["mo0"].value) except: pass try: month_end = int(form["mo1"].value) except: month_end = 12 pass try: q = form["q"].value config = months20cr[q] if month_start == 0: month_start = 1 skip = 1 else: skip = 12 pass except: try: fi = form["fi"].value config = {} config['url'] = "http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets20thC_ReanV2/" + \ "gaussian/monolevel/" + fi + "." + str(year_start) + ".nc" config['url2'] = "http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets20thC_ReanV2/" + \ "gaussian/monolevel/" + fi + "." + str(year_end) + ".nc" config['var'] = fi.split(".")[0] config['en'] = fi config['convert'] = lambda data: data[:] skip = 1 except: k = months20cr.keys() warn("q must be one of " + str(k)) return pass varname = config['var'] firstday = datetime(year_start,month_start,1, tzinfo=UTC()) if (month_end == 12): lastday = datetime(year_end,12,31, tzinfo=UTC()) else: lastday = datetime(year_end,month_end+1,1, tzinfo=UTC()) pass outdata = [] urls = [config['url']] try: urls.append(config['url2']) except: pass for dataurl in urls: dataset = open_url(dataurl) first = to_udunits(firstday, dataset.time.units) last = to_udunits(lastday, dataset.time.units) (x,y) = toXY(lat,lng) a = dataset[varname][(first <= dataset.time) & (dataset.time <= last),y,x] seq = a.array[::skip] times = a.time[::skip] latitude = dataset['lat'][y] longitude = dataset['lon'][x] missing = dataset[varname].missing_value data = numpy.select([seq == missing],[None], default = seq) data = (data * dataset[varname].scale_factor + dataset[varname].add_offset) values = config['convert'](data).astype('float').tolist() i = 0 if tqx['out'] == 'json': for t in times: outdata.append({"date":str(dDate(t,dataset)),"value":values[i]}) i += 1 pass pass else: for t in times: outdata.append({"date":str(dDate(t,dataset)),"value":values[i]}) i += 1 pass pass pass # Loading it into gviz_api.DataTable if tqx['out'] == 'json': description = {"date": ("string", "Date"), "value": ("number", config['en'])} else: description = {"date": ("string", "Date"), "value": ("number", config['en'])} pass data_table = gviz_api.DataTable(description) data_table.LoadData(outdata) if tqx['out'] == 'json': # Creating a JSon string json = data_table.ToJSonResponse(columns_order=("date", "value"), order_by="date", req_id=tqx['reqId']) print 'Content-type: text/plain\n' print json elif tqx['out'] == 'csv': print 'Content-type: text/plain\n' csv = data_table.ToCsv(columns_order=("date", "value"), order_by="date", separator=", ") print csv pass return
def main(): form=cgi.FieldStorage() try: x = int(form["x"].value) y = int(form["y"].value) except: returnJson({"msg":"x and y must be integers"}) return try: q = form["q"].value config = months20cr[q] except: k = months20cr.keys() returnJson({"msg":"q must be one of " + str(k)}) return try: year_start = int(form["yr0"].value) except: year_start = default_year_start pass try: year_end = int(form["yr1"].value) except: year_end = date.today().year pass try: month = int(form["mo"].value) except: month = 0 # i.e. default is all months pass try: callback = form["callback"].value except: callback = None pass dataset = open_url(config['url']) varname = config['var'] if month != 0: month_start = month skip = 12 else: month_start = 1 skip = 1 pass firstday = datetime(year_start,month_start,1, tzinfo=UTC()) lastday = datetime(year_end,12,31, tzinfo=UTC()) first = to_udunits(firstday, dataset.time.units) last = to_udunits(lastday, dataset.time.units) interval = ((first <= dataset.time) & (dataset.time <= last)) rainrecs = [] a = dataset[varname][interval,y,x] seq = a.array[::skip] times = a.time[::skip] #latitude = a.lat #longitude = a.lon latitude = dataset['lat'][y] longitude = dataset['lon'][x] missing = dataset[varname].missing_value data = numpy.select([seq == missing],[None], default = seq) data = (data * dataset[varname].scale_factor + dataset[varname].add_offset) values = config['convert'](data).astype('float').tolist() plot = [] i = 0 for t in times: plot.append([udDate(t,dataset),values[i]]) i += 1 pass report = {} report['attributes'] = dataset.attributes report['data-url'] = config['url'] report['data-en'] = config['en'] report['lat'] = latitude.astype('float').tolist() report['lon'] = longitude.astype('float').tolist() report['data'] = plot returnJson(report,callback) return