def getFormData(self): """Returns an ordered list of (choice label, choice value) for display to the user.""" job_data = [] job_data.append( ('Dataset Category', self.dataset_category) ) job_data.append( ('Dataset', self.dataset) ) if hasText(self.variable): job_data.append( ('Variable', self.variable) ) if hasText(self.geometry): job_data.append( ('Shape Type', self.geometry) ) if self.geometry_id is not None and len(self.geometry_id)>0: job_data.append( ('Shape Geometry', self.geometry_id.replace(",",", ")) ) if hasText(self.latmin): job_data.append( ('Latitude Minimum', self.latmin) ) if hasText(self.latmax): job_data.append( ('Latitude Maximum', self.latmax) ) if hasText(self.lonmin): job_data.append( ('Longitude Minimum', self.lonmin) ) if hasText(self.lonmax): job_data.append( ('Longitude Maximum', self.lonmax) ) if hasText(self.lat): job_data.append( ('Latitude', self.lat) ) if hasText(self.lon): job_data.append( ('Longitude', self.lon) ) job_data.append( ('Start Date Time', self.datetime_start) ) job_data.append( ('Stop Date Time', self.datetime_stop) ) if self.timeregion_month is not None and len(self.timeregion_month)>0: job_data.append( ('Time Region: Months', get_month_string( map(int, self.timeregion_month.split(",")) ) )) if self.timeregion_year is not None and hasText(self.timeregion_year): job_data.append( ('Time Region: Years', self.timeregion_year) ) if self.calc is not None and self.calc != '': job_data.append( ('Calculation', self.calc) ) if self.par1 is not None and self.par1 != '': job_data.append( ('Calculation Parameter 1', self.par1) ) if self.par2 is not None and self.par2 != '': job_data.append( ('Calculation Parameter 2', self.par2) ) if self.par3 is not None and self.par3 != '': job_data.append( ('Calculation Parameter 3', self.par3) ) if self.calc_group is not None and len(self.calc_group)>0: job_data.append( ('Calculation Group', self.calc_group.replace(",",", ")) ) job_data.append( ('Calculate Raw?', self.calc_raw) ) job_data.append( ('Spatial Operation', ocgisChoices(Config.SPATIAL_OPERATION)[self.spatial_operation]) ) job_data.append( ('Aggregate', self.aggregate) ) job_data.append( ('Output Format', ocgisChoices(Config.OUTPUT_FORMAT)[self.output_format]) ) job_data.append( ('File Output Prefix', self.prefix) ) job_data.append( ('Include Auxiliary Files', self.with_auxiliary_files) ) return job_data
def clean(self): # invoke superclass cleaning method super(ClimateTranslatorForm2, self).clean() # validate calculation if 'calc' in self.cleaned_data and hasText(self.cleaned_data['calc']) and self.cleaned_data['calc'].lower() != 'none': # calculation group must be selected if len( self.cleaned_data['calc_group'] ) == 0: self._errors["calc_group"] = self.error_class(["Calculation Group(s) not selected."]) # validate keyword values func = self.cleaned_data['calc'] calc = ocgisCalculations.getCalc(func) if 'keywords' in calc: for i, keyword in enumerate(calc["keywords"]): parN = 'par%s' % (i+1) value = self.cleaned_data[parN] if keyword["type"]=="float": try: x = float(value) except ValueError: self._errors[parN] = self.error_class(["Invalid float value for keyword: "+keyword["name"]]) elif keyword["type"] == "string": if "values" in keyword: if not value.lower() in keyword["values"]: self._errors[parN] = self.error_class(["Invalid string value for keyword: "+keyword["name"]]) # if no calculation is selected, there cannot be any temporal grouping else: self.cleaned_data['calc_group'] = None if 'prefix' in self.cleaned_data and re.search(INVALID_CHARS, self.cleaned_data['prefix']): self._errors['prefix'] = self.error_class(["The prefix contains invalid characters."]) if not self.is_valid(): print 'VALIDATION ERRORS: %s' % self.errors # return cleaned data return self.cleaned_data
def clean(self): # invoke superclass cleaning method super(OpenClimateGisForm1, self).clean() # validate data selection if 'dataset_category' in self.cleaned_data and hasText(self.cleaned_data['dataset_category']): if 'dataset' in self.cleaned_data and hasText(self.cleaned_data['dataset']): jsonData = ocgisDatasets.datasets[self.cleaned_data['dataset_category']][self.cleaned_data['dataset']] if jsonData['type'] == 'datasets': if not 'variable' in self.cleaned_data or not hasText(self.cleaned_data['variable']): self._errors["variable"] = self.error_class(["A variable must be selected when the dataset is not a package."]) # validate geometry ngeometries = 0 geometry = None if (hasText(self.cleaned_data['geometry']) or len(self.cleaned_data['geometry_id']) ): ngeometries += 1 geometry = 'shape' # NOTE: invalid float values in form result in cleaned_data not being populated for that key if ( ('latmin' in self.cleaned_data and hasText(self.cleaned_data['latmin'])) or ('latmax' in self.cleaned_data and hasText(self.cleaned_data['latmax'])) or ('lonmin' in self.cleaned_data and hasText(self.cleaned_data['lonmin'])) or ('lonmax' in self.cleaned_data and hasText(self.cleaned_data['lonmax'])) ): ngeometries += 1 geometry = 'box' if ( ('lat' in self.cleaned_data and hasText(self.cleaned_data['lat'])) or ('lon' in self.cleaned_data and hasText(self.cleaned_data['lon'])) ): ngeometries += 1 geometry = 'point' if ngeometries > 1: self._errors["geometry"] = self.error_class(["Please choose only one geometry: shape, bounding box or point."]) elif ngeometries==1: if geometry =='shape': if not hasText(self.cleaned_data['geometry']): self._errors["geometry_id"] = self.error_class(["Please select a shape type."]) if len(self.cleaned_data['geometry_id'])==0: self._errors["geometry_id"] = self.error_class(["Please select a shape geometry."]) elif geometry == 'box': if ( not 'latmin' in self.cleaned_data or not hasText(self.cleaned_data['latmin']) or not 'latmax' in self.cleaned_data or not hasText(self.cleaned_data['latmax']) or not 'lonmin' in self.cleaned_data or not hasText(self.cleaned_data['lonmin']) or not 'lonmax' in self.cleaned_data or not hasText(self.cleaned_data['lonmax']) ): self._errors["latmin"] = self.error_class(["Invalid bounding box latitude or longitude values."]) elif geometry == 'point': if not 'lat' in self.cleaned_data or not hasText(self.cleaned_data['lat']): self._errors["lat"] = self.error_class(["Invalid point latitude."]) if not 'lon' in self.cleaned_data or not hasText(self.cleaned_data['lon']): self._errors["lon"] = self.error_class(["Invalid point longitude."]) # validate time range datetime_start = None datetime_stop = None if 'datetime_start' in self.cleaned_data and hasText(self.cleaned_data['datetime_start']): datetime_start = self.cleaned_data['datetime_start'] if 'datetime_stop' in self.cleaned_data and hasText(self.cleaned_data['datetime_stop']): datetime_stop = self.cleaned_data['datetime_stop'] if datetime_start is not None and datetime_stop is None: self._errors["datetime_stop"] = self.error_class(["Invalid value for 'Time Range Stop'."]) if datetime_start is None and datetime_stop is not None: self._errors["datetime_start"] = self.error_class(["Invalid value for 'Time Range Start'."]) if datetime_start is not None and datetime_stop is not None: if datetime_start > datetime_stop: self._errors["datetime_start"] = self.error_class(["'Time Range Start' must be less than 'Time Range Stop'."]) time_range = [datetime_start, datetime_stop] else: time_range = None # validate years time region time_region = {} if 'timeregion_year' in self.cleaned_data and hasText(self.cleaned_data['timeregion_year']): years = str(self.cleaned_data['timeregion_year'].replace(" ","")) # remove blanks if re.match('^\d{4}-\d{4}', years): year1 = int(years[0:4]) year2 = int(years[5:9]) if year1 >= year2: self._errors["timeregion_year"] = self.error_class(["Invalid year selection: must be year1 < year2"]) time_region['year'] = range(year1, year2+1) else: years = years.split(',') time_region['year'] = [] for year in years: if not re.match('^\d{4}$', year): self._errors["timeregion_year"] = self.error_class(["Invalid year selection"]) break time_region['year'].append( int(year) ) # validate months time region if 'timeregion_month' in self.cleaned_data and len(self.cleaned_data['timeregion_month'])>0: time_region['month'] = map(int, self.cleaned_data['timeregion_month']) # validate time range + time region if time_range is not None and len(time_region)>0: if not validate_time_subset(time_range, time_region): self._errors["timeregion_year"] = self.error_class(["Time Range must contain Time Region."]) if not self.is_valid(): print 'VALIDATION ERRORS: %s' % self.errors # return cleaned data return self.cleaned_data
def _validateFields(self, field_names): '''Method for bulk validation of a list of field.''' for field_name in field_names: if not hasText( self.cleaned_data[field_name] ): self._errors[field_name] = self.error_class(["This field is required"])
def get_context_data(self, form, **kwargs): context = super(ClimateTranslatorWizard, self).get_context_data(form=form, **kwargs) # before rendering of first form: send data and geometry choices if self.steps.current == self.steps.first: #context.update({'datasets': json.dumps(ocgisDatasets.datasets) }) # FIXME ? context.update({'geometries': json.dumps(ocgisGeometries.geometries) }) elif self.steps.current == "1": # note: string type context.update({'calculations': json.dumps(ocgisCalculations.calcs) }) # before very last view: create summary of user choices elif self.steps.current == self.steps.last: job_data = {} # retrieve form data for all previous views for step in self.steps.all: cleaned_data = self.get_cleaned_data_for_step(step) # first form if step == '0': job_data['data_type'] = cleaned_data['data_type'] if cleaned_data.has_key('long_name'): job_data['long_name'] = cleaned_data['long_name'] if cleaned_data.has_key('time_frequency'): job_data['time_frequency'] = cleaned_data['time_frequency'] if cleaned_data.has_key("dataset_category"): job_data['dataset_category'] = cleaned_data['dataset_category'] if cleaned_data.has_key("dataset"): job_data['dataset'] = cleaned_data['dataset'] if cleaned_data.has_key("dataset_category2"): job_data['dataset_category2'] = cleaned_data['dataset_category2'] if cleaned_data.has_key("package_name"): job_data['package_name'] = cleaned_data['package_name'] if cleaned_data.has_key('geometry_category') and hasText(cleaned_data['geometry_category']): job_data['geometry_category'] = cleaned_data['geometry_category'] if cleaned_data.has_key('geometry_subcategory') and hasText(cleaned_data['geometry_subcategory']): job_data['geometry_subcategory'] = cleaned_data['geometry_subcategory'] if cleaned_data.has_key('geometry_id') and len( cleaned_data['geometry_id'] )>0: job_data['geometry_id'] = formatListForDisplay(cleaned_data['geometry_id']) if cleaned_data.has_key('latmin') and cleaned_data['latmin'] is not None: job_data['latmin'] = float( cleaned_data['latmin'] ) if cleaned_data.has_key('latmax') and cleaned_data['latmax'] is not None: job_data['latmax'] = float( cleaned_data['latmax'] ) if cleaned_data.has_key('lonmin') and cleaned_data['lonmin'] is not None: job_data['lonmin'] = float( cleaned_data['lonmin'] ) if cleaned_data.has_key('lonmax') and cleaned_data['lonmax'] is not None: job_data['lonmax'] = float( cleaned_data['lonmax'] ) if cleaned_data.has_key('lat') and cleaned_data['lat'] is not None: job_data['lat'] = float( cleaned_data['lat'] ) if cleaned_data.has_key('lon') and cleaned_data['lon'] is not None: job_data['lon'] = float( cleaned_data['lon'] ) if cleaned_data.has_key('agg_selection'): job_data['agg_selection'] = bool(cleaned_data['agg_selection']) if cleaned_data.has_key('datetime_start') and cleaned_data['datetime_start'] is not None: job_data['datetime_start'] = cleaned_data['datetime_start'] if cleaned_data.has_key('datetime_stop') and cleaned_data['datetime_stop'] is not None: job_data['datetime_stop'] = cleaned_data['datetime_stop'] if cleaned_data.has_key('timeregion_month') and cleaned_data['timeregion_month'] is not None: job_data['timeregion_month'] = get_month_string( cleaned_data['timeregion_month'] ) if cleaned_data.has_key('timeregion_year') and cleaned_data['timeregion_year'] is not None: job_data['timeregion_year'] = cleaned_data['timeregion_year'] # second form if step == '1': if cleaned_data.has_key('calc') and cleaned_data['calc'] is not None and cleaned_data['calc'] != '': job_data['calc'] = ocgisCalculations.getCalc(cleaned_data['calc'])["name"] if cleaned_data.has_key('par1') and cleaned_data['par1'] is not None: job_data['par1'] = cleaned_data['par1'] if cleaned_data.has_key('par2') and cleaned_data['par2'] is not None: job_data['par2'] = cleaned_data['par2'] if cleaned_data.has_key('par3') and cleaned_data['par3'] is not None: job_data['par3'] = cleaned_data['par3'] if cleaned_data.has_key('calc_group'): calc_groups =[] for group in cleaned_data['calc_group']: calc_groups.append(ocgisChoices(Config.CALCULATION_GROUP)[group]) job_data['calc_group'] = formatListForDisplay(calc_groups) if cleaned_data.has_key('calc_raw'): job_data['calc_raw'] = bool(cleaned_data['calc_raw']) if cleaned_data.has_key('spatial_operation'): job_data['spatial_operation'] = ocgisChoices(Config.SPATIAL_OPERATION)[cleaned_data['spatial_operation']] if cleaned_data.has_key('aggregate'): job_data['aggregate'] = bool(cleaned_data['aggregate']) if cleaned_data.has_key('output_format'): job_data['output_format'] = ocgisChoices(Config.OUTPUT_FORMAT)[cleaned_data['output_format']] if cleaned_data.has_key('prefix'): job_data['prefix'] = cleaned_data['prefix'] if cleaned_data.has_key('with_auxiliary_files'): job_data['with_auxiliary_files'] = bool(cleaned_data['with_auxiliary_files']) context.update({'job_data': job_data}) return context
def get_context_data(self, form, **kwargs): context = super(OpenClimateGisWizard, self).get_context_data(form=form, **kwargs) # before rendering of first form: send data and geometry choices if self.steps.current == self.steps.first: context.update({"datasets": json.dumps(ocgisDatasets.datasets)}) context.update({"geometries": json.dumps(ocgisGeometries.geometries)}) elif self.steps.current == "1": # note: string type context.update({"calculations": json.dumps(ocgisCalculations.calcs)}) # before very last view: create summary of user choices elif self.steps.current == self.steps.last: job_data = {} # retrieve form data for all previous views for step in self.steps.all: if step != self.steps.current: cleaned_data = self.get_cleaned_data_for_step(step) if cleaned_data.has_key("dataset_category"): job_data["dataset_category"] = cleaned_data["dataset_category"] if cleaned_data.has_key("dataset"): job_data["dataset"] = cleaned_data["dataset"] if cleaned_data.has_key("variable"): job_data["variable"] = cleaned_data["variable"] if cleaned_data.has_key("geometry") and hasText(cleaned_data["geometry"]): job_data["geometry"] = cleaned_data["geometry"] if cleaned_data.has_key("geometry_id") and len(cleaned_data["geometry_id"]) > 0: job_data["geometry_id"] = formatListForDisplay(cleaned_data["geometry_id"]) if cleaned_data.has_key("latmin") and cleaned_data["latmin"] is not None: job_data["latmin"] = float(cleaned_data["latmin"]) if cleaned_data.has_key("latmax") and cleaned_data["latmax"] is not None: job_data["latmax"] = float(cleaned_data["latmax"]) if cleaned_data.has_key("lonmin") and cleaned_data["lonmin"] is not None: job_data["lonmin"] = float(cleaned_data["lonmin"]) if cleaned_data.has_key("lonmax") and cleaned_data["lonmax"] is not None: job_data["lonmax"] = float(cleaned_data["lonmax"]) if cleaned_data.has_key("lat") and cleaned_data["lat"] is not None: job_data["lat"] = float(cleaned_data["lat"]) if cleaned_data.has_key("lon") and cleaned_data["lon"] is not None: job_data["lon"] = float(cleaned_data["lon"]) if cleaned_data.has_key("datetime_start") and cleaned_data["datetime_start"] is not None: job_data["datetime_start"] = cleaned_data["datetime_start"] if cleaned_data.has_key("datetime_stop") and cleaned_data["datetime_stop"] is not None: job_data["datetime_stop"] = cleaned_data["datetime_stop"] if cleaned_data.has_key("timeregion_month") and cleaned_data["timeregion_month"] is not None: job_data["timeregion_month"] = get_month_string(cleaned_data["timeregion_month"]) if cleaned_data.has_key("timeregion_year") and cleaned_data["timeregion_year"] is not None: job_data["timeregion_year"] = cleaned_data["timeregion_year"] if cleaned_data.has_key("calc") and cleaned_data["calc"] is not None and cleaned_data["calc"] != "": job_data["calc"] = ocgisCalculations.getCalc(cleaned_data["calc"])["name"] if cleaned_data.has_key("par1") and cleaned_data["par1"] is not None: job_data["par1"] = cleaned_data["par1"] if cleaned_data.has_key("par2") and cleaned_data["par2"] is not None: job_data["par2"] = cleaned_data["par2"] if cleaned_data.has_key("par3") and cleaned_data["par3"] is not None: job_data["par3"] = cleaned_data["par3"] if cleaned_data.has_key("calc_group"): calc_groups = [] for group in cleaned_data["calc_group"]: calc_groups.append(ocgisChoices(Config.CALCULATION_GROUP)[group]) job_data["calc_group"] = formatListForDisplay(calc_groups) if cleaned_data.has_key("calc_raw"): job_data["calc_raw"] = bool(cleaned_data["calc_raw"]) if cleaned_data.has_key("spatial_operation"): job_data["spatial_operation"] = ocgisChoices(Config.SPATIAL_OPERATION)[ cleaned_data["spatial_operation"] ] if cleaned_data.has_key("aggregate"): job_data["aggregate"] = bool(cleaned_data["aggregate"]) if cleaned_data.has_key("output_format"): job_data["output_format"] = ocgisChoices(Config.OUTPUT_FORMAT)[cleaned_data["output_format"]] if cleaned_data.has_key("prefix"): job_data["prefix"] = cleaned_data["prefix"] if cleaned_data.has_key("with_auxiliary_files"): job_data["with_auxiliary_files"] = bool(cleaned_data["with_auxiliary_files"]) context.update({"job_data": job_data}) return context
def encodeArgs(self, openClimateGisJob): """Method to transform the OpenClimateGisJob instance into a dictionary of arguments passed on to the ocgis library.""" args = {} # ocgis.RequestDataset(uri=None, variable=None, alias=None, time_range=None, time_region=None, # level_range=None, s_proj=None, t_units=None, t_calendar=None, did=None, meta=None) # retrieve dataset URIs, variable from configuration JSON data data_type = openClimateGisJob.data_type args['variable'] = [] args['alias'] = [] args['uri'] = [] args['t_calendar'] = [] args['t_units'] = [] if data_type == 'variable': # Example: # {'metadata': {'description': {'long_name': u'<tdk>', 'dataset_category': u'<tdk>', 'dataset': u'<tdk>'}, # 'time_range': [datetime.datetime(1971, 1, 1, 0, 0), datetime.datetime(2000, 12, 31, 0, 0)]}, # 'dataset': [{'variable': u'tas', 'alias': u'tas', 't_calendar': u'standard', # 'uri': [u'/data/maurer/concatenated/Maurer02new_OBS_tas_daily.1971-2000.nc'], # 't_units': u'days since 1940-01-01 00:00:00'}]} dictionaries = self.ocgisDatasets.getDatasets(data_type, long_name=openClimateGisJob.long_name, time_frequency=openClimateGisJob.time_frequency, dataset_category=openClimateGisJob.dataset_category, dataset=openClimateGisJob.dataset ) elif data_type == 'package': # Example: # {'metadata': {'description': {'dataset_category': u'<tdk>', 'package': u'<tdk>'}, # 'time_range': [datetime.datetime(1971, 1, 1, 0, 0), datetime.datetime(2000, 12, 31, 0, 0)]}, # 'dataset': [{'variable': u'pr', 'alias': u'pr', 't_calendar': u'365_day', # 'uri': [u'/data/downscaled/arrm/arrm_gfdl_2.1.20c3m.pr.NAm.1971-2000.nc'], # 't_units': u'days since 1959-12-31'}, # {'variable': u'tasmax', 'alias': u'tasmax', 't_calendar': u'365_day', # 'uri': [u'/data/downscaled/arrm/arrm_gfdl_2.1.20c3m.tasmax.NAm.1971-2000.nc'], # 't_units': u'days since 1959-12-31'}, # {'variable': u'tasmin', 'alias': u'tasmin', 't_calendar': u'365_day', # 'uri': [u'/data/downscaled/arrm/arrm_gfdl_2.1.20c3m.tasmin.NAm.1971-2000.nc'], # 't_units': u'days since 1959-12-31'}]} dictionaries = self.ocgisDatasets.getDatasets(data_type, package_name=openClimateGisJob.package_name) args['datasets'] = dictionaries['dataset'] # combine all dictionaries into lists #for dict in dictionaries: # args['variable'].append( dict['variable'] ) # args['alias'].append( dict['alias'] ) # args['uri'].append( dict['uri'] ) # args['t_calendar'].append( dict['t_calendar'] ) # args['t_units'].append( dict['t_units'] ) # class ocgis.OcgOperations(dataset=None, spatial_operation='intersects', geom=None, # aggregate=False, calc=None, calc_grouping=None, calc_raw=False, # abstraction='polygon', snippet=False, backend='ocg', prefix=None, # output_format='numpy', agg_selection=False, select_ugid=None, # vector_wrap=True, allow_empty=False, dir_output=None, slice=None, # file_only=False, headers=None) args['geom'] = None args['select_ugid'] = None if hasText(openClimateGisJob.geometry_category): args['geom'] = self.ocgisGeometries.getCategoryKey( openClimateGisJob.geometry_category ) args['select_ugid'] = [] # must transform back from string to list of integers for geom in openClimateGisJob.geometry_id.split(","): args['select_ugid'].append( self.ocgisGeometries.getGuid(openClimateGisJob.geometry_category, openClimateGisJob.geometry_subcategory, str(geom))) elif ( hasText(openClimateGisJob.latmin) and hasText(openClimateGisJob.latmax) and hasText(openClimateGisJob.lonmin) and hasText(openClimateGisJob.lonmax)): args['geom'] = [openClimateGisJob.lonmin, openClimateGisJob.lonmax, openClimateGisJob.latmin, openClimateGisJob.latmax] elif hasText(openClimateGisJob.lat) and hasText(openClimateGisJob.lon): args['geom'] = [openClimateGisJob.lon, openClimateGisJob.lat] args['agg_selection'] = openClimateGisJob.agg_selection if openClimateGisJob.datetime_start is not None and openClimateGisJob.datetime_stop is not None: args['time_range'] = [openClimateGisJob.datetime_start, openClimateGisJob.datetime_stop] else: args['time_range'] = None args['time_region'] = None if hasText(openClimateGisJob.timeregion_month) or hasText(openClimateGisJob.timeregion_year): args['time_region'] = { 'month':None, 'year':None } if hasText(openClimateGisJob.timeregion_month): args['time_region']['month'] = [] for i in map(int, openClimateGisJob.timeregion_month.split(",")): args['time_region']['month'].append(i) if hasText(openClimateGisJob.timeregion_year): args['time_region']['year'] = [] years = openClimateGisJob.timeregion_year.replace(" ","") if re.match('^\d{4}-\d{4}', years): year1 = int(years[0:4]) year2 = int(years[5:9]) args['time_region']['year'] = range(year1, year2+1) else: for i in map(int, openClimateGisJob.timeregion_year.split(",")): args['time_region']['year'].append(i) args['calc'] = None if hasText(openClimateGisJob.calc) and openClimateGisJob.calc.lower() != 'none': calc = self.ocgisCalculations.getCalc(str(openClimateGisJob.calc)) args['calc'] = [ {'func':str(calc["func"]), 'name':str(calc["name"])} ] args['calc'][0]['kwds'] = {} # loop over keywords in order if "keywords" in calc: for i, keyword in enumerate(calc["keywords"]): if i==0: val = openClimateGisJob.par1 elif i==1: val = openClimateGisJob.par2 elif i==2: val = openClimateGisJob.par3 if keyword["type"] == "float": val = float(val) elif keyword["type"] == "string": val = str(val).lower() args['calc'][0]['kwds'][str(keyword[u'name'])] = val args['headers'] = HEADERS_CALC else: args['headers'] = HEADERS_NOCALC args['calc_raw'] = openClimateGisJob.calc_raw if hasText(openClimateGisJob.calc_group): args['calc_grouping'] = map(str, openClimateGisJob.calc_group.split(",")) else: args['calc_grouping'] = None args['spatial_operation'] = openClimateGisJob.spatial_operation args['aggregate'] = openClimateGisJob.aggregate args['output_format'] = openClimateGisJob.output_format args['prefix'] = openClimateGisJob.prefix args['dir_output'] = str( openClimateGisJob.id ) args['with_auxiliary_files'] = openClimateGisJob.with_auxiliary_files return args
def encodeArgs(self, openClimateGisJob): """Method to transform the OpenClimateGisJob instance into a dictionary of arguments passed on to the ocgis library.""" args = {} # ocgis.RequestDataset(uri=None, variable=None, alias=None, time_range=None, time_region=None, # level_range=None, s_proj=None, t_units=None, t_calendar=None, did=None, meta=None) # retrieve dataset URIs, variable from configuration JSON data jsonObject = self.ocgisDatasets.datasets[openClimateGisJob.dataset_category][openClimateGisJob.dataset] if jsonObject['type'] == 'datasets': # must sub-select data structure by variable jsonData =jsonObject[openClimateGisJob.variable] elif jsonObject['type'] == 'package': jsonData = jsonObject args['uri'] = jsonData['uri'] args['variable'] = jsonData["variable"] args['t_calendar'] = jsonData["t_calendar"] args['t_units'] = jsonData["t_units"] args['alias'] = jsonData["alias"] # class ocgis.OcgOperations(dataset=None, spatial_operation='intersects', geom=None, # aggregate=False, calc=None, calc_grouping=None, calc_raw=False, # abstraction='polygon', snippet=False, backend='ocg', prefix=None, # output_format='numpy', agg_selection=False, select_ugid=None, # vector_wrap=True, allow_empty=False, dir_output=None, slice=None, # file_only=False, headers=None) args['geom'] = None args['select_ugid'] = None if hasText(openClimateGisJob.geometry): args['geom'] = self.ocgisGeometries.getCategoryKey( openClimateGisJob.geometry ) args['select_ugid'] = [] # must transform back from string to list of integers for geom in openClimateGisJob.geometry_id.split(","): args['select_ugid'].append( self.ocgisGeometries.getGuid(openClimateGisJob.geometry, str(geom))) elif ( hasText(openClimateGisJob.latmin) and hasText(openClimateGisJob.latmax) and hasText(openClimateGisJob.lonmin) and hasText(openClimateGisJob.lonmax)): args['geom'] = [openClimateGisJob.lonmin, openClimateGisJob.lonmax, openClimateGisJob.latmin, openClimateGisJob.latmax] elif hasText(openClimateGisJob.lat) and hasText(openClimateGisJob.lon): args['geom'] = [openClimateGisJob.lon, openClimateGisJob.lat] if openClimateGisJob.datetime_start is not None and openClimateGisJob.datetime_stop is not None: args['time_range'] = [openClimateGisJob.datetime_start, openClimateGisJob.datetime_stop] else: args['time_range'] = None args['time_region'] = None if hasText(openClimateGisJob.timeregion_month) or hasText(openClimateGisJob.timeregion_year): args['time_region'] = { 'month':None, 'year':None } if hasText(openClimateGisJob.timeregion_month): args['time_region']['month'] = [] for i in map(int, openClimateGisJob.timeregion_month.split(",")): args['time_region']['month'].append(i) if hasText(openClimateGisJob.timeregion_year): args['time_region']['year'] = [] years = openClimateGisJob.timeregion_year.replace(" ","") if re.match('^\d{4}-\d{4}', years): year1 = int(years[0:4]) year2 = int(years[5:9]) args['time_region']['year'] = range(year1, year2+1) else: for i in map(int, openClimateGisJob.timeregion_year.split(",")): args['time_region']['year'].append(i) args['calc'] = None if hasText(openClimateGisJob.calc) and openClimateGisJob.calc.lower() != 'none': calc = self.ocgisCalculations.getCalc(str(openClimateGisJob.calc)) args['calc'] = [ {'func':str(calc["func"]), 'name':str(calc["name"])} ] args['calc'][0]['kwds'] = {} # loop over keywords in order if "keywords" in calc: for i, keyword in enumerate(calc["keywords"]): if i==0: val = openClimateGisJob.par1 elif i==1: val = openClimateGisJob.par2 elif i==2: val = openClimateGisJob.par3 if keyword["type"] == "float": val = float(val) elif keyword["type"] == "string": val = str(val).lower() args['calc'][0]['kwds'][str(keyword[u'name'])] = val args['headers'] = HEADERS_CALC else: args['headers'] = HEADERS_NOCALC args['calc_raw'] = openClimateGisJob.calc_raw if hasText(openClimateGisJob.calc_group): args['calc_grouping'] = map(str, openClimateGisJob.calc_group.split(",")) else: args['calc_grouping'] = None args['spatial_operation'] = openClimateGisJob.spatial_operation args['aggregate'] = openClimateGisJob.aggregate args['output_format'] = openClimateGisJob.output_format args['prefix'] = openClimateGisJob.prefix args['dir_output'] = str( openClimateGisJob.id ) args['with_auxiliary_files'] = openClimateGisJob.with_auxiliary_files return args