def test_product_data_management(self): def row2dict(row): d = {} for column_name in row.c.keys(): # all_cols: d[column_name] = str(getattr(row, column_name)) return d # Select prod/vers productcode = 'vgt-ndvi' version = 'proba-v2.1' product = Product(product_code=productcode, version=version) # does the product have mapsets AND subproducts? all_prod_mapsets = product.mapsets all_prod_subproducts = product.subproducts if all_prod_mapsets.__len__() > 0 and all_prod_subproducts.__len__( ) > 0: for mapset in all_prod_mapsets: mapset_info = querydb.get_mapset(mapsetcode=mapset, allrecs=False) mapset_dict = row2dict(mapset_info) mapset_dict['mapsetdatasets'] = [] all_mapset_datasets = product.get_subproducts(mapset=mapset) for subproductcode in all_mapset_datasets: dataset_info = querydb.get_subproduct( productcode=productcode, version=version, subproductcode=subproductcode) dataset_dict = row2dict(dataset_info) dataset = product.get_dataset( mapset=mapset, sub_product_code=subproductcode) completeness = dataset.get_dataset_normalized_info() dataset_dict['datasetcompleteness'] = completeness mapset_dict['mapsetdatasets'].append(dataset_dict)
def test_list_my_subproducts(self): productcode = 'vgt-ndvi' version = 'sv2-pv2.1' #productcode='fewsnet-rfe' #version='2.0' product = Product(product_code=productcode, version=version) my_list = product.list_all_ingested_and_derived_subproducts_mapsets() for sprod in my_list: print sprod
def test_all_products_to_json(self): def row2dict(row): d = {} for column_name in list(row.c.keys()): # all_cols: d[column_name] = str(getattr(row, column_name)) return d # get full distinct list of products (native only) db_products = querydb.get_products() try: db_products.__len__() except AttributeError: db_products = querydb.get_product_native(allrecs=True) self.assertTrue(db_products.__len__() > 0) products_dict_all = [] # loop the products list for product in db_products: if python_version == 2: prod_dict = row2dict(product) if python_version == 3: prod_dict = product productcode = prod_dict['productcode'] version = prod_dict['version'] p = Product(product_code=productcode, version=version) # does the product have mapsets AND subproducts? all_prod_mapsets = p.mapsets all_prod_subproducts = p.subproducts if all_prod_mapsets.__len__() > 0 and all_prod_subproducts.__len__() > 0: prod_dict['productmapsets'] = [] for mapset in all_prod_mapsets: mapset_info = querydb.get_mapset(mapsetcode=mapset, allrecs=False) mapset_dict = row2dict(mapset_info) mapset_dict['mapsetdatasets'] = [] all_mapset_datasets = p.get_subproducts(mapset=mapset) for subproductcode in all_mapset_datasets: dataset_info = querydb.get_subproduct(productcode=productcode, version=version, subproductcode=subproductcode) dataset_dict = row2dict(dataset_info) dataset = p.get_dataset(mapset=mapset, sub_product_code=subproductcode) completeness = dataset.get_dataset_normalized_info() dataset_dict['datasetcompleteness'] = completeness mapset_dict['mapsetdatasets'].append(dataset_dict) prod_dict['productmapsets'].append(mapset_dict) products_dict_all.append(prod_dict) # See ES2-596 self.assertEqual(len(db_products), len(products_dict_all))
def test_get_years(self): # productcode="fewsnet-rfe" # subproductcode="10d" # version="2.0" # mapsetcode="FEWSNET-Africa-8km" productcode = "vgt-ndvi" subproductcode = "ndv" version = "spot-v1" mapsetcode = "SPOTV-Africa-1km" p = Product(product_code=productcode, version=version) dataset = p.get_dataset(mapset=mapsetcode, sub_product_code=subproductcode) dataset.get_filenames() all_present_product_dates = dataset.get_dates() print all_present_product_dates distinctyears = [] for product_date in all_present_product_dates: if product_date.year not in distinctyears: distinctyears.append(product_date.year) print distinctyears self.assertEquals(True, True)
def test_missing_product_request_product_1(self): # Test the mechanism of creating 'missing' reports on a specific 'product' productcode = 'fewsnet-rfe' version = '2.0' product = Product(product_code=productcode, version=version) mapsets = product.mapsets subproductcode = '10d' type = 'Ingest' to_date = datetime.date(2015, 4, 30) # Create an initial list # missing = product.get_missing_datasets(mapset=mapsets[0],sub_product_code=subproductcode, from_date=None, to_date=to_date) # Move away some files dates = [] dates.append('20150101') dates.append('20150111') dates.append('20150121') #self.hide_some_files(productcode, version, subproductcode, type, mapsets[0], dates) # Create the 'missing' list missing = product.get_missing_datasets(mapset=mapsets[0], sub_product_code=subproductcode, from_date=None, to_date=to_date) # Move back the files (to create .tar) self.move_back_files('/tmp/eStation2/') # Create the tar file with files 'missing' [file_tar, result] = product.create_tar(missing, filetar=None, tgz=True) # Move away the files again files self.hide_some_files(productcode, version, subproductcode, type, mapsets[0], dates) # Import the files from archive result = product.import_tar(file_tar, tgz=True) # Re-create the 'missing' list missing = product.get_missing_datasets(mapset=mapsets[0], sub_product_code=subproductcode, from_date=None, to_date=to_date) a = 1
def convert_driver(output_dir=None): # Definitions input_dir = es_constants.es2globals['processing_dir'] # Instance metadata object sds_meta = metadata.SdsMetadata() # Check base output dir if output_dir is None: output_dir = es_constants.es2globals['spirits_output_dir'] functions.check_output_dir(output_dir) # Read the spirits table and convert all existing files spirits_list = querydb.get_spirits() for entry in spirits_list: use_range = False product_code = entry['productcode'] sub_product_code = entry['subproductcode'] version = entry['version'] out_data_type = entry['out_data_type'] out_scale_factor = entry['out_scale_factor'] out_offset = entry['out_offset'] out_nodata = entry['data_ignore_value'] # Prepare the naming dict naming_spirits = { 'sensor_filename_prefix':entry['sensor_filename_prefix'], \ 'frequency_filename_prefix':entry['frequency_filename_prefix'], \ 'pa_filename_prefix':entry['product_anomaly_filename_prefix']} metadata_spirits= {'values': entry['prod_values'], 'flags': entry['flags'], \ 'data_ignore_value':entry['data_ignore_value'], \ 'days': entry['days'], \ 'sensor_type':entry['sensor_type'], \ 'comment':entry['comment'], \ 'date':''} # Manage mapsets: if defined use it, else read the existing ones from filesystem my_mapsets = [] if entry['mapsetcode']: my_mapsets.append(entry['mapsetcode']) else: prod = Product(product_code, version=version) for mp in prod.mapsets: my_mapsets.append(mp) # Manage dates if entry['start_date']: from_date = datetime.datetime.strptime(str(entry['start_date']), '%Y%m%d').date() use_range = True else: from_date = None if entry['end_date']: to_date = datetime.datetime.strptime(str(entry['end_date']), '%Y%m%d').date() use_range = True else: to_date = None for my_mapset in my_mapsets: # Manage output dirs out_sub_dir = my_mapset+os.path.sep+\ product_code+os.path.sep+\ entry['product_anomaly_filename_prefix']+\ entry['frequency_filename_prefix']+\ str(entry['days'])+os.path.sep logger.info('Working on [%s]/[%s]/[%s]/[%s]' % (product_code, version, my_mapset, sub_product_code)) ds = Dataset(product_code, sub_product_code, my_mapset, version=version, from_date=from_date, to_date=to_date) product_info = ds._db_product in_scale_factor = product_info.scale_factor in_offset = product_info.scale_offset in_nodata = product_info.nodata mask_min = product_info.mask_min mask_max = product_info.mask_max productcode = product_info.productcode subproductcode = product_info.subproductcode if productcode == 'vgt-ndvi' and subproductcode == 'ndv': mask_min = 0 if use_range: available_files = ds.get_filenames_range() else: available_files = ds.get_filenames() # Convert input products if len(available_files) > 0: for input_file in available_files: # Check it is a .tif file (not .missing) path, ext = os.path.splitext(input_file) if ext == '.tif': functions.check_output_dir(output_dir + out_sub_dir) str_date = functions.get_date_from_path_filename( os.path.basename(input_file)) # Check input file exists if os.path.isfile(input_file): if len(naming_spirits['frequency_filename_prefix'] ) > 1: my_str_date = naming_spirits[ 'frequency_filename_prefix'][1:5] + str_date metadata_spirits['date'] = my_str_date else: metadata_spirits['date'] = str_date #Read metadata from the file and differentiate chirps prelim and final data sds_meta.read_from_file(input_file) input_file_name = sds_meta.get_item( 'eStation2_input_files') if productcode == 'chirps-dekad' and input_file_name.endswith( ".tif;"): metadata_spirits[ 'comment'] = 'Prelim ' + entry['comment'] elif productcode == 'chirps-dekad' and input_file_name.endswith( ".gz;"): metadata_spirits[ 'comment'] = 'Final ' + entry['comment'] # Check output file exists #convert_geotiff_file(input_file, output_dir+out_sub_dir, str_date, naming_spirits, metadata_spirits) convert_geotiff_datatype_rescaled( input_file, output_dir + out_sub_dir, str_date, naming_spirits, metadata_spirits, in_scale_factor, in_offset, in_nodata, out_scale_factor, out_offset, out_nodata, out_data_type, mask_min, mask_max) else: logger.debug('Input file does not exist: %s' % input_file)
def test_class(self): self.assertIsInstance(Product(**self.kwargs), Product)
def get_product(self): product = Product(**self.kwargs) product._get_full_mapsets = lambda: self.files_mapsets product._get_full_subproducts = lambda mapset='*': self.files_subproducts return product
def test_data_management(self): import json db_products = querydb.get_products(activated=True) if db_products.__len__() > 0: products_dict_all = [] # loop the products list for row in db_products: prod_dict = functions.row2dict(row) productcode = prod_dict['productcode'] version = prod_dict['version'] p = Product(product_code=productcode, version=version) # print productcode # does the product have mapsets AND subproducts? all_prod_mapsets = p.mapsets all_prod_subproducts = p.subproducts if all_prod_mapsets.__len__( ) > 0 and all_prod_subproducts.__len__() > 0: prod_dict['productmapsets'] = [] for mapset in all_prod_mapsets: mapset_dict = [] # print mapset mapset_info = querydb.get_mapset(mapsetcode=mapset, allrecs=False) # if mapset_info.__len__() > 0: mapset_dict = functions.row2dict(mapset_info) # else: # mapset_dict['mapsetcode'] = mapset mapset_dict['mapsetdatasets'] = [] all_mapset_datasets = p.get_subproducts(mapset=mapset) for subproductcode in all_mapset_datasets: # print 'productcode: ' + productcode # print 'version: ' + version # print 'subproductcode: ' + subproductcode dataset_info = querydb.get_subproduct( productcode=productcode, version=version, subproductcode=subproductcode) # print dataset_info # dataset_info = querydb.db.product.get(productcode, version, subproductcode) # dataset_dict = {} if dataset_info is not None: dataset_dict = functions.row2dict(dataset_info) # dataset_dict = dataset_info.__dict__ # del dataset_dict['_labels'] if hasattr(dataset_info, 'frequency_id'): if dataset_info.frequency_id == 'e15minute' or dataset_info.frequency_id == 'e30minute': dataset_dict[ 'nodisplay'] = 'no_minutes_display' # To be implemented in dataset.py elif dataset_info.frequency_id == 'e1year': dataset_dict[ 'nodisplay'] = 'no_minutes_display' else: dataset = p.get_dataset( mapset=mapset, sub_product_code=subproductcode) completeness = dataset.get_dataset_normalized_info( ) dataset_dict[ 'datasetcompleteness'] = completeness dataset_dict['nodisplay'] = 'false' dataset_dict['mapsetcode'] = mapset_dict[ 'mapsetcode'] dataset_dict[ 'mapset_descriptive_name'] = mapset_dict[ 'descriptive_name'] mapset_dict['mapsetdatasets'].append( dataset_dict) else: pass prod_dict['productmapsets'].append(mapset_dict) products_dict_all.append(prod_dict) prod_json = json.dumps(products_dict_all, ensure_ascii=False, sort_keys=True, indent=4, separators=(', ', ': ')) datamanagement_json = '{"success":"true", "total":'\ + str(db_products.__len__())\ + ',"products":'+prod_json+'}' else: datamanagement_json = '{"success":false, "error":"No data sets defined!"}'
def getFilesList(productcode, subproductcode, version, mapsetcode, date_format, start_date, end_date): # Generate a list of files (possibly with repetitions) for extracting timeseries # It applies to a single dataset (prod/sprod/version/mapset) and between 2 dates # # Prepare for results list_files = [] dates_list = [] # print productcode # print subproductcode # print version # print mapsetcode # print date_format # print start_date # print end_date p = Product(product_code=productcode, version=version) dataset = p.get_dataset(mapset=mapsetcode, sub_product_code=subproductcode) dataset.get_filenames() if date_format == 'YYYYMMDD': # Loop over dates for date in dataset._frequency.get_dates(start_date, end_date): if (date >= start_date) and (date <= end_date): filedate = date.strftime("%Y%m%d") productfilename = functions.set_path_filename( filedate, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename dates_list.append(date) if os.path.isfile(productfilepath): list_files.append(productfilepath) # dates_list.append(date) else: list_files.append('') if date_format == 'MMDD': # Extract MMDD mmdd_start = start_date.month * 100 + start_date.day mmdd_end = end_date.month * 100 + end_date.day # Case 1: same year if start_date.year == end_date.year: for mmdd in dataset.get_mmdd(): if mmdd_start <= int(mmdd) <= mmdd_end: # mmdd contains the list of existing 'mmdd' - sorted productfilename = functions.set_path_filename( mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append( datetime.date(start_date.year, int(mmdd[:2]), int(mmdd[2:4]))) # Debug only # logger.info(list_files) # Case 2: end_year > start_year if start_date.year < end_date.year: # list_mmdd contains the list of existing 'mmdd' - sorted list_mmdd = dataset.get_mmdd() # Put all dates from start_mmdd to end of the year for mmdd in list_mmdd: if int(mmdd) >= mmdd_start: productfilename = functions.set_path_filename( mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append( datetime.date(start_date.year, int(mmdd[:2]), int(mmdd[2:4]))) # Fill the list with 'full' years for n_years in range(end_date.year - start_date.year - 1): for mmdd in list_mmdd: productfilename = functions.set_path_filename( mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append( datetime.date(start_date.year + 1 + n_years, int(mmdd[:2]), int(mmdd[2:4]))) # Put all dates from begin of the year to end_mmdd for mmdd in list_mmdd: if int(mmdd) <= mmdd_end: # mmdd contains the list of existing 'mmdd' - sorted productfilename = functions.set_path_filename( mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append( datetime.date(end_date.year, int(mmdd[:2]), int(mmdd[2:4]))) # logger.info(list_files) return [list_files, dates_list]
def test_get_productlayer(self): #import StringIO import mapscript # getparams = web.input() #getparams = {'STYLES': u'', 'productcode': u'vgt-ndvi', 'legendid': u'7', 'SERVICE': u'WMS', 'subproductcode': u'ndv', 'CRS': u'EPSG:4326', 'FORMAT': u'image/png', 'REQUEST': u'GetMap', 'HEIGHT': u'1010', 'WIDTH': u'998', 'VERSION': u'1.3.0', 'productversion': u'sv2-pv2.1', 'date': u'20130221', 'mapsetcode': u'SPOTV-Africa-1km', 'TRANSPARENT': u'false', 'BBOX': u'-16.17,16.17,-15.47,16.87'} getparams = {'STYLES': u'', 'productcode': u'vgt-fapar', 'legendid': u'99', 'SERVICE': u'WMS', 'subproductcode': u'fapar', 'CRS': u'EPSG:4326', 'FORMAT': u'image/png', 'REQUEST': u'GetMap', 'HEIGHT': u'1010', 'WIDTH': u'998', 'VERSION': u'1.3.0', 'productversion': u'V1.4', 'date': u'20130221', 'mapsetcode': u'SPOTV-Africa-1km', 'TRANSPARENT': u'false', 'BBOX': u'15.46875, -17.578125, 16.171875, -16.875'} #getparams = {'STYLES': u'', 'productcode': u'vgt-ndvi', 'legendid': u'7', 'SERVICE': u'WMS', 'subproductcode': u'ndv', 'CRS': u'EPSG:4326', 'FORMAT': u'image/png', 'REQUEST': u'GetMap', 'HEIGHT': u'1091', 'WIDTH': u'998', 'VERSION': u'1.3.0', 'productversion': u'sv2-pv2.1', 'date': u'20130221', 'mapsetcode': u'SPOTV-Africa-1km', 'TRANSPARENT': u'false', 'BBOX': u'-25.70957541665903,9.276714800828785,-13.723491432284028,20.021343707078785'} # getparams = [ # SERVICE:'WMS', # VERSION='1.3.0', # REQUEST='GetMap', # FORMAT='image/png', # TRANSPARENT='false', # productcode='vgt-ndvi', # productversion='sv2-pv2.1', # subproductcode='ndv', # mapsetcode='SPOTV-Africa-1km', # legendid='7', # date='20130221', # CRS='EPSG:4326'', # STYLES='' # WIDTH='998', # HEIGHT='1010', # BBOX='-26,-35,60,38' # ] p = Product(product_code=getparams['productcode'], version=getparams['productversion']) dataset = p.get_dataset(mapset=getparams['mapsetcode'], sub_product_code=getparams['subproductcode']) # print dataset.fullpath if hasattr(getparams, "date"): filedate = getparams['date'] else: dataset.get_filenames() lastdate = dataset.get_dates()[-1].strftime("%Y%m%d") filedate = lastdate if dataset.no_year(): filedate=dataset.strip_year(filedate) # lastdate = lastdate.replace("-", "") # mydate=lastdate.strftime("%Y%m%d") filename = functions.set_path_filename(filedate, getparams['productcode'], getparams['subproductcode'], getparams['mapsetcode'], getparams['productversion'], '.tif') productfile = dataset.fullpath + filename # print productfile #web.header('Content-type', 'image/png') #web.header('Content-transfer-encoding', 'binary') #buf = StringIO.StringIO() #mapscript.msIO_installStdoutToBuffer() #map = mapserver.getmap() ##map.save to a file fname.png ##web.header('Content-Disposition', 'attachment; filename="fname.png"') #contents = buf.getvalue() #return contents #logger.debug("MapServer: Installing stdout to buffer.") mapscript.msIO_installStdoutToBuffer() # projlib = "/usr/share/proj/" projlib = es_constants.proj4_lib_dir # errorfile = es_constants.apps_dir+"/analysis/ms_tmp/ms_errors.log" errorfile = es_constants.log_dir+"/mapserver_error.log" # imagepath = es_constants.apps_dir+"/analysis/ms_tmp/" owsrequest = mapscript.OWSRequest() inputparams = getparams # web.input() for k, v in inputparams.iteritems(): print k + ':' + v owsrequest.setParameter(k.upper(), v) # print owsrequest filenamenoextention = functions.set_path_filename(filedate, getparams['productcode'], getparams['subproductcode'], getparams['mapsetcode'], getparams['productversion'], '') owsrequest.setParameter("LAYERS", filenamenoextention) productmap = mapscript.mapObj(es_constants.template_mapfile) productmap.setConfigOption("PROJ_LIB", projlib) productmap.setConfigOption("MS_ERRORFILE", errorfile) productmap.maxsize = 4096 outputformat_png = mapscript.outputFormatObj('GD/PNG', 'png') outputformat_png.setOption("INTERLACE", "OFF") productmap.appendOutputFormat(outputformat_png) #outputformat_gd = mapscript.outputFormatObj('GD/GIF', 'gif') #productmap.appendOutputFormat(outputformat_gd) productmap.selectOutputFormat('png') productmap.debug = mapscript.MS_TRUE productmap.status = mapscript.MS_ON productmap.units = mapscript.MS_DD coords = map(float, inputparams['BBOX'].split(",")) print coords llx = coords[0] lly = coords[1] urx = coords[2] ury = coords[3] print llx, lly, urx, ury productmap.setExtent(llx, lly, urx, ury) # -26, -35, 60, 38 # productmap.setExtent(-26, -35, 60, 38) # epsg must be in lowercase because in unix/linux systems the proj filenames are lowercase! # epsg = "+init=epsg:3857" # epsg = "+init=" + inputparams.CRS.lower() # CRS = "EPSG:4326" epsg = inputparams['CRS'].lower() # CRS = "EPSG:4326" productmap.setProjection(epsg) w = int(inputparams['WIDTH']) h = int(inputparams['HEIGHT']) productmap.setSize(w, h) # General web service information productmap.setMetaData("WMS_TITLE", "Product description") productmap.setMetaData("WMS_SRS", inputparams['CRS'].lower()) # productmap.setMetaData("WMS_SRS", "epsg:3857") productmap.setMetaData("WMS_ABSTRACT", "A Web Map Service returning eStation2 raster layers.") productmap.setMetaData("WMS_ENABLE_REQUEST", "*") # necessary!! product_info = querydb.get_product_out_info(productcode=inputparams['productcode'], subproductcode=inputparams['subproductcode'], version=inputparams['productversion']) if hasattr(product_info, "__len__") and product_info.__len__() > 0: for row in product_info: scale_factor = row.scale_factor scale_offset = row.scale_offset nodata = row.nodata legend_info = querydb.get_legend_info(legendid=inputparams['legendid']) if hasattr(legend_info, "__len__") and legend_info.__len__() > 0: for row in legend_info: minstep = int((row.min_value - scale_offset)/scale_factor) #int(row.min_value*scale_factor+scale_offset) maxstep = int((row.max_value - scale_offset)/scale_factor) # int(row.max_value*scale_factor+scale_offset) realminstep = int((row.realminstep - scale_offset)/scale_factor) realmaxstep = int((row.realmaxstep - scale_offset)/scale_factor) minstepwidth = int((row.minstepwidth - scale_offset)/scale_factor) maxstepwidth = int((row.maxstepwidth - scale_offset)/scale_factor) totwidth = int((row.totwidth - scale_offset)/scale_factor) totsteps = row.totsteps # maxstep = 255 processing_scale = 'SCALE='+str(minstep)+','+str(maxstep) # min(legend_step.from_step) max(legend_step.to_step) example: 'SCALE=-7000,10000' minbuckets = 256 maxbuckets = 10000 num_buckets = maxbuckets if minstepwidth > 0: num_buckets = round(totwidth / minstepwidth, 0) if num_buckets < minbuckets: num_buckets = minbuckets elif num_buckets > maxbuckets: num_buckets = 0 # num_buckets = 10000 if num_buckets > 0: processing_buckets = 'SCALE_BUCKETS='+str(num_buckets) # nodata = -32768 # get this value from the table products.product processing_novalue = '' if nodata is not None and minstep <= nodata < maxstep: processing_novalue = 'NODATA='+str(nodata) layer = mapscript.layerObj(productmap) layer.name = filenamenoextention layer.type = mapscript.MS_LAYER_RASTER layer.status = mapscript.MS_ON # MS_DEFAULT layer.data = productfile # layer.setProjection("+init=epsg:4326") layer.setProjection("epsg:4326") layer.dump = mapscript.MS_TRUE # scale & buckets if num_buckets > 0: layer.setProcessing(processing_scale) layer.setProcessing(processing_buckets) if processing_novalue != '': layer.setProcessing(processing_novalue) legend_steps = querydb.get_legend_steps(legendid=inputparams['legendid']) if hasattr(legend_steps, "__len__") and legend_steps.__len__() > 0: stepcount = 0 for step in legend_steps: stepcount += 1 min_step = int((step.from_step - scale_offset)/scale_factor) max_step = int((step.to_step - scale_offset)/scale_factor) colors = map(int, (color.strip() for color in step.color_rgb.split(" ") if color.strip())) if stepcount == legend_steps.__len__(): # For the last step use <= max_step expression_string = '([pixel] >= '+str(min_step)+' and [pixel] <= '+str(max_step)+')' else: expression_string = '([pixel] >= '+str(min_step)+' and [pixel] < '+str(max_step)+')' # define class object and style layerclass = mapscript.classObj(layer) layerclass.name = layer.name+'_'+str(stepcount) layerclass.setExpression(expression_string) style = mapscript.styleObj(layerclass) style.color.setRGB(colors[0], colors[1], colors[2]) result_map_file = es_constants.apps_dir+'/analysis/MAP_result.map' # if os.path.isfile(result_map_file): # os.remove(result_map_file) productmap.save(result_map_file) image = productmap.draw() image.save(es_constants.apps_dir+'/analysis/'+filenamenoextention+'.png') contents = productmap.OWSDispatch(owsrequest) content_type = mapscript.msIO_stripStdoutBufferContentType() content = mapscript.msIO_getStdoutBufferBytes() #web.header = "Content-Type","%s; charset=utf-8"%content_type # web.header('Content-type', 'image/png') #web.header('Content-transfer-encoding', 'binary') # return content self.assertEquals(True, True)
def getFilesList(productcode, subproductcode, version, mapsetcode, date_format, start_date, end_date): # Generate a list of files (possibly with repetitions) for extracting timeseries # It applies to a single dataset (prod/sprod/version/mapset) and between 2 dates # # Prepare for results list_files = [] dates_list = [] # print productcode # print subproductcode # print version # print mapsetcode # print date_format # print start_date # print end_date p = Product(product_code=productcode, version=version) dataset = p.get_dataset(mapset=mapsetcode, sub_product_code=subproductcode) dataset.get_filenames() if date_format == 'YYYYMMDD': # Loop over dates for date in dataset.get_dates(): if (date >= start_date) and (date <= end_date): filedate = date.strftime("%Y%m%d") productfilename = functions.set_path_filename(filedate, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename if os.path.isfile(productfilepath): list_files.append(productfilepath) dates_list.append(date) if date_format == 'MMDD': # Extract MMDD mmdd_start = start_date.month*100+start_date.day mmdd_end = end_date.month*100+end_date.day # Case 1: same year if start_date.year == end_date.year: for mmdd in dataset.get_mmdd(): if mmdd_start <= int(mmdd) <= mmdd_end: # mmdd contains the list of existing 'mmdd' - sorted productfilename = functions.set_path_filename(mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append(datetime.date(start_date.year, int(mmdd[:2]), int(mmdd[2:4]))) # Debug only # logger.info(list_files) # Case 2: end_year > start_year if start_date.year < end_date.year: # list_mmdd contains the list of existing 'mmdd' - sorted list_mmdd = dataset.get_mmdd() # Put all dates from start_mmdd to end of the year for mmdd in list_mmdd: if int(mmdd) >= mmdd_start: productfilename = functions.set_path_filename(mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append(datetime.date(start_date.year, int(mmdd[:2]), int(mmdd[2:4]))) # Fill the list with 'full' years for n_years in range(end_date.year-start_date.year-1): for mmdd in list_mmdd: productfilename = functions.set_path_filename(mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append(datetime.date(start_date.year+1+n_years, int(mmdd[:2]), int(mmdd[2:4]))) # Put all dates from begin of the year to end_mmdd for mmdd in list_mmdd: if int(mmdd) <= mmdd_end: # mmdd contains the list of existing 'mmdd' - sorted productfilename = functions.set_path_filename(mmdd, productcode, subproductcode, mapsetcode, version, '.tif') productfilepath = dataset.fullpath + productfilename list_files.append(productfilepath) dates_list.append(datetime.date(end_date.year, int(mmdd[:2]), int(mmdd[2:4]))) # logger.info(list_files) return [list_files, dates_list]