def get_index_maps(request, job_id, index_name): ''' This action is used to pass the kml data to the google map. It must return the key 'kml_link'. ''' # Get the job id and user id job_id = job_id map_name = index_name user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get the list of index files to display resource_list = json.loads(job.current_kmls) # Get the kml url kml_links = resource_list[map_name]['url'] return JsonResponse({'kml_links': [kml_links]})
def get_index_maps(request, job_id, index_name): ''' This action is used to pass the kml data to the google map. It must return the key 'kml_link'. ''' # Get the job id and user id job_id = job_id map_name = index_name user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get the list of index files to display resource_list = json.loads(job.current_kmls) # Get the kml url kml_links = resource_list[map_name]['url'] return JsonResponse({'kml_links': [kml_links]})
def extract_existing_gssha(request, job_id): ''' This takes the file name and id that were submitted and unzips the files. ''' context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Get url for the resource and extract the GSSHA file url = job.original_url extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir) # Create GSSHAPY Session gsshapy_session = gsshapy_sessionmaker() # Find the project file for root, dirs, files in os.walk(userDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) context['job_id'] = job_id return redirect( reverse('gsshaindex:select_index', kwargs={'job_id': job_id}))
def extract_existing_gssha(request, job_id): ''' This takes the file name and id that were submitted and unzips the files. ''' context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Get url for the resource and extract the GSSHA file url = job.original_url extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir) # Create GSSHAPY Session gsshapy_session = gsshapy_sessionmaker() # Find the project file for root, dirs, files in os.walk(userDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) context['job_id'] = job_id return redirect(reverse('gsshaindex:select_index', kwargs={'job_id':job_id}))
def replace_values(request, job_id, index_name, mapping_table_number): ''' This replaces the values for variables on the index map. ''' context = {} # Get the user id and the project file id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id #Create session gsshapy_session = gsshapy_sessionmaker() # Use project id to link to map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() indices = index_raster.indices mapTables = index_raster.mapTables # Process for if descriptions are submitted if (request.POST): params = request.POST for key in params: if "var" in key: identity = int(key.replace("var-","")) mapTableValue = gsshapy_session.query(MTValue).get(identity) mapTableValue.value = params[key] gsshapy_session.commit() context['job_id'] = job_id context['index_name'] = index_name context['mapping_table_number'] = mapping_table_number return redirect(reverse('gsshaindex:submit_mapping_table', kwargs={'job_id':job_id, 'index_name':index_name, 'mapping_table_number':mapping_table_number}))
def replace_index_with_shapefile(request, job_id, index_name, shapefile_name): """ Controller to replace the index map with the selected shapefile. """ context = {} user = str(request.user) geojson = get_geojson_from_geoserver(user, shapefile_name) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create a session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = index_raster.mapTables if geojson['success'] != False: geojson_result = geojson['geojson'] # Get existing indices index_raster_indices = index_raster.indices srid_name = geojson['crs'] project_file_srid = project_file.srid id = 200 # Loop through each geometry for object in geojson_result: index_present = False object_id = object['id'] # Check to see if the index is present for index in index_raster_indices: if object_id == index.index: index_present = True break # Create new index value if it doesn't exist and add the number of ids if index_present == False: new_indice = MTIndex(id, object_id, "") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() geom = object['geometry'] geom['crs'] = srid_name geom_full = json.dumps(geom) # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1,ST_Transform(ST_GeomFromGeoJSON('{0}'), {1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(str(geom_full), project_file_srid, id, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values, index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) # If there is a timeout if result == None: messages.error(request, 'The submission timed out. Please try again.') job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:shapefile_index', kwargs={ 'job_id': job_id, 'index_name': index_name, 'shapefile_name': shapefile_name })) id += 1 # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = ''' + unicode( index_raster.id) + ''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs += 1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count += 1 index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name }))
def shapefile_index(request, job_id, index_name, shapefile_name): """ Controller for the selecting the shapefile to use to define the index map. """ context = {} user = str(request.user) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create a session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = new_index.mapTables # Find the contents of GeoServer for the user and display them dataset_engine = get_spatial_dataset_engine(name='gsshaindex_geoserver', app_class=GSSHAIndex) overlay_result = gi_lib.get_layer_and_resource(dataset_engine, user, shapefile_name) if overlay_result['success'] == True: url = overlay_result['layer']['wms']['kml'] coord_list = list(overlay_result['resource']['latlon_bbox'][:-1]) avg_x = int(round((float(coord_list[0])+float(coord_list[1]))/2)) avg_y = int(round((float(coord_list[2])+float(coord_list[3]))/2)) map_view = {'height': '600px', 'width': '100%', 'controls': ['ZoomSlider', 'ScaleLine', ], 'layers': [{'WMS': {'url': url, 'params': {'LAYERS': overlay_result['layer']['name'],}, 'serverType': 'geoserver'} }, ], 'view': {'projection': 'EPSG:4326', 'center': [avg_x, avg_y], 'zoom': 6.5, 'maxZoom': 18, 'minZoom': 3}, 'base_map': 'OpenStreetMap' } else: map_view = {'height': '400px', 'width': '100%', 'controls': ['ZoomSlider', 'ScaleLine', ], 'view': {'projection': 'EPSG:4326', 'center': [-100, 40], 'zoom': 3.5, 'maxZoom': 18, 'minZoom': 3}, 'base_map': 'OpenStreetMap' } context['job_id'] = job_id context['index_name'] = index_name context['file_name'] = shapefile_name context['map_view'] = map_view return render(request, 'gsshaindex/select_shapefile.html', context)
def replace_index_with_shapefile(request, job_id, index_name, shapefile_name): """ Controller to replace the index map with the selected shapefile. """ context = {} user = str(request.user) geojson = get_geojson_from_geoserver(user, shapefile_name) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create a session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = index_raster.mapTables if geojson['success'] != False: geojson_result = geojson['geojson'] # Get existing indices index_raster_indices = index_raster.indices srid_name = geojson['crs'] project_file_srid = project_file.srid id = 200 # Loop through each geometry for object in geojson_result: index_present = False object_id = object['id'] # Check to see if the index is present for index in index_raster_indices: if object_id == index.index: index_present = True break # Create new index value if it doesn't exist and add the number of ids if index_present == False: new_indice = MTIndex(id, object_id,"") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() geom = object['geometry'] geom['crs'] = srid_name geom_full = json.dumps(geom) # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1,ST_Transform(ST_GeomFromGeoJSON('{0}'), {1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(str(geom_full), project_file_srid, id, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values,index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) # If there is a timeout if result == None: messages.error(request, 'The submission timed out. Please try again.') job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:shapefile_index', kwargs={'job_id':job_id, 'index_name':index_name, 'shapefile_name':shapefile_name})) id += 1 # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = '''+ unicode(index_raster.id) +''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs +=1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count +=1 index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
def extract_gssha(request, job_id): ''' This takes the file name and id that were submitted and unzips the files, finds the index maps, and creates kmls. ''' context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Get url for the resource and extract the GSSHA file url = job.original_url extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir) # Create GSSHAPY Session gsshapy_session = gsshapy_sessionmaker() # Find the project file for root, dirs, files in os.walk(userDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) # Create an empty Project File Object project = ProjectFile() project.readInput(directory=read_dir, projectFileName=project_name, session=gsshapy_session, spatial=True) # Create empty dictionary to hold the kmls from this session current_kmls = {} # Store model information job.new_model_name = project.name job.new_model_id = project.id job.created = datetime.now() # Get index maps index_list = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project.mapTableFile).all() # Loop through the index for current_index in index_list: # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = current_index.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp current_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) # If the kml is added correctly, create an entry for the current_kmls with the name as the index name if status == True: current_kmls[current_index.name] = { 'url': resource['url'], 'full_name': resource['name'] } # Add the kmls with their url to the database job.current_kmls = json.dumps(current_kmls) session.commit() session.close() gsshapy_session.close() context['job_id'] = job_id return redirect( reverse('gsshaindex:select_index', kwargs={'job_id': job_id}))
def submit_edits(request, job_id, index_name): ''' Controller that handles submissions of edits from the user after they manually edit an index map. ''' context = {} user = str(request.user) params = request.POST # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mask_file = gsshapy_session.query(RasterMapFile).filter( RasterMapFile.projectFileID == project_file_id).filter( RasterMapFile.fileExtension == "msk").one() # Get a list of the map tables for the index map mapTables = index_raster.mapTables # If some geometry is submitted, go and run the necessary steps to change the map if params['geometry']: jsonGeom = json.loads(params['geometry']) geometries = jsonGeom['geometries'] # Convert from json to WKT for geometry in geometries: wkt = geometry['wkt'] # Get the values for the geometry value = geometry['properties']['value'] # Loop through indices and see if they match index_raster_indices = index_raster.indices index_present = False for index in index_raster_indices: if int(index.index) == int(value): index_present = True break # Create new index value if it doesn't exist and change the number of ids if index_present == False: new_indice = MTIndex(value, "", "") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() if project_file.srid == None: srid = 26912 else: srid = project_file.srid # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1, ST_Transform(ST_GeomFromText('{0}', 4326),{1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(wkt, srid, value, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values, index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) if result == None: messages.error( request, 'The submission timed out. Please try to draw in the changes and submit them again.' ) job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name })) # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = ''' + unicode( index_raster.id) + ''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs += 1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(MTIndex.index, MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count += 1 index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() else: messages.error(request, "You must make edits to submit") context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name }))
def combine_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) ID_OFFSET = 10 # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id and gsshapy_session project_file_id = job.new_model_id gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) # Create blank array for names and urls resource_names = [] resource_url = [] resource_info = [] # Get array of names and urls for key in resource_list: resource_names.append(key) resource_url.append(resource_list[key]['url']) resource_info.append((key,key)) select_input1 = {'display_text': "Select first index map", 'name': 'select1', 'multiple': False, 'options': resource_info} select_input2 = {'display_text': "Select second index map or none", 'name': 'select2', 'multiple': False, 'options': [("None", "none")] + resource_info} # if the next button was pressed if request.POST: params = request.POST # Error message if both maps selected are the same if params['select1'] == params['select2']: result = "" messages.error(request, "You must select two different index maps. Or if you'd like to replace this map with a different map, select None for the second option") # Process if only one map is selected elif params['select2'] == "none": select1_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select1']).one() statement = '''UPDATE idx_index_maps Set raster = ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select1_id.id) +'''), 1, (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(new_index.id) +'''), 1, '([rast1]*1000+ [rast2]*0)' ) WHERE id = '''+ unicode(new_index.id) +'''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement,), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) # Process if two maps are selected else: # Get the ids for the two index maps to be combined select1_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select1']).one() select2_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select2']).one() # Combine the maps and give a unique id statement = '''UPDATE idx_index_maps SET raster =ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select1_id.id) +'''), 1, (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select2_id.id) +'''), 1, '(([rast1]*1000) + [rast2])' ) WHERE id = '''+ unicode(new_index.id) +'''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement,), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) if result != "": # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = '''+ unicode(new_index.id) +''') AS foo ORDER BY (pvc).value; ''' new_indice_values = gsshapy_engine.execute(statement3) # Get the indices for the index being changed indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() # Go through the map tables that use the index map map_table_count = 0 for mapping_table in mapTables: # Reset the number of ids to start counting them numberIDs = ID_OFFSET ids = [] # Go through each new id value for row in new_indice_values: index_present = False numberIDs +=1 ids.append(row.value) large_id = int(row[0]) for index in new_index.indices: if int(index.index) == int(row[0]): index_present = True break if index_present == False: if str(large_id).endswith("000") == False: second_id = str(large_id).split("0")[-1] first_id = (large_id - int(second_id))/1000 else: first_id = (large_id)/1000 second_id = "" description2 = "" pastinfo1 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select1_id.id).\ filter(MTIndex.index == first_id).\ all() description1 = pastinfo1[0].description1 + " " + pastinfo1[0].description2 if second_id != "": pastinfo2 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select2_id.id).\ filter(MTIndex.index == second_id).\ all() description2 = pastinfo2[0].description1 + " " + pastinfo2[0].description2 # Query for the pixel values of row[0] and replace with numberIDs pixel_query = '''SELECT ST_PixelOfValue((SELECT raster FROM idx_index_maps WHERE id = {0}), {1});'''.format(unicode(new_index.id), row[0]) pixels = gsshapy_session.execute(pixel_query) for pixel in pixels: coord = pixel[0].strip("()") x, y = coord.split(",") update_query = '''UPDATE idx_index_maps SET raster = (SELECT ST_SetValue(raster,{1},{2},{3}) FROM idx_index_maps WHERE id = {0}) WHERE id = {0};'''.format(unicode(new_index.id), int(x), int(y), numberIDs) new_result = gsshapy_session.execute(update_query) # Create new index value new_indice = MTIndex(numberIDs, description1, description2) # new_indice = MTIndex(row[0], description1, description2) new_indice.indexMap = new_index for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice # Delete indices that aren't present for index in indices: if not int(index[0]) in ids: fetched_index = gsshapy_session.query(MTIndex).get(index.id) for val in fetched_index.values: gsshapy_session.delete(val) gsshapy_session.delete(fetched_index) new_index.mapTables[map_table_count].numIDs = numberIDs - ID_OFFSET map_table_count +=1 indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path = clusterFile, colorRamp = ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(temp_list) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() return redirect(reverse('gsshaindex:mapping_table', kwargs={'job_id':job_id, 'index_name':index_name, 'mapping_table_number':'0'})) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() # Set the first index as the active one index_names = str(resource_names[0]) # Set up map properties editable_map = {'height': '400px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_names, 'maps_api_key':maps_api_key, 'drawing_types_enabled':[]} context['replaced_index'] = index_name context['index_name'] = index_names context['google_map'] = editable_map context['select_input1'] = select_input1 context['select_input2'] = select_input2 context['job_id'] = job_id context['resource_name'] = resource_names return render(request, 'gsshaindex/combine_index.html', context)
def zip_file(request, job_id): ''' This zips up the GSSHA files in preparation of their being run ''' context = {} # Get the job id and user id job_id = job_id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) project_file_id = job.new_model_id # Get the name and description from the submission params = request.POST not_clean_name = params['new_name'] new_description = params['new_description'] # Reformat the name by removing bad characters # bad_char = "',.<>()[]{}=+-/\"|:;\\^?!~`@#$%&* " bad_char = "',.<>[]{}=+-/\"|:;\\^?!~`@#$%&*" for char in bad_char: new_name = not_clean_name.replace(char, "_") #Create session gsshapy_session = gsshapy_sessionmaker() # Get project from the database projectFileAll = gsshapy_session.query(ProjectFile).get(project_file_id) # Create name for files project_name = projectFileAll.name if project_name.endswith('.prj'): project_name = project_name[:-4] pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p") # Set depth map if projectFileAll.getCard("FLOOD_GRID") == None: max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(new_name)) project_cards = projectFileAll.projectCards.append(max_depth_card) gsshapy_session.commit() job.original_certification = "Missing gfl" # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) newFileDir = os.path.join(userDir, 'newFile') writeFile = os.path.join(newFileDir, new_name) zipPath = os.path.join(newFileDir, new_name + "_zip") # Clear workspace folders gi_lib.clear_folder(userDir) gi_lib.clear_folder(newFileDir) gi_lib.clear_folder(writeFile) # Get all the project files projectFileAll.writeInput(session=gsshapy_session, directory=writeFile, name=new_name) # Make a list of the project files writeFile_list = os.listdir(writeFile) # Add each project file to the zip folder with zipfile.ZipFile(zipPath, "w") as gssha_zip: for item in writeFile_list: abs_path = os.path.join(writeFile, item) archive_path = os.path.join(new_name, item) gssha_zip.write(abs_path, archive_path) GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) # Add the zipped GSSHA file to the public ckan results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, new_name, new_description, pretty_date, user) # If the file zips correctly, get information and store it in the database if success == True: new_url = results['url'] new_name = results['name'] original_url = job.original_url original_name = job.original_name model_data = { 'original': { 'url': original_url, 'name': original_name }, 'new': { 'url': new_url, 'name': new_name } } job.run_urls = model_data job.new_name = new_name job.status = "ready to run" session.commit() return redirect(reverse('gsshaindex:status'))
def select_index(request, job_id): """ Controller for the app home page. """ context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Give options for editing the index map if ('select_index' in request.POST): params = request.POST index_name = params['index_name'] if (params['method'] == "Create polygons"): return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name })) elif (params['method'] == "Upload shapefile"): # messages.error(request, "Select by polygon is currently in production and hasn't been initialized yet.") return redirect( reverse('gsshaindex:shapefile_index', kwargs={ 'job_id': job_id, 'index_name': index_name, 'shapefile_name': "None" })) elif (params['method'] == "Merge index maps or replace with another"): # messages.error(request, "Merging index maps is currently in production and hasn't been initialized yet.") return redirect( reverse('gsshaindex:combine_index', kwargs={ 'job_id': job_id, 'index_name': index_name })) # Get list of index files resource_kmls = json.loads(job.current_kmls) # Create arrays of the names and urls unsorted_resource_name = [] resource_url = [] for key in resource_kmls: unsorted_resource_name.append(key) resource_url.append(resource_kmls[key]['url']) resource_name = sorted(unsorted_resource_name) # Set the first index as the active one map_name = str(resource_name[0]) #Create session gsshapy_session = gsshapy_sessionmaker() # Use project id to link to map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == map_name).one() indices = index_raster.indices # Set up map properties editable_map = { 'height': '600px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + map_name, 'maps_api_key': maps_api_key, 'drawing_types_enabled': [] } context['google_map'] = editable_map context['project_name'] = job.original_name context['resource_name'] = resource_name context['map_name'] = map_name context['job_id'] = job_id return render(request, 'gsshaindex/select_index.html', context)
def shapefile_index(request, job_id, index_name, shapefile_name): """ Controller for the selecting the shapefile to use to define the index map. """ context = {} user = str(request.user) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create a session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = new_index.mapTables # Find the contents of GeoServer for the user and display them dataset_engine = get_spatial_dataset_engine(name='gsshaindex_geoserver', app_class=GSSHAIndex) overlay_result = gi_lib.get_layer_and_resource(dataset_engine, user, shapefile_name) if overlay_result['success'] == True: url = overlay_result['layer']['wms']['kml'] coord_list = list(overlay_result['resource']['latlon_bbox'][:-1]) avg_x = int(round((float(coord_list[0]) + float(coord_list[1])) / 2)) avg_y = int(round((float(coord_list[2]) + float(coord_list[3])) / 2)) map_view = { 'height': '600px', 'width': '100%', 'controls': [ 'ZoomSlider', 'ScaleLine', ], 'layers': [ { 'WMS': { 'url': url, 'params': { 'LAYERS': overlay_result['layer']['name'], }, 'serverType': 'geoserver' } }, ], 'view': { 'projection': 'EPSG:4326', 'center': [avg_x, avg_y], 'zoom': 6.5, 'maxZoom': 18, 'minZoom': 3 }, 'base_map': 'OpenStreetMap' } else: map_view = { 'height': '400px', 'width': '100%', 'controls': [ 'ZoomSlider', 'ScaleLine', ], 'view': { 'projection': 'EPSG:4326', 'center': [-100, 40], 'zoom': 3.5, 'maxZoom': 18, 'minZoom': 3 }, 'base_map': 'OpenStreetMap' } context['job_id'] = job_id context['index_name'] = index_name context['file_name'] = shapefile_name context['map_view'] = map_view return render(request, 'gsshaindex/select_shapefile.html', context)
def mapping_table(request, job_id, index_name, mapping_table_number): ''' This identifies the mapping tables that relate to the index map that is being edited and prepares them to be displayed for editing. ''' context = {} # Get the user id and the project file id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id #Create session gsshapy_session = gsshapy_sessionmaker() # Use project id to link to map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() indices = index_raster.indices mapTables = index_raster.mapTables mapping_table_number = int(mapping_table_number) # Process for if descriptions are submitted if (request.POST): params = request.POST for key in params: if "indice" in key: if "desc1" in key: identity = int(key.replace("indice-desc1-","")) mapDesc1 = gsshapy_session.query(MTIndex).get(identity) mapDesc1.description1 = params[key] elif "desc2" in key: identity = key.replace("indice-desc2-","") mapDesc2 = gsshapy_session.query(MTIndex).get(identity) mapDesc2.description2 = params[key] gsshapy_session.commit() # Get list of index files resource_kmls = json.loads(job.current_kmls) # Create array of kml names and urls resource_name = [] resource_url = [] for key in resource_kmls: resource_name.append(key) resource_url.append(resource_kmls[key]['url']) # Find the associated map tables and add them to an array arrayMapTables = [] count = 0 for table in mapTables: name = str(table.name) clean = name.replace("_"," ") arrayMapTables.append([clean, table.name, count]) count +=1 # Find the variables that are related to the active map table distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapTables[mapping_table_number]).\ order_by(MTValue.variable).\ all() # Create an array of the variables in the active map table variables = [] for var in distinct_vars: variables.append(var[0]) # Cross tabulate manually to populate the mapping table information indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[mapping_table_number]).\ order_by(MTIndex.index).\ all() # Get all the values to populate the table var_values = [] for var in variables: values = gsshapy_session.query(MTValue.id, MTValue.value).\ join(MTIndex).\ filter(MTValue.mapTable == mapTables[mapping_table_number]).\ filter(MTValue.variable == var).\ order_by(MTIndex.index).\ all() var_values.append(values) zipValues = zip(*var_values) # Dictionary of properties for the map google_map = {'height': '400px', 'width': '100%', 'kml_service': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_name, 'maps_api_key':maps_api_key} context['indices'] = indices context['job_id'] = job_id context['index_name'] = index_name context['mapping_table_number'] = mapping_table_number context['resource_kmls'] = resource_kmls context['resource_name'] = resource_name context['resource_url'] = resource_url context['mapTables'] = arrayMapTables context['variables'] = variables context['google_map'] = google_map context['values'] = zipValues return render(request, 'gsshaindex/mapping_table.html', context)
def extract_gssha(request, job_id): ''' This takes the file name and id that were submitted and unzips the files, finds the index maps, and creates kmls. ''' context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Get url for the resource and extract the GSSHA file url = job.original_url extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir) # Create GSSHAPY Session gsshapy_session = gsshapy_sessionmaker() # Find the project file for root, dirs, files in os.walk(userDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) # Create an empty Project File Object project = ProjectFile() project.readInput(directory=read_dir, projectFileName=project_name, session = gsshapy_session, spatial=True) # Create empty dictionary to hold the kmls from this session current_kmls = {} # Store model information job.new_model_name = project.name job.new_model_id = project.id job.created = datetime.now() # Get index maps index_list = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project.mapTableFile).all() # Loop through the index for current_index in index_list: # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = current_index.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp current_index.getAsKmlClusters(session=gsshapy_session, path = clusterFile, colorRamp = ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) # If the kml is added correctly, create an entry for the current_kmls with the name as the index name if status == True: current_kmls[current_index.name] = {'url':resource['url'], 'full_name':resource['name']} # Add the kmls with their url to the database job.current_kmls = json.dumps(current_kmls) session.commit() session.close() gsshapy_session.close() context['job_id'] = job_id return redirect(reverse('gsshaindex:select_index', kwargs={'job_id':job_id}))
def submit_mapping_table(request, job_id, index_name, mapping_table_number): ''' This submits the mapping table and values for review. ''' context = {} # Get the user id and the project file id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Get list of index files resource_kmls = json.loads(job.current_kmls) #Create session gsshapy_session = gsshapy_sessionmaker() # Use project id to link to map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() indices = index_raster.indices mapTables = index_raster.mapTables mapping_table_number = int(mapping_table_number) # Find the associated map tables and add them to an array assocMapTables = [] count = 0 for table in mapTables: name = str(table.name) clean = name.replace("_"," ") assocMapTables.append([clean, table.name, count]) count +=1 # Find the variables that are related to the active map table distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapTables[mapping_table_number]).\ order_by(MTValue.variable).\ all() # Create an array of the variables in the active map table variables = [] for var in distinct_vars: variables.append(var[0]) # Cross tabulate manually to populate the mapping table information indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[mapping_table_number]).\ order_by(MTIndex.index).\ all() # Get the values for the mapping table var_values = [] for var in variables: values = gsshapy_session.query(MTValue.id, MTValue.value).\ join(MTIndex).\ filter(MTValue.mapTable == mapTables[mapping_table_number]).\ filter(MTValue.variable == var).\ order_by(MTIndex.index).\ all() var_values.append(values) arrayValues = zip(*var_values) # Dictionary of properties for the map google_map = {'height': '400px', 'width': '100%', 'kml_service': '/apps/gsshaindex/' + job_id+ '/get-index-maps/' + index_name, 'maps_api_key':maps_api_key} context['job_id'] = job_id context['index_name'] = index_name context['mapping_table_number'] = mapping_table_number context['variables'] = variables context['values'] = arrayValues context['indices'] = indices context['mapTables'] = assocMapTables context['resource_kmls'] = resource_kmls context['google_map'] = google_map return render(request, 'gsshaindex/review_mapping_table.html', context)
def select_index(request, job_id): """ Controller for the app home page. """ context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Give options for editing the index map if ('select_index' in request.POST): params = request.POST index_name = params['index_name'] if (params['method'] == "Create polygons"): return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name})) elif (params['method'] == "Upload shapefile"): # messages.error(request, "Select by polygon is currently in production and hasn't been initialized yet.") return redirect(reverse('gsshaindex:shapefile_index', kwargs={'job_id':job_id, 'index_name':index_name, 'shapefile_name':"None"})) elif (params['method'] == "Merge index maps or replace with another"): # messages.error(request, "Merging index maps is currently in production and hasn't been initialized yet.") return redirect(reverse('gsshaindex:combine_index', kwargs={'job_id':job_id, 'index_name':index_name})) # Get list of index files resource_kmls = json.loads(job.current_kmls) # Create arrays of the names and urls unsorted_resource_name = [] resource_url = [] for key in resource_kmls: unsorted_resource_name.append(key) resource_url.append(resource_kmls[key]['url']) resource_name = sorted(unsorted_resource_name) # Set the first index as the active one map_name = str(resource_name[0]) #Create session gsshapy_session = gsshapy_sessionmaker() # Use project id to link to map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == map_name).one() indices = index_raster.indices # Set up map properties editable_map = {'height': '600px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/'+ job_id + '/get-index-maps/' + map_name, 'maps_api_key':maps_api_key, 'drawing_types_enabled':[]} context['google_map'] = editable_map context['project_name'] = job.original_name context['resource_name'] = resource_name context['map_name'] = map_name context['job_id'] = job_id return render(request, 'gsshaindex/select_index.html', context)
def submit_edits(request, job_id, index_name): ''' Controller that handles submissions of edits from the user after they manually edit an index map. ''' context = {} user = str(request.user) params = request.POST # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mask_file = gsshapy_session.query(RasterMapFile).filter(RasterMapFile.projectFileID == project_file_id).filter(RasterMapFile.fileExtension == "msk").one() # Get a list of the map tables for the index map mapTables = index_raster.mapTables # If some geometry is submitted, go and run the necessary steps to change the map if params['geometry']: jsonGeom = json.loads(params['geometry']) geometries= jsonGeom['geometries'] # Convert from json to WKT for geometry in geometries: wkt = geometry['wkt'] # Get the values for the geometry value = geometry['properties']['value'] # Loop through indices and see if they match index_raster_indices = index_raster.indices index_present = False for index in index_raster_indices: if int(index.index) == int(value): index_present = True break # Create new index value if it doesn't exist and change the number of ids if index_present == False: new_indice = MTIndex(value, "", "") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() if project_file.srid == None: srid = 26912 else: srid = project_file.srid # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1, ST_Transform(ST_GeomFromText('{0}', 4326),{1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(wkt, srid, value, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values,index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) if result == None: messages.error(request, 'The submission timed out. Please try to draw in the changes and submit them again.') job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name})) # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = '''+ unicode(index_raster.id) +''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs +=1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(MTIndex.index, MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count +=1 index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() else: messages.error(request, "You must make edits to submit") context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
def zip_file(request, job_id): ''' This zips up the GSSHA files in preparation of their being run ''' context = {} # Get the job id and user id job_id = job_id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) project_file_id = job.new_model_id # Get the name and description from the submission params=request.POST not_clean_name = params['new_name'] new_description = params['new_description'] # Reformat the name by removing bad characters # bad_char = "',.<>()[]{}=+-/\"|:;\\^?!~`@#$%&* " bad_char = "',.<>[]{}=+-/\"|:;\\^?!~`@#$%&*" for char in bad_char: new_name = not_clean_name.replace(char,"_") #Create session gsshapy_session = gsshapy_sessionmaker() # Get project from the database projectFileAll = gsshapy_session.query(ProjectFile).get(project_file_id) # Create name for files project_name = projectFileAll.name if project_name.endswith('.prj'): project_name = project_name[:-4] pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p") # Set depth map if projectFileAll.getCard("FLOOD_GRID") == None: max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(new_name)) project_cards = projectFileAll.projectCards.append(max_depth_card) gsshapy_session.commit() job.original_certification = "Missing gfl" # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) newFileDir = os.path.join(userDir, 'newFile') writeFile = os.path.join(newFileDir, new_name) zipPath = os.path.join(newFileDir, new_name + "_zip") # Clear workspace folders gi_lib.clear_folder(userDir) gi_lib.clear_folder(newFileDir) gi_lib.clear_folder(writeFile) # Get all the project files projectFileAll.writeInput(session=gsshapy_session, directory=writeFile, name=new_name) # Make a list of the project files writeFile_list = os.listdir(writeFile) # Add each project file to the zip folder with zipfile.ZipFile(zipPath, "w") as gssha_zip: for item in writeFile_list: abs_path = os.path.join(writeFile, item) archive_path = os.path.join(new_name, item) gssha_zip.write(abs_path, archive_path) GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) # Add the zipped GSSHA file to the public ckan results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, new_name, new_description, pretty_date, user) # If the file zips correctly, get information and store it in the database if success == True: new_url = results['url'] new_name = results['name'] original_url = job.original_url original_name = job.original_name model_data = {'original': {'url':original_url, 'name':original_name}, 'new':{'url':new_url, 'name':new_name}} job.run_urls = model_data job.new_name = new_name job.status = "ready to run" session.commit() return redirect(reverse('gsshaindex:status'))
def edit_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) resource_names = [] resource_url = [] # Get array of names and urls for key in resource_list: resource_names.append(key) # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = new_index.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # See if kmls are present in the database file_present = False for key in resource_list: if key == index_name: file_present = True if file_present == False: # Generate color ramp new_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) for resource in result['resources']: if resource['name'] == resource_name: resource_list[new_index.name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(resource_list) job_session.commit() job_session.close() # Set up map properties editable_map = {'height': '600px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/'+ job_id + '/get-index-maps/' + index_name, 'maps_api_key':maps_api_key, 'drawing_types_enabled': ['POLYGONS'], 'initial_drawing_mode': 'POLYGONS', 'output_format': 'WKT'} context['google_map'] = editable_map context['mapTables'] = mapTables context['indices'] = indices context['resource_names'] = resource_names context['resource_url'] = resource_url context['resource_list'] = resource_list context['index_name'] = index_name context['job_id'] = job_id return render(request, 'gsshaindex/edit_index.html', context)
def edit_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) resource_names = [] resource_url = [] # Get array of names and urls for key in resource_list: resource_names.append(key) # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = new_index.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # See if kmls are present in the database file_present = False for key in resource_list: if key == index_name: file_present = True if file_present == False: # Generate color ramp new_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) for resource in result['resources']: if resource['name'] == resource_name: resource_list[new_index.name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(resource_list) job_session.commit() job_session.close() # Set up map properties editable_map = { 'height': '600px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_name, 'maps_api_key': maps_api_key, 'drawing_types_enabled': ['POLYGONS'], 'initial_drawing_mode': 'POLYGONS', 'output_format': 'WKT' } context['google_map'] = editable_map context['mapTables'] = mapTables context['indices'] = indices context['resource_names'] = resource_names context['resource_url'] = resource_url context['resource_list'] = resource_list context['index_name'] = index_name context['job_id'] = job_id return render(request, 'gsshaindex/edit_index.html', context)
def combine_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) ID_OFFSET = 10 # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id and gsshapy_session project_file_id = job.new_model_id gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) # Create blank array for names and urls resource_names = [] resource_url = [] resource_info = [] # Get array of names and urls for key in resource_list: resource_names.append(key) resource_url.append(resource_list[key]['url']) resource_info.append((key, key)) select_input1 = { 'display_text': "Select first index map", 'name': 'select1', 'multiple': False, 'options': resource_info } select_input2 = { 'display_text': "Select second index map or none", 'name': 'select2', 'multiple': False, 'options': [("None", "none")] + resource_info } # if the next button was pressed if request.POST: params = request.POST # Error message if both maps selected are the same if params['select1'] == params['select2']: result = "" messages.error( request, "You must select two different index maps. Or if you'd like to replace this map with a different map, select None for the second option" ) # Process if only one map is selected elif params['select2'] == "none": select1_id = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == params['select1']).one() statement = '''UPDATE idx_index_maps Set raster = ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( select1_id.id) + '''), 1, (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( new_index.id) + '''), 1, '([rast1]*1000+ [rast2]*0)' ) WHERE id = ''' + unicode(new_index.id) + '''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement, ), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) # Process if two maps are selected else: # Get the ids for the two index maps to be combined select1_id = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == params['select1']).one() select2_id = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == params['select2']).one() # Combine the maps and give a unique id statement = '''UPDATE idx_index_maps SET raster =ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( select1_id.id) + '''), 1, (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( select2_id.id) + '''), 1, '(([rast1]*1000) + [rast2])' ) WHERE id = ''' + unicode(new_index.id) + '''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement, ), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) if result != "": # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = ''' + unicode( new_index.id) + ''') AS foo ORDER BY (pvc).value; ''' new_indice_values = gsshapy_engine.execute(statement3) # Get the indices for the index being changed indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() # Go through the map tables that use the index map map_table_count = 0 for mapping_table in mapTables: # Reset the number of ids to start counting them numberIDs = ID_OFFSET ids = [] # Go through each new id value for row in new_indice_values: index_present = False numberIDs += 1 ids.append(row.value) large_id = int(row[0]) for index in new_index.indices: if int(index.index) == int(row[0]): index_present = True break if index_present == False: if str(large_id).endswith("000") == False: second_id = str(large_id).split("0")[-1] first_id = (large_id - int(second_id)) / 1000 else: first_id = (large_id) / 1000 second_id = "" description2 = "" pastinfo1 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select1_id.id).\ filter(MTIndex.index == first_id).\ all() description1 = pastinfo1[ 0].description1 + " " + pastinfo1[0].description2 if second_id != "": pastinfo2 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select2_id.id).\ filter(MTIndex.index == second_id).\ all() description2 = pastinfo2[ 0].description1 + " " + pastinfo2[ 0].description2 # Query for the pixel values of row[0] and replace with numberIDs pixel_query = '''SELECT ST_PixelOfValue((SELECT raster FROM idx_index_maps WHERE id = {0}), {1});'''.format( unicode(new_index.id), row[0]) pixels = gsshapy_session.execute(pixel_query) for pixel in pixels: coord = pixel[0].strip("()") x, y = coord.split(",") update_query = '''UPDATE idx_index_maps SET raster = (SELECT ST_SetValue(raster,{1},{2},{3}) FROM idx_index_maps WHERE id = {0}) WHERE id = {0};'''.format( unicode(new_index.id), int(x), int(y), numberIDs) new_result = gsshapy_session.execute(update_query) # Create new index value new_indice = MTIndex(numberIDs, description1, description2) # new_indice = MTIndex(row[0], description1, description2) new_indice.indexMap = new_index for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice # Delete indices that aren't present for index in indices: if not int(index[0]) in ids: fetched_index = gsshapy_session.query(MTIndex).get( index.id) for val in fetched_index.values: gsshapy_session.delete(val) gsshapy_session.delete(fetched_index) new_index.mapTables[ map_table_count].numIDs = numberIDs - ID_OFFSET map_table_count += 1 indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) # Generate color ramp index_raster.getAsKmlClusters( session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(temp_list) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() return redirect( reverse('gsshaindex:mapping_table', kwargs={ 'job_id': job_id, 'index_name': index_name, 'mapping_table_number': '0' })) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() # Set the first index as the active one index_names = str(resource_names[0]) # Set up map properties editable_map = { 'height': '400px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_names, 'maps_api_key': maps_api_key, 'drawing_types_enabled': [] } context['replaced_index'] = index_name context['index_name'] = index_names context['google_map'] = editable_map context['select_input1'] = select_input1 context['select_input2'] = select_input2 context['job_id'] = job_id context['resource_name'] = resource_names return render(request, 'gsshaindex/combine_index.html', context)