def edit_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) resource_names = [] resource_url = [] # Get array of names and urls for key in resource_list: resource_names.append(key) # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = new_index.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # See if kmls are present in the database file_present = False for key in resource_list: if key == index_name: file_present = True if file_present == False: # Generate color ramp new_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) for resource in result['resources']: if resource['name'] == resource_name: resource_list[new_index.name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(resource_list) job_session.commit() job_session.close() # Set up map properties editable_map = { 'height': '600px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_name, 'maps_api_key': maps_api_key, 'drawing_types_enabled': ['POLYGONS'], 'initial_drawing_mode': 'POLYGONS', 'output_format': 'WKT' } context['google_map'] = editable_map context['mapTables'] = mapTables context['indices'] = indices context['resource_names'] = resource_names context['resource_url'] = resource_url context['resource_list'] = resource_list context['index_name'] = index_name context['job_id'] = job_id return render(request, 'gsshaindex/edit_index.html', context)
def fly(request, job_id): context = {} # Get the user id user = str(request.user) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) resultsPath = os.path.join(userDir, 'results') originalFileRunPath = os.path.join(userDir, "preRun") writeFile = os.path.join(userDir, "writeFile") zipPath = os.path.join(userDir, "zipPath") #Create session gsshapy_session = gsshapy_sessionmaker() # Clear the results folder gi_lib.clear_folder(userDir) gi_lib.clear_folder(resultsPath) gi_lib.clear_folder(originalFileRunPath) gi_lib.clear_folder(writeFile) # Get the jobs from the database session = jobs_sessionmaker() job = session.query(Jobs).\ filter(Jobs.user_id == user).\ filter(Jobs.original_id == job_id).one() # Get the urls and names for the analysis run_urls = job.run_urls arguments={'new': {'url':run_urls['new']['url'], 'name':run_urls['new']['name']}, 'original':{'url':run_urls['original']['url'], 'name':run_urls['original']['name']}} # Set up for fly GSSHA job.status = "processing" session.commit() status = 'complete' results = [] # results_urls = [] results_urls = {} count = 0 GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) # Try running the web service # try: for k in arguments: url = str(arguments[k]['url']) if k == 'original' and job.original_certification=="Certified": results_urls['original']=url count +=1 continue elif k == 'original' and job.original_certification=="Missing gfl": # Need to download from url, add gfl, zip, send to ckan, run, and save the url downloaded_project = gi_lib.extract_zip_from_url(user, url, originalFileRunPath) # Create an empty Project File Object # Find the project file for root, dirs, files in os.walk(originalFileRunPath): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) project = ProjectFile() project.readInput(directory=read_dir, projectFileName=project_name, session = gsshapy_session, spatial=True) if project.getCard("FLOOD_GRID") == None: max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(project_name[:-4])) project_cards = project.projectCards.append(max_depth_card) gsshapy_session.commit() # Need to format so that it will work for the file I just did # Get all the project files project.writeInput(session=gsshapy_session, directory=writeFile, name=project_name[:-4]) # Make a list of the project files writeFile_list = os.listdir(writeFile) # Add each project file to the zip folder with zipfile.ZipFile(zipPath, "w") as gssha_zip: for item in writeFile_list: abs_path = os.path.join(writeFile, item) archive_path = os.path.join(project_name, item) gssha_zip.write(abs_path, archive_path) GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) description = job.original_description + "with a gfl added" pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p") # Add the zipped GSSHA file to the public ckan results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, project_name[:-4] + " with gfl", description, pretty_date, user) job.original_url = results['url'] url = job.original_url resultsFile = os.path.join(resultsPath, arguments[k]['name'].replace(" ","_")+datetime.now().strftime('%Y%d%m%H%M%S')) gi_lib.flyGssha(str(url), resultsFile) # Push file to ckan dataset resource_name = ' '.join((arguments[k]['name'], '-Run',datetime.now().strftime('%b %d %y %H:%M:%S'))) pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p") result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, resultsFile, CKAN_engine, resource_name, "", pretty_date, user, certification="Certified") # Save the new url as the original_url and run job.original_certification = "Certified" # Publish link to table results_urls['original']=result['url'] count +=1 else: resultsFile = os.path.join(resultsPath, arguments[k]['name'].replace(" ","_")+datetime.now().strftime('%Y%d%m%H%M%S')) gi_lib.flyGssha(url, resultsFile) # Push file to ckan dataset resource_name = ' '.join((arguments[k]['name'], '-Run',datetime.now().strftime('%b %d %y %H:%M:%S'))) pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p") result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, resultsFile, CKAN_engine, resource_name, "", pretty_date, user, certification="Certified") # Publish link to table if k=='original': results_urls['original']=result['url'] job.original_certification = "Certified" else: results_urls['new']=result['url'] count +=1 if (count == 2): print results_urls else: status = 'failed' # except: # status = 'failed' job.status = status job.result_urls = results_urls session.commit() session.close() gsshapy_session.commit() gsshapy_session.close() return redirect(reverse('gsshaindex:status'))
def submit_edits(request, job_id, index_name): ''' Controller that handles submissions of edits from the user after they manually edit an index map. ''' context = {} user = str(request.user) params = request.POST # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mask_file = gsshapy_session.query(RasterMapFile).filter( RasterMapFile.projectFileID == project_file_id).filter( RasterMapFile.fileExtension == "msk").one() # Get a list of the map tables for the index map mapTables = index_raster.mapTables # If some geometry is submitted, go and run the necessary steps to change the map if params['geometry']: jsonGeom = json.loads(params['geometry']) geometries = jsonGeom['geometries'] # Convert from json to WKT for geometry in geometries: wkt = geometry['wkt'] # Get the values for the geometry value = geometry['properties']['value'] # Loop through indices and see if they match index_raster_indices = index_raster.indices index_present = False for index in index_raster_indices: if int(index.index) == int(value): index_present = True break # Create new index value if it doesn't exist and change the number of ids if index_present == False: new_indice = MTIndex(value, "", "") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() if project_file.srid == None: srid = 26912 else: srid = project_file.srid # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1, ST_Transform(ST_GeomFromText('{0}', 4326),{1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(wkt, srid, value, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values, index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) if result == None: messages.error( request, 'The submission timed out. Please try to draw in the changes and submit them again.' ) job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name })) # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = ''' + unicode( index_raster.id) + ''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs += 1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(MTIndex.index, MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count += 1 index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() else: messages.error(request, "You must make edits to submit") context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name }))
def extract_gssha(request, job_id): ''' This takes the file name and id that were submitted and unzips the files, finds the index maps, and creates kmls. ''' context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Get url for the resource and extract the GSSHA file url = job.original_url extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir) # Create GSSHAPY Session gsshapy_session = gsshapy_sessionmaker() # Find the project file for root, dirs, files in os.walk(userDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) # Create an empty Project File Object project = ProjectFile() project.readInput(directory=read_dir, projectFileName=project_name, session = gsshapy_session, spatial=True) # Create empty dictionary to hold the kmls from this session current_kmls = {} # Store model information job.new_model_name = project.name job.new_model_id = project.id job.created = datetime.now() # Get index maps index_list = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project.mapTableFile).all() # Loop through the index for current_index in index_list: # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = current_index.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp current_index.getAsKmlClusters(session=gsshapy_session, path = clusterFile, colorRamp = ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) # If the kml is added correctly, create an entry for the current_kmls with the name as the index name if status == True: current_kmls[current_index.name] = {'url':resource['url'], 'full_name':resource['name']} # Add the kmls with their url to the database job.current_kmls = json.dumps(current_kmls) session.commit() session.close() gsshapy_session.close() context['job_id'] = job_id return redirect(reverse('gsshaindex:select_index', kwargs={'job_id':job_id}))
def zip_file(request, job_id): ''' This zips up the GSSHA files in preparation of their being run ''' context = {} # Get the job id and user id job_id = job_id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user,session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) project_file_id = job.new_model_id # Get the name and description from the submission params=request.POST not_clean_name = params['new_name'] new_description = params['new_description'] # Reformat the name by removing bad characters # bad_char = "',.<>()[]{}=+-/\"|:;\\^?!~`@#$%&* " bad_char = "',.<>[]{}=+-/\"|:;\\^?!~`@#$%&*" for char in bad_char: new_name = not_clean_name.replace(char,"_") #Create session gsshapy_session = gsshapy_sessionmaker() # Get project from the database projectFileAll = gsshapy_session.query(ProjectFile).get(project_file_id) # Create name for files project_name = projectFileAll.name if project_name.endswith('.prj'): project_name = project_name[:-4] pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p") # Set depth map if projectFileAll.getCard("FLOOD_GRID") == None: max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(new_name)) project_cards = projectFileAll.projectCards.append(max_depth_card) gsshapy_session.commit() job.original_certification = "Missing gfl" # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) newFileDir = os.path.join(userDir, 'newFile') writeFile = os.path.join(newFileDir, new_name) zipPath = os.path.join(newFileDir, new_name + "_zip") # Clear workspace folders gi_lib.clear_folder(userDir) gi_lib.clear_folder(newFileDir) gi_lib.clear_folder(writeFile) # Get all the project files projectFileAll.writeInput(session=gsshapy_session, directory=writeFile, name=new_name) # Make a list of the project files writeFile_list = os.listdir(writeFile) # Add each project file to the zip folder with zipfile.ZipFile(zipPath, "w") as gssha_zip: for item in writeFile_list: abs_path = os.path.join(writeFile, item) archive_path = os.path.join(new_name, item) gssha_zip.write(abs_path, archive_path) GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) # Add the zipped GSSHA file to the public ckan results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, new_name, new_description, pretty_date, user) # If the file zips correctly, get information and store it in the database if success == True: new_url = results['url'] new_name = results['name'] original_url = job.original_url original_name = job.original_name model_data = {'original': {'url':original_url, 'name':original_name}, 'new':{'url':new_url, 'name':new_name}} job.run_urls = model_data job.new_name = new_name job.status = "ready to run" session.commit() return redirect(reverse('gsshaindex:status'))
def replace_index_with_shapefile(request, job_id, index_name, shapefile_name): """ Controller to replace the index map with the selected shapefile. """ context = {} user = str(request.user) geojson = get_geojson_from_geoserver(user, shapefile_name) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create a session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = index_raster.mapTables if geojson['success'] != False: geojson_result = geojson['geojson'] # Get existing indices index_raster_indices = index_raster.indices srid_name = geojson['crs'] project_file_srid = project_file.srid id = 200 # Loop through each geometry for object in geojson_result: index_present = False object_id = object['id'] # Check to see if the index is present for index in index_raster_indices: if object_id == index.index: index_present = True break # Create new index value if it doesn't exist and add the number of ids if index_present == False: new_indice = MTIndex(id, object_id, "") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() geom = object['geometry'] geom['crs'] = srid_name geom_full = json.dumps(geom) # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1,ST_Transform(ST_GeomFromGeoJSON('{0}'), {1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(str(geom_full), project_file_srid, id, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values, index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) # If there is a timeout if result == None: messages.error(request, 'The submission timed out. Please try again.') job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:shapefile_index', kwargs={ 'job_id': job_id, 'index_name': index_name, 'shapefile_name': shapefile_name })) id += 1 # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = ''' + unicode( index_raster.id) + ''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs += 1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count += 1 index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect( reverse('gsshaindex:edit_index', kwargs={ 'job_id': job_id, 'index_name': index_name }))
def get_mask_map(request, file_id): """ This action is used to pass the kml data to the google map. It must return a JSON response with a Python dictionary that has the key 'kml_links'. """ kml_links = [] session = jobs_sessionmaker() user = str(request.user) job, success = gi_lib.get_new_job(file_id, user,session) if job.kml_url != None: kml_links.append(job.kml_url) #TODO Need some way to check and see if the link works or if it's broken return JsonResponse({'kml_links': kml_links}) else: # Check that there's a package to store kmls CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) present = gi_lib.check_package('kmls', CKAN_engine) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) # Clear the workspace gi_lib.clear_folder(userDir) url = job.original_url maskMapDir = os.path.join(userDir, 'mask_maps') extractPath = os.path.join(maskMapDir, file_id) mask_file = gi_lib.extract_mask(url, extractPath) if mask_file == "blank": job.kml_url = '' session.commit() return JsonResponse({'kml_links': ''}) else: projection_file = gi_lib.extract_projection(url,extractPath) # Set up kml file name and save location name = job.original_name norm_name = name.replace(" ","") current_time = time.strftime("%Y%m%dT%H%M%S") kml_name = norm_name + "_" + user + "_" + current_time kml_ext = kml_name + ".kml" kml_file = os.path.join(extractPath, kml_ext) colors = [(237,9,222),(92,245,61),(61,184,245),(171,61,245),(250,245,105),(245,151,44),(240,37,14),(88,5,232),(5,232,190),(11,26,227)] color = [random.choice(colors)] # Extract mask map and create kml gsshapy_session = gsshapy_sessionmaker() if projection_file != "blank": srid = ProjectionFile.lookupSpatialReferenceID(extractPath, projection_file) else: srid = 4302 mask_map = RasterMapFile() mask_map.read(directory=extractPath, filename=mask_file, session=gsshapy_session, spatial=True, spatialReferenceID=srid) mask_map.getAsKmlClusters(session=gsshapy_session, path=kml_file, colorRamp={'colors':color, 'interpolatedPoints':1}) mask_map_dataset = gi_lib.check_dataset("mask-maps", CKAN_engine) # Add mask kml to CKAN for viewing resource, success = gi_lib.add_kml_CKAN(mask_map_dataset, CKAN_engine, kml_file, kml_name) # Check to ensure the resource was added and save it to database by adding "kml_url" if success == True: job.kml_url = resource['url'] session.commit() kml_links.append(job.kml_url) return JsonResponse({'kml_links': kml_links})
def edit_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) resource_names = [] resource_url = [] # Get array of names and urls for key in resource_list: resource_names.append(key) # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = new_index.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # See if kmls are present in the database file_present = False for key in resource_list: if key == index_name: file_present = True if file_present == False: # Generate color ramp new_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) for resource in result['resources']: if resource['name'] == resource_name: resource_list[new_index.name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(resource_list) job_session.commit() job_session.close() # Set up map properties editable_map = {'height': '600px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/'+ job_id + '/get-index-maps/' + index_name, 'maps_api_key':maps_api_key, 'drawing_types_enabled': ['POLYGONS'], 'initial_drawing_mode': 'POLYGONS', 'output_format': 'WKT'} context['google_map'] = editable_map context['mapTables'] = mapTables context['indices'] = indices context['resource_names'] = resource_names context['resource_url'] = resource_url context['resource_list'] = resource_list context['index_name'] = index_name context['job_id'] = job_id return render(request, 'gsshaindex/edit_index.html', context)
def replace_index_with_shapefile(request, job_id, index_name, shapefile_name): """ Controller to replace the index map with the selected shapefile. """ context = {} user = str(request.user) geojson = get_geojson_from_geoserver(user, shapefile_name) # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create a session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = index_raster.mapTables if geojson['success'] != False: geojson_result = geojson['geojson'] # Get existing indices index_raster_indices = index_raster.indices srid_name = geojson['crs'] project_file_srid = project_file.srid id = 200 # Loop through each geometry for object in geojson_result: index_present = False object_id = object['id'] # Check to see if the index is present for index in index_raster_indices: if object_id == index.index: index_present = True break # Create new index value if it doesn't exist and add the number of ids if index_present == False: new_indice = MTIndex(id, object_id,"") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() geom = object['geometry'] geom['crs'] = srid_name geom_full = json.dumps(geom) # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1,ST_Transform(ST_GeomFromGeoJSON('{0}'), {1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(str(geom_full), project_file_srid, id, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values,index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) # If there is a timeout if result == None: messages.error(request, 'The submission timed out. Please try again.') job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:shapefile_index', kwargs={'job_id':job_id, 'index_name':index_name, 'shapefile_name':shapefile_name})) id += 1 # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = '''+ unicode(index_raster.id) +''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs +=1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count +=1 index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
def combine_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) ID_OFFSET = 10 # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id and gsshapy_session project_file_id = job.new_model_id gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) # Create blank array for names and urls resource_names = [] resource_url = [] resource_info = [] # Get array of names and urls for key in resource_list: resource_names.append(key) resource_url.append(resource_list[key]['url']) resource_info.append((key,key)) select_input1 = {'display_text': "Select first index map", 'name': 'select1', 'multiple': False, 'options': resource_info} select_input2 = {'display_text': "Select second index map or none", 'name': 'select2', 'multiple': False, 'options': [("None", "none")] + resource_info} # if the next button was pressed if request.POST: params = request.POST # Error message if both maps selected are the same if params['select1'] == params['select2']: result = "" messages.error(request, "You must select two different index maps. Or if you'd like to replace this map with a different map, select None for the second option") # Process if only one map is selected elif params['select2'] == "none": select1_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select1']).one() statement = '''UPDATE idx_index_maps Set raster = ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select1_id.id) +'''), 1, (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(new_index.id) +'''), 1, '([rast1]*1000+ [rast2]*0)' ) WHERE id = '''+ unicode(new_index.id) +'''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement,), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) # Process if two maps are selected else: # Get the ids for the two index maps to be combined select1_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select1']).one() select2_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select2']).one() # Combine the maps and give a unique id statement = '''UPDATE idx_index_maps SET raster =ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select1_id.id) +'''), 1, (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select2_id.id) +'''), 1, '(([rast1]*1000) + [rast2])' ) WHERE id = '''+ unicode(new_index.id) +'''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement,), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) if result != "": # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = '''+ unicode(new_index.id) +''') AS foo ORDER BY (pvc).value; ''' new_indice_values = gsshapy_engine.execute(statement3) # Get the indices for the index being changed indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() # Go through the map tables that use the index map map_table_count = 0 for mapping_table in mapTables: # Reset the number of ids to start counting them numberIDs = ID_OFFSET ids = [] # Go through each new id value for row in new_indice_values: index_present = False numberIDs +=1 ids.append(row.value) large_id = int(row[0]) for index in new_index.indices: if int(index.index) == int(row[0]): index_present = True break if index_present == False: if str(large_id).endswith("000") == False: second_id = str(large_id).split("0")[-1] first_id = (large_id - int(second_id))/1000 else: first_id = (large_id)/1000 second_id = "" description2 = "" pastinfo1 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select1_id.id).\ filter(MTIndex.index == first_id).\ all() description1 = pastinfo1[0].description1 + " " + pastinfo1[0].description2 if second_id != "": pastinfo2 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select2_id.id).\ filter(MTIndex.index == second_id).\ all() description2 = pastinfo2[0].description1 + " " + pastinfo2[0].description2 # Query for the pixel values of row[0] and replace with numberIDs pixel_query = '''SELECT ST_PixelOfValue((SELECT raster FROM idx_index_maps WHERE id = {0}), {1});'''.format(unicode(new_index.id), row[0]) pixels = gsshapy_session.execute(pixel_query) for pixel in pixels: coord = pixel[0].strip("()") x, y = coord.split(",") update_query = '''UPDATE idx_index_maps SET raster = (SELECT ST_SetValue(raster,{1},{2},{3}) FROM idx_index_maps WHERE id = {0}) WHERE id = {0};'''.format(unicode(new_index.id), int(x), int(y), numberIDs) new_result = gsshapy_session.execute(update_query) # Create new index value new_indice = MTIndex(numberIDs, description1, description2) # new_indice = MTIndex(row[0], description1, description2) new_indice.indexMap = new_index for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice # Delete indices that aren't present for index in indices: if not int(index[0]) in ids: fetched_index = gsshapy_session.query(MTIndex).get(index.id) for val in fetched_index.values: gsshapy_session.delete(val) gsshapy_session.delete(fetched_index) new_index.mapTables[map_table_count].numIDs = numberIDs - ID_OFFSET map_table_count +=1 indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path = clusterFile, colorRamp = ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(temp_list) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() return redirect(reverse('gsshaindex:mapping_table', kwargs={'job_id':job_id, 'index_name':index_name, 'mapping_table_number':'0'})) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() # Set the first index as the active one index_names = str(resource_names[0]) # Set up map properties editable_map = {'height': '400px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_names, 'maps_api_key':maps_api_key, 'drawing_types_enabled':[]} context['replaced_index'] = index_name context['index_name'] = index_names context['google_map'] = editable_map context['select_input1'] = select_input1 context['select_input2'] = select_input2 context['job_id'] = job_id context['resource_name'] = resource_names return render(request, 'gsshaindex/combine_index.html', context)
def submit_edits(request, job_id, index_name): ''' Controller that handles submissions of edits from the user after they manually edit an index map. ''' context = {} user = str(request.user) params = request.POST # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id project_file_id = job.new_model_id # Create session gsshapy_session = gsshapy_sessionmaker() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir,'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one() index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() mask_file = gsshapy_session.query(RasterMapFile).filter(RasterMapFile.projectFileID == project_file_id).filter(RasterMapFile.fileExtension == "msk").one() # Get a list of the map tables for the index map mapTables = index_raster.mapTables # If some geometry is submitted, go and run the necessary steps to change the map if params['geometry']: jsonGeom = json.loads(params['geometry']) geometries= jsonGeom['geometries'] # Convert from json to WKT for geometry in geometries: wkt = geometry['wkt'] # Get the values for the geometry value = geometry['properties']['value'] # Loop through indices and see if they match index_raster_indices = index_raster.indices index_present = False for index in index_raster_indices: if int(index.index) == int(value): index_present = True break # Create new index value if it doesn't exist and change the number of ids if index_present == False: new_indice = MTIndex(value, "", "") new_indice.indexMap = index_raster for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice gsshapy_session.commit() if project_file.srid == None: srid = 26912 else: srid = project_file.srid # Change values in the index map change_index_values = "SELECT ST_SetValue(raster,1, ST_Transform(ST_GeomFromText('{0}', 4326),{1}),{2}) " \ "FROM idx_index_maps " \ "WHERE id = {3};".format(wkt, srid, value, index_raster.id) result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values,index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None) if result == None: messages.error(request, 'The submission timed out. Please try to draw in the changes and submit them again.') job_session.close() gsshapy_session.close() context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name})) # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = '''+ unicode(index_raster.id) +''') AS foo ORDER BY (pvc).value; ''' result3 = gsshapy_engine.execute(statement3) numberIDs = 0 ids = [] for row in result3: numberIDs +=1 ids.append(row.value) map_table_count = 0 for mapping_table in mapTables: index_raster.mapTables[map_table_count].numIDs = numberIDs indices = gsshapy_session.query(MTIndex.index, MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTIndex.index).\ all() for index in indices: if not int(index[0]) in ids: bob = gsshapy_session.query(MTIndex).get(index.id) for val in bob.values: gsshapy_session.delete(val) gsshapy_session.delete(bob) gsshapy_session.commit() map_table_count +=1 index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one() # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str(user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']} break job.current_kmls = json.dumps(temp_list) job_session.commit() job_session.close() gsshapy_session.close() else: messages.error(request, "You must make edits to submit") context['index_name'] = index_name context['job_id'] = job_id return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
def fly(request, job_id): context = {} # Get the user id user = str(request.user) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) resultsPath = os.path.join(userDir, 'results') originalFileRunPath = os.path.join(userDir, "preRun") writeFile = os.path.join(userDir, "writeFile") zipPath = os.path.join(userDir, "zipPath") #Create session gsshapy_session = gsshapy_sessionmaker() # Clear the results folder gi_lib.clear_folder(userDir) gi_lib.clear_folder(resultsPath) gi_lib.clear_folder(originalFileRunPath) gi_lib.clear_folder(writeFile) # Get the jobs from the database session = jobs_sessionmaker() job = session.query(Jobs).\ filter(Jobs.user_id == user).\ filter(Jobs.original_id == job_id).one() # Get the urls and names for the analysis run_urls = job.run_urls arguments = { 'new': { 'url': run_urls['new']['url'], 'name': run_urls['new']['name'] }, 'original': { 'url': run_urls['original']['url'], 'name': run_urls['original']['name'] } } # Set up for fly GSSHA job.status = "processing" session.commit() status = 'complete' results = [] # results_urls = [] results_urls = {} count = 0 GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) # Try running the web service # try: for k in arguments: url = str(arguments[k]['url']) if k == 'original' and job.original_certification == "Certified": results_urls['original'] = url count += 1 continue elif k == 'original' and job.original_certification == "Missing gfl": # Need to download from url, add gfl, zip, send to ckan, run, and save the url downloaded_project = gi_lib.extract_zip_from_url( user, url, originalFileRunPath) # Create an empty Project File Object # Find the project file for root, dirs, files in os.walk(originalFileRunPath): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) project = ProjectFile() project.readInput(directory=read_dir, projectFileName=project_name, session=gsshapy_session, spatial=True) if project.getCard("FLOOD_GRID") == None: max_depth_card = ProjectCard( "FLOOD_GRID", '"{0}.gfl"'.format(project_name[:-4])) project_cards = project.projectCards.append(max_depth_card) gsshapy_session.commit() # Need to format so that it will work for the file I just did # Get all the project files project.writeInput(session=gsshapy_session, directory=writeFile, name=project_name[:-4]) # Make a list of the project files writeFile_list = os.listdir(writeFile) # Add each project file to the zip folder with zipfile.ZipFile(zipPath, "w") as gssha_zip: for item in writeFile_list: abs_path = os.path.join(writeFile, item) archive_path = os.path.join(project_name, item) gssha_zip.write(abs_path, archive_path) GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) description = job.original_description + "with a gfl added" pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p") # Add the zipped GSSHA file to the public ckan results, success = gi_lib.add_zip_GSSHA( GSSHA_dataset, zipPath, CKAN_engine, project_name[:-4] + " with gfl", description, pretty_date, user) job.original_url = results['url'] url = job.original_url resultsFile = os.path.join( resultsPath, arguments[k]['name'].replace(" ", "_") + datetime.now().strftime('%Y%d%m%H%M%S')) gi_lib.flyGssha(str(url), resultsFile) # Push file to ckan dataset resource_name = ' '.join( (arguments[k]['name'], '-Run', datetime.now().strftime('%b %d %y %H:%M:%S'))) pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p") result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, resultsFile, CKAN_engine, resource_name, "", pretty_date, user, certification="Certified") # Save the new url as the original_url and run job.original_certification = "Certified" # Publish link to table results_urls['original'] = result['url'] count += 1 else: resultsFile = os.path.join( resultsPath, arguments[k]['name'].replace(" ", "_") + datetime.now().strftime('%Y%d%m%H%M%S')) gi_lib.flyGssha(url, resultsFile) # Push file to ckan dataset resource_name = ' '.join( (arguments[k]['name'], '-Run', datetime.now().strftime('%b %d %y %H:%M:%S'))) pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p") result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, resultsFile, CKAN_engine, resource_name, "", pretty_date, user, certification="Certified") # Publish link to table if k == 'original': results_urls['original'] = result['url'] job.original_certification = "Certified" else: results_urls['new'] = result['url'] count += 1 if (count == 2): print results_urls else: status = 'failed' # except: # status = 'failed' job.status = status job.result_urls = results_urls session.commit() session.close() gsshapy_session.commit() gsshapy_session.close() return redirect(reverse('gsshaindex:status'))
def zip_file(request, job_id): ''' This zips up the GSSHA files in preparation of their being run ''' context = {} # Get the job id and user id job_id = job_id user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) project_file_id = job.new_model_id # Get the name and description from the submission params = request.POST not_clean_name = params['new_name'] new_description = params['new_description'] # Reformat the name by removing bad characters # bad_char = "',.<>()[]{}=+-/\"|:;\\^?!~`@#$%&* " bad_char = "',.<>[]{}=+-/\"|:;\\^?!~`@#$%&*" for char in bad_char: new_name = not_clean_name.replace(char, "_") #Create session gsshapy_session = gsshapy_sessionmaker() # Get project from the database projectFileAll = gsshapy_session.query(ProjectFile).get(project_file_id) # Create name for files project_name = projectFileAll.name if project_name.endswith('.prj'): project_name = project_name[:-4] pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p") # Set depth map if projectFileAll.getCard("FLOOD_GRID") == None: max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(new_name)) project_cards = projectFileAll.projectCards.append(max_depth_card) gsshapy_session.commit() job.original_certification = "Missing gfl" # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) newFileDir = os.path.join(userDir, 'newFile') writeFile = os.path.join(newFileDir, new_name) zipPath = os.path.join(newFileDir, new_name + "_zip") # Clear workspace folders gi_lib.clear_folder(userDir) gi_lib.clear_folder(newFileDir) gi_lib.clear_folder(writeFile) # Get all the project files projectFileAll.writeInput(session=gsshapy_session, directory=writeFile, name=new_name) # Make a list of the project files writeFile_list = os.listdir(writeFile) # Add each project file to the zip folder with zipfile.ZipFile(zipPath, "w") as gssha_zip: for item in writeFile_list: abs_path = os.path.join(writeFile, item) archive_path = os.path.join(new_name, item) gssha_zip.write(abs_path, archive_path) GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine) # Add the zipped GSSHA file to the public ckan results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, new_name, new_description, pretty_date, user) # If the file zips correctly, get information and store it in the database if success == True: new_url = results['url'] new_name = results['name'] original_url = job.original_url original_name = job.original_name model_data = { 'original': { 'url': original_url, 'name': original_name }, 'new': { 'url': new_url, 'name': new_name } } job.run_urls = model_data job.new_name = new_name job.status = "ready to run" session.commit() return redirect(reverse('gsshaindex:status'))
def extract_gssha(request, job_id): ''' This takes the file name and id that were submitted and unzips the files, finds the index maps, and creates kmls. ''' context = {} user = str(request.user) session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Clear the workspace gi_lib.clear_folder(userDir) gi_lib.clear_folder(indexMapDir) # Get url for the resource and extract the GSSHA file url = job.original_url extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir) # Create GSSHAPY Session gsshapy_session = gsshapy_sessionmaker() # Find the project file for root, dirs, files in os.walk(userDir): for file in files: if file.endswith(".prj"): project_name = file project_path = os.path.join(root, file) read_dir = os.path.dirname(project_path) # Create an empty Project File Object project = ProjectFile() project.readInput(directory=read_dir, projectFileName=project_name, session=gsshapy_session, spatial=True) # Create empty dictionary to hold the kmls from this session current_kmls = {} # Store model information job.new_model_name = project.name job.new_model_id = project.id job.created = datetime.now() # Get index maps index_list = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project.mapTableFile).all() # Loop through the index for current_index in index_list: # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = current_index.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) # Generate color ramp current_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) # If the kml is added correctly, create an entry for the current_kmls with the name as the index name if status == True: current_kmls[current_index.name] = { 'url': resource['url'], 'full_name': resource['name'] } # Add the kmls with their url to the database job.current_kmls = json.dumps(current_kmls) session.commit() session.close() gsshapy_session.close() context['job_id'] = job_id return redirect( reverse('gsshaindex:select_index', kwargs={'job_id': job_id}))
def get_mask_map(request, file_id): """ This action is used to pass the kml data to the google map. It must return a JSON response with a Python dictionary that has the key 'kml_links'. """ kml_links = [] session = jobs_sessionmaker() user = str(request.user) job, success = gi_lib.get_new_job(file_id, user, session) if job.kml_url != None: kml_links.append(job.kml_url) #TODO Need some way to check and see if the link works or if it's broken return JsonResponse({'kml_links': kml_links}) else: # Check that there's a package to store kmls CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) present = gi_lib.check_package('kmls', CKAN_engine) # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) # Clear the workspace gi_lib.clear_folder(userDir) url = job.original_url maskMapDir = os.path.join(userDir, 'mask_maps') extractPath = os.path.join(maskMapDir, file_id) mask_file = gi_lib.extract_mask(url, extractPath) if mask_file == "blank": job.kml_url = '' session.commit() return JsonResponse({'kml_links': ''}) else: projection_file = gi_lib.extract_projection(url, extractPath) # Set up kml file name and save location name = job.original_name norm_name = name.replace(" ", "") current_time = time.strftime("%Y%m%dT%H%M%S") kml_name = norm_name + "_" + user + "_" + current_time kml_ext = kml_name + ".kml" kml_file = os.path.join(extractPath, kml_ext) colors = [(237, 9, 222), (92, 245, 61), (61, 184, 245), (171, 61, 245), (250, 245, 105), (245, 151, 44), (240, 37, 14), (88, 5, 232), (5, 232, 190), (11, 26, 227)] color = [random.choice(colors)] # Extract mask map and create kml gsshapy_session = gsshapy_sessionmaker() if projection_file != "blank": srid = ProjectionFile.lookupSpatialReferenceID( extractPath, projection_file) else: srid = 4302 mask_map = RasterMapFile() mask_map.read(directory=extractPath, filename=mask_file, session=gsshapy_session, spatial=True, spatialReferenceID=srid) mask_map.getAsKmlClusters(session=gsshapy_session, path=kml_file, colorRamp={ 'colors': color, 'interpolatedPoints': 1 }) mask_map_dataset = gi_lib.check_dataset("mask-maps", CKAN_engine) # Add mask kml to CKAN for viewing resource, success = gi_lib.add_kml_CKAN(mask_map_dataset, CKAN_engine, kml_file, kml_name) # Check to ensure the resource was added and save it to database by adding "kml_url" if success == True: job.kml_url = resource['url'] session.commit() kml_links.append(job.kml_url) return JsonResponse({'kml_links': kml_links})
def combine_index(request, job_id, index_name): """ Controller for the edit index by manually drawing in edits page. """ context = {} user = str(request.user) ID_OFFSET = 10 # Get the job from the database job_session = jobs_sessionmaker() job, success = gi_lib.get_pending_job(job_id, user, job_session) CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex) # Get project file id and gsshapy_session project_file_id = job.new_model_id gsshapy_session = gsshapy_sessionmaker() # Use project id to link to original map table file project_file = gsshapy_session.query(ProjectFile).filter( ProjectFile.id == project_file_id).one() new_index = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() mapTables = new_index.mapTables indices = new_index.indices # Get list of index files resource_list = json.loads(job.current_kmls) # Create blank array for names and urls resource_names = [] resource_url = [] resource_info = [] # Get array of names and urls for key in resource_list: resource_names.append(key) resource_url.append(resource_list[key]['url']) resource_info.append((key, key)) select_input1 = { 'display_text': "Select first index map", 'name': 'select1', 'multiple': False, 'options': resource_info } select_input2 = { 'display_text': "Select second index map or none", 'name': 'select2', 'multiple': False, 'options': [("None", "none")] + resource_info } # if the next button was pressed if request.POST: params = request.POST # Error message if both maps selected are the same if params['select1'] == params['select2']: result = "" messages.error( request, "You must select two different index maps. Or if you'd like to replace this map with a different map, select None for the second option" ) # Process if only one map is selected elif params['select2'] == "none": select1_id = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == params['select1']).one() statement = '''UPDATE idx_index_maps Set raster = ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( select1_id.id) + '''), 1, (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( new_index.id) + '''), 1, '([rast1]*1000+ [rast2]*0)' ) WHERE id = ''' + unicode(new_index.id) + '''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement, ), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) # Process if two maps are selected else: # Get the ids for the two index maps to be combined select1_id = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == params['select1']).one() select2_id = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == params['select2']).one() # Combine the maps and give a unique id statement = '''UPDATE idx_index_maps SET raster =ST_MapAlgebra( (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( select1_id.id) + '''), 1, (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode( select2_id.id) + '''), 1, '(([rast1]*1000) + [rast2])' ) WHERE id = ''' + unicode(new_index.id) + '''; ''' result = gi_lib.timeout(gsshapy_engine.execute, args=(statement, ), kwargs={}, timeout=10, result_can_be_pickled=False, default=None) if result != "": # Get the values in the index map statement3 = '''SELECT (pvc).* FROM (SELECT ST_ValueCount(raster,1,true) As pvc FROM idx_index_maps WHERE id = ''' + unicode( new_index.id) + ''') AS foo ORDER BY (pvc).value; ''' new_indice_values = gsshapy_engine.execute(statement3) # Get the indices for the index being changed indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() # Go through the map tables that use the index map map_table_count = 0 for mapping_table in mapTables: # Reset the number of ids to start counting them numberIDs = ID_OFFSET ids = [] # Go through each new id value for row in new_indice_values: index_present = False numberIDs += 1 ids.append(row.value) large_id = int(row[0]) for index in new_index.indices: if int(index.index) == int(row[0]): index_present = True break if index_present == False: if str(large_id).endswith("000") == False: second_id = str(large_id).split("0")[-1] first_id = (large_id - int(second_id)) / 1000 else: first_id = (large_id) / 1000 second_id = "" description2 = "" pastinfo1 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select1_id.id).\ filter(MTIndex.index == first_id).\ all() description1 = pastinfo1[ 0].description1 + " " + pastinfo1[0].description2 if second_id != "": pastinfo2 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ filter(MTIndex.idxMapID == select2_id.id).\ filter(MTIndex.index == second_id).\ all() description2 = pastinfo2[ 0].description1 + " " + pastinfo2[ 0].description2 # Query for the pixel values of row[0] and replace with numberIDs pixel_query = '''SELECT ST_PixelOfValue((SELECT raster FROM idx_index_maps WHERE id = {0}), {1});'''.format( unicode(new_index.id), row[0]) pixels = gsshapy_session.execute(pixel_query) for pixel in pixels: coord = pixel[0].strip("()") x, y = coord.split(",") update_query = '''UPDATE idx_index_maps SET raster = (SELECT ST_SetValue(raster,{1},{2},{3}) FROM idx_index_maps WHERE id = {0}) WHERE id = {0};'''.format( unicode(new_index.id), int(x), int(y), numberIDs) new_result = gsshapy_session.execute(update_query) # Create new index value new_indice = MTIndex(numberIDs, description1, description2) # new_indice = MTIndex(row[0], description1, description2) new_indice.indexMap = new_index for mapping_table in mapTables: distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\ filter(MTValue.mapTable == mapping_table).\ order_by(MTValue.variable).\ all() variables = [] for var in distinct_vars: variables.append(var[0]) for variable in variables: new_value = MTValue(variable, 0) new_value.mapTable = mapping_table new_value.index = new_indice # Delete indices that aren't present for index in indices: if not int(index[0]) in ids: fetched_index = gsshapy_session.query(MTIndex).get( index.id) for val in fetched_index.values: gsshapy_session.delete(val) gsshapy_session.delete(fetched_index) new_index.mapTables[ map_table_count].numIDs = numberIDs - ID_OFFSET map_table_count += 1 indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\ join(MTValue).\ filter(MTValue.mapTable == mapTables[0]).\ order_by(MTIndex.index).\ all() index_raster = gsshapy_session.query(IndexMap).filter( IndexMap.mapTableFile == project_file.mapTableFile).filter( IndexMap.name == index_name).one() # Specify the workspace controllerDir = os.path.abspath(os.path.dirname(__file__)) gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir)) publicDir = os.path.join(gsshaindexDir, 'public') userDir = os.path.join(publicDir, str(user)) indexMapDir = os.path.join(userDir, 'index_maps') # Create kml file name and path current_time = time.strftime("%Y%m%dT%H%M%S") resource_name = index_raster.name + "_" + str( user) + "_" + current_time kml_ext = resource_name + '.kml' clusterFile = os.path.join(indexMapDir, kml_ext) index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine) # Generate color ramp index_raster.getAsKmlClusters( session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6) resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name) temp_list = json.loads(job.current_kmls) if status == True: for item in temp_list: if item == index_name: del temp_list[item] temp_list[index_name] = { 'url': resource['url'], 'full_name': resource['name'] } break job.current_kmls = json.dumps(temp_list) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() return redirect( reverse('gsshaindex:mapping_table', kwargs={ 'job_id': job_id, 'index_name': index_name, 'mapping_table_number': '0' })) job_session.commit() gsshapy_session.commit() job_session.close() gsshapy_session.close() # Set the first index as the active one index_names = str(resource_names[0]) # Set up map properties editable_map = { 'height': '400px', 'width': '100%', 'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_names, 'maps_api_key': maps_api_key, 'drawing_types_enabled': [] } context['replaced_index'] = index_name context['index_name'] = index_names context['google_map'] = editable_map context['select_input1'] = select_input1 context['select_input2'] = select_input2 context['job_id'] = job_id context['resource_name'] = resource_names return render(request, 'gsshaindex/combine_index.html', context)