def get_index_maps(request, job_id, index_name):
    '''
    This action is used to pass the kml data to the google map. It must return the key 'kml_link'.
    '''
    # Get the job id and user id
    job_id = job_id
    map_name = index_name
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get the list of index files to display
    resource_list = json.loads(job.current_kmls)

    # Get the kml url
    kml_links = resource_list[map_name]['url']

    return JsonResponse({'kml_links': [kml_links]})
Exemple #2
0
def get_index_maps(request, job_id, index_name):
    '''
    This action is used to pass the kml data to the google map. It must return the key 'kml_link'.
    '''
    # Get the job id and user id
    job_id = job_id
    map_name = index_name
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get the list of index files to display
    resource_list = json.loads(job.current_kmls)

    # Get the kml url
    kml_links = resource_list[map_name]['url']

    return JsonResponse({'kml_links': [kml_links]})
Exemple #3
0
def extract_existing_gssha(request, job_id):
    '''
    This takes the file name and id that were submitted and unzips the files.
    '''
    context = {}
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Clear the workspace
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(indexMapDir)

    # Get url for the resource and extract the GSSHA file
    url = job.original_url
    extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir)

    # Create GSSHAPY Session
    gsshapy_session = gsshapy_sessionmaker()

    # Find the project file
    for root, dirs, files in os.walk(userDir):
        for file in files:
            if file.endswith(".prj"):
                project_name = file
                project_path = os.path.join(root, file)
                read_dir = os.path.dirname(project_path)

    context['job_id'] = job_id

    return redirect(
        reverse('gsshaindex:select_index', kwargs={'job_id': job_id}))
def extract_existing_gssha(request, job_id):
    '''
    This takes the file name and id that were submitted and unzips the files.
    '''
    context = {}
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Clear the workspace
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(indexMapDir)

    # Get url for the resource and extract the GSSHA file
    url = job.original_url
    extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir)

    # Create GSSHAPY Session
    gsshapy_session = gsshapy_sessionmaker()

    # Find the project file
    for root, dirs, files in os.walk(userDir):
        for file in files:
            if file.endswith(".prj"):
                project_name = file
                project_path = os.path.join(root, file)
                read_dir = os.path.dirname(project_path)

    context['job_id'] = job_id

    return redirect(reverse('gsshaindex:select_index', kwargs={'job_id':job_id}))
Exemple #5
0
def replace_values(request, job_id, index_name, mapping_table_number):
    '''
    This replaces the values for variables on the index map.
    '''
    context = {}

    # Get the user id and the project file id
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    indices = index_raster.indices
    mapTables = index_raster.mapTables

    # Process for if descriptions are submitted
    if (request.POST):
        params = request.POST
        for key in params:
            if "var" in key:
                identity = int(key.replace("var-",""))
                mapTableValue = gsshapy_session.query(MTValue).get(identity)
                mapTableValue.value = params[key]
        gsshapy_session.commit()

    context['job_id'] = job_id
    context['index_name'] = index_name
    context['mapping_table_number'] = mapping_table_number

    return redirect(reverse('gsshaindex:submit_mapping_table', kwargs={'job_id':job_id, 'index_name':index_name, 'mapping_table_number':mapping_table_number}))
def submit_edits(request, job_id, index_name):
    '''
    Controller that handles submissions of edits from the user after they manually edit an index map.
    '''
    context = {}
    user = str(request.user)
    params = request.POST

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(
        ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project_file.mapTableFile).filter(
            IndexMap.name == index_name).one()
    mask_file = gsshapy_session.query(RasterMapFile).filter(
        RasterMapFile.projectFileID == project_file_id).filter(
            RasterMapFile.fileExtension == "msk").one()

    # Get a list of the map tables for the index map
    mapTables = index_raster.mapTables

    # If some geometry is submitted, go and run the necessary steps to change the map
    if params['geometry']:
        jsonGeom = json.loads(params['geometry'])
        geometries = jsonGeom['geometries']

        # Convert from json to WKT
        for geometry in geometries:
            wkt = geometry['wkt']

            # Get the values for the geometry
            value = geometry['properties']['value']

            # Loop through indices and see if they match
            index_raster_indices = index_raster.indices
            index_present = False
            for index in index_raster_indices:
                if int(index.index) == int(value):
                    index_present = True
                    break

            # Create new index value if it doesn't exist and change the number of ids
            if index_present == False:
                new_indice = MTIndex(value, "", "")
                new_indice.indexMap = index_raster
                for mapping_table in mapTables:
                    distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                     filter(MTValue.mapTable == mapping_table).\
                                     order_by(MTValue.variable).\
                                     all()
                    variables = []
                    for var in distinct_vars:
                        variables.append(var[0])

                    for variable in variables:
                        new_value = MTValue(variable, 0)
                        new_value.mapTable = mapping_table
                        new_value.index = new_indice
                gsshapy_session.commit()

            if project_file.srid == None:
                srid = 26912
            else:
                srid = project_file.srid

            # Change values in the index map
            change_index_values = "SELECT ST_SetValue(raster,1, ST_Transform(ST_GeomFromText('{0}', 4326),{1}),{2}) " \
                                  "FROM idx_index_maps " \
                                  "WHERE id = {3};".format(wkt, srid, value, index_raster.id)

            result = gi_lib.timeout(gi_lib.draw_update_index,
                                    args=(change_index_values,
                                          index_raster.id),
                                    kwargs={},
                                    timeout=10,
                                    result_can_be_pickled=True,
                                    default=None)

            if result == None:

                messages.error(
                    request,
                    'The submission timed out. Please try to draw in the changes and submit them again.'
                )
                job_session.close()
                gsshapy_session.close()
                context['index_name'] = index_name
                context['job_id'] = job_id

                return redirect(
                    reverse('gsshaindex:edit_index',
                            kwargs={
                                'job_id': job_id,
                                'index_name': index_name
                            }))

        # Get the values in the index map
        statement3 = '''SELECT (pvc).*
                        FROM (SELECT ST_ValueCount(raster,1,true) As pvc
                        FROM idx_index_maps WHERE id = ''' + unicode(
            index_raster.id) + ''') AS foo
                        ORDER BY (pvc).value;
                        '''
        result3 = gsshapy_engine.execute(statement3)

        numberIDs = 0
        ids = []
        for row in result3:
            numberIDs += 1
            ids.append(row.value)

        map_table_count = 0
        for mapping_table in mapTables:

            index_raster.mapTables[map_table_count].numIDs = numberIDs

            indices = gsshapy_session.query(MTIndex.index, MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapping_table).\
                                   order_by(MTIndex.index).\
                                   all()

            for index in indices:
                if not int(index[0]) in ids:
                    bob = gsshapy_session.query(MTIndex).get(index.id)
                    for val in bob.values:
                        gsshapy_session.delete(val)
                    gsshapy_session.delete(bob)
            gsshapy_session.commit()
            map_table_count += 1

        index_raster = gsshapy_session.query(IndexMap).filter(
            IndexMap.mapTableFile == project_file.mapTableFile).filter(
                IndexMap.name == index_name).one()

        # Create kml file name and path
        current_time = time.strftime("%Y%m%dT%H%M%S")
        resource_name = index_raster.name + "_" + str(
            user) + "_" + current_time
        kml_ext = resource_name + '.kml'
        clusterFile = os.path.join(indexMapDir, kml_ext)

        # Generate color ramp
        index_raster.getAsKmlClusters(session=gsshapy_session,
                                      path=clusterFile,
                                      colorRamp=ColorRampEnum.COLOR_RAMP_HUE,
                                      alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)
        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine,
                                               clusterFile, resource_name)

        temp_list = json.loads(job.current_kmls)

        if status == True:
            for item in temp_list:
                if item == index_name:
                    del temp_list[item]
                    temp_list[index_name] = {
                        'url': resource['url'],
                        'full_name': resource['name']
                    }
                    break

        job.current_kmls = json.dumps(temp_list)
        job_session.commit()
        job_session.close()
        gsshapy_session.close()

    else:
        messages.error(request, "You must make edits to submit")

    context['index_name'] = index_name
    context['job_id'] = job_id

    return redirect(
        reverse('gsshaindex:edit_index',
                kwargs={
                    'job_id': job_id,
                    'index_name': index_name
                }))
def fly(request, job_id):
    context = {}

    # Get the user id
    user = str(request.user)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    resultsPath = os.path.join(userDir, 'results')
    originalFileRunPath = os.path.join(userDir, "preRun")
    writeFile = os.path.join(userDir, "writeFile")
    zipPath = os.path.join(userDir, "zipPath")

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Clear the results folder
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(resultsPath)
    gi_lib.clear_folder(originalFileRunPath)
    gi_lib.clear_folder(writeFile)

    # Get the jobs from the database
    session = jobs_sessionmaker()
    job = session.query(Jobs).\
                    filter(Jobs.user_id == user).\
                    filter(Jobs.original_id == job_id).one()

    # Get the urls and names for the analysis
    run_urls = job.run_urls

    arguments={'new': {'url':run_urls['new']['url'], 'name':run_urls['new']['name']}, 'original':{'url':run_urls['original']['url'], 'name':run_urls['original']['name']}}

    # Set up for fly GSSHA
    job.status = "processing"
    session.commit()

    status = 'complete'

    results = []
    # results_urls = []
    results_urls = {}
    count = 0

    GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine)

    # Try running the web service
    # try:
    for k in arguments:
        url = str(arguments[k]['url'])

        if k == 'original' and job.original_certification=="Certified":
            results_urls['original']=url
            count +=1
            continue
        elif k == 'original' and job.original_certification=="Missing gfl":
            # Need to download from url, add gfl, zip, send to ckan, run, and save the url
            downloaded_project = gi_lib.extract_zip_from_url(user, url, originalFileRunPath)
            # Create an empty Project File Object
            # Find the project file
            for root, dirs, files in os.walk(originalFileRunPath):
                for file in files:
                    if file.endswith(".prj"):
                        project_name = file
                        project_path = os.path.join(root, file)
                        read_dir = os.path.dirname(project_path)
            project = ProjectFile()
            project.readInput(directory=read_dir,
                      projectFileName=project_name,
                      session = gsshapy_session,
                      spatial=True)

            if project.getCard("FLOOD_GRID") == None:
                max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(project_name[:-4]))
                project_cards = project.projectCards.append(max_depth_card)
                gsshapy_session.commit()

            # Need to format so that it will work for the file I just did
            # Get all the project files
            project.writeInput(session=gsshapy_session, directory=writeFile, name=project_name[:-4])

            # Make a list of the project files
            writeFile_list = os.listdir(writeFile)

            # Add each project file to the zip folder
            with zipfile.ZipFile(zipPath, "w") as gssha_zip:
                for item in writeFile_list:
                    abs_path = os.path.join(writeFile, item)
                    archive_path = os.path.join(project_name, item)
                    gssha_zip.write(abs_path, archive_path)

            GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine)

            description = job.original_description + "with a gfl added"
            pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p")

            # Add the zipped GSSHA file to the public ckan
            results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, project_name[:-4] + " with gfl", description, pretty_date, user)

            job.original_url = results['url']

            url = job.original_url

            resultsFile = os.path.join(resultsPath, arguments[k]['name'].replace(" ","_")+datetime.now().strftime('%Y%d%m%H%M%S'))

            gi_lib.flyGssha(str(url), resultsFile)

            # Push file to ckan dataset
            resource_name = ' '.join((arguments[k]['name'], '-Run',datetime.now().strftime('%b %d %y %H:%M:%S')))
            pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p")
            result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, resultsFile, CKAN_engine, resource_name, "", pretty_date, user, certification="Certified")

            # Save the new url as the original_url and run

            job.original_certification = "Certified"

            # Publish link to table
            results_urls['original']=result['url']
            count +=1
        else:
            resultsFile = os.path.join(resultsPath, arguments[k]['name'].replace(" ","_")+datetime.now().strftime('%Y%d%m%H%M%S'))
            gi_lib.flyGssha(url, resultsFile)

            # Push file to ckan dataset
            resource_name = ' '.join((arguments[k]['name'], '-Run',datetime.now().strftime('%b %d %y %H:%M:%S')))
            pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p")
            result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, resultsFile, CKAN_engine, resource_name, "", pretty_date, user, certification="Certified")

            # Publish link to table
            if k=='original':
                results_urls['original']=result['url']
                job.original_certification = "Certified"
            else:
                results_urls['new']=result['url']
            count +=1

    if (count == 2):
        print results_urls
    else:
        status = 'failed'

    # except:
    #     status = 'failed'

    job.status = status
    job.result_urls = results_urls
    session.commit()
    session.close()
    gsshapy_session.commit()
    gsshapy_session.close()


    return redirect(reverse('gsshaindex:status'))
def home(request):
    """
    Controller for the app home page.
    """
    context = {}
    session = jobs_sessionmaker()
    user = str(request.user)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)
    session = jobs_sessionmaker()

    # Check to see if there's a models package
    present = gi_lib.check_package('gssha-models', CKAN_engine)

    # Check if the submit button has been clicked and if there is a file_id
    if ('file_id' in request.POST):
        params = request.POST
        file_id = params['file_id']
        if file_id == "":
            messages.error(request, 'No GSSHA Model is selected')
        else:
            job,success = gi_lib.get_new_job(file_id,user,session)
            job.status = "pending"
            date = datetime.now()
            formatted_date = '{:%Y-%m-%d-%H-%M-%S}'.format(date)
            new_id = file_id + "-" + formatted_date
            job.original_id = new_id
            session.commit()
            return redirect(reverse('gsshaindex:extract_gssha', kwargs={'job_id':new_id}))

    # Find all the GSSHA models in the datasets
    results = CKAN_engine.get_dataset('gssha-models')
    resources = results['result']['resources']

    # Create empty array to hold the information for the GSSHA models
    model_list = []

    no_files = True

    # Fill the array with information on the GSSHA models
    if len(resources) > 0:
        no_files = False
        for result in resources:
            file_id = result.get('id')
            # Check to see if the files are already in the database and add them if they aren't
            job,success = gi_lib.get_new_job(file_id,user,session)
            if success == False:
                if result['description'] == "":
                    file_description = "None"
                else:
                    file_description = result['description']
                file_name = result.get('name')
                file_url = result.get('url')
                file_certification = result.get('certification')
                new_job = Jobs(name=file_name, user_id=user, original_description=file_description, original_id=file_id, original_url=file_url, original_certification=file_certification)
                session.add(new_job)
                session.commit()
                context['job_id'] = new_job.id
            else:
                file_name = job.original_name
                file_description = job.original_description

            model_list.append((file_name, file_id, file_description))

    if no_files == False:
        # Display a google map with the first kml
        first_file_id = model_list[0][1]

        google_map = {'height': '500px',
                        'width': '100%',
                        'reference_kml_action': '/apps/gsshaindex/get-mask-map/' + str(first_file_id),
                        'maps_api_key':maps_api_key}
        # Populate the drop-down menu
        select_model = {'display_text': 'Select GSSHA Model',
                    'name': 'select_a_model',
                    'multiple': False,
                    'options': model_list,
                    'initial': model_list[0]}

    else:
        #Display a google map with kmls
        google_map = {'height': '500px',
                        'width': '100%',
                        'maps_api_key':maps_api_key}
        select_model = ""

    context['model_list'] = model_list
    context['google_map'] = google_map
    context['select_model'] = select_model

    #Display the index page
    return render(request, 'gsshaindex/home.html', context)
Exemple #9
0
def shapefile_index(request, job_id, index_name, shapefile_name):
    """
    Controller for the selecting the shapefile to use to define the index map.
    """
    context = {}
    user = str(request.user)

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Create a session
    gsshapy_session = gsshapy_sessionmaker()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(
        ProjectFile.id == project_file_id).one()
    new_index = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project_file.mapTableFile).filter(
            IndexMap.name == index_name).one()
    mapTables = new_index.mapTables

    # Find the contents of GeoServer for the user and display them
    dataset_engine = get_spatial_dataset_engine(name='gsshaindex_geoserver',
                                                app_class=GSSHAIndex)
    overlay_result = gi_lib.get_layer_and_resource(dataset_engine, user,
                                                   shapefile_name)

    if overlay_result['success'] == True:
        url = overlay_result['layer']['wms']['kml']
        coord_list = list(overlay_result['resource']['latlon_bbox'][:-1])
        avg_x = int(round((float(coord_list[0]) + float(coord_list[1])) / 2))
        avg_y = int(round((float(coord_list[2]) + float(coord_list[3])) / 2))

        map_view = {
            'height':
            '600px',
            'width':
            '100%',
            'controls': [
                'ZoomSlider',
                'ScaleLine',
            ],
            'layers': [
                {
                    'WMS': {
                        'url': url,
                        'params': {
                            'LAYERS': overlay_result['layer']['name'],
                        },
                        'serverType': 'geoserver'
                    }
                },
            ],
            'view': {
                'projection': 'EPSG:4326',
                'center': [avg_x, avg_y],
                'zoom': 6.5,
                'maxZoom': 18,
                'minZoom': 3
            },
            'base_map':
            'OpenStreetMap'
        }

    else:
        map_view = {
            'height': '400px',
            'width': '100%',
            'controls': [
                'ZoomSlider',
                'ScaleLine',
            ],
            'view': {
                'projection': 'EPSG:4326',
                'center': [-100, 40],
                'zoom': 3.5,
                'maxZoom': 18,
                'minZoom': 3
            },
            'base_map': 'OpenStreetMap'
        }

    context['job_id'] = job_id
    context['index_name'] = index_name
    context['file_name'] = shapefile_name
    context['map_view'] = map_view

    return render(request, 'gsshaindex/select_shapefile.html', context)
def shapefile_index(request, job_id, index_name, shapefile_name):
    """
    Controller for the selecting the shapefile to use to define the index map.
    """
    context = {}
    user = str(request.user)

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Create a session
    gsshapy_session = gsshapy_sessionmaker()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    mapTables = new_index.mapTables

    # Find the contents of GeoServer for the user and display them
    dataset_engine = get_spatial_dataset_engine(name='gsshaindex_geoserver', app_class=GSSHAIndex)
    overlay_result = gi_lib.get_layer_and_resource(dataset_engine, user, shapefile_name)

    if overlay_result['success'] == True:
        url = overlay_result['layer']['wms']['kml']
        coord_list = list(overlay_result['resource']['latlon_bbox'][:-1])
        avg_x = int(round((float(coord_list[0])+float(coord_list[1]))/2))
        avg_y = int(round((float(coord_list[2])+float(coord_list[3]))/2))

        map_view = {'height': '600px',
                    'width': '100%',
                    'controls': ['ZoomSlider',
                                 'ScaleLine',
                    ],
                    'layers': [{'WMS': {'url': url,
                                        'params': {'LAYERS': overlay_result['layer']['name'],},
                                        'serverType': 'geoserver'}
                                },
                    ],
                    'view': {'projection': 'EPSG:4326',
                             'center': [avg_x, avg_y], 'zoom': 6.5,
                             'maxZoom': 18, 'minZoom': 3},
                    'base_map': 'OpenStreetMap'
  }

    else:
        map_view = {'height': '400px',
                    'width': '100%',
                    'controls': ['ZoomSlider',
                                 'ScaleLine',
                    ],

                    'view': {'projection': 'EPSG:4326',
                             'center': [-100, 40], 'zoom': 3.5,
                             'maxZoom': 18, 'minZoom': 3},
                    'base_map': 'OpenStreetMap'
  }


    context['job_id'] = job_id
    context['index_name'] = index_name
    context['file_name'] = shapefile_name
    context['map_view'] = map_view


    return render(request, 'gsshaindex/select_shapefile.html', context)
Exemple #11
0
def select_index(request, job_id):
    """
    Controller for the app home page.
    """
    context = {}
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Give options for editing the index map
    if ('select_index' in request.POST):
        params = request.POST
        index_name = params['index_name']
        if (params['method'] == "Create polygons"):
            return redirect(
                reverse('gsshaindex:edit_index',
                        kwargs={
                            'job_id': job_id,
                            'index_name': index_name
                        }))
        elif (params['method'] == "Upload shapefile"):
            # messages.error(request, "Select by polygon is currently in production and hasn't been initialized yet.")
            return redirect(
                reverse('gsshaindex:shapefile_index',
                        kwargs={
                            'job_id': job_id,
                            'index_name': index_name,
                            'shapefile_name': "None"
                        }))
        elif (params['method'] == "Merge index maps or replace with another"):
            # messages.error(request, "Merging index maps is currently in production and hasn't been initialized yet.")
            return redirect(
                reverse('gsshaindex:combine_index',
                        kwargs={
                            'job_id': job_id,
                            'index_name': index_name
                        }))

    # Get list of index files
    resource_kmls = json.loads(job.current_kmls)

    # Create arrays of the names and urls
    unsorted_resource_name = []
    resource_url = []
    for key in resource_kmls:
        unsorted_resource_name.append(key)
        resource_url.append(resource_kmls[key]['url'])

    resource_name = sorted(unsorted_resource_name)

    # Set the first index as the active one
    map_name = str(resource_name[0])

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to map table file
    project_file = gsshapy_session.query(ProjectFile).filter(
        ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project_file.mapTableFile).filter(
            IndexMap.name == map_name).one()
    indices = index_raster.indices

    # Set up map properties
    editable_map = {
        'height':
        '600px',
        'width':
        '100%',
        'reference_kml_action':
        '/apps/gsshaindex/' + job_id + '/get-index-maps/' + map_name,
        'maps_api_key':
        maps_api_key,
        'drawing_types_enabled': []
    }

    context['google_map'] = editable_map
    context['project_name'] = job.original_name
    context['resource_name'] = resource_name
    context['map_name'] = map_name
    context['job_id'] = job_id

    return render(request, 'gsshaindex/select_index.html', context)
Exemple #12
0
def extract_gssha(request, job_id):
    '''
    This takes the file name and id that were submitted and unzips the files, finds the index maps, and creates kmls.
    '''
    context = {}
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Clear the workspace
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(indexMapDir)

    # Get url for the resource and extract the GSSHA file
    url = job.original_url
    extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir)

    # Create GSSHAPY Session
    gsshapy_session = gsshapy_sessionmaker()

    # Find the project file
    for root, dirs, files in os.walk(userDir):
        for file in files:
            if file.endswith(".prj"):
                project_name = file
                project_path = os.path.join(root, file)
                read_dir = os.path.dirname(project_path)

    # Create an empty Project File Object
    project = ProjectFile()

    project.readInput(directory=read_dir,
                      projectFileName=project_name,
                      session=gsshapy_session,
                      spatial=True)

    # Create empty dictionary to hold the kmls from this session
    current_kmls = {}

    # Store model information
    job.new_model_name = project.name
    job.new_model_id = project.id
    job.created = datetime.now()

    # Get index maps
    index_list = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project.mapTableFile).all()

    # Loop through the index
    for current_index in index_list:
        # Create kml file name and path
        current_time = time.strftime("%Y%m%dT%H%M%S")
        resource_name = current_index.name + "_" + str(
            user) + "_" + current_time
        kml_ext = resource_name + '.kml'
        clusterFile = os.path.join(indexMapDir, kml_ext)

        # Generate color ramp
        current_index.getAsKmlClusters(session=gsshapy_session,
                                       path=clusterFile,
                                       colorRamp=ColorRampEnum.COLOR_RAMP_HUE,
                                       alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)

        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine,
                                               clusterFile, resource_name)

        # If the kml is added correctly, create an entry for the current_kmls with the name as the index name
        if status == True:
            current_kmls[current_index.name] = {
                'url': resource['url'],
                'full_name': resource['name']
            }

    # Add the kmls with their url to the database
    job.current_kmls = json.dumps(current_kmls)
    session.commit()
    session.close()
    gsshapy_session.close()

    context['job_id'] = job_id

    return redirect(
        reverse('gsshaindex:select_index', kwargs={'job_id': job_id}))
Exemple #13
0
def home(request):
    """
    Controller for the app home page.
    """
    context = {}
    session = jobs_sessionmaker()
    user = str(request.user)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)
    session = jobs_sessionmaker()

    # Check to see if there's a models package
    present = gi_lib.check_package('gssha-models', CKAN_engine)

    # Check if the submit button has been clicked and if there is a file_id
    if ('file_id' in request.POST):
        params = request.POST
        file_id = params['file_id']
        if file_id == "":
            messages.error(request, 'No GSSHA Model is selected')
        else:
            job, success = gi_lib.get_new_job(file_id, user, session)
            job.status = "pending"
            date = datetime.now()
            formatted_date = '{:%Y-%m-%d-%H-%M-%S}'.format(date)
            new_id = file_id + "-" + formatted_date
            job.original_id = new_id
            session.commit()
            return redirect(
                reverse('gsshaindex:extract_gssha', kwargs={'job_id': new_id}))

    # Find all the GSSHA models in the datasets
    results = CKAN_engine.get_dataset('gssha-models')
    resources = results['result']['resources']

    # Create empty array to hold the information for the GSSHA models
    model_list = []

    no_files = True

    # Fill the array with information on the GSSHA models
    if len(resources) > 0:
        no_files = False
        for result in resources:
            file_id = result.get('id')
            # Check to see if the files are already in the database and add them if they aren't
            job, success = gi_lib.get_new_job(file_id, user, session)
            if success == False:
                if result['description'] == "":
                    file_description = "None"
                else:
                    file_description = result['description']
                file_name = result.get('name')
                file_url = result.get('url')
                file_certification = result.get('certification')
                new_job = Jobs(name=file_name,
                               user_id=user,
                               original_description=file_description,
                               original_id=file_id,
                               original_url=file_url,
                               original_certification=file_certification)
                session.add(new_job)
                session.commit()
                context['job_id'] = new_job.id
            else:
                file_name = job.original_name
                file_description = job.original_description

            model_list.append((file_name, file_id, file_description))

    if no_files == False:
        # Display a google map with the first kml
        first_file_id = model_list[0][1]

        google_map = {
            'height':
            '500px',
            'width':
            '100%',
            'reference_kml_action':
            '/apps/gsshaindex/get-mask-map/' + str(first_file_id),
            'maps_api_key':
            maps_api_key
        }
        # Populate the drop-down menu
        select_model = {
            'display_text': 'Select GSSHA Model',
            'name': 'select_a_model',
            'multiple': False,
            'options': model_list,
            'initial': model_list[0]
        }

    else:
        #Display a google map with kmls
        google_map = {
            'height': '500px',
            'width': '100%',
            'maps_api_key': maps_api_key
        }
        select_model = ""

    context['model_list'] = model_list
    context['google_map'] = google_map
    context['select_model'] = select_model

    #Display the index page
    return render(request, 'gsshaindex/home.html', context)
Exemple #14
0
def get_mask_map(request, file_id):
    """
    This action is used to pass the kml data to the google map.
    It must return a JSON response with a Python dictionary that
    has the key 'kml_links'.
    """
    kml_links = []
    session = jobs_sessionmaker()
    user = str(request.user)
    job, success = gi_lib.get_new_job(file_id, user, session)

    if job.kml_url != None:
        kml_links.append(job.kml_url)
        #TODO Need some way to check and see if the link works or if it's broken
        return JsonResponse({'kml_links': kml_links})
    else:
        # Check that there's a package to store kmls
        CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                         app_class=GSSHAIndex)
        present = gi_lib.check_package('kmls', CKAN_engine)

        # Specify the workspace
        controllerDir = os.path.abspath(os.path.dirname(__file__))
        gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
        publicDir = os.path.join(gsshaindexDir, 'public')
        userDir = os.path.join(publicDir, str(user))

        # Clear the workspace
        gi_lib.clear_folder(userDir)

        url = job.original_url
        maskMapDir = os.path.join(userDir, 'mask_maps')
        extractPath = os.path.join(maskMapDir, file_id)
        mask_file = gi_lib.extract_mask(url, extractPath)
        if mask_file == "blank":
            job.kml_url = ''
            session.commit()
            return JsonResponse({'kml_links': ''})
        else:
            projection_file = gi_lib.extract_projection(url, extractPath)

            # Set up kml file name and save location
            name = job.original_name
            norm_name = name.replace(" ", "")
            current_time = time.strftime("%Y%m%dT%H%M%S")
            kml_name = norm_name + "_" + user + "_" + current_time
            kml_ext = kml_name + ".kml"
            kml_file = os.path.join(extractPath, kml_ext)

            colors = [(237, 9, 222), (92, 245, 61), (61, 184, 245),
                      (171, 61, 245), (250, 245, 105), (245, 151, 44),
                      (240, 37, 14), (88, 5, 232), (5, 232, 190),
                      (11, 26, 227)]
            color = [random.choice(colors)]

            # Extract mask map and create kml
            gsshapy_session = gsshapy_sessionmaker()
            if projection_file != "blank":
                srid = ProjectionFile.lookupSpatialReferenceID(
                    extractPath, projection_file)
            else:
                srid = 4302
            mask_map = RasterMapFile()
            mask_map.read(directory=extractPath,
                          filename=mask_file,
                          session=gsshapy_session,
                          spatial=True,
                          spatialReferenceID=srid)
            mask_map.getAsKmlClusters(session=gsshapy_session,
                                      path=kml_file,
                                      colorRamp={
                                          'colors': color,
                                          'interpolatedPoints': 1
                                      })

            mask_map_dataset = gi_lib.check_dataset("mask-maps", CKAN_engine)

            # Add mask kml to CKAN for viewing
            resource, success = gi_lib.add_kml_CKAN(mask_map_dataset,
                                                    CKAN_engine, kml_file,
                                                    kml_name)

            # Check to ensure the resource was added and save it to database by adding "kml_url"
            if success == True:
                job.kml_url = resource['url']
                session.commit()
                kml_links.append(job.kml_url)
                return JsonResponse({'kml_links': kml_links})
Exemple #15
0
def get_depth_map(request, job_id, view_type):
    context = {}

    # Get the user id
    user = str(request.user)

    # Get the job from the database and delete
    session = jobs_sessionmaker()
    job = session.query(Jobs).\
                    filter(Jobs.user_id == user).\
                    filter(Jobs.original_id == job_id).one()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    depthMapDir = os.path.join(userDir, 'depth_maps')
    newDepthDir = os.path.join(depthMapDir, 'new')
    originalDepthDir = os.path.join(depthMapDir, 'original')

    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    kml_link = []

    if view_type == "newTime":
        if job.newTime:
            kml_link.append(job.newTime)
        else:
            result_url = job.result_urls['new']
            result = gi_lib.prepare_time_depth_map(user, result_url, job,
                                                   newDepthDir, CKAN_engine)
            job.newTime = result['url']
            session.commit()
            kml_link.append(result['url'])

    elif view_type == "newMax":
        if job.newMax:
            kml_link.append(job.newMax)
        else:
            result_url = job.result_urls['new']
            result = gi_lib.prepare_max_depth_map(user, result_url, job,
                                                  newDepthDir, CKAN_engine)
            job.newMax = result['url']
            session.commit()
            kml_link.append(result['url'])

    elif view_type == "originalTime":
        if job.originalTime:
            kml_link.append(job.originalTime)
        else:
            result_url = job.result_urls['original']
            result = gi_lib.prepare_time_depth_map(user, result_url, job,
                                                   originalDepthDir,
                                                   CKAN_engine)
            job.originalTime = result['url']
            session.commit()
            kml_link.append(result['url'])

    elif view_type == "originalMax":
        if job.originalMax:
            kml_link.append(job.originalMax)
        else:
            result_url = job.result_urls['original']
            result = gi_lib.prepare_max_depth_map(user, result_url, job,
                                                  originalDepthDir,
                                                  CKAN_engine)
            job.originalMax = result['url']
            session.commit()
            kml_link.append(result['url'])

    else:
        kml_link = kml_link.append("")

    session.close()

    return JsonResponse({'kml_links': kml_link})
def replace_index_with_shapefile(request, job_id, index_name, shapefile_name):
    """
    Controller to replace the index map with the selected shapefile.
    """
    context = {}
    user = str(request.user)

    geojson = get_geojson_from_geoserver(user, shapefile_name)

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Create a session
    gsshapy_session = gsshapy_sessionmaker()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Clear the workspace
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(indexMapDir)

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()

    mapTables = index_raster.mapTables

    if geojson['success'] != False:
        geojson_result = geojson['geojson']

        # Get existing indices
        index_raster_indices = index_raster.indices

        srid_name = geojson['crs']

        project_file_srid = project_file.srid

        id = 200

        # Loop through each geometry
        for object in geojson_result:
            index_present = False
            object_id = object['id']

            # Check to see if the index is present
            for index in index_raster_indices:
                if object_id == index.index:
                    index_present = True
                    break

            # Create new index value if it doesn't exist and add the number of ids
            if index_present == False:
                new_indice = MTIndex(id, object_id,"")
                new_indice.indexMap = index_raster
                for mapping_table in mapTables:
                    distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                     filter(MTValue.mapTable == mapping_table).\
                                     order_by(MTValue.variable).\
                                     all()
                    variables = []
                    for var in distinct_vars:
                        variables.append(var[0])

                    for variable in variables:
                        new_value = MTValue(variable, 0)
                        new_value.mapTable = mapping_table
                        new_value.index = new_indice
                    gsshapy_session.commit()

            geom = object['geometry']
            geom['crs'] = srid_name
            geom_full = json.dumps(geom)

            # Change values in the index map
            change_index_values = "SELECT ST_SetValue(raster,1,ST_Transform(ST_GeomFromGeoJSON('{0}'), {1}),{2}) " \
                                  "FROM idx_index_maps " \
                                  "WHERE id = {3};".format(str(geom_full), project_file_srid, id, index_raster.id)
            result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values,index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None)

            # If there is a timeout
            if result == None:
                messages.error(request, 'The submission timed out. Please try again.')
                job_session.close()
                gsshapy_session.close()
                context['index_name'] = index_name
                context['job_id'] = job_id

                return redirect(reverse('gsshaindex:shapefile_index', kwargs={'job_id':job_id, 'index_name':index_name, 'shapefile_name':shapefile_name}))

            id += 1

        # Get the values in the index map
        statement3 = '''SELECT (pvc).*
                        FROM (SELECT ST_ValueCount(raster,1,true) As pvc
                        FROM idx_index_maps WHERE id = '''+ unicode(index_raster.id) +''') AS foo
                        ORDER BY (pvc).value;
                        '''
        result3 = gsshapy_engine.execute(statement3)

        numberIDs = 0
        ids = []
        for row in result3:
            numberIDs +=1
            ids.append(row.value)

        map_table_count = 0
        for mapping_table in mapTables:

            index_raster.mapTables[map_table_count].numIDs = numberIDs

            indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapping_table).\
                                   order_by(MTIndex.index).\
                                   all()

            for index in indices:
                if not int(index[0]) in ids:
                    bob = gsshapy_session.query(MTIndex).get(index.id)
                    for val in bob.values:
                        gsshapy_session.delete(val)
                    gsshapy_session.delete(bob)
            gsshapy_session.commit()
            map_table_count +=1

        index_raster =  gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()

        # Create kml file name and path
        current_time = time.strftime("%Y%m%dT%H%M%S")
        resource_name = index_raster.name + "_" + str(user) + "_" + current_time
        kml_ext = resource_name + '.kml'
        clusterFile = os.path.join(indexMapDir, kml_ext)

        # Generate color ramp
        index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)
        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name)

        temp_list = json.loads(job.current_kmls)

        if status == True:
            for item in temp_list:
                    if item == index_name:
                        del temp_list[item]
                        temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']}
                        break

        job.current_kmls = json.dumps(temp_list)
        job_session.commit()
        job_session.close()
        gsshapy_session.close()

    context['index_name'] = index_name
    context['job_id'] = job_id

    return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
Exemple #17
0
def zip_file(request, job_id):
    '''
    This zips up the GSSHA files in preparation of their being run
    '''
    context = {}

    # Get the job id and user id
    job_id = job_id
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    project_file_id = job.new_model_id

    # Get the name and description from the submission
    params = request.POST
    not_clean_name = params['new_name']
    new_description = params['new_description']

    # Reformat the name by removing bad characters
    # bad_char = "',.<>()[]{}=+-/\"|:;\\^?!~`@#$%&* "
    bad_char = "',.<>[]{}=+-/\"|:;\\^?!~`@#$%&*"
    for char in bad_char:
        new_name = not_clean_name.replace(char, "_")

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Get project from the database
    projectFileAll = gsshapy_session.query(ProjectFile).get(project_file_id)

    # Create name for files
    project_name = projectFileAll.name
    if project_name.endswith('.prj'):
        project_name = project_name[:-4]
    pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p")

    # Set depth map
    if projectFileAll.getCard("FLOOD_GRID") == None:
        max_depth_card = ProjectCard("FLOOD_GRID",
                                     '"{0}.gfl"'.format(new_name))
        project_cards = projectFileAll.projectCards.append(max_depth_card)
        gsshapy_session.commit()
        job.original_certification = "Missing gfl"

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    newFileDir = os.path.join(userDir, 'newFile')
    writeFile = os.path.join(newFileDir, new_name)
    zipPath = os.path.join(newFileDir, new_name + "_zip")

    # Clear workspace folders
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(newFileDir)
    gi_lib.clear_folder(writeFile)

    # Get all the project files
    projectFileAll.writeInput(session=gsshapy_session,
                              directory=writeFile,
                              name=new_name)

    # Make a list of the project files
    writeFile_list = os.listdir(writeFile)

    # Add each project file to the zip folder
    with zipfile.ZipFile(zipPath, "w") as gssha_zip:
        for item in writeFile_list:
            abs_path = os.path.join(writeFile, item)
            archive_path = os.path.join(new_name, item)
            gssha_zip.write(abs_path, archive_path)

    GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine)

    # Add the zipped GSSHA file to the public ckan
    results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath,
                                            CKAN_engine, new_name,
                                            new_description, pretty_date, user)

    # If the file zips correctly, get information and store it in the database
    if success == True:
        new_url = results['url']
        new_name = results['name']
        original_url = job.original_url
        original_name = job.original_name

    model_data = {
        'original': {
            'url': original_url,
            'name': original_name
        },
        'new': {
            'url': new_url,
            'name': new_name
        }
    }
    job.run_urls = model_data
    job.new_name = new_name
    job.status = "ready to run"
    session.commit()

    return redirect(reverse('gsshaindex:status'))
Exemple #18
0
def replace_index_with_shapefile(request, job_id, index_name, shapefile_name):
    """
    Controller to replace the index map with the selected shapefile.
    """
    context = {}
    user = str(request.user)

    geojson = get_geojson_from_geoserver(user, shapefile_name)

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Create a session
    gsshapy_session = gsshapy_sessionmaker()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Clear the workspace
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(indexMapDir)

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(
        ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project_file.mapTableFile).filter(
            IndexMap.name == index_name).one()

    mapTables = index_raster.mapTables

    if geojson['success'] != False:
        geojson_result = geojson['geojson']

        # Get existing indices
        index_raster_indices = index_raster.indices

        srid_name = geojson['crs']

        project_file_srid = project_file.srid

        id = 200

        # Loop through each geometry
        for object in geojson_result:
            index_present = False
            object_id = object['id']

            # Check to see if the index is present
            for index in index_raster_indices:
                if object_id == index.index:
                    index_present = True
                    break

            # Create new index value if it doesn't exist and add the number of ids
            if index_present == False:
                new_indice = MTIndex(id, object_id, "")
                new_indice.indexMap = index_raster
                for mapping_table in mapTables:
                    distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                     filter(MTValue.mapTable == mapping_table).\
                                     order_by(MTValue.variable).\
                                     all()
                    variables = []
                    for var in distinct_vars:
                        variables.append(var[0])

                    for variable in variables:
                        new_value = MTValue(variable, 0)
                        new_value.mapTable = mapping_table
                        new_value.index = new_indice
                    gsshapy_session.commit()

            geom = object['geometry']
            geom['crs'] = srid_name
            geom_full = json.dumps(geom)

            # Change values in the index map
            change_index_values = "SELECT ST_SetValue(raster,1,ST_Transform(ST_GeomFromGeoJSON('{0}'), {1}),{2}) " \
                                  "FROM idx_index_maps " \
                                  "WHERE id = {3};".format(str(geom_full), project_file_srid, id, index_raster.id)
            result = gi_lib.timeout(gi_lib.draw_update_index,
                                    args=(change_index_values,
                                          index_raster.id),
                                    kwargs={},
                                    timeout=10,
                                    result_can_be_pickled=True,
                                    default=None)

            # If there is a timeout
            if result == None:
                messages.error(request,
                               'The submission timed out. Please try again.')
                job_session.close()
                gsshapy_session.close()
                context['index_name'] = index_name
                context['job_id'] = job_id

                return redirect(
                    reverse('gsshaindex:shapefile_index',
                            kwargs={
                                'job_id': job_id,
                                'index_name': index_name,
                                'shapefile_name': shapefile_name
                            }))

            id += 1

        # Get the values in the index map
        statement3 = '''SELECT (pvc).*
                        FROM (SELECT ST_ValueCount(raster,1,true) As pvc
                        FROM idx_index_maps WHERE id = ''' + unicode(
            index_raster.id) + ''') AS foo
                        ORDER BY (pvc).value;
                        '''
        result3 = gsshapy_engine.execute(statement3)

        numberIDs = 0
        ids = []
        for row in result3:
            numberIDs += 1
            ids.append(row.value)

        map_table_count = 0
        for mapping_table in mapTables:

            index_raster.mapTables[map_table_count].numIDs = numberIDs

            indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapping_table).\
                                   order_by(MTIndex.index).\
                                   all()

            for index in indices:
                if not int(index[0]) in ids:
                    bob = gsshapy_session.query(MTIndex).get(index.id)
                    for val in bob.values:
                        gsshapy_session.delete(val)
                    gsshapy_session.delete(bob)
            gsshapy_session.commit()
            map_table_count += 1

        index_raster = gsshapy_session.query(IndexMap).filter(
            IndexMap.mapTableFile == project_file.mapTableFile).filter(
                IndexMap.name == index_name).one()

        # Create kml file name and path
        current_time = time.strftime("%Y%m%dT%H%M%S")
        resource_name = index_raster.name + "_" + str(
            user) + "_" + current_time
        kml_ext = resource_name + '.kml'
        clusterFile = os.path.join(indexMapDir, kml_ext)

        # Generate color ramp
        index_raster.getAsKmlClusters(session=gsshapy_session,
                                      path=clusterFile,
                                      colorRamp=ColorRampEnum.COLOR_RAMP_HUE,
                                      alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)
        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine,
                                               clusterFile, resource_name)

        temp_list = json.loads(job.current_kmls)

        if status == True:
            for item in temp_list:
                if item == index_name:
                    del temp_list[item]
                    temp_list[index_name] = {
                        'url': resource['url'],
                        'full_name': resource['name']
                    }
                    break

        job.current_kmls = json.dumps(temp_list)
        job_session.commit()
        job_session.close()
        gsshapy_session.close()

    context['index_name'] = index_name
    context['job_id'] = job_id

    return redirect(
        reverse('gsshaindex:edit_index',
                kwargs={
                    'job_id': job_id,
                    'index_name': index_name
                }))
Exemple #19
0
def fly(request, job_id):
    context = {}

    # Get the user id
    user = str(request.user)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    resultsPath = os.path.join(userDir, 'results')
    originalFileRunPath = os.path.join(userDir, "preRun")
    writeFile = os.path.join(userDir, "writeFile")
    zipPath = os.path.join(userDir, "zipPath")

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Clear the results folder
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(resultsPath)
    gi_lib.clear_folder(originalFileRunPath)
    gi_lib.clear_folder(writeFile)

    # Get the jobs from the database
    session = jobs_sessionmaker()
    job = session.query(Jobs).\
                    filter(Jobs.user_id == user).\
                    filter(Jobs.original_id == job_id).one()

    # Get the urls and names for the analysis
    run_urls = job.run_urls

    arguments = {
        'new': {
            'url': run_urls['new']['url'],
            'name': run_urls['new']['name']
        },
        'original': {
            'url': run_urls['original']['url'],
            'name': run_urls['original']['name']
        }
    }

    # Set up for fly GSSHA
    job.status = "processing"
    session.commit()

    status = 'complete'

    results = []
    # results_urls = []
    results_urls = {}
    count = 0

    GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine)

    # Try running the web service
    # try:
    for k in arguments:
        url = str(arguments[k]['url'])

        if k == 'original' and job.original_certification == "Certified":
            results_urls['original'] = url
            count += 1
            continue
        elif k == 'original' and job.original_certification == "Missing gfl":
            # Need to download from url, add gfl, zip, send to ckan, run, and save the url
            downloaded_project = gi_lib.extract_zip_from_url(
                user, url, originalFileRunPath)
            # Create an empty Project File Object
            # Find the project file
            for root, dirs, files in os.walk(originalFileRunPath):
                for file in files:
                    if file.endswith(".prj"):
                        project_name = file
                        project_path = os.path.join(root, file)
                        read_dir = os.path.dirname(project_path)
            project = ProjectFile()
            project.readInput(directory=read_dir,
                              projectFileName=project_name,
                              session=gsshapy_session,
                              spatial=True)

            if project.getCard("FLOOD_GRID") == None:
                max_depth_card = ProjectCard(
                    "FLOOD_GRID", '"{0}.gfl"'.format(project_name[:-4]))
                project_cards = project.projectCards.append(max_depth_card)
                gsshapy_session.commit()

            # Need to format so that it will work for the file I just did
            # Get all the project files
            project.writeInput(session=gsshapy_session,
                               directory=writeFile,
                               name=project_name[:-4])

            # Make a list of the project files
            writeFile_list = os.listdir(writeFile)

            # Add each project file to the zip folder
            with zipfile.ZipFile(zipPath, "w") as gssha_zip:
                for item in writeFile_list:
                    abs_path = os.path.join(writeFile, item)
                    archive_path = os.path.join(project_name, item)
                    gssha_zip.write(abs_path, archive_path)

            GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine)

            description = job.original_description + "with a gfl added"
            pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p")

            # Add the zipped GSSHA file to the public ckan
            results, success = gi_lib.add_zip_GSSHA(
                GSSHA_dataset, zipPath, CKAN_engine,
                project_name[:-4] + " with gfl", description, pretty_date,
                user)

            job.original_url = results['url']

            url = job.original_url

            resultsFile = os.path.join(
                resultsPath, arguments[k]['name'].replace(" ", "_") +
                datetime.now().strftime('%Y%d%m%H%M%S'))

            gi_lib.flyGssha(str(url), resultsFile)

            # Push file to ckan dataset
            resource_name = ' '.join(
                (arguments[k]['name'], '-Run',
                 datetime.now().strftime('%b %d %y %H:%M:%S')))
            pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p")
            result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset,
                                                   resultsFile,
                                                   CKAN_engine,
                                                   resource_name,
                                                   "",
                                                   pretty_date,
                                                   user,
                                                   certification="Certified")

            # Save the new url as the original_url and run

            job.original_certification = "Certified"

            # Publish link to table
            results_urls['original'] = result['url']
            count += 1
        else:
            resultsFile = os.path.join(
                resultsPath, arguments[k]['name'].replace(" ", "_") +
                datetime.now().strftime('%Y%d%m%H%M%S'))
            gi_lib.flyGssha(url, resultsFile)

            # Push file to ckan dataset
            resource_name = ' '.join(
                (arguments[k]['name'], '-Run',
                 datetime.now().strftime('%b %d %y %H:%M:%S')))
            pretty_date = time.strftime("%A %B %d, %Y %I:%M:%S %p")
            result, success = gi_lib.add_zip_GSSHA(GSSHA_dataset,
                                                   resultsFile,
                                                   CKAN_engine,
                                                   resource_name,
                                                   "",
                                                   pretty_date,
                                                   user,
                                                   certification="Certified")

            # Publish link to table
            if k == 'original':
                results_urls['original'] = result['url']
                job.original_certification = "Certified"
            else:
                results_urls['new'] = result['url']
            count += 1

    if (count == 2):
        print results_urls
    else:
        status = 'failed'

    # except:
    #     status = 'failed'

    job.status = status
    job.result_urls = results_urls
    session.commit()
    session.close()
    gsshapy_session.commit()
    gsshapy_session.close()

    return redirect(reverse('gsshaindex:status'))
Exemple #20
0
def get_mask_map(request, file_id):
    """
    This action is used to pass the kml data to the google map.
    It must return a JSON response with a Python dictionary that
    has the key 'kml_links'.
    """
    kml_links = []
    session = jobs_sessionmaker()
    user = str(request.user)
    job, success = gi_lib.get_new_job(file_id, user,session)

    if job.kml_url != None:
        kml_links.append(job.kml_url)
        #TODO Need some way to check and see if the link works or if it's broken
        return JsonResponse({'kml_links': kml_links})
    else:
        # Check that there's a package to store kmls
        CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)
        present = gi_lib.check_package('kmls', CKAN_engine)

        # Specify the workspace
        controllerDir = os.path.abspath(os.path.dirname(__file__))
        gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
        publicDir = os.path.join(gsshaindexDir,'public')
        userDir = os.path.join(publicDir, str(user))

        # Clear the workspace
        gi_lib.clear_folder(userDir)

        url = job.original_url
        maskMapDir = os.path.join(userDir, 'mask_maps')
        extractPath = os.path.join(maskMapDir, file_id)
        mask_file = gi_lib.extract_mask(url, extractPath)
        if mask_file == "blank":
            job.kml_url = ''
            session.commit()
            return JsonResponse({'kml_links': ''})
        else:
            projection_file = gi_lib.extract_projection(url,extractPath)

            # Set up kml file name and save location
            name = job.original_name
            norm_name = name.replace(" ","")
            current_time = time.strftime("%Y%m%dT%H%M%S")
            kml_name = norm_name + "_" + user + "_" + current_time
            kml_ext = kml_name + ".kml"
            kml_file = os.path.join(extractPath, kml_ext)

            colors = [(237,9,222),(92,245,61),(61,184,245),(171,61,245),(250,245,105),(245,151,44),(240,37,14),(88,5,232),(5,232,190),(11,26,227)]
            color = [random.choice(colors)]

            # Extract mask map and create kml
            gsshapy_session = gsshapy_sessionmaker()
            if projection_file != "blank":
                srid = ProjectionFile.lookupSpatialReferenceID(extractPath, projection_file)
            else:
                srid = 4302
            mask_map = RasterMapFile()
            mask_map.read(directory=extractPath, filename=mask_file, session=gsshapy_session, spatial=True, spatialReferenceID=srid)
            mask_map.getAsKmlClusters(session=gsshapy_session, path=kml_file, colorRamp={'colors':color, 'interpolatedPoints':1})

            mask_map_dataset = gi_lib.check_dataset("mask-maps", CKAN_engine)

            # Add mask kml to CKAN for viewing
            resource, success = gi_lib.add_kml_CKAN(mask_map_dataset, CKAN_engine, kml_file, kml_name)

            # Check to ensure the resource was added and save it to database by adding "kml_url"
            if success == True:
                job.kml_url = resource['url']
                session.commit()
                kml_links.append(job.kml_url)
                return JsonResponse({'kml_links': kml_links})
def combine_index(request, job_id, index_name):
    """
    Controller for the edit index by manually drawing in edits page.
    """
    context = {}
    user = str(request.user)

    ID_OFFSET = 10

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id and gsshapy_session
    project_file_id = job.new_model_id
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    mapTables = new_index.mapTables
    indices = new_index.indices

    # Get list of index files
    resource_list = json.loads(job.current_kmls)

    # Create blank array for names and urls
    resource_names = []
    resource_url = []
    resource_info = []

    # Get array of names and urls
    for key in resource_list:
        resource_names.append(key)
        resource_url.append(resource_list[key]['url'])
        resource_info.append((key,key))

    select_input1 = {'display_text': "Select first index map",
                       'name': 'select1',
                       'multiple': False,
                       'options': resource_info}

    select_input2 = {'display_text': "Select second index map or none",
                       'name': 'select2',
                       'multiple': False,
                       'options': [("None", "none")] + resource_info}

    # if the next button was pressed
    if request.POST:
        params = request.POST
        # Error message if both maps selected are the same
        if params['select1'] == params['select2']:
            result = ""
            messages.error(request, "You must select two different index maps. Or if you'd like to replace this map with a different map, select None for the second option")
        # Process if only one map is selected
        elif params['select2'] == "none":
            select1_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select1']).one()
            statement = '''UPDATE idx_index_maps
                                  Set raster = ST_MapAlgebra(
                                  (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select1_id.id) +'''), 1,
                                  (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(new_index.id) +'''), 1,
                                  '([rast1]*1000+ [rast2]*0)'
                                  )
                                WHERE id = '''+ unicode(new_index.id) +''';
                            '''
            result = gi_lib.timeout(gsshapy_engine.execute, args=(statement,), kwargs={}, timeout=10, result_can_be_pickled=False, default=None)
        # Process if two maps are selected
        else:
            # Get the ids for the two index maps to be combined
            select1_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select1']).one()
            select2_id = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == params['select2']).one()
            # Combine the maps and give a unique id
            statement = '''UPDATE idx_index_maps
                              SET raster =ST_MapAlgebra(
                              (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select1_id.id) +'''), 1,
                              (SELECT raster FROM idx_index_maps WHERE id = '''+ unicode(select2_id.id) +'''), 1,
                              '(([rast1]*1000) + [rast2])'
                              )
                            WHERE id = '''+ unicode(new_index.id) +''';
                        '''
            result = gi_lib.timeout(gsshapy_engine.execute, args=(statement,), kwargs={}, timeout=10, result_can_be_pickled=False, default=None)

        if result != "":
            # Get the values in the index map
            statement3 = '''SELECT (pvc).*
                            FROM (SELECT ST_ValueCount(raster,1,true) As pvc
                            FROM idx_index_maps WHERE id = '''+ unicode(new_index.id) +''') AS foo
                            ORDER BY (pvc).value;
                            '''
            new_indice_values = gsshapy_engine.execute(statement3)

            # Get the indices for the index being changed
            indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapTables[0]).\
                                   order_by(MTIndex.index).\
                                   all()

            # Go through the map tables that use the index map
            map_table_count = 0
            for mapping_table in mapTables:

                # Reset the number of ids to start counting them
                numberIDs = ID_OFFSET
                ids = []

                # Go through each new id value
                for row in new_indice_values:
                    index_present = False
                    numberIDs +=1
                    ids.append(row.value)
                    large_id = int(row[0])
                    for index in new_index.indices:
                        if int(index.index) == int(row[0]):
                            index_present = True
                            break

                    if index_present == False:
                        if str(large_id).endswith("000") == False:
                            second_id = str(large_id).split("0")[-1]
                            first_id = (large_id - int(second_id))/1000
                        else:
                            first_id = (large_id)/1000
                            second_id = ""
                            description2 = ""

                        pastinfo1 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                filter(MTIndex.idxMapID == select1_id.id).\
                                filter(MTIndex.index == first_id).\
                                all()
                        description1 = pastinfo1[0].description1 + " " + pastinfo1[0].description2

                        if second_id != "":
                            pastinfo2 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                    filter(MTIndex.idxMapID == select2_id.id).\
                                    filter(MTIndex.index == second_id).\
                                    all()
                            description2 = pastinfo2[0].description1 + " " + pastinfo2[0].description2

                        # Query for the pixel values of row[0] and replace with numberIDs
                        pixel_query = '''SELECT ST_PixelOfValue((SELECT raster FROM idx_index_maps WHERE id = {0}), {1});'''.format(unicode(new_index.id), row[0])
                        pixels = gsshapy_session.execute(pixel_query)

                        for pixel in pixels:
                            coord = pixel[0].strip("()")
                            x, y = coord.split(",")
                            update_query = '''UPDATE idx_index_maps
                                              SET raster = (SELECT ST_SetValue(raster,{1},{2},{3}) FROM idx_index_maps WHERE id = {0})
                                              WHERE id = {0};'''.format(unicode(new_index.id), int(x), int(y), numberIDs)
                            new_result = gsshapy_session.execute(update_query)

                        # Create new index value
                        new_indice = MTIndex(numberIDs, description1, description2)
                        # new_indice = MTIndex(row[0], description1, description2)
                        new_indice.indexMap = new_index
                        for mapping_table in mapTables:
                            distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                             filter(MTValue.mapTable == mapping_table).\
                                             order_by(MTValue.variable).\
                                             all()

                            variables = []
                            for var in distinct_vars:
                                variables.append(var[0])

                            for variable in variables:
                                new_value = MTValue(variable, 0)
                                new_value.mapTable = mapping_table
                                new_value.index = new_indice

                # Delete indices that aren't present
                for index in indices:
                    if not int(index[0]) in ids:
                        fetched_index = gsshapy_session.query(MTIndex).get(index.id)
                        for val in fetched_index.values:
                            gsshapy_session.delete(val)
                        gsshapy_session.delete(fetched_index)

                new_index.mapTables[map_table_count].numIDs = numberIDs - ID_OFFSET
                map_table_count +=1

            indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapTables[0]).\
                                   order_by(MTIndex.index).\
                                   all()

            index_raster =  gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()

            # Specify the workspace
            controllerDir = os.path.abspath(os.path.dirname(__file__))
            gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
            publicDir = os.path.join(gsshaindexDir,'public')
            userDir = os.path.join(publicDir, str(user))
            indexMapDir = os.path.join(userDir, 'index_maps')

            # Create kml file name and path
            current_time = time.strftime("%Y%m%dT%H%M%S")
            resource_name = index_raster.name + "_" + str(user) + "_" + current_time
            kml_ext = resource_name + '.kml'
            clusterFile = os.path.join(indexMapDir, kml_ext)

            index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)

            # Generate color ramp
            index_raster.getAsKmlClusters(session=gsshapy_session, path = clusterFile, colorRamp = ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6)

            resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name)

            temp_list = json.loads(job.current_kmls)

            if status == True:
                for item in temp_list:
                        if item == index_name:
                            del temp_list[item]
                            temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']}
                            break

            job.current_kmls = json.dumps(temp_list)
            job_session.commit()
            gsshapy_session.commit()
            job_session.close()
            gsshapy_session.close()

            return redirect(reverse('gsshaindex:mapping_table', kwargs={'job_id':job_id, 'index_name':index_name, 'mapping_table_number':'0'}))

    job_session.commit()
    gsshapy_session.commit()
    job_session.close()
    gsshapy_session.close()

    # Set the first index as the active one
    index_names = str(resource_names[0])

    # Set up map properties
    editable_map = {'height': '400px',
                      'width': '100%',
                      'reference_kml_action': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_names,
                      'maps_api_key':maps_api_key,
                      'drawing_types_enabled':[]}

    context['replaced_index'] = index_name
    context['index_name'] = index_names
    context['google_map'] = editable_map
    context['select_input1'] = select_input1
    context['select_input2'] = select_input2
    context['job_id'] = job_id
    context['resource_name'] = resource_names

    return render(request, 'gsshaindex/combine_index.html', context)
Exemple #22
0
def extract_gssha(request, job_id):
    '''
    This takes the file name and id that were submitted and unzips the files, finds the index maps, and creates kmls.
    '''
    context = {}
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Clear the workspace
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(indexMapDir)

    # Get url for the resource and extract the GSSHA file
    url = job.original_url
    extract_path, unique_dir = gi_lib.extract_zip_from_url(user, url, userDir)

    # Create GSSHAPY Session
    gsshapy_session = gsshapy_sessionmaker()

    # Find the project file
    for root, dirs, files in os.walk(userDir):
        for file in files:
            if file.endswith(".prj"):
                project_name = file
                project_path = os.path.join(root, file)
                read_dir = os.path.dirname(project_path)

    # Create an empty Project File Object
    project = ProjectFile()

    project.readInput(directory=read_dir,
                      projectFileName=project_name,
                      session = gsshapy_session,
                      spatial=True)

    # Create empty dictionary to hold the kmls from this session
    current_kmls = {}

    # Store model information
    job.new_model_name = project.name
    job.new_model_id = project.id
    job.created = datetime.now()

    # Get index maps
    index_list = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project.mapTableFile).all()

    # Loop through the index
    for current_index in index_list:
        # Create kml file name and path
        current_time = time.strftime("%Y%m%dT%H%M%S")
        resource_name = current_index.name + "_" + str(user) + "_" + current_time
        kml_ext = resource_name + '.kml'
        clusterFile = os.path.join(indexMapDir, kml_ext)

        # Generate color ramp
        current_index.getAsKmlClusters(session=gsshapy_session,
                                       path = clusterFile,
                                       colorRamp = ColorRampEnum.COLOR_RAMP_HUE,
                                       alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)

        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name)

        # If the kml is added correctly, create an entry for the current_kmls with the name as the index name
        if status == True:
            current_kmls[current_index.name] = {'url':resource['url'], 'full_name':resource['name']}

    # Add the kmls with their url to the database
    job.current_kmls = json.dumps(current_kmls)
    session.commit()
    session.close()
    gsshapy_session.close()

    context['job_id'] = job_id

    return redirect(reverse('gsshaindex:select_index', kwargs={'job_id':job_id}))
Exemple #23
0
def mapping_table(request, job_id, index_name, mapping_table_number):
    '''
    This identifies the mapping tables that relate to the index map that is being edited and prepares them to be displayed for editing.
    '''
    context = {}

    # Get the user id and the project file id
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    indices = index_raster.indices
    mapTables = index_raster.mapTables

    mapping_table_number = int(mapping_table_number)

    # Process for if descriptions are submitted
    if (request.POST):
        params = request.POST
        for key in params:
            if "indice" in key:
                if "desc1" in key:
                    identity = int(key.replace("indice-desc1-",""))
                    mapDesc1 = gsshapy_session.query(MTIndex).get(identity)
                    mapDesc1.description1 = params[key]

                elif "desc2" in key:
                    identity = key.replace("indice-desc2-","")
                    mapDesc2 = gsshapy_session.query(MTIndex).get(identity)
                    mapDesc2.description2 = params[key]

        gsshapy_session.commit()

     # Get list of index files
    resource_kmls = json.loads(job.current_kmls)

    # Create array of kml names and urls
    resource_name = []
    resource_url = []
    for key in resource_kmls:
        resource_name.append(key)
        resource_url.append(resource_kmls[key]['url'])

    # Find the associated map tables and add them to an array
    arrayMapTables = []
    count = 0
    for table in mapTables:
        name =  str(table.name)
        clean = name.replace("_"," ")
        arrayMapTables.append([clean, table.name, count])
        count +=1

    # Find the variables that are related to the active map table
    distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                 filter(MTValue.mapTable == mapTables[mapping_table_number]).\
                                 order_by(MTValue.variable).\
                                 all()

    # Create an array of the variables in the active map table
    variables = []
    for var in distinct_vars:
        variables.append(var[0])


    # Cross tabulate manually to populate the mapping table information
    indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                           join(MTValue).\
                           filter(MTValue.mapTable == mapTables[mapping_table_number]).\
                           order_by(MTIndex.index).\
                           all()

    # Get all the values to populate the table
    var_values = []
    for var in variables:
        values = gsshapy_session.query(MTValue.id, MTValue.value).\
                              join(MTIndex).\
                              filter(MTValue.mapTable == mapTables[mapping_table_number]).\
                              filter(MTValue.variable == var).\
                              order_by(MTIndex.index).\
                              all()
        var_values.append(values)
    zipValues = zip(*var_values)

    # Dictionary of properties for the map
    google_map = {'height': '400px',
                      'width': '100%',
                      'kml_service': '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_name,
                      'maps_api_key':maps_api_key}

    context['indices'] = indices
    context['job_id'] = job_id
    context['index_name'] = index_name
    context['mapping_table_number'] = mapping_table_number
    context['resource_kmls'] = resource_kmls
    context['resource_name'] = resource_name
    context['resource_url'] = resource_url
    context['mapTables'] = arrayMapTables
    context['variables'] = variables
    context['google_map'] = google_map
    context['values'] = zipValues

    return render(request, 'gsshaindex/mapping_table.html', context)
Exemple #24
0
def select_index(request, job_id):
    """
    Controller for the app home page.
    """
    context = {}
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Give options for editing the index map
    if ('select_index' in request.POST):
        params = request.POST
        index_name = params['index_name']
        if (params['method'] == "Create polygons"):
            return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
        elif (params['method'] == "Upload shapefile"):
            # messages.error(request, "Select by polygon is currently in production and hasn't been initialized yet.")
            return redirect(reverse('gsshaindex:shapefile_index', kwargs={'job_id':job_id, 'index_name':index_name, 'shapefile_name':"None"}))
        elif (params['method'] == "Merge index maps or replace with another"):
            # messages.error(request, "Merging index maps is currently in production and hasn't been initialized yet.")
            return redirect(reverse('gsshaindex:combine_index', kwargs={'job_id':job_id, 'index_name':index_name}))

    # Get list of index files
    resource_kmls = json.loads(job.current_kmls)

    # Create arrays of the names and urls
    unsorted_resource_name = []
    resource_url = []
    for key in resource_kmls:
        unsorted_resource_name.append(key)
        resource_url.append(resource_kmls[key]['url'])

    resource_name = sorted(unsorted_resource_name)

    # Set the first index as the active one
    map_name = str(resource_name[0])

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == map_name).one()
    indices = index_raster.indices

    # Set up map properties
    editable_map = {'height': '600px',
                    'width': '100%',
                    'reference_kml_action': '/apps/gsshaindex/'+ job_id + '/get-index-maps/'  + map_name,
                    'maps_api_key':maps_api_key,
                    'drawing_types_enabled':[]}

    context['google_map'] = editable_map
    context['project_name'] = job.original_name
    context['resource_name'] = resource_name
    context['map_name'] = map_name
    context['job_id'] = job_id

    return render(request, 'gsshaindex/select_index.html', context)
Exemple #25
0
def submit_mapping_table(request, job_id, index_name, mapping_table_number):
    '''
    This submits the mapping table and values for review.
    '''
    context = {}

    # Get the user id and the project file id
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Get list of index files
    resource_kmls = json.loads(job.current_kmls)

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    indices = index_raster.indices
    mapTables = index_raster.mapTables

    mapping_table_number = int(mapping_table_number)

    # Find the associated map tables and add them to an array
    assocMapTables = []
    count = 0
    for table in mapTables:
        name =  str(table.name)
        clean = name.replace("_"," ")
        assocMapTables.append([clean, table.name, count])
        count +=1

    # Find the variables that are related to the active map table
    distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                 filter(MTValue.mapTable == mapTables[mapping_table_number]).\
                                 order_by(MTValue.variable).\
                                 all()

    # Create an array of the variables in the active map table
    variables = []
    for var in distinct_vars:
        variables.append(var[0])

    # Cross tabulate manually to populate the mapping table information
    indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.description1, MTIndex.description2).\
                           join(MTValue).\
                           filter(MTValue.mapTable == mapTables[mapping_table_number]).\
                           order_by(MTIndex.index).\
                           all()

    # Get the values for the mapping table
    var_values = []
    for var in variables:
        values = gsshapy_session.query(MTValue.id, MTValue.value).\
                              join(MTIndex).\
                              filter(MTValue.mapTable == mapTables[mapping_table_number]).\
                              filter(MTValue.variable == var).\
                              order_by(MTIndex.index).\
                              all()
        var_values.append(values)
    arrayValues = zip(*var_values)

    # Dictionary of properties for the map
    google_map = {'height': '400px',
                      'width': '100%',
                      'kml_service': '/apps/gsshaindex/' + job_id+ '/get-index-maps/' + index_name,
                      'maps_api_key':maps_api_key}

    context['job_id'] = job_id
    context['index_name'] = index_name
    context['mapping_table_number'] = mapping_table_number
    context['variables'] = variables
    context['values'] = arrayValues
    context['indices'] = indices
    context['mapTables'] = assocMapTables
    context['resource_kmls'] = resource_kmls
    context['google_map'] = google_map

    return render(request, 'gsshaindex/review_mapping_table.html', context)
Exemple #26
0
def zip_file(request, job_id):
    '''
    This zips up the GSSHA files in preparation of their being run
    '''
    context = {}

    # Get the job id and user id
    job_id = job_id
    user = str(request.user)
    session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user,session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    project_file_id = job.new_model_id

    # Get the name and description from the submission
    params=request.POST
    not_clean_name = params['new_name']
    new_description = params['new_description']

    # Reformat the name by removing bad characters
    # bad_char = "',.<>()[]{}=+-/\"|:;\\^?!~`@#$%&* "
    bad_char = "',.<>[]{}=+-/\"|:;\\^?!~`@#$%&*"
    for char in bad_char:
        new_name = not_clean_name.replace(char,"_")

    #Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Get project from the database
    projectFileAll = gsshapy_session.query(ProjectFile).get(project_file_id)

    # Create name for files
    project_name = projectFileAll.name
    if project_name.endswith('.prj'):
        project_name = project_name[:-4]
    pretty_date= time.strftime("%A %B %d, %Y %I:%M:%S %p")

    # Set depth map
    if projectFileAll.getCard("FLOOD_GRID") == None:
        max_depth_card = ProjectCard("FLOOD_GRID", '"{0}.gfl"'.format(new_name))
        project_cards = projectFileAll.projectCards.append(max_depth_card)
        gsshapy_session.commit()
        job.original_certification = "Missing gfl"

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    newFileDir = os.path.join(userDir, 'newFile')
    writeFile = os.path.join(newFileDir, new_name)
    zipPath = os.path.join(newFileDir, new_name + "_zip")

    # Clear workspace folders
    gi_lib.clear_folder(userDir)
    gi_lib.clear_folder(newFileDir)
    gi_lib.clear_folder(writeFile)

    # Get all the project files
    projectFileAll.writeInput(session=gsshapy_session, directory=writeFile, name=new_name)

    # Make a list of the project files
    writeFile_list = os.listdir(writeFile)

    # Add each project file to the zip folder
    with zipfile.ZipFile(zipPath, "w") as gssha_zip:
        for item in writeFile_list:
            abs_path = os.path.join(writeFile, item)
            archive_path = os.path.join(new_name, item)
            gssha_zip.write(abs_path, archive_path)

    GSSHA_dataset = gi_lib.check_dataset("gssha-models", CKAN_engine)

    # Add the zipped GSSHA file to the public ckan
    results, success = gi_lib.add_zip_GSSHA(GSSHA_dataset, zipPath, CKAN_engine, new_name, new_description, pretty_date, user)

    # If the file zips correctly, get information and store it in the database
    if success == True:
        new_url = results['url']
        new_name = results['name']
        original_url = job.original_url
        original_name = job.original_name

    model_data = {'original': {'url':original_url, 'name':original_name}, 'new':{'url':new_url, 'name':new_name}}
    job.run_urls = model_data
    job.new_name = new_name
    job.status = "ready to run"
    session.commit()

    return redirect(reverse('gsshaindex:status'))
def submit_edits(request, job_id, index_name):
    '''
    Controller that handles submissions of edits from the user after they manually edit an index map.
    '''
    context = {}
    user = str(request.user)
    params = request.POST

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Create session
    gsshapy_session = gsshapy_sessionmaker()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    index_raster = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    mask_file = gsshapy_session.query(RasterMapFile).filter(RasterMapFile.projectFileID == project_file_id).filter(RasterMapFile.fileExtension == "msk").one()

    # Get a list of the map tables for the index map
    mapTables = index_raster.mapTables

    # If some geometry is submitted, go and run the necessary steps to change the map
    if params['geometry']:
        jsonGeom = json.loads(params['geometry'])
        geometries= jsonGeom['geometries']

        # Convert from json to WKT
        for geometry in geometries:
            wkt = geometry['wkt']

            # Get the values for the geometry
            value = geometry['properties']['value']

            # Loop through indices and see if they match
            index_raster_indices = index_raster.indices
            index_present = False
            for index in index_raster_indices:
                if int(index.index) == int(value):
                    index_present = True
                    break

            # Create new index value if it doesn't exist and change the number of ids
            if index_present == False:
                new_indice = MTIndex(value, "", "")
                new_indice.indexMap = index_raster
                for mapping_table in mapTables:
                    distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                     filter(MTValue.mapTable == mapping_table).\
                                     order_by(MTValue.variable).\
                                     all()
                    variables = []
                    for var in distinct_vars:
                        variables.append(var[0])

                    for variable in variables:
                        new_value = MTValue(variable, 0)
                        new_value.mapTable = mapping_table
                        new_value.index = new_indice
                gsshapy_session.commit()

            if project_file.srid == None:
                srid = 26912
            else:
                srid = project_file.srid

            # Change values in the index map
            change_index_values = "SELECT ST_SetValue(raster,1, ST_Transform(ST_GeomFromText('{0}', 4326),{1}),{2}) " \
                                  "FROM idx_index_maps " \
                                  "WHERE id = {3};".format(wkt, srid, value, index_raster.id)

            result = gi_lib.timeout(gi_lib.draw_update_index, args=(change_index_values,index_raster.id), kwargs={}, timeout=10, result_can_be_pickled=True, default=None)

            if result == None:

                messages.error(request, 'The submission timed out. Please try to draw in the changes and submit them again.')
                job_session.close()
                gsshapy_session.close()
                context['index_name'] = index_name
                context['job_id'] = job_id

                return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))

        # Get the values in the index map
        statement3 = '''SELECT (pvc).*
                        FROM (SELECT ST_ValueCount(raster,1,true) As pvc
                        FROM idx_index_maps WHERE id = '''+ unicode(index_raster.id) +''') AS foo
                        ORDER BY (pvc).value;
                        '''
        result3 = gsshapy_engine.execute(statement3)

        numberIDs = 0
        ids = []
        for row in result3:
            numberIDs +=1
            ids.append(row.value)

        map_table_count = 0
        for mapping_table in mapTables:

            index_raster.mapTables[map_table_count].numIDs = numberIDs

            indices = gsshapy_session.query(MTIndex.index, MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapping_table).\
                                   order_by(MTIndex.index).\
                                   all()

            for index in indices:
                if not int(index[0]) in ids:
                    bob = gsshapy_session.query(MTIndex).get(index.id)
                    for val in bob.values:
                        gsshapy_session.delete(val)
                    gsshapy_session.delete(bob)
            gsshapy_session.commit()
            map_table_count +=1

        index_raster =  gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()

        # Create kml file name and path
        current_time = time.strftime("%Y%m%dT%H%M%S")
        resource_name = index_raster.name + "_" + str(user) + "_" + current_time
        kml_ext = resource_name + '.kml'
        clusterFile = os.path.join(indexMapDir, kml_ext)

        # Generate color ramp
        index_raster.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)
        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name)

        temp_list = json.loads(job.current_kmls)

        if status == True:
            for item in temp_list:
                    if item == index_name:
                        del temp_list[item]
                        temp_list[index_name] = {'url':resource['url'], 'full_name':resource['name']}
                        break

        job.current_kmls = json.dumps(temp_list)
        job_session.commit()
        job_session.close()
        gsshapy_session.close()

    else:
        messages.error(request, "You must make edits to submit")

    context['index_name'] = index_name
    context['job_id'] = job_id

    return redirect(reverse('gsshaindex:edit_index', kwargs={'job_id':job_id, 'index_name':index_name}))
Exemple #28
0
def get_depth_map(request, job_id, view_type):
    context = {}

    # Get the user id
    user = str(request.user)

    # Get the job from the database and delete
    session = jobs_sessionmaker()
    job = session.query(Jobs).\
                    filter(Jobs.user_id == user).\
                    filter(Jobs.original_id == job_id).one()

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    depthMapDir = os.path.join(userDir, 'depth_maps')
    newDepthDir = os.path.join(depthMapDir, 'new')
    originalDepthDir = os.path.join(depthMapDir, 'original')

    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    kml_link = []

    if view_type == "newTime":
        if job.newTime:
            kml_link.append(job.newTime)
        else:
            result_url = job.result_urls['new']
            result = gi_lib.prepare_time_depth_map(user, result_url, job, newDepthDir, CKAN_engine)
            job.newTime = result['url']
            session.commit()
            kml_link.append(result['url'])

    elif view_type == "newMax":
        if job.newMax:
            kml_link.append(job.newMax)
        else:
            result_url = job.result_urls['new']
            result = gi_lib.prepare_max_depth_map(user, result_url, job, newDepthDir, CKAN_engine)
            job.newMax = result['url']
            session.commit()
            kml_link.append(result['url'])

    elif view_type == "originalTime":
        if job.originalTime:
            kml_link.append(job.originalTime)
        else:
            result_url = job.result_urls['original']
            result = gi_lib.prepare_time_depth_map(user, result_url, job, originalDepthDir, CKAN_engine)
            job.originalTime = result['url']
            session.commit()
            kml_link.append(result['url'])

    elif view_type == "originalMax":
        if job.originalMax:
            kml_link.append(job.originalMax)
        else:
            result_url = job.result_urls['original']
            result = gi_lib.prepare_max_depth_map(user, result_url, job, originalDepthDir, CKAN_engine)
            job.originalMax = result['url']
            session.commit()
            kml_link.append(result['url'])

    else:
        kml_link=kml_link.append("")

    session.close()

    return JsonResponse({'kml_links': kml_link})
def edit_index(request, job_id, index_name):
    """
    Controller for the edit index by manually drawing in edits page.
    """
    context = {}
    user = str(request.user)

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb', app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir,'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(ProjectFile.id == project_file_id).one()
    new_index = gsshapy_session.query(IndexMap).filter(IndexMap.mapTableFile == project_file.mapTableFile).filter(IndexMap.name == index_name).one()
    mapTables = new_index.mapTables
    indices = new_index.indices


    # Get list of index files
    resource_list = json.loads(job.current_kmls)

    resource_names = []
    resource_url = []
    # Get array of names and urls
    for key in resource_list:
        resource_names.append(key)

    # Create kml file name and path
    current_time = time.strftime("%Y%m%dT%H%M%S")
    resource_name = new_index.name + "_" + str(user) + "_" + current_time
    kml_ext = resource_name + '.kml'
    clusterFile = os.path.join(indexMapDir, kml_ext)

    # See if kmls are present in the database
    file_present = False
    for key in resource_list:
        if key == index_name:
            file_present = True

    if file_present == False:
        # Generate color ramp
        new_index.getAsKmlClusters(session=gsshapy_session, path=clusterFile, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)
        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine, clusterFile, resource_name)

        for resource in result['resources']:
            if resource['name'] == resource_name:
                resource_list[new_index.name] = {'url':resource['url'], 'full_name':resource['name']}
                break

        job.current_kmls = json.dumps(resource_list)

    job_session.commit()
    job_session.close()

    # Set up map properties
    editable_map = {'height': '600px',
                    'width': '100%',
                    'reference_kml_action': '/apps/gsshaindex/'+ job_id + '/get-index-maps/'  + index_name,
                    'maps_api_key':maps_api_key,
                    'drawing_types_enabled': ['POLYGONS'],
                    'initial_drawing_mode': 'POLYGONS',
                    'output_format': 'WKT'}

    context['google_map'] = editable_map
    context['mapTables'] = mapTables
    context['indices'] = indices
    context['resource_names'] = resource_names
    context['resource_url'] = resource_url
    context['resource_list'] = resource_list
    context['index_name'] = index_name
    context['job_id'] = job_id

    return render(request, 'gsshaindex/edit_index.html', context)
def edit_index(request, job_id, index_name):
    """
    Controller for the edit index by manually drawing in edits page.
    """
    context = {}
    user = str(request.user)

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get project file id
    project_file_id = job.new_model_id

    # Specify the workspace
    controllerDir = os.path.abspath(os.path.dirname(__file__))
    gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
    publicDir = os.path.join(gsshaindexDir, 'public')
    userDir = os.path.join(publicDir, str(user))
    indexMapDir = os.path.join(userDir, 'index_maps')

    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(
        ProjectFile.id == project_file_id).one()
    new_index = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project_file.mapTableFile).filter(
            IndexMap.name == index_name).one()
    mapTables = new_index.mapTables
    indices = new_index.indices

    # Get list of index files
    resource_list = json.loads(job.current_kmls)

    resource_names = []
    resource_url = []
    # Get array of names and urls
    for key in resource_list:
        resource_names.append(key)

    # Create kml file name and path
    current_time = time.strftime("%Y%m%dT%H%M%S")
    resource_name = new_index.name + "_" + str(user) + "_" + current_time
    kml_ext = resource_name + '.kml'
    clusterFile = os.path.join(indexMapDir, kml_ext)

    # See if kmls are present in the database
    file_present = False
    for key in resource_list:
        if key == index_name:
            file_present = True

    if file_present == False:
        # Generate color ramp
        new_index.getAsKmlClusters(session=gsshapy_session,
                                   path=clusterFile,
                                   colorRamp=ColorRampEnum.COLOR_RAMP_HUE,
                                   alpha=0.6)

        index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)
        resource, status = gi_lib.add_kml_CKAN(index_map_dataset, CKAN_engine,
                                               clusterFile, resource_name)

        for resource in result['resources']:
            if resource['name'] == resource_name:
                resource_list[new_index.name] = {
                    'url': resource['url'],
                    'full_name': resource['name']
                }
                break

        job.current_kmls = json.dumps(resource_list)

    job_session.commit()
    job_session.close()

    # Set up map properties
    editable_map = {
        'height':
        '600px',
        'width':
        '100%',
        'reference_kml_action':
        '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_name,
        'maps_api_key':
        maps_api_key,
        'drawing_types_enabled': ['POLYGONS'],
        'initial_drawing_mode':
        'POLYGONS',
        'output_format':
        'WKT'
    }

    context['google_map'] = editable_map
    context['mapTables'] = mapTables
    context['indices'] = indices
    context['resource_names'] = resource_names
    context['resource_url'] = resource_url
    context['resource_list'] = resource_list
    context['index_name'] = index_name
    context['job_id'] = job_id

    return render(request, 'gsshaindex/edit_index.html', context)
def combine_index(request, job_id, index_name):
    """
    Controller for the edit index by manually drawing in edits page.
    """
    context = {}
    user = str(request.user)

    ID_OFFSET = 10

    # Get the job from the database
    job_session = jobs_sessionmaker()
    job, success = gi_lib.get_pending_job(job_id, user, job_session)
    CKAN_engine = get_dataset_engine(name='gsshaindex_ciwweb',
                                     app_class=GSSHAIndex)

    # Get project file id and gsshapy_session
    project_file_id = job.new_model_id
    gsshapy_session = gsshapy_sessionmaker()

    # Use project id to link to original map table file
    project_file = gsshapy_session.query(ProjectFile).filter(
        ProjectFile.id == project_file_id).one()
    new_index = gsshapy_session.query(IndexMap).filter(
        IndexMap.mapTableFile == project_file.mapTableFile).filter(
            IndexMap.name == index_name).one()
    mapTables = new_index.mapTables
    indices = new_index.indices

    # Get list of index files
    resource_list = json.loads(job.current_kmls)

    # Create blank array for names and urls
    resource_names = []
    resource_url = []
    resource_info = []

    # Get array of names and urls
    for key in resource_list:
        resource_names.append(key)
        resource_url.append(resource_list[key]['url'])
        resource_info.append((key, key))

    select_input1 = {
        'display_text': "Select first index map",
        'name': 'select1',
        'multiple': False,
        'options': resource_info
    }

    select_input2 = {
        'display_text': "Select second index map or none",
        'name': 'select2',
        'multiple': False,
        'options': [("None", "none")] + resource_info
    }

    # if the next button was pressed
    if request.POST:
        params = request.POST
        # Error message if both maps selected are the same
        if params['select1'] == params['select2']:
            result = ""
            messages.error(
                request,
                "You must select two different index maps. Or if you'd like to replace this map with a different map, select None for the second option"
            )
        # Process if only one map is selected
        elif params['select2'] == "none":
            select1_id = gsshapy_session.query(IndexMap).filter(
                IndexMap.mapTableFile == project_file.mapTableFile).filter(
                    IndexMap.name == params['select1']).one()
            statement = '''UPDATE idx_index_maps
                                  Set raster = ST_MapAlgebra(
                                  (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode(
                select1_id.id) + '''), 1,
                                  (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode(
                    new_index.id) + '''), 1,
                                  '([rast1]*1000+ [rast2]*0)'
                                  )
                                WHERE id = ''' + unicode(new_index.id) + ''';
                            '''
            result = gi_lib.timeout(gsshapy_engine.execute,
                                    args=(statement, ),
                                    kwargs={},
                                    timeout=10,
                                    result_can_be_pickled=False,
                                    default=None)
        # Process if two maps are selected
        else:
            # Get the ids for the two index maps to be combined
            select1_id = gsshapy_session.query(IndexMap).filter(
                IndexMap.mapTableFile == project_file.mapTableFile).filter(
                    IndexMap.name == params['select1']).one()
            select2_id = gsshapy_session.query(IndexMap).filter(
                IndexMap.mapTableFile == project_file.mapTableFile).filter(
                    IndexMap.name == params['select2']).one()
            # Combine the maps and give a unique id
            statement = '''UPDATE idx_index_maps
                              SET raster =ST_MapAlgebra(
                              (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode(
                select1_id.id) + '''), 1,
                              (SELECT raster FROM idx_index_maps WHERE id = ''' + unicode(
                    select2_id.id) + '''), 1,
                              '(([rast1]*1000) + [rast2])'
                              )
                            WHERE id = ''' + unicode(new_index.id) + ''';
                        '''
            result = gi_lib.timeout(gsshapy_engine.execute,
                                    args=(statement, ),
                                    kwargs={},
                                    timeout=10,
                                    result_can_be_pickled=False,
                                    default=None)

        if result != "":
            # Get the values in the index map
            statement3 = '''SELECT (pvc).*
                            FROM (SELECT ST_ValueCount(raster,1,true) As pvc
                            FROM idx_index_maps WHERE id = ''' + unicode(
                new_index.id) + ''') AS foo
                            ORDER BY (pvc).value;
                            '''
            new_indice_values = gsshapy_engine.execute(statement3)

            # Get the indices for the index being changed
            indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapTables[0]).\
                                   order_by(MTIndex.index).\
                                   all()

            # Go through the map tables that use the index map
            map_table_count = 0
            for mapping_table in mapTables:

                # Reset the number of ids to start counting them
                numberIDs = ID_OFFSET
                ids = []

                # Go through each new id value
                for row in new_indice_values:
                    index_present = False
                    numberIDs += 1
                    ids.append(row.value)
                    large_id = int(row[0])
                    for index in new_index.indices:
                        if int(index.index) == int(row[0]):
                            index_present = True
                            break

                    if index_present == False:
                        if str(large_id).endswith("000") == False:
                            second_id = str(large_id).split("0")[-1]
                            first_id = (large_id - int(second_id)) / 1000
                        else:
                            first_id = (large_id) / 1000
                            second_id = ""
                            description2 = ""

                        pastinfo1 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                filter(MTIndex.idxMapID == select1_id.id).\
                                filter(MTIndex.index == first_id).\
                                all()
                        description1 = pastinfo1[
                            0].description1 + " " + pastinfo1[0].description2

                        if second_id != "":
                            pastinfo2 = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                    filter(MTIndex.idxMapID == select2_id.id).\
                                    filter(MTIndex.index == second_id).\
                                    all()
                            description2 = pastinfo2[
                                0].description1 + " " + pastinfo2[
                                    0].description2

                        # Query for the pixel values of row[0] and replace with numberIDs
                        pixel_query = '''SELECT ST_PixelOfValue((SELECT raster FROM idx_index_maps WHERE id = {0}), {1});'''.format(
                            unicode(new_index.id), row[0])
                        pixels = gsshapy_session.execute(pixel_query)

                        for pixel in pixels:
                            coord = pixel[0].strip("()")
                            x, y = coord.split(",")
                            update_query = '''UPDATE idx_index_maps
                                              SET raster = (SELECT ST_SetValue(raster,{1},{2},{3}) FROM idx_index_maps WHERE id = {0})
                                              WHERE id = {0};'''.format(
                                unicode(new_index.id), int(x), int(y),
                                numberIDs)
                            new_result = gsshapy_session.execute(update_query)

                        # Create new index value
                        new_indice = MTIndex(numberIDs, description1,
                                             description2)
                        # new_indice = MTIndex(row[0], description1, description2)
                        new_indice.indexMap = new_index
                        for mapping_table in mapTables:
                            distinct_vars = gsshapy_session.query(distinct(MTValue.variable)).\
                                             filter(MTValue.mapTable == mapping_table).\
                                             order_by(MTValue.variable).\
                                             all()

                            variables = []
                            for var in distinct_vars:
                                variables.append(var[0])

                            for variable in variables:
                                new_value = MTValue(variable, 0)
                                new_value.mapTable = mapping_table
                                new_value.index = new_indice

                # Delete indices that aren't present
                for index in indices:
                    if not int(index[0]) in ids:
                        fetched_index = gsshapy_session.query(MTIndex).get(
                            index.id)
                        for val in fetched_index.values:
                            gsshapy_session.delete(val)
                        gsshapy_session.delete(fetched_index)

                new_index.mapTables[
                    map_table_count].numIDs = numberIDs - ID_OFFSET
                map_table_count += 1

            indices = gsshapy_session.query(distinct(MTIndex.index), MTIndex.id, MTIndex.description1, MTIndex.description2).\
                                   join(MTValue).\
                                   filter(MTValue.mapTable == mapTables[0]).\
                                   order_by(MTIndex.index).\
                                   all()

            index_raster = gsshapy_session.query(IndexMap).filter(
                IndexMap.mapTableFile == project_file.mapTableFile).filter(
                    IndexMap.name == index_name).one()

            # Specify the workspace
            controllerDir = os.path.abspath(os.path.dirname(__file__))
            gsshaindexDir = os.path.abspath(os.path.dirname(controllerDir))
            publicDir = os.path.join(gsshaindexDir, 'public')
            userDir = os.path.join(publicDir, str(user))
            indexMapDir = os.path.join(userDir, 'index_maps')

            # Create kml file name and path
            current_time = time.strftime("%Y%m%dT%H%M%S")
            resource_name = index_raster.name + "_" + str(
                user) + "_" + current_time
            kml_ext = resource_name + '.kml'
            clusterFile = os.path.join(indexMapDir, kml_ext)

            index_map_dataset = gi_lib.check_dataset("index-maps", CKAN_engine)

            # Generate color ramp
            index_raster.getAsKmlClusters(
                session=gsshapy_session,
                path=clusterFile,
                colorRamp=ColorRampEnum.COLOR_RAMP_HUE,
                alpha=0.6)

            resource, status = gi_lib.add_kml_CKAN(index_map_dataset,
                                                   CKAN_engine, clusterFile,
                                                   resource_name)

            temp_list = json.loads(job.current_kmls)

            if status == True:
                for item in temp_list:
                    if item == index_name:
                        del temp_list[item]
                        temp_list[index_name] = {
                            'url': resource['url'],
                            'full_name': resource['name']
                        }
                        break

            job.current_kmls = json.dumps(temp_list)
            job_session.commit()
            gsshapy_session.commit()
            job_session.close()
            gsshapy_session.close()

            return redirect(
                reverse('gsshaindex:mapping_table',
                        kwargs={
                            'job_id': job_id,
                            'index_name': index_name,
                            'mapping_table_number': '0'
                        }))

    job_session.commit()
    gsshapy_session.commit()
    job_session.close()
    gsshapy_session.close()

    # Set the first index as the active one
    index_names = str(resource_names[0])

    # Set up map properties
    editable_map = {
        'height':
        '400px',
        'width':
        '100%',
        'reference_kml_action':
        '/apps/gsshaindex/' + job_id + '/get-index-maps/' + index_names,
        'maps_api_key':
        maps_api_key,
        'drawing_types_enabled': []
    }

    context['replaced_index'] = index_name
    context['index_name'] = index_names
    context['google_map'] = editable_map
    context['select_input1'] = select_input1
    context['select_input2'] = select_input2
    context['job_id'] = job_id
    context['resource_name'] = resource_names

    return render(request, 'gsshaindex/combine_index.html', context)