예제 #1
0
    def __init__(self, start_time, end_time, dtype, mkeys, region_id,
                 raster_mask_id):
        assert isinstance(start_time, datetime), \
            "start_time not a datetime object!"
        assert isinstance(end_time, datetime), \
            "end_time not a datetime object!"
        assert start_time <= end_time, "start_time > end_time!"

        self.region_id = region_id
        self.raster_mask_id = raster_mask_id
        self.start_time = start_time
        self.end_time = end_time
        self.dtype = dtype

        self.old_mask_name = None

        # LatLon is not supported
        if gscript.locn_is_latlong():
            msgr.fatal(u"latlong location is not supported. "
                       u"Please use a projected location")
        # Set region
        if self.region_id:
            gscript.use_temp_region()
            gscript.run_command("g.region", region=region_id)
        self.region = Region()
        self.xr = self.region.cols
        self.yr = self.region.rows
        # Check if region is at least 3x3
        if self.xr < 3 or self.yr < 3:
            msgr.fatal(u"GRASS Region should be at least 3 cells by 3 cells")
        self.dx = self.region.ewres
        self.dy = self.region.nsres
        self.reg_bbox = {
            'e': self.region.east,
            'w': self.region.west,
            'n': self.region.north,
            's': self.region.south
        }
        # Set temporary mask
        if self.raster_mask_id:
            self.set_temp_mask()
        self.overwrite = gscript.overwrite()
        self.mapset = gutils.getenv('MAPSET')
        self.maps = dict.fromkeys(mkeys)
        # init temporal module
        tgis.init()
        # Create thread and queue for writing raster maps
        self.raster_lock = Lock()
        self.raster_writer_queue = Queue(maxsize=15)
        worker_args = (self.raster_writer_queue, self.raster_lock)
        self.raster_writer_thread = Thread(name="RasterWriter",
                                           target=raster_writer,
                                           args=worker_args)
        self.raster_writer_thread.start()
예제 #2
0
def main():
    inmap = options['input']
    outmap = options['output']
    coor = options['coor']
    coor = coor.replace(',', ' ')

    global tmp, nuldev, grass_version
    nuldev = None

    # setup temporary files
    tmp = grass.tempfile()

    # check for LatLong location
    if grass.locn_is_latlong() == True:
        grass.fatal(
            "Module works only in locations with cartesian coordinate system")

    # check if input file exists
    if not grass.find_file(inmap, element='vector')['file']:
        grass.fatal(_("<%s> does not exist.") % inmap)

    ## DO IT ##
    ## add categories to boundaries
    grass.run_command('v.category', input_ = inmap, option = 'add',\
                      type_ = 'boundary', output = 'v_temp_bcats', \
                      quiet = True, stderr = nuldev)

    ## export polygons to CSV + WKT
    tmp1 = tmp + '.csv'
    tmp2 = tmp + '2.csv'
    grass.run_command('v.out.ogr',
                      input_='v_temp_bcats',
                      output=tmp1,
                      format_="CSV",
                      type_=('boundary'),
                      lco="GEOMETRY=AS_WKT",
                      quiet=True,
                      stderr=nuldev)

    ## convert lines to polygons
    f1 = open(tmp1, 'r')
    f2 = open(tmp2, 'w')
    for line in f1:
        f2.write(
            line.replace('LINESTRING',
                         'POLYGON').replace(' (', ' ((').replace(')"', '))"'))
    f1.close()
    f2.close()

    with open(tmp2, 'r') as f:
        print(f.read())

    ## open CSV with OGR and get layer name
    f = ogr.Open(tmp2, 0)
    lyr = f.GetLayer(0)
    lyr_name = lyr.GetName()

    ## make spatial query with coordinates
    coords = "%s %s" % (coor, coor)
    tmp3 = tmp + '_v_temp_select.shp'
    cmd = 'ogr2ogr ' + ' -spat ' + coords + ' ' + tmp3 + ' ' + tmp2 + ' ' + lyr_name
    os.system(cmd)

    ## open SHP with OGR and get layer name
    f = ogr.Open(tmp3, 0)
    lyr = f.GetLayer(0)
    lyr_name = lyr.GetName()

    ## print selected objects to stdout or write into vector map
    if flags['p']:
        cmd = 'ogrinfo -al -fields=YES -geom=SUMMARY' + ' ' + tmp3 + ' ' + lyr_name
        os.system(cmd)
    else:
        grass.run_command('v.in.ogr', input_ = tmp3, layer = lyr_name, \
                          output = outmap, flags = 'c', quiet = True, stderr = nuldev)
예제 #3
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': ['1 x 1 degree', '15 x 15 minute'],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011':
                (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness', 'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)
            },
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extension = nav_string['extension']
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    min_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['w'], gregion['s']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['e'], gregion['n']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = urllib.quote_plus(gui_prod_str)
    prod_format = urllib.quote_plus(product_format)
    prod_extent = urllib.quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try:
        TNM_API_GET = urllib2.urlopen(TNM_API_URL, timeout=12)
    except urllib2.URLError:
        gscript.fatal(
            _("USGS TNM API query has timed out. Check network configuration. Please try again."
              ))
    except:
        gscript.fatal(
            _("USGS TNM API query has timed out. Check network configuration. Please try again."
              ))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    if cleanup_list:
        cleanup_msg = _(
            "\n{0} existing incomplete file(s) detected and removed. Run module again."
        ).format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(
                        gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print data_info

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urllib2.urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except urllib2.URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        for z in local_zip_path_list:
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.endswith(product_extension):
                            extracted_tile = os.path.join(work_dir, str(f))
                            if os.path.exists(extracted_tile):
                                os.remove(extracted_tile)
                                read_zip.extract(f, work_dir)
                            else:
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    cleanup_list.append(extracted_tile)
            except:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file from ZIP archive.")
                )

    # operations for extracted or complete files available locally
    for t in local_tile_path_list:
        # create variables for use in GRASS GIS import process
        LT_file_name = os.path.basename(t)
        LT_layer_name = os.path.splitext(LT_file_name)[0]
        patch_names.append(LT_layer_name)
        in_info = ("Importing and reprojecting {0}...").format(LT_file_name)
        gscript.info(in_info)
        # import to GRASS GIS
        try:
            gscript.run_command('r.import',
                                input=t,
                                output=LT_layer_name,
                                resolution='value',
                                resolution_value=product_resolution,
                                extent="region",
                                resample=product_interpolation)
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' or not gui_k_flag:
                cleanup_list.append(t)
        except CalledModuleError:
            in_error = ("Unable to import '{0}'").format(LT_file_name)
            gscript.fatal(in_error)

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if completed_tiles_count > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [
                            name + '.' + i for name in patch_names
                        ]
                        gscript.run_command('r.patch',
                                            input=patch_names_i,
                                            output=gui_output_layer + '.' + i)
                else:
                    gscript.run_command('r.patch',
                                        input=patch_names,
                                        output=gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if 'k' flag
                if not gui_k_flag:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [
                                name + '.' + i for name in patch_names
                            ]
                            gscript.run_command('g.remove',
                                                type='raster',
                                                name=patch_names_i,
                                                flags='f')
                    else:
                        gscript.run_command('g.remove',
                                            type='raster',
                                            name=patch_names,
                                            flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
        elif completed_tiles_count == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename',
                                        raster=(patch_names[0] + '.' + i,
                                                gui_output_layer + '.' + i))
            else:
                gscript.run_command('g.rename',
                                    raster=(patch_names[0], gui_output_layer))
        temp_down_count = "\n{0} of {1} tile/s succesfully imported and patched.".format(
            completed_tiles_count, tiles_needed_count)
        gscript.info(temp_down_count)
    else:
        gscript.fatal("Error downloading files. Please retry.")

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors',
                            map=gui_output_layer,
                            color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite',
                            red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2',
                            blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.del_temp_region()
예제 #4
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': [
                '1 x 1 degree',
                '15 x 15 minute'
            ],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011': (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness',
                'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)},
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "lidar": {
            'product': 'Lidar Point Cloud (LPC)',
            'dataset': {
                'Lidar Point Cloud (LPC)': (1. / 3600 / 9, 3, 10)},
            'subset': {},
            'extent': [''],
            'format': 'LAS,LAZ',
            'extension': 'las,laz',
            'zip': True,
            'srs': '',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extensions = tuple(nav_string['extension'].split(','))
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    has_pdal = gscript.find_program(pgm='v.in.pdal')
    if gui_product == 'lidar':
        gui_dataset = 'Lidar Point Cloud (LPC)'
        product_tag = nav_string['product']
        if not has_pdal:
            gscript.warning(_("Module v.in.pdal is missing,"
                              " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(_("Directory <{}> does not exist."
                        " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == 'lidar' and options['resolution']:
        product_resolution = float(options['resolution'])

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product,
                        res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _("Possibly, the query has timed out. Check network configuration and try again.")
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(_(
            "HTTP(S) error from USGS TNM API:"
            " {code}: {reason} ({instructions})").format(
                reason=error.reason, code=error.code, instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(_(
            "Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == 'lidar' and options['title_filter']:
            return_JSON['items'] = [item for item in return_JSON['items'] if options['title_filter'] in item['title']]
            return_JSON['total'] = len(return_JSON['items'])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size - TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(_("TNM API ERROR or Zero tiles available for given input parameters."))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: simply continue with whatever is needed to be done in this case
    if cleanup_list:
        cleanup_msg = _("\n{0} existing incomplete file(s) detected and removed. Run module again.").format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(_("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(_("USGS download request has timed out. Network or formatting error."))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(extracted_tile) < os.path.getmtime(z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(_(
                    "Unable to locate or extract IMG file '{filename}'"
                    " from ZIP archive '{zipname}': {error}").format(
                        filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(_("Extracted {extracted} new tiles and"
                          " used {used} existing tiles").format(
            used=used_existing_extracted_tiles_num,
            extracted=extracted_tiles_num
        ))
        if old_extracted_tiles_num:
            gscript.verbose(_("Found {removed} existing tiles older"
                              " than the corresponding downloaded archive").format(
                            removed=old_extracted_tiles_num
                            ))
        if removed_extracted_tiles_num:
            gscript.verbose(_("Removed {removed} existing tiles").format(
                            removed=removed_extracted_tiles_num
                            ))

    if gui_product == 'lidar' and not has_pdal:
        gscript.fatal(_("Module v.in.pdal is missing,"
                        " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists("raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name, count=i + 1, total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != 'lidar':
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_file_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            resolution='value', resolution_value=product_resolution,
                            extent="region", resample=product_interpolation,
                            memory=memory
                        ))
                else:
                    srs = options['input_srs']
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_lidar_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            input_srs=srs if srs else None
                        ))
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(_("Parallel import and reprojection failed."
                                        " Try running with nprocs=1."))
                    else:
                        gscript.fatal(_("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(_("Imported {imported} new tiles and"
                      " used {used} existing tiles").format(
        used=used_existing_imported_tiles_num,
        imported=imported_tiles_num
    ))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [name + '.' + i for name in patch_names]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch', input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == 'lidar':
                    gscript.run_command('v.patch', flags='nzb', input=patch_names,
                                        output=gui_output_layer)
                    gscript.run_command('v.surf.rst', input=gui_output_layer,
                                        elevation=gui_output_layer, nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command('r.patch', input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added").format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [name + '.' + i for name in patch_names]
                            gscript.run_command('g.remove', type='raster',
                                                name=patch_names_i, flags='f')
                    elif gui_product == 'lidar':
                        gscript.run_command('g.remove', type='vector',
                                            name=patch_names + [gui_output_layer], flags='f')
                    else:
                        gscript.run_command('g.remove', type='raster',
                                            name=patch_names, flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename', raster=(patch_names[0] + '.' + i, gui_output_layer + '.' + i))
            elif gui_product == 'lidar':
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                gscript.run_command('v.surf.rst', input=patch_names[0],
                                    elevation=gui_output_layer, nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command('g.remove', type='vector',
                                        name=patch_names[0], flags='f')
            else:
                gscript.run_command('g.rename', raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(_("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(_(
            "Error in getting or importing the data (see above). Please retry."))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = ("<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors', map=gui_output_layer, color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite', red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2', blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
예제 #5
0
def main():
    inmap = options['input']
    outmap = options['output']
    gtype = options['type']

    global tmp, nuldev, grass_version
    nuldev = None

    # setup temporary files
    tmp = grass.tempfile()

    # check for LatLong location
    if grass.locn_is_latlong() == True:
        grass.fatal("Module works only in locations with cartesian coordinate system")

    # check if input file exists
    if not grass.find_file(inmap, element = 'vector')['file']:
        grass.fatal(_("<%s> does not exist.") % inmap)
        

    ## export geometry to CSV
    tmp1 = tmp + '.csv'
    if gtype:
        grass.run_command('v.out.ogr', input_ = inmap, output = tmp1, 
                      format_ = "CSV", type_ = '%s' % gtype, 
                          lco = "GEOMETRY=AS_WKT", quiet = True, stderr = nuldev)
    else:
        grass.run_command('v.out.ogr', input_ = inmap, output = tmp1, 
                      format_ = "CSV", type_ = ('point','centroid','line','boundary','area'), 
                      lco = "GEOMETRY=AS_WKT", quiet = True, stderr = nuldev)
    
    # open CSV with OGR
    input = ogr.Open(tmp1, 0)
    lyr = input.GetLayer(0)

    xlist = []
    ylist = []

    # export geometries as WKT
    for f in lyr:
        geom = f.GetGeometryRef()
        load = loads(geom.ExportToWkt())
        # compute centroid for each feature
        cent = str(load.centroid.wkt).replace('POINT ','').replace('(','').replace(')','')
        x = cent.split(' ')[0]
        y = cent.split(' ')[1]
        xlist.append(float(x))
        ylist.append(float(y))
        
    xy_list = zip(xlist,ylist)

    # compute centroid for centroids    
    mpoint = geometry.MultiPoint(xy_list)
    mcent = str(mpoint.centroid.wkt.replace('POINT ','').replace('(','').replace(')',''))


    # output
    if not outmap:
        print mcent
    else:
        out = tmp + '.out'
        outf = file(out, 'w')
        print >> outf, mcent
        outf.close()
        
        grass.run_command('v.in.ascii', input_ = out, output = outmap,
                          sep = ' ', quiet = True, stderr = nuldev)
def main():
    """Do the main processing
    """

    # Parse input options:
    patch_map = options['input']
    patches = patch_map.split('@')[0]
    patches_mapset = patch_map.split('@')[1] if len(
        patch_map.split('@')) > 1 else None
    pop_proxy = options['pop_proxy']
    layer = options['layer']
    costs = options['costs']
    cutoff = float(options['cutoff'])
    border_dist = int(options['border_dist'])
    conefor_dir = options['conefor_dir']
    memory = int(options['memory'])

    # Parse output options:
    prefix = options['prefix']
    edge_map = '{}_edges'.format(prefix)
    vertex_map = '{}_vertices'.format(prefix)
    shortest_paths = '{}_shortest_paths'.format(prefix)

    # Parse flags:
    p_flag = flags['p']
    t_flag = flags['t']
    r_flag = flags['r']

    dist_flags = 'kn' if flags['k'] else 'n'

    lin_cat = 1
    zero_dist = None

    folder = grass.tempdir()
    if not os.path.exists(folder):
        os.makedirs(folder)

    # Setup counter for progress message
    counter = 0

    # Check if location is lat/lon (only in lat/lon geodesic distance
    # measuring is supported)
    if grass.locn_is_latlong():
        grass.verbose("Location is lat/lon: Geodesic distance \
                      measure is used")

    # Check if prefix is legal GRASS name
    if not grass.legal_name(prefix):
        grass.fatal('{} is not a legal name for GRASS \
                    maps.'.format(prefix))

    if prefix[0].isdigit():
        grass.fatal('Tables names starting with a digit are not SQL \
                    compliant.'.format(prefix))

    # Check if output maps not already exists or could be overwritten
    for output in [edge_map, vertex_map, shortest_paths]:
        if grass.db.db_table_exist(output) and not grass.overwrite():
            grass.fatal('Vector map <{}> already exists'.format(output))

    # Check if input has required attributes
    in_db_connection = grass.vector.vector_db(patch_map)
    if not int(layer) in in_db_connection.keys():
        grass.fatal('No attribute table connected vector map {} at \
                    layer {}.'.format(patches, layer))

    #Check if cat column exists
    pcols = grass.vector.vector_columns(patch_map, layer=layer)

    #Check if cat column exists
    if not 'cat' in pcols.keys():
        grass.fatal('Cannot find the reqired column cat in vector map \
                    {}.'.format(patches))

    #Check if pop_proxy column exists
    if not pop_proxy in pcols.keys():
        grass.fatal('Cannot find column {} in vector map \
                    {}'.format(pop_proxy, patches))

    #Check if pop_proxy column is numeric type
    if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL', 'DOUBLE PRECISION']:
        grass.fatal('Column {} is of type {}. Only numeric types \
                    (integer or double precision) \
                    allowed!'.format(pop_proxy, pcols[pop_proxy]['type']))

    #Check if pop_proxy column does not contain values <= 0
    pop_vals = np.fromstring(grass.read_command('v.db.select',
                                                flags='c',
                                                map=patches,
                                                columns=pop_proxy,
                                                nv=-9999).rstrip('\n'),
                             dtype=float,
                             sep='\n')

    if np.min(pop_vals) <= 0:
        grass.fatal('Column {} contains values <= 0 or NULL. Neither \
                    values <= 0 nor NULL allowed!}'.format(pop_proxy))

    ##############################################
    # Use pygrass region instead of grass.parse_command !?!
    start_reg = grass.parse_command('g.region', flags='ugp')

    max_n = start_reg['n']
    min_s = start_reg['s']
    max_e = start_reg['e']
    min_w = start_reg['w']
    # cost_nsres = reg['nsres']
    # cost_ewres = reg['ewres']

    # Rasterize patches
    # http://www.gdal.org/gdal_tutorial.html
    # http://geoinformaticstutorial.blogspot.no/2012/11/convert-
    # shapefile-to-raster-with-gdal.html
    if t_flag:
        # Rasterize patches with "all-touched" mode using GDAL
        # Read region-settings (not needed canuse max_n, min_s, max_e,
        # min_w nsres, ewres...
        prast = os.path.join(folder, 'patches_rast.tif')

        # Check if GDAL-GRASS plugin is installed
        if ogr.GetDriverByName('GRASS'):
            #With GDAL-GRASS plugin
            #Locate file for patch vector map
            pfile = grass.parse_command('g.findfile',
                                        element='vector',
                                        file=patches,
                                        mapset=patches_mapset)['file']
            pfile = os.path.join(pfile, 'head')

        else:
            # Without GDAL-GRASS-plugin
            grass.warning("Cannot find GDAL-GRASS plugin. Consider \
                          installing it in order to save time for \
                          all-touched rasterisation")
            pfile = os.path.join(folder, 'patches_vect.gpkg')
            # Export patch vector map to temp-file in a GDAL-readable
            # format (shp)
            grass.run_command('v.out.ogr',
                              flags='m',
                              quiet=True,
                              input=patch_map,
                              type='area',
                              layer=layer,
                              output=pfile,
                              lco='GEOMETRY_NAME=geom')

        # Rasterize vector map with all-touched option
        os.system('gdal_rasterize -l {} -at -tr {} {} \
                  -te {} {} {} {} -ot Uint32 -a cat \
                  {} {} -q'.format(patches, start_reg['ewres'],
                                   start_reg['nsres'], start_reg['w'],
                                   start_reg['s'], start_reg['e'],
                                   start_reg['n'], pfile, prast))

        if not ogr.GetDriverByName('GRASS'):
            # Remove vector temp-file
            os.remove(os.path.join(folder, 'patches_vect.gpkg'))

        # Import rasterized patches
        grass.run_command('r.external',
                          flags='o',
                          quiet=True,
                          input=prast,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    else:
        # Simple rasterisation (only area)
        # in G 7.6 also with support for 'centroid'
        if float(grass.version()['version'][:3]) >= 7.6:
            conv_types = ['area', 'centroid']
        else:
            conv_types = ['area']
        grass.run_command('v.to.rast',
                          quiet=True,
                          input=patches,
                          use='cat',
                          type=conv_types,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    # Extract boundaries from patch raster map
    grass.run_command('r.mapcalc',
                      expression='{p}_patches_boundary=if(\
    {p}_patches_pol,\
    if((\
    (isnull({p}_patches_pol[-1,0])||| \
    {p}_patches_pol[-1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,1])||| \
    {p}_patches_pol[0,1]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[1,0])||| \
    {p}_patches_pol[1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,-1])||| \
    {p}_patches_pol[0,-1]!={p}_patches_pol)), \
    {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX),
                      quiet=True)

    rasterized_cats = grass.read_command(
        'r.category',
        separator='newline',
        map='{p}_patches_boundary'.format(p=TMP_PREFIX)).replace(
            '\t', '').strip('\n')
    rasterized_cats = list(
        map(int, set([x for x in rasterized_cats.split('\n') if x != ''])))

    #Init output vector maps if they are requested by user
    network = VectorTopo(edge_map)
    network_columns = [(u'cat', 'INTEGER PRIMARY KEY'), (u'from_p', 'INTEGER'),
                       (u'to_p', 'INTEGER'), (u'min_dist', 'DOUBLE PRECISION'),
                       (u'dist', 'DOUBLE PRECISION'),
                       (u'max_dist', 'DOUBLE PRECISION')]
    network.open('w', tab_name=edge_map, tab_cols=network_columns)

    vertex = VectorTopo(vertex_map)
    vertex_columns = [
        (u'cat', 'INTEGER PRIMARY KEY'),
        (pop_proxy, 'DOUBLE PRECISION'),
    ]
    vertex.open('w', tab_name=vertex_map, tab_cols=vertex_columns)

    if p_flag:
        # Init cost paths file for start-patch
        grass.run_command('v.edit',
                          quiet=True,
                          map=shortest_paths,
                          tool='create')
        grass.run_command('v.db.addtable',
                          quiet=True,
                          map=shortest_paths,
                          columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")

    start_region_bbox = Bbox(north=float(max_n),
                             south=float(min_s),
                             east=float(max_e),
                             west=float(min_w))
    vpatches = VectorTopo(patches, mapset=patches_mapset)
    vpatches.open('r', layer=int(layer))

    ###Loop through patches
    vpatch_ids = np.array(vpatches.features_to_wkb_list(
        feature_type="centroid", bbox=start_region_bbox),
                          dtype=[('vid', 'uint32'), ('cat', 'uint32'),
                                 ('geom', '|S10')])
    cats = set(vpatch_ids['cat'])
    n_cats = len(cats)
    if n_cats < len(vpatch_ids['cat']):
        grass.verbose('At least one MultiPolygon found in patch map.\n \
                      Using average coordinates of the centroids for \
                      visual representation of the patch.')

    for cat in cats:
        if cat not in rasterized_cats:
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            continue
        grass.verbose("Calculating connectivity-distances for patch \
                      number {}".format(cat))

        # Filter
        from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat]

        # Get patch ID
        if from_vpatch['vid'].size == 1:
            from_centroid = Centroid(v_id=int(from_vpatch['vid']),
                                     c_mapinfo=vpatches.c_mapinfo)
            from_x = from_centroid.x
            from_y = from_centroid.y

            # Get centroid
            if not from_centroid:
                continue
        else:
            xcoords = []
            ycoords = []
            for f_p in from_vpatch['vid']:
                from_centroid = Centroid(v_id=int(f_p),
                                         c_mapinfo=vpatches.c_mapinfo)
                xcoords.append(from_centroid.x)
                ycoords.append(from_centroid.y)

                # Get centroid
                if not from_centroid:
                    continue
            from_x = np.average(xcoords)
            from_y = np.average(ycoords)

        # Get BoundingBox
        from_bbox = grass.parse_command('v.db.select',
                                        map=patch_map,
                                        flags='r',
                                        where='cat={}'.format(cat))

        attr_filter = vpatches.table.filters.select(pop_proxy)
        attr_filter = attr_filter.where("cat={}".format(cat))
        proxy_val = vpatches.table.execute().fetchone()

        # Prepare start patch
        start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat)
        reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat))
        recl = grass.feed_command(
            'r.reclass',
            quiet=True,
            input='{}_patches_boundary'.format(TMP_PREFIX),
            output=start_patch,
            rules='-')
        recl.stdin.write(reclass_rule)
        recl.stdin.close()
        recl.wait()

        # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised)
        #start_check = grass.parse_command('r.info', flags='r', map=start_patch)
        #start_check = grass.parse_command('r.univar', flags='g', map=start_patch)
        #print(start_check)
        """if start_check['min'] != '1':
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            grass.run_command('g.remove', flags='f', vector=start_patch,
                              raster=start_patch, quiet=True)
            grass.del_temp_region()
            continue"""

        # Prepare stop patches
        ############################################
        reg = grass.parse_command('g.region',
                                  flags='ug',
                                  quiet=True,
                                  raster=start_patch,
                                  n=float(from_bbox['n']) + float(cutoff),
                                  s=float(from_bbox['s']) - float(cutoff),
                                  e=float(from_bbox['e']) + float(cutoff),
                                  w=float(from_bbox['w']) - float(cutoff),
                                  align='{}_patches_pol'.format(TMP_PREFIX))

        north = reg['n'] if max_n > reg['n'] else max_n
        south = reg['s'] if min_s < reg['s'] else min_s
        east = reg['e'] if max_e < reg['e'] else max_e
        west = reg['w'] if min_w > reg['w'] else min_w

        # Set region to patch search radius
        grass.use_temp_region()
        grass.run_command('g.region',
                          quiet=True,
                          n=north,
                          s=south,
                          e=east,
                          w=west,
                          align='{}_patches_pol'.format(TMP_PREFIX))

        # Create buffer around start-patch as a mask
        # for cost distance analysis
        grass.run_command('r.buffer',
                          quiet=True,
                          input=start_patch,
                          output='MASK',
                          distances=cutoff)
        grass.run_command('r.mapcalc',
                          quiet=True,
                          expression='{pf}_patch_{p}_neighbours_contur=\
                                     if({pf}_patches_boundary=={p},\
                                     null(),\
                                     {pf}_patches_boundary)'.format(
                              pf=TMP_PREFIX, p=cat))
        grass.run_command('r.mask', flags='r', quiet=True)

        # Calculate cost distance
        cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat)
        grass.run_command('r.cost',
                          flags=dist_flags,
                          quiet=True,
                          overwrite=True,
                          input=costs,
                          output=cost_distance_map,
                          start_rast=start_patch,
                          memory=memory)

        #grass.run_command('g.region', flags='up')
        # grass.raster.raster_history(cost_distance_map)
        cdhist = History(cost_distance_map)
        cdhist.clear()
        cdhist.creator = os.environ['USER']
        cdhist.write()
        # History object cannot modify description
        grass.run_command('r.support',
                          map=cost_distance_map,
                          description='Generated by r.connectivity.distance',
                          history=os.environ['CMDLINE'])

        # Export distance at boundaries
        maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist'
        maps = maps.format(TMP_PREFIX, cat, prefix),

        connections = grass.encode(
            grass.read_command('r.stats',
                               flags='1ng',
                               quiet=True,
                               input=maps,
                               separator=';').rstrip('\n'))
        if connections:
            con_array = np.genfromtxt(BytesIO(connections),
                                      delimiter=';',
                                      dtype=None,
                                      names=['x', 'y', 'cat', 'dist'])
        else:
            grass.warning('No connections for patch {}'.format(cat))

            # Write centroid to vertex map
            vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val)
            vertex.table.conn.commit()

            # Remove temporary map data
            grass.run_command('g.remove',
                              quiet=True,
                              flags='f',
                              type=['raster', 'vector'],
                              pattern="{}*{}*".format(TMP_PREFIX, cat))
            grass.del_temp_region()
            continue

        #Find closest points on neigbour patches
        to_cats = set(np.atleast_1d(con_array['cat']))
        to_coords = []
        for to_cat in to_cats:
            connection = con_array[con_array['cat'] == to_cat]
            connection.sort(order=['dist'])
            pixel = border_dist if len(
                connection) > border_dist else len(connection) - 1
            # closest_points_x = connection['x'][pixel]
            # closest_points_y = connection['y'][pixel]
            closest_points_to_cat = to_cat
            closest_points_min_dist = connection['dist'][0]
            closest_points_dist = connection['dist'][pixel]
            closest_points_max_dist = connection['dist'][-1]
            to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid']

            if len(to_patch_ids) == 1:
                to_centroid = Centroid(v_id=to_patch_ids,
                                       c_mapinfo=vpatches.c_mapinfo)
                to_x = to_centroid.x
                to_y = to_centroid.y
            elif len(to_patch_ids) >= 1:
                xcoords = []
                ycoords = []
                for t_p in to_patch_ids:
                    to_centroid = Centroid(v_id=int(t_p),
                                           c_mapinfo=vpatches.c_mapinfo)
                    xcoords.append(to_centroid.x)
                    ycoords.append(to_centroid.y)

                    # Get centroid
                    if not to_centroid:
                        continue
                to_x = np.average(xcoords)
                to_y = np.average(ycoords)

            to_coords.append('{},{},{},{},{},{}'.format(
                connection['x'][0], connection['y'][0], to_cat,
                closest_points_min_dist, closest_points_dist,
                closest_points_max_dist))

            #Save edges to network dataset
            if closest_points_dist <= 0:
                zero_dist = 1

            # Write data to network
            network.write(Line([(from_x, from_y), (to_x, to_y)]),
                          cat=lin_cat,
                          attrs=(
                              cat,
                              int(closest_points_to_cat),
                              closest_points_min_dist,
                              closest_points_dist,
                              closest_points_max_dist,
                          ))
            network.table.conn.commit()

            lin_cat = lin_cat + 1

        # Save closest points and shortest paths through cost raster as
        # vector map (r.drain limited to 1024 points) if requested
        if p_flag:
            grass.verbose('Extracting shortest paths for patch number \
                          {}...'.format(cat))

            points_n = len(to_cats)

            tiles = int(points_n / 1024.0)
            rest = points_n % 1024
            if not rest == 0:
                tiles = tiles + 1

            tile_n = 0
            while tile_n < tiles:
                tile_n = tile_n + 1
                #Import closest points for start-patch in 1000er blocks
                sp = grass.feed_command('v.in.ascii',
                                        flags='nr',
                                        overwrite=True,
                                        quiet=True,
                                        input='-',
                                        stderr=subprocess.PIPE,
                                        output="{}_{}_cp".format(
                                            TMP_PREFIX, cat),
                                        separator=",",
                                        columns="x double precision,\
                                           y double precision,\
                                           to_p integer,\
                                           dist_min double precision,\
                                           dist double precision,\
                                           dist_max double precision")
                sp.stdin.write(grass.encode("\n".join(to_coords)))
                sp.stdin.close()
                sp.wait()

                # Extract shortest paths for start-patch in chunks of
                # 1024 points
                cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat)
                start_points = "{}_{}_cp".format(TMP_PREFIX, cat)
                grass.run_command('r.drain',
                                  overwrite=True,
                                  quiet=True,
                                  input=cost_distance_map,
                                  output=cost_paths,
                                  drain=cost_paths,
                                  start_points=start_points)

                grass.run_command('v.db.addtable',
                                  map=cost_paths,
                                  quiet=True,
                                  columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")
                grass.run_command('v.db.update',
                                  map=cost_paths,
                                  column='from_p',
                                  value=cat,
                                  quiet=True)
                grass.run_command('v.distance',
                                  quiet=True,
                                  from_=cost_paths,
                                  to=start_points,
                                  upload='to_attr',
                                  column='to_p',
                                  to_column='to_p')
                grass.run_command('v.db.join',
                                  quiet=True,
                                  map=cost_paths,
                                  column='to_p',
                                  other_column='to_p',
                                  other_table=start_points,
                                  subset_columns='dist_min,dist,dist_max')

                #grass.run_command('v.info', flags='c',
                #                  map=cost_paths)
                grass.run_command('v.patch',
                                  flags='ae',
                                  overwrite=True,
                                  quiet=True,
                                  input=cost_paths,
                                  output=shortest_paths)

                # Remove temporary map data
                grass.run_command('g.remove',
                                  quiet=True,
                                  flags='f',
                                  type=['raster', 'vector'],
                                  pattern="{}*{}*".format(TMP_PREFIX, cat))

        # Remove temporary map data for patch
        if r_flag:
            grass.run_command('g.remove',
                              flags='f',
                              type='raster',
                              name=cost_distance_map,
                              quiet=True)

        vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val)

        vertex.table.conn.commit()

        # Print progress message
        grass.percent(i=int((float(counter) / n_cats) * 100), n=100, s=3)

        # Update counter for progress message
        counter = counter + 1

    if zero_dist:
        grass.warning('Some patches are directly adjacent to others. \
                       Minimum distance set to 0.0000000001')

    # Close vector maps and build topology
    network.close()
    vertex.close()

    # Add vertex attributes
    # grass.run_command('v.db.addtable', map=vertex_map)
    # grass.run_command('v.db.join', map=vertex_map, column='cat',
    #                   other_table=in_db_connection[int(layer)]['table'],
    #                   other_column='cat', subset_columns=pop_proxy,
    #                   quiet=True)

    # Add history and meta data to produced maps
    grass.run_command('v.support',
                      flags='h',
                      map=edge_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    grass.run_command('v.support',
                      flags='h',
                      map=vertex_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    if p_flag:
        grass.run_command('v.support',
                          flags='h',
                          map=shortest_paths,
                          person=os.environ['USER'],
                          cmdhist=os.environ['CMDLINE'])

    # Output also Conefor files if requested
    if conefor_dir:
        query = """SELECT p_from, p_to, avg(dist) FROM
                 (SELECT
                 CASE
                 WHEN from_p > to_p THEN to_p
                 ELSE from_p END AS p_from,
                    CASE
                 WHEN from_p > to_p THEN from_p
                 ELSE to_p END AS p_to,
                 dist
                 FROM {}) AS x
                 GROUP BY p_from, p_to""".format(edge_map)
        with open(os.path.join(conefor_dir, 'undirected_connection_file'),
                  'w') as edges:
            edges.write(
                grass.read_command('db.select', sql=query, separator=' '))
        with open(os.path.join(conefor_dir, 'directed_connection_file'),
                  'w') as edges:
            edges.write(
                grass.read_command('v.db.select',
                                   map=edge_map,
                                   separator=' ',
                                   flags='c'))
        with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes:
            nodes.write(
                grass.read_command('v.db.select',
                                   map=vertex_map,
                                   separator=' ',
                                   flags='c'))
예제 #7
0
def main():
    inmap = options["input"]
    outmap = options["output"]
    coor = options["coor"]
    coor = coor.replace(",", " ")

    global tmp, grass_version

    # setup temporary files
    tmp = grass.tempfile()

    # check for LatLong location
    if grass.locn_is_latlong():
        grass.fatal("Module works only in locations with cartesian coordinate system")

    # check if input file exists
    if not grass.find_file(inmap, element="vector")["file"]:
        grass.fatal(_("<%s> does not exist.") % inmap)

    ## DO IT ##
    ## add categories to boundaries
    grass.run_command(
        "v.category",
        input_=inmap,
        option="add",
        type_="boundary",
        output="v_temp_bcats",
        quiet=True,
        stderr=None,
    )

    ## export polygons to CSV + WKT
    tmp1 = tmp + ".csv"
    tmp2 = tmp + "2.csv"
    grass.run_command(
        "v.out.ogr",
        input_="v_temp_bcats",
        output=tmp1,
        format_="CSV",
        type_=("boundary"),
        lco="GEOMETRY=AS_WKT",
        quiet=True,
        stderr=None,
    )

    ## convert lines to polygons
    f1 = open(tmp1, "r")
    f2 = open(tmp2, "w")
    for line in f1:
        f2.write(
            line.replace("LINESTRING", "POLYGON")
            .replace(" (", " ((")
            .replace(')"', '))"')
        )
    f1.close()
    f2.close()

    with open(tmp2, "r") as f:
        print(f.read())

    ## open CSV with OGR and get layer name
    f = ogr.Open(tmp2, 0)
    lyr = f.GetLayer(0)
    lyr_name = lyr.GetName()

    ## make spatial query with coordinates
    coords = "%s %s" % (coor, coor)
    tmp3 = tmp + "_v_temp_select.shp"
    cmd = "ogr2ogr " + " -spat " + coords + " " + tmp3 + " " + tmp2 + " " + lyr_name
    os.system(cmd)

    ## open SHP with OGR and get layer name
    f = ogr.Open(tmp3, 0)
    lyr = f.GetLayer(0)
    lyr_name = lyr.GetName()

    ## print selected objects to stdout or write into vector map
    if flags["p"]:
        cmd = "ogrinfo -al -fields=YES -geom=SUMMARY" + " " + tmp3 + " " + lyr_name
        os.system(cmd)
    else:
        grass.run_command(
            "v.in.ogr",
            input_=tmp3,
            layer=lyr_name,
            output=outmap,
            flags="c",
            quiet=True,
            stderr=None,
        )
예제 #8
0
def main():
    check_progs()
    
    inmap = options['input']
    output = options['ldm']
    width = options['width']
    color = options['color']
    graph = options['graph']
    ldm_type = options['type']

    mapset = grass.gisenv()['MAPSET']

    global tmp, nuldev, grass_version
    nuldev = None

    grass_version = grass.version()['version'][0]
    if grass_version != '7':
        grass.fatal(_("Sorry, this script works in GRASS 7.* only"))

    # setup temporary files
    tmp = grass.tempfile()
    
    # check for LatLong location
    if grass.locn_is_latlong() == True:
        grass.fatal("Module works only in locations with cartesian coordinate system")


    # check if input file exists
    if not grass.find_file(inmap, element = 'vector')['file']:
        grass.fatal(_("<%s> does not exist.") % inmap)
        
    # check for lines
    iflines = grass.vector_info_topo(inmap)['lines']
    if iflines == 0:
        grass.fatal(_("Map <%s> has no lines.") % inmap)
    

    # diplay options 
    if flags['x']:
        env = grass.gisenv()
        mon = env.get('MONITOR', None)
        if not mon:
            if not graph:
                grass.fatal(_("Please choose \"graph\" output file with LDM graphics or not use flag \"x\""))

    
    ####### DO IT #######
    # copy input vector map and drop table
    grass.run_command('g.copy', vect = (inmap, 'v_ldm_vect'), quiet = True, stderr = nuldev)
    db = grass.vector_db('v_ldm_vect')
    if db != {}:
        grass.run_command('v.db.droptable', map_ = 'v_ldm_vect', flags = 'f', quiet = True, stderr = nuldev)

    # compute mean center of lines with v.mc.py module
    center_coords = grass.read_command('v.mc.py', input_ = inmap, type_ = 'line',
                                quiet = True, stderr = nuldev).strip()
    mc_x = center_coords.split(' ')[0]
    mc_y = center_coords.split(' ')[1]

    center_coords = str(mc_x) + ',' + str(mc_y)

    ### 
    inmap = 'v_ldm_vect'

    # count lines
    count = grass.vector_info_topo(inmap)['lines']

    # add temp table with azimuths and lengths of lines
    in_cats = inmap + '_cats'    
    grass.run_command('v.category', input_ = inmap, option = 'add', 
                      output = in_cats, quiet = True, stderr = nuldev)
    grass.run_command('v.db.addtable', map_ = in_cats, table = 'tmp_tab', 
                      columns = 'sum_azim double, len double', quiet = True, stderr = nuldev)
    grass.run_command('v.db.connect', map_ = in_cats, table = 'tmp_tab', 
                      flags = 'o', quiet = True, stderr = nuldev)
    grass.run_command('v.to.db', map_ = in_cats, opt = 'azimuth', 
                      columns = 'sum_azim', units = 'radians', quiet = True, stderr = nuldev)
    grass.run_command('v.to.db', map_ = in_cats, opt = 'length',  
                      columns = 'len', units = 'meters', quiet = True, stderr = nuldev)    

    # find end azimuth
    p = grass.pipe_command('v.db.select', map_ = in_cats, columns = 'sum_azim', flags = 'c', quiet = True, stderr = nuldev)
    c = p.communicate()[0].strip().split('\n')

    sin = []
    cos = []
    
    for i in c:
        s1 = math.sin(float(i))
        c1 = math.cos(float(i))
        sin.append(s1)
        cos.append(c1)

    ca_sin = sum(map(float,sin))
    ca_cos = sum(map(float,cos))
    
    atan = math.atan2(ca_sin,ca_cos)
    end_azim = math.degrees(atan)

    # find compass angle    
    if end_azim < 0:
        a2 = -(end_azim)
    if end_azim > 0:
        a2 = end_azim
    if (ca_sin > 0) and (ca_cos > 0):
        comp_angle = a2
    if (ca_sin > 0) and (ca_cos < 0):
        comp_angle = a2
    if (ca_sin < 0) and (ca_cos > 0):
        comp_angle = 360 - a2
    if (ca_sin < 0) and (ca_cos < 0):
        comp_angle = 360 - a2

    # find LDM
    if end_azim < 0:
        a2 = -(end_azim)
    if end_azim > 0:
        a2 = end_azim
    if (ca_sin > 0) and (ca_cos > 0):
        ldm = 90 - a2
    if (ca_sin > 0) and (ca_cos < 0):
        ldm = 450 - a2
    if (ca_sin < 0) and (ca_cos > 0):
        ldm = 90 + a2
    if (ca_sin < 0) and (ca_cos < 0):
        ldm = 90 + a2

    # find circular variance
    sin_pow = math.pow(ca_sin,2) 
    cos_pow = math.pow(ca_cos,2) 

    circ_var = 1-(math.sqrt(sin_pow+cos_pow))/count

    # find start/end points of "mean" line
    end_azim_dms = decimal2dms(end_azim)

    # if end_azim < 0:
    #     end_azim_dms = '-' + (str(end_azim_dms))

    start_azim = 180 - end_azim
    start_azim_dms = decimal2dms(start_azim)
    
    p = grass.pipe_command('v.db.select', map_ = in_cats, columns = 'len',
                           flags = 'c', quiet = True, stderr = nuldev)
    c = p.communicate()[0].strip().split('\n')

    mean_length = sum(map(float,c))/len(c)
    half_length = float(mean_length)/2

    tmp1 = tmp + '.inf'
    inf1 = file(tmp1, 'w')
    print >> inf1, 'N ' + str(end_azim_dms) + ' E ' + str(half_length)
    inf1.close()
    
    end_coords = grass.read_command('m.cogo', input_ = tmp1, output = '-',
                                    coord = center_coords, quiet = True).strip()

    tmp2 = tmp + '.inf2'
    inf2 = file(tmp2, 'w')
    print >> inf2, 'N ' + str(start_azim_dms) + ' W ' + str(half_length)
    inf2.close()

    start_coords = grass.read_command('m.cogo', input_ = tmp2, output = '-',
                                      coord = center_coords, quiet = True).strip()

    # make "arrowhead" symbol
    if flags['x'] or graph:
        tmp3 = tmp + '.arrowhead_1'
        outf3 = file(tmp3, 'w')

        if ldm_type == 'direct':
            t1 = """VERSION 1.0
BOX -0.5 -0.5 0.5 0.5
POLYGON
  RING
  FCOLOR NONE
    LINE
      0 0
      0.3 -1
    END
  END
POLYGON
  RING
  FCOLOR NONE
    LINE
      0 0
      -0.3 -1
    END
  END
END
"""
            outf3.write(t1)
            outf3.close()
    
            gisdbase = grass.gisenv()['GISDBASE']
            location = grass.gisenv()['LOCATION_NAME']
            mapset = grass.gisenv()['MAPSET']
            symbols_dir = os.path.join(gisdbase, location, mapset, 'symbol', 'arrows')
            symbol = os.path.join(symbols_dir, 'arrowhead_1')
    
            if not os.path.exists(symbols_dir):
                try:
                    os.makedirs(symbols_dir)
                except OSError:
                    pass
        
            if not os.path.isfile(symbol):
                shutil.copyfile(tmp3, symbol)
        
    
        # write LDM graph file and optionally display line of LDM with an arrow
    tmp4 = tmp + '.ldm'
    outf4 = file(tmp4, 'w')
    
    arrow_size = int(width) * 1.4
    arrow_azim = 360 - float(end_azim)

    if ldm_type == 'direct':
        t2 = string.Template("""
move $start_coords
width $width
color $color
draw $end_coords

rotation $arrow_azim
width $width
symbol $symbol_s $arrow_size $end_coords $color
""")    
        s2 = t2.substitute(start_coords = start_coords, width = width, color = color,
                       end_coords = end_coords, arrow_azim = arrow_azim,
                       symbol_s = "arrows/arrowhead_1", arrow_size = arrow_size)
    else:
        t2 = string.Template("""
move $start_coords
width $width
color $color
draw $end_coords
""")    
        s2 = t2.substitute(start_coords = start_coords, width = width, color = color,
                       end_coords = end_coords)

    outf4.write(s2)
    outf4.close()

    if graph:
        shutil.copy(tmp4, graph)



    # save LDM line to vector if option "output" set  
    if output:
        tmp5 = tmp + '.line'
        outf5 = file(tmp5, 'w')

        print >> outf5, str(start_coords)
        print >> outf5, str(end_coords)

        outf5.close()

        grass.run_command('v.in.lines', input_ = tmp5, output = output,
                              separator = " ", overwrite = True, quiet = True)

        out_cats = output + '_cats'
        grass.run_command('v.category', input_ = output, option = 'add', 
                          output = out_cats, quiet = True, stderr = nuldev)
        grass.run_command('g.rename', vect = (out_cats,output), 
                          overwrite = True, quiet = True, stderr = nuldev)
        
        if circ_var:
            col = 'comp_angle double,dir_mean double,cir_var double,ave_x double,ave_y double,ave_len double'
        else:
            col = 'comp_angle double,dir_mean double,ave_x double,ave_y double,ave_len double'
                        
        grass.run_command('v.db.addtable', map_ = output, columns = col, quiet = True, stderr = nuldev)

        tmp6 = tmp + '.sql'
        outf6 = file(tmp6, 'w')
                
        t3 = string.Template("""
UPDATE $output SET comp_angle = $comp_angle;
UPDATE $output SET dir_mean = $ldm;
UPDATE $output SET ave_x = $mc_x;
UPDATE $output SET ave_y = $mc_y;
UPDATE $output SET ave_len = $mean_length;
""")
        s3 = t3.substitute(output = output, comp_angle = ("%0.3f" % comp_angle),
                           ldm = ("%0.3f" % ldm), mc_x = ("%0.3f" % float(mc_x)),
                           mc_y = ("%0.3f" % float(mc_y)), mean_length = ("%0.3f" % mean_length))
        outf6.write(s3)

        if circ_var:
            print >> outf6, "UPDATE %s SET cir_var = %0.3f;" % (output, circ_var)

        outf6.close()

        grass.run_command('db.execute', input_ = tmp6, quiet = True, stderr = nuldev)


    # print LDM parameters to stdout (with <-g> flag in shell style):
    print_out = ['Compass Angle', 'Directional Mean', 'Average Center', 'Average Length']
    if circ_var:
        print_out.append('Circular Variance')
        
    print_shell = ['compass_angle', 'directional_mean', 'average_center',
                   'average_length', 'circular_variance']
    if circ_var:
        print_shell.append('circular_variance')
        
    print_vars = ["%0.3f" % comp_angle, "%0.3f" % ldm,
                  "%0.3f" % float(mc_x) + ',' + "%0.3f" % float(mc_y),
                  "%0.3f" % mean_length]
    if circ_var:
        print_vars.append("%0.3f" % circ_var)


    if flags['g']:
        for i,j in zip(print_shell, print_vars):
            print "%s=%s" % (i, j)
    else:
        for i,j in zip(print_out, print_vars):
            print "%s: %s" % (i, j)


    # diplay LDM graphics
    if flags['x']:
        if mon:
            if graph:
                grass.run_command('d.graph', input_ = graph, flags = 'm', quiet = True, stderr = nuldev)
            else:
                grass.run_command('d.graph', input_ = tmp4, flags = 'm', quiet = True, stderr = nuldev)
        elif graph:
            grass.message(_("\n Use this command in wxGUI \"Command console\" or with <d.mon> or with \"command layer\" to display LDM graphics: \n d.graph -m input=%s \n\n" ) % graph)
예제 #9
0
def main():
    """Do the main work"""

    # set numpy printing options
    np.set_printoptions(formatter={"float": lambda x: "{0:0.2f}".format(x)})

    # ==========================================================================
    # Input data
    # ==========================================================================
    # Required
    r_output = options["output"]
    r_dsm = options["input"]
    dsm_type = grass.parse_command("r.info", map=r_dsm, flags="g")["datatype"]

    # Test if DSM exist
    gfile_dsm = grass.find_file(name=r_dsm, element="cell")
    if not gfile_dsm["file"]:
        grass.fatal("Raster map <{}> not found".format(r_dsm))

    # Exposure settings
    v_source = options["sampling_points"]
    r_source = options["source"]
    source_cat = options["sourcecat"]
    r_weights = options["weights"]

    # test if source vector map exist and contains points
    if v_source:
        gfile_vsource = grass.find_file(name=v_source, element="vector")
        if not gfile_vsource["file"]:
            grass.fatal("Vector map <{}> not found".format(v_source))
        if not grass.vector.vector_info_topo(v_source, layer=1)["points"] > 0:
            grass.fatal("Vector map <{}> does not contain any points.".format(
                v_source))

    if r_source:
        gfile_rsource = grass.find_file(name=r_source, element="cell")
        if not gfile_rsource["file"]:
            grass.fatal("Raster map <{}> not found".format(r_source))

        # if source_cat is set, check that r_source is CELL
        source_datatype = grass.parse_command("r.info",
                                              map=r_source,
                                              flags="g")["datatype"]

        if source_cat != "*" and source_datatype != "CELL":
            grass.fatal(
                "The raster map <%s> must be integer (CELL type) in order to \
                use the 'sourcecat' parameter" % r_source)

    if r_weights:
        gfile_weights = grass.find_file(name=r_weights, element="cell")
        if not gfile_weights["file"]:
            grass.fatal("Raster map <{}> not found".format(r_weights))

    # Viewshed settings
    range_inp = float(options["range"])
    v_elevation = float(options["observer_elevation"])
    b_1 = float(options["b1_distance"])
    pfunction = options["function"]
    refr_coeff = float(options["refraction_coeff"])
    flagstring = ""
    if flags["r"]:
        flagstring += "r"
    if flags["c"]:
        flagstring += "c"

    # test values
    if v_elevation < 0.0:
        grass.fatal("Observer elevation must be larger than or equal to 0.0.")

    if range_inp <= 0.0 and range_inp != -1:
        grass.fatal("Exposure range must be larger than 0.0.")

    if pfunction == "Fuzzy_viewshed" and range_inp == -1:
        grass.fatal("Exposure range cannot be \
            infinity for fuzzy viewshed approch.")

    if pfunction == "Fuzzy_viewshed" and b_1 > range_inp:
        grass.fatal("Exposure range must be larger than radius around \
            the viewpoint where clarity is perfect.")

    # Sampling settings
    source_sample_density = float(options["sample_density"])
    seed = options["seed"]

    if not seed:  # if seed is not set, set it to process number
        seed = os.getpid()

    # Optional
    cores = int(options["nprocs"])
    memory = int(options["memory"])

    # ==========================================================================
    # Region settings
    # ==========================================================================
    # check that location is not in lat/long
    if grass.locn_is_latlong():
        grass.fatal("The analysis is not available for lat/long coordinates.")

    # get comp. region parameters
    reg = Region()

    # check that NSRES equals EWRES
    if abs(reg.ewres - reg.nsres) > 1e-6:
        grass.fatal("Variable north-south and east-west 2D grid resolution \
            is not supported")

    # adjust exposure range as a multiplicate of region resolution
    # if infinite, set exposure range to the max of region size
    if range_inp != -1:
        multiplicate = math.floor(range_inp / reg.nsres)
        exp_range = multiplicate * reg.nsres
    else:
        range_inf = max(reg.north - reg.south, reg.east - reg.west)
        multiplicate = math.floor(range_inf / reg.nsres)
        exp_range = multiplicate * reg.nsres

    if RasterRow("MASK", Mapset().name).exist():
        grass.warning("Current MASK is temporarily renamed.")
        unset_mask()

    # ==========================================================================
    # Random sample exposure source with target points T
    # ==========================================================================
    if v_source:
        # go for using input vector map as sampling points
        v_source_sample = v_source
        grass.verbose("Using sampling points from input vector map")

    else:
        # go for sampling

        # min. distance between samples set to half of region resolution
        # (issue in r.random.cells)
        sample_distance = reg.nsres / 2
        v_source_sample = sample_raster_with_points(
            r_source,
            source_cat,
            source_sample_density,
            sample_distance,
            "{}_rand_pts_vect".format(TEMPNAME),
            seed,
        )

    # ==========================================================================
    # Get coordinates and attributes of target points T
    # ==========================================================================
    # Prepare a list of maps to extract attributes from
    # DSM values
    attr_map_list = [r_dsm]

    if pfunction in ["Solid_angle", "Visual_magnitude"]:
        grass.verbose("Precomputing parameter maps...")

    # Precompute values A, B, C, D for solid angle function
    # using moving window [row, col]
    if pfunction == "Solid_angle":
        r_a_z = "{}_A_z".format(TEMPNAME)
        r_b_z = "{}_B_z".format(TEMPNAME)
        r_c_z = "{}_C_z".format(TEMPNAME)
        r_d_z = "{}_D_z".format(TEMPNAME)

        expr = ";".join([
            "$outmap_A = ($inmap[0, 0] + \
                          $inmap[0, -1] + \
                          $inmap[1, -1] + \
                          $inmap[1, 0]) / 4",
            "$outmap_B = ($inmap[-1, 0] + \
                          $inmap[-1, -1] + \
                          $inmap[0, -1] + \
                          $inmap[0, 0]) / 4",
            "$outmap_C = ($inmap[-1, 1] + \
                          $inmap[-1, 0] + \
                          $inmap[0, 0] + \
                          $inmap[0, 1]) / 4",
            "$outmap_D = ($inmap[0, 1] + \
                          $inmap[0, 0] + \
                          $inmap[1, 0] + \
                          $inmap[1, 1]) / 4",
        ])
        grass.mapcalc(
            expr,
            inmap=r_dsm,
            outmap_A=r_a_z,
            outmap_B=r_b_z,
            outmap_C=r_c_z,
            outmap_D=r_d_z,
            overwrite=True,
            quiet=grass.verbosity() <= 1,
        )

        attr_map_list.extend([r_a_z, r_b_z, r_c_z, r_d_z])

    # Precompute values slopes in e-w direction, n-s direction
    # as atan(dz/dx) (e-w direction), atan(dz/dy) (n-s direction)
    # using moving window [row, col]
    elif pfunction == "Visual_magnitude":

        r_slope_ew = "{}_slope_ew".format(TEMPNAME)
        r_slope_ns = "{}_slope_ns".format(TEMPNAME)

        expr = ";".join([
            "$outmap_ew = atan((sqrt(2) * $inmap[-1, 1] + \
                          2 * $inmap[0, 1] + \
                          sqrt(2) * $inmap[1, 1] - \
                          sqrt(2) * $inmap[-1, -1] - \
                          2 * $inmap[0, -1] - \
                          sqrt(2) * $inmap[1, -1]) / \
                          (8 * $w_ew))",
            "$outmap_ns = atan((sqrt(2) * $inmap[-1, -1] + \
                          2 * $inmap[-1, 0] + \
                          sqrt(2) * $inmap[-1, 1] - \
                          sqrt(2) * $inmap[1, -1] - \
                          2 * $inmap[1, 0] - \
                          sqrt(2) * $inmap[1, 1]) / \
                          (8 * $w_ns))",
        ])

        grass.mapcalc(
            expr,
            inmap=r_dsm,
            outmap_ew=r_slope_ew,
            outmap_ns=r_slope_ns,
            w_ew=reg.ewres,
            w_ns=reg.nsres,
            overwrite=True,
            quiet=grass.verbosity() <= 1,
        )

        attr_map_list.extend([r_slope_ew, r_slope_ns])

    # Use viewshed weights if provided
    if r_weights:
        attr_map_list.append(r_weights)

    # Extract attribute values
    target_pts_grass = grass.read_command(
        "r.what",
        flags="v",
        map=attr_map_list,
        points=v_source_sample,
        separator="|",
        null_value="*",
        quiet=True,
    )

    # columns to use depending on parametrization function
    usecols = list(range(0, 4 + len(attr_map_list)))
    usecols.remove(3)  # skip 3rd column - site_name

    # convert coordinates and attributes of target points T to numpy array
    target_pts_np = txt2numpy(
        target_pts_grass,
        sep="|",
        names=None,
        null_value="*",
        usecols=usecols,
        structured=False,
    )

    # if one point only - 0D array which cannot be used in iteration
    if target_pts_np.ndim == 1:
        target_pts_np = target_pts_np.reshape(1, -1)

    target_pts_np = target_pts_np[~np.isnan(target_pts_np).any(axis=1)]

    no_points = target_pts_np.shape[0]

    # if viewshed weights not set by flag - set weight to 1 for all pts
    if not r_weights:
        weights_np = np.ones((no_points, 1))
        target_pts_np = np.hstack((target_pts_np, weights_np))

    grass.debug("target_pts_np: {}".format(target_pts_np))

    # ==========================================================================
    # Calculate weighted parametrised cummulative viewshed
    # by iterating over target points T
    # ==========================================================================
    grass.verbose("Calculating partial viewsheds...")

    # Parametrisation function
    if pfunction == "Solid_angle":
        parametrise_viewshed = solid_angle_reverse

    elif pfunction == "Distance_decay":
        parametrise_viewshed = distance_decay_reverse

    elif pfunction == "Fuzzy_viewshed":
        parametrise_viewshed = fuzzy_viewshed_reverse

    elif pfunction == "Visual_magnitude":
        parametrise_viewshed = visual_magnitude_reverse

    else:
        parametrise_viewshed = binary

    # Collect variables that will be used in do_it_all() into a dictionary
    global_vars = {
        "region": reg,
        "range": exp_range,
        "param_viewshed": parametrise_viewshed,
        "observer_elevation": v_elevation,
        "b_1": b_1,
        "memory": memory,
        "refr_coeff": refr_coeff,
        "flagstring": flagstring,
        "r_dsm": r_dsm,
        "dsm_type": dsm_type,
        "cores": cores,
        "tempname": TEMPNAME,
    }

    # Split target points to chunks for each core
    target_pnts = np.array_split(target_pts_np, cores)

    # Combine each chunk with dictionary
    combo = list(zip(itertools.repeat(global_vars), target_pnts))

    # Calculate partial cummulative viewshed
    with Pool(cores) as pool:
        np_sum = pool.starmap(do_it_all, combo)
        pool.close()
        pool.join()

    # We should probably use nansum here?
    all_nan = np.all(np.isnan(np_sum), axis=0)
    np_sum = np.nansum(np_sum, axis=0, dtype=np.single)
    np_sum[all_nan] = np.nan

    grass.verbose("Writing final result and cleaning up...")

    # Restore original computational region
    reg.read()
    reg.set_current()
    reg.set_raster_region()

    # Convert numpy array of cummulative viewshed to raster
    numpy2raster(np_sum, mtype="FCELL", rastname=r_output, overwrite=True)

    # Remove temporary files and reset mask if needed
    cleanup()

    # Set raster history to output raster
    grass.raster_history(r_output, overwrite=True)
    grass.run_command(
        "r.support",
        overwrite=True,
        map=r_output,
        title="Visual exposure index as {}".format(pfunction.replace("_",
                                                                     " ")),
        description="generated by r.viewshed.exposure",
        units="Index value",
        quiet=True,
    )
예제 #10
0
def is_latlon():
    """Return True if the location is latlon
    """
    return gscript.locn_is_latlong()
예제 #11
0
def main():
    in_pts = options['points']
    in_lines = options['lines']
    out_tin = options['tin']
    max_area = options['max_area']
    min_angle = options['min_angle']
    steiner_points = options['steiner_points']
    
    global tmp, nuldev, grass_version
    nuldev = None

    # setup temporary files
    tmp = grass.tempfile()

    # check for LatLong location
    if grass.locn_is_latlong() == True:
        grass.fatal("Module works only in locations with cartesian coordinate system")

    # check if input file exists
    if not grass.find_file(in_pts, element = 'vector')['file']:
        grass.fatal(_("<%s> does not exist.") % inmap)

    ############################################################
    ## check for Triangle options
    
        
    ############################################################
    ## prepare vectors to Triangle input

    tmp_pts_cut = tmp + '_pts_cut'
    
    grass.run_command('v.out.ascii', input_ = in_pts, output = tmp_pts_cut,
                      sep = ' ', quiet = True, stderr = nuldev)

    tmp_pts_cut2 = tmp_pts_cut + '2'
    
    with open(tmp_pts_cut,'r') as fin:
        with open (tmp_pts_cut2,'w') as fout:
            writer = csv.writer(fout, delimiter=' ')            
            for row in csv.reader(fin, delimiter=' '):
                writer.writerow(row[0:3])

    if in_lines:
        grass.run_command('v.split', input_ = in_lines, output = 'V_TRIANGLE_CUT_SEGM',
                          vertices = '2', quiet = True, stderr = nuldev)
        grass.run_command('v.category', input_ = 'V_TRIANGLE_CUT_SEGM', output = 'V_TRIANGLE_CUT_SEGM_NOCATS',
                          option = 'del', quiet = True, stderr = nuldev)
        grass.run_command('v.category', input_ = 'V_TRIANGLE_CUT_SEGM_NOCATS', output = 'V_TRIANGLE_CUT_SEGM_NEWCATS',
                          option = 'add', quiet = True, stderr = nuldev)
        grass.run_command('v.to.points', input_ = 'V_TRIANGLE_CUT_SEGM_NEWCATS', output = 'V_TRIANGLE_CUT_PTS',
                          use = 'vertex', flags = 't', quiet = True, stderr = nuldev)

        tmp_lines_cut = tmp + '_lines_cut'
        grass.run_command('v.out.ascii', input_ = 'V_TRIANGLE_CUT_PTS', output = tmp_lines_cut,
                          format_ = 'point', sep = ' ', quiet = True, stderr = nuldev)

    ## make *.node file
    tmp_pts_cut_0 = tmp_pts_cut + '_0'
    
    with open(tmp_pts_cut2,'r') as fin:
        with open (tmp_pts_cut_0,'w') as fout:
            writer = csv.writer(fout, delimiter=' ')            
            for row in csv.reader(fin, delimiter=' '):
                row.append('0')
                writer.writerow(row)

    tmp_cut = tmp + '_cut'

    with open(tmp_cut, 'w') as outfile:
        if in_lines:
            filenames = [tmp_lines_cut, tmp_pts_cut_0]
        else:
            filenames = [tmp_pts_cut_0]

        for fname in filenames:
            with open(fname) as infile:
                for num, line in enumerate(infile, 1):
                    outfile.write('%s ' '%s' % (num, line))

    num_lines = sum(1 for line in open(tmp_cut))

    tmp_header = tmp + '_header'
    with open(tmp_header,'w') as fout:
        fout.write("%s 2 1 1" % num_lines)
        fout.write('\n')

    tmp_node = tmp + '_node'
    
    filenames = [tmp_header, tmp_cut]
    with open(tmp_node, 'w') as outfile:
        for fname in filenames:
            with open(fname) as infile:
                for line in infile:
                    outfile.write(line)

    ## make *.poly file
    tmp_poly = tmp + '.poly'
    if in_lines:
        with open(tmp_poly, 'w') as fout:
            fout.write('0 2 1 1')
            fout.write('\n')
    
    vert_num = sum(1 for line in open(tmp_lines_cut))
    segm_num = (vert_num / 2)

    with open(tmp_poly, 'a') as fout:
        fout.write('%s 1' % segm_num)
        fout.write('\n')
    
    tmp_num = tmp + '_num'
    
    with open(tmp_num, 'w') as outfile:
        with open(tmp_lines_cut) as infile:
            for num, line in enumerate(infile, 1):
                outfile.write('%s ' '%s' % (num, line))

    tmp_num1 = tmp + '_num1'
    tmp_num2 = tmp + '_num2'
    tmp_num3 = tmp + '_num3'
    tmp_num4 = tmp + '_num4'
    tmp_num5 = tmp + '_num5'

    with open(tmp_num1, 'w') as outfile1:
        with open(tmp_num2, 'w') as outfile2:
            with open(tmp_num) as infile:
                reader = csv.reader(infile, delimiter=' ')
                for row in reader:
                    content = list(row[i] for i in [0])
                    content = [int(i) for i in content]
                    content2 = str(content).replace('[','').replace(']','')
                    if int(content2) % 2:
                        outfile1.write('%s' % content2)
                        outfile1.write('\n')
                    else:
                        outfile2.write('%s' % content2)
                        outfile2.write('\n')
    numlist = []
    with open(tmp_num) as infile:
        reader = csv.reader(infile, delimiter=' ')
        for row in reader:
            content = list(row[i] for i in [4])
            content = [int(i) for i in content]
            numlist.append(content)

    numlist2 = [item for sublist in numlist for item in sublist]
    numlist3 = list(set(numlist2))

    with open(tmp_num3, 'w') as outfile:
        for item in numlist3:
            outfile.write("%s\n" % item)

    with open(tmp_num4, 'w') as outfile, open(tmp_num1) as f1, open(tmp_num2) as f2, open(tmp_num3) as f3:
        for line1, line2, line3 in itertools.izip_longest(f1, f2, f3, fillvalue = ""):
            outfile.write("{} {} {}\n".format(line1.rstrip(), line2.rstrip(), line3.rstrip()))

    with open(tmp_num5, 'w') as outfile:
        with open(tmp_num4) as infile:
            for num, line in enumerate(infile, 1):
                outfile.write('%s ' '%s' % (num, line))

    with open(tmp_poly, 'a') as outfile:
        with open(tmp_num5) as infile:
            for line in infile:
                outfile.write(line)
        outfile.write('0')
                
    # with open(tmp_poly, 'r') as f:
    #     print f.read().strip()

    ## let's triangulate
    t = triangle.get_data(tmp_poly)
    print t
예제 #12
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            "product": "National Elevation Dataset (NED)",
            "dataset": {
                "ned1sec": (1.0 / 3600, 30, 100),
                "ned13sec": (1.0 / 3600 / 3, 10, 30),
                "ned19sec": (1.0 / 3600 / 9, 3, 10),
            },
            "subset": {},
            "extent": ["1 x 1 degree", "15 x 15 minute"],
            "format": "IMG",
            "extension": "img",
            "zip": True,
            "srs": "wgs84",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "bilinear",
            "url_split": "/",
        },
        "nlcd": {
            "product": "National Land Cover Database (NLCD)",
            "dataset": {
                "National Land Cover Database (NLCD) - 2001":
                (1.0 / 3600, 30, 100),
                "National Land Cover Database (NLCD) - 2006":
                (1.0 / 3600, 30, 100),
                "National Land Cover Database (NLCD) - 2011":
                (1.0 / 3600, 30, 100),
            },
            "subset": {
                "Percent Developed Imperviousness",
                "Percent Tree Canopy",
                "Land Cover",
            },
            "extent": ["3 x 3 degree"],
            "format": "GeoTIFF",
            "extension": "tif",
            "zip": True,
            "srs": "wgs84",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "nearest",
            "url_split": "/",
        },
        "naip": {
            "product": "USDA National Agriculture Imagery Program (NAIP)",
            "dataset": {
                "Imagery - 1 meter (NAIP)": (1.0 / 3600 / 27, 1, 3)
            },
            "subset": {},
            "extent": [
                "3.75 x 3.75 minute",
            ],
            "format": "JPEG2000",
            "extension": "jp2",
            "zip": False,
            "srs": "wgs84",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "nearest",
            "url_split": "/",
        },
        "lidar": {
            "product": "Lidar Point Cloud (LPC)",
            "dataset": {
                "Lidar Point Cloud (LPC)": (1.0 / 3600 / 9, 3, 10)
            },
            "subset": {},
            "extent": [""],
            "format": "LAS,LAZ",
            "extension": "las,laz",
            "zip": True,
            "srs": "",
            "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            "interpolation": "nearest",
            "url_split": "/",
        },
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options["product"]

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string["product"]
    product_format = nav_string["format"]
    product_extensions = tuple(nav_string["extension"].split(","))
    product_is_zip = nav_string["zip"]
    product_srs = nav_string["srs"]
    product_proj4 = nav_string["srs_proj4"]
    product_interpolation = nav_string["interpolation"]
    product_url_split = nav_string["url_split"]
    product_extent = nav_string["extent"]
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == "ned":
        gui_dataset = options["ned_dataset"]
        ned_api_name = ""
        if options["ned_dataset"] == "ned1sec":
            ned_data_abbrv = "ned_1arc_"
            ned_api_name = "1 arc-second"
        if options["ned_dataset"] == "ned13sec":
            ned_data_abbrv = "ned_13arc_"
            ned_api_name = "1/3 arc-second"
        if options["ned_dataset"] == "ned19sec":
            ned_data_abbrv = "ned_19arc_"
            ned_api_name = "1/9 arc-second"
        product_tag = product + " " + ned_api_name

    if gui_product == "nlcd":
        gui_dataset = options["nlcd_dataset"]
        if options["nlcd_dataset"] == "nlcd2001":
            gui_dataset = "National Land Cover Database (NLCD) - 2001"
        if options["nlcd_dataset"] == "nlcd2006":
            gui_dataset = "National Land Cover Database (NLCD) - 2006"
        if options["nlcd_dataset"] == "nlcd2011":
            gui_dataset = "National Land Cover Database (NLCD) - 2011"

        if options["nlcd_subset"] == "landcover":
            gui_subset = "Land Cover"
        if options["nlcd_subset"] == "impervious":
            gui_subset = "Percent Developed Imperviousness"
        if options["nlcd_subset"] == "canopy":
            gui_subset = "Percent Tree Canopy"
        product_tag = gui_dataset

    if gui_product == "naip":
        gui_dataset = "Imagery - 1 meter (NAIP)"
        product_tag = nav_string["product"]

    has_pdal = gscript.find_program(pgm="v.in.pdal")
    if gui_product == "lidar":
        gui_dataset = "Lidar Point Cloud (LPC)"
        product_tag = nav_string["product"]
        if not has_pdal:
            gscript.warning(
                _("Module v.in.pdal is missing,"
                  " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options["output_name"]
    gui_resampling_method = options["resampling_method"]
    gui_i_flag = flags["i"]
    gui_k_flag = flags["k"]
    work_dir = options["output_directory"]
    memory = options["memory"]
    nprocs = options["nprocs"]

    preserve_extracted_files = True
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not work_dir:
        work_dir = get_cache_dir("r_in_usgs")
    elif not os.path.isdir(work_dir):
        gscript.fatal(
            _("Directory <{}> does not exist. Please create it.").format(
                work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command("g.proj", flags="g")
        if gscript.locn_is_latlong():
            product_resolution = nav_string["dataset"][gui_dataset][0]
        elif float(proj["meters"]) == 1:
            product_resolution = nav_string["dataset"][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string["dataset"][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == "lidar" and options["resolution"]:
        product_resolution = float(options["resolution"])

    if gui_resampling_method == "default":
        gui_resampling_method = nav_string["interpolation"]
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
    min_coords = gscript.read_command(
        "m.proj",
        coordinates=(gregion["w"], gregion["s"]),
        proj_out=wgs84,
        separator="comma",
        flags="d",
    )
    max_coords = gscript.read_command(
        "m.proj",
        coordinates=(gregion["e"], gregion["n"]),
        proj_out=wgs84,
        separator="comma",
        flags="d",
    )
    min_list = min_coords.split(",")[:2]
    max_list = max_coords.split(",")[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://tnmaccess.nationalmap.gov/api/v1/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == "nlcd":
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _(
        "Possibly, the query has timed out. Check network configuration and try again."
    )
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(
            _("HTTP(S) error from USGS TNM API: {code}: {reason} ({instructions})"
              ).format(reason=error.reason,
                       code=error.code,
                       instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(
            _("Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON["errors"]:
            TNM_API_error = return_JSON["errors"]
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == "lidar" and options["title_filter"]:
            return_JSON["items"] = [
                item for item in return_JSON["items"]
                if options["title_filter"] in item["title"]
            ]
            return_JSON["total"] = len(return_JSON["items"])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON["total"])
    tiles_needed_count = 0
    # TODO: Make the tolerance configurable.
    # Some combinations produce >10 byte differences.
    size_diff_tolerance = 5
    exist_dwnld_size = 0

    # Fatal error if API query returns no results for GUI input
    if tile_API_count == 0:
        gs.fatal(
            _("USGS TNM API error or no tiles available for given input parameters"
              ))

    dwnld_size = []
    dwnld_url = []
    TNM_file_titles = []
    exist_dwnld_url = []
    exist_TNM_titles = []
    exist_zip_list = []
    exist_tile_list = []
    extract_zip_list = []
    # for each file returned, assign variables to needed parameters
    for f in return_JSON["items"]:
        TNM_file_title = f["title"]
        TNM_file_URL = str(f["downloadURL"])
        TNM_file_size = int(f["sizeInBytes"])
        TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
        if gui_product == "ned":
            local_file_path = os.path.join(work_dir,
                                           ned_data_abbrv + TNM_file_name)
            local_zip_path = os.path.join(work_dir,
                                          ned_data_abbrv + TNM_file_name)
            local_tile_path = os.path.join(work_dir,
                                           ned_data_abbrv + TNM_file_name)
        else:
            local_file_path = os.path.join(work_dir, TNM_file_name)
            local_zip_path = os.path.join(work_dir, TNM_file_name)
            local_tile_path = os.path.join(work_dir, TNM_file_name)
        file_exists = os.path.exists(local_file_path)
        file_complete = None
        # If file exists, do not download,
        # but if incomplete (e.g. interupted download), redownload.
        if file_exists:
            existing_local_file_size = os.path.getsize(local_file_path)
            # if local file is incomplete
            if abs(existing_local_file_size -
                   TNM_file_size) > size_diff_tolerance:
                gscript.verbose(
                    _("Size of local file {filename} ({local_size}) differs"
                      " from a file size specified in the API ({api_size})"
                      " by {difference} bytes"
                      " which is more than tolerance ({tolerance})."
                      " It will be downloaded again.").format(
                          filename=local_file_path,
                          local_size=existing_local_file_size,
                          api_size=TNM_file_size,
                          difference=abs(existing_local_file_size -
                                         TNM_file_size),
                          tolerance=size_diff_tolerance,
                      ))
                # NLCD API query returns subsets that cannot be filtered before
                # results are returned. gui_subset is used to filter results.
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    exist_list()
                    exist_dwnld_size += TNM_file_size
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        continue
        else:
            if not gui_subset:
                tiles_needed_count += 1
                down_list()
            else:
                if gui_subset in TNM_file_title:
                    tiles_needed_count += 1
                    down_list()
                    continue

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = "0"

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = "none"

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == "nlcd":
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = (
                        "NLCD {0} data unavailable for input parameters".
                        format(gui_subset))
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = "\n".join(data_info).format(
            size=total_size_str,
            count=file_download_count,
            srs=product_srs,
            tile=TNM_file_titles_info,
        )
    print(data_info)

    if gui_i_flag:
        gs.message(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == "ned":
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            download_count += 1
            gs.message(
                _("Download {current} of {total}...").format(
                    current=download_count, total=TNM_count))
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()["Content-Length"])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
        except URLError as error:
            gs.fatal(
                _("USGS download request for {url} has timed out. "
                  "Network or formatting error: {err}").format(url=url,
                                                               err=error))
        except StandardError as error:
            cleanup_list.append(local_file_path)
            gs.fatal(
                _("Download of {url} failed: {err}").format(url=url,
                                                            err=error))

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(
                                            extracted_tile) < os.path.getmtime(
                                                z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, str(work_dir))
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file '{filename}'"
                      " from ZIP archive '{zipname}': {error}").format(
                          filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(
            _("Extracted {extracted} new tiles and used {used} existing tiles"
              ).format(used=used_existing_extracted_tiles_num,
                       extracted=extracted_tiles_num))
        if old_extracted_tiles_num:
            gscript.verbose(
                _("Found {removed} existing tiles older"
                  " than the corresponding downloaded archive").format(
                      removed=old_extracted_tiles_num))
        if removed_extracted_tiles_num:
            gscript.verbose(
                _("Removed {removed} existing tiles").format(
                    removed=removed_extracted_tiles_num))

    if gui_product == "lidar" and not has_pdal:
        gs.fatal(
            _("Module v.in.pdal is missing, cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != "naip" and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists(
                    "raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _(
                    "Importing and reprojecting {name} ({count} out of {total})..."
                ).format(name=LT_file_name, count=i + 1, total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != "lidar":
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_file_import,
                        kwargs=dict(
                            identifier=i,
                            results=results,
                            input=t,
                            output=LT_layer_name,
                            resolution="value",
                            resolution_value=product_resolution,
                            extent="region",
                            resample=product_interpolation,
                            memory=int(float(memory) // int(nprocs)),
                        ),
                    )
                else:
                    srs = options["input_srs"]
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_lidar_import,
                        kwargs=dict(
                            identifier=i,
                            results=results,
                            input=t,
                            output=LT_layer_name,
                            input_srs=srs if srs else None,
                        ),
                    )
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(
                            _("Parallel import and reprojection failed."
                              " Try running with nprocs=1."))
                    else:
                        gscript.fatal(
                            _("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(
        _("Imported {imported} new tiles and used {used} existing tiles").
        format(used=used_existing_imported_tiles_num,
               imported=imported_tiles_num))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command("g.region",
                                        res=product_resolution,
                                        flags="a")
                if gui_product == "naip":
                    for i in ("1", "2", "3", "4"):
                        patch_names_i = [
                            name + "." + i for name in patch_names
                        ]
                        output = gui_output_layer + "." + i
                        gscript.run_command("r.patch",
                                            input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == "lidar":
                    gscript.run_command(
                        "v.patch",
                        flags="nzb",
                        input=patch_names,
                        output=gui_output_layer,
                    )
                    gscript.run_command(
                        "v.surf.rst",
                        input=gui_output_layer,
                        elevation=gui_output_layer,
                        nprocs=nprocs,
                        **rst_params,
                    )
                else:
                    gscript.run_command("r.patch",
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == "naip":
                        for i in ("1", "2", "3", "4"):
                            patch_names_i = [
                                name + "." + i for name in patch_names
                            ]
                            gscript.run_command("g.remove",
                                                type="raster",
                                                name=patch_names_i,
                                                flags="f")
                    elif gui_product == "lidar":
                        gscript.run_command(
                            "g.remove",
                            type="vector",
                            name=patch_names + [gui_output_layer],
                            flags="f",
                        )
                    else:
                        gscript.run_command("g.remove",
                                            type="raster",
                                            name=patch_names,
                                            flags="f")
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == "naip":
                for i in ("1", "2", "3", "4"):
                    gscript.run_command(
                        "g.rename",
                        raster=(patch_names[0] + "." + i,
                                gui_output_layer + "." + i),
                    )
            elif gui_product == "lidar":
                if product_resolution:
                    gscript.run_command("g.region",
                                        res=product_resolution,
                                        flags="a")
                gscript.run_command(
                    "v.surf.rst",
                    input=patch_names[0],
                    elevation=gui_output_layer,
                    nprocs=nprocs,
                    **rst_params,
                )
                if not preserve_imported_tiles:
                    gscript.run_command("g.remove",
                                        type="vector",
                                        name=patch_names[0],
                                        flags="f")
            else:
                gscript.run_command("g.rename",
                                    raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(
                _("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(
            _("Error in getting or importing the data (see above). Please retry."
              ))

    # set appropriate color table
    if gui_product == "ned":
        gscript.run_command("r.colors",
                            map=gui_output_layer,
                            color="elevation")

    # composite NAIP
    if gui_product == "naip":
        gscript.use_temp_region()
        gscript.run_command("g.region", raster=gui_output_layer + ".1")
        gscript.run_command(
            "r.composite",
            red=gui_output_layer + ".1",
            green=gui_output_layer + ".2",
            blue=gui_output_layer + ".3",
            output=gui_output_layer,
        )
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()