Esempio n. 1
0
def read_polygon_dir(weight_dict, directory, filepattern='*.csv'):
    """
    In a directory directory looks at all files matching filepattern
    and returns a list of tuples consisting of polygon and a weight 
    """
    pattern = os.path.join(directory, filepattern)
    files = glob.glob(pattern)

    # check that the dictionary contains *all* the files
    
    errors = []
    for f in files:
        try:
            _ = weight_dict[f]
        except KeyError:
            errors.append(f)
            
    if errors:
        msg = ''
        for f in errors:
            msg = msg + ', ' + f
        raise KeyError, 'Files not defined in dictionary: %s' % msg[2:]

    # now get the result list
    result = []
    for f in files:
        result.append((read_polygon(f), weight_dict[f]))
    return result
Esempio n. 2
0
def read_polygon_dir(weight_dict, directory, filepattern='*.csv'):
    """
    In a directory directory looks at all files matching filepattern
    and returns a list of tuples consisting of polygon and a weight 
    """
    pattern = os.path.join(directory, filepattern)
    files = glob.glob(pattern)

    # check that the dictionary contains *all* the files

    errors = []
    for f in files:
        try:
            _ = weight_dict[f]
        except KeyError:
            errors.append(f)

    if errors:
        msg = ''
        for f in errors:
            msg = msg + ', ' + f
        raise KeyError, 'Files not defined in dictionary: %s' % msg[2:]

    # now get the result list
    result = []
    for f in files:
        result.append((read_polygon(f), weight_dict[f]))
    return result
Esempio n. 3
0
def read_hole_dir_multi_files_with_single_poly(directory, filepattern='*.csv'):
    """
    Looks in a directory, and reads all .csv files as polygons
    and returns a list of polygon 
    """
    pattern = os.path.join(directory, filepattern)
    files = glob.glob(pattern)

    # now get the result list
    result = []
    for f in files:
        result.append(read_polygon(f))
    return result
Esempio n. 4
0
def read_hole_dir_multi_files_with_single_poly(directory, filepattern='*.csv'):
    """
    Looks in a directory, and reads all .csv files as polygons
    and returns a list of polygon 
    """
    pattern = os.path.join(directory, filepattern)
    files = glob.glob(pattern)

    # now get the result list
    result = []
    for f in files:
        result.append(read_polygon(f))
    return result
Esempio n. 5
0
#-------------------------------------------------------------------------------
# Check for errors detected above.
#-------------------------------------------------------------------------------

if sanity_error:
    msg = 'You must fix the above errors before continuing.'
    raise Exception, msg

#-------------------------------------------------------------------------------
# Reading polygons and creating interior regions
#-------------------------------------------------------------------------------

# Create list of land polygons with initial conditions
land_initial_conditions = []
for filename, MSL in land_initial_conditions_filename:
    polygon = read_polygon(join(polygons_folder, filename))
    land_initial_conditions.append([polygon, MSL])

# Create list of interior polygons with scaling factor
interior_regions = []
for filename, maxarea in interior_regions_data:
    polygon = read_polygon(join(polygons_folder, filename))
    interior_regions.append([polygon,
                                     maxarea*scale_factor])

# Initial bounding polygon for data clipping 
bounding_polygon = read_polygon(join(polygons_folder,
                                             bounding_polygon_filename))
bounding_maxarea = bounding_polygon_maxarea*scale_factor
Esempio n. 6
0
#-------------------------------------------------------------------------------
# Check for errors detected above.
#-------------------------------------------------------------------------------

if sanity_error:
    msg = 'You must fix the above errors before continuing.'
    raise Exception, msg

#-------------------------------------------------------------------------------
# Reading polygons and creating interior regions
#-------------------------------------------------------------------------------

# Create list of land polygons with initial conditions
land_initial_conditions = []
for filename, MSL in land_initial_conditions_filename:
    polygon = read_polygon(join(polygons_folder, filename))
    land_initial_conditions.append([polygon, MSL])

# Create list of interior polygons with scaling factor
interior_regions = []
for filename, maxarea in interior_regions_data:
    polygon = read_polygon(join(polygons_folder, filename))
    interior_regions.append([polygon,
                                     maxarea*scale_factor])

# Initial bounding polygon for data clipping 
bounding_polygon = read_polygon(join(polygons_folder,
                                             bounding_polygon_filename))
bounding_maxarea = bounding_polygon_maxarea*scale_factor
# Create Geospatial data from TXT files

for filename in project.point_filenames:
    absolute_filename = join(project.topographies_folder, filename)
    G_points = Geospatial_data(file_name=absolute_filename,
                                                verbose=True)
    print 'Clip geospatial object'
    geospatial_data[filename] = G_points.clip(project.bounding_polygon)

#-------------------------------------------------------------------------------
# Combine, clip and export dataset 
#-------------------------------------------------------------------------------
extent_polygons = []
for extent_polygon_filename in project.extent_polygon_filenames:
    p = read_polygon(join(project.polygons_folder, extent_polygon_filename))
    extent_polygons.append(p)
    
print 'Add geospatial objects' 
G = None
for key in geospatial_data:
    if key == project.point_filenames[0] or key == project.point_filenames[1]:
        G += geospatial_data[key]
    elif key == project.point_filenames[2]:
        D = geospatial_data[key]
        D = D.clip_outside(extent_polygons[0])
        D = D.clip_outside(extent_polygons[1])
        G += D
    elif key == project.point_filenames[3]:
        D = geospatial_data[key]
        D = D.clip_outside(extent_polygons[2])
    geospatial_data[filename] = G_grid.clip(project.bounding_polygon)

# Create Geospatial data from TXT files

for filename in project.point_filenames:
    absolute_filename = join(project.topographies_folder, filename)
    G_points = Geospatial_data(file_name=absolute_filename, verbose=True)
    print 'Clip geospatial object'
    geospatial_data[filename] = G_points.clip(project.bounding_polygon)

#-------------------------------------------------------------------------------
# Combine, clip and export dataset
#-------------------------------------------------------------------------------
extent_polygons = []
for extent_polygon_filename in project.extent_polygon_filenames:
    p = read_polygon(join(project.polygons_folder, extent_polygon_filename))
    extent_polygons.append(p)

print 'Add geospatial objects'
G = None
for key in geospatial_data:
    if key == project.point_filenames[0] or key == project.point_filenames[1]:
        G += geospatial_data[key]
    elif key == project.point_filenames[2]:
        D = geospatial_data[key]
        D = D.clip_outside(extent_polygons[0])
        D = D.clip_outside(extent_polygons[1])
        G += D
    elif key == project.point_filenames[3]:
        D = geospatial_data[key]
        D = D.clip_outside(extent_polygons[2])