コード例 #1
0
ファイル: wednesday.py プロジェクト: iceseismic/SeisSuite
    dist_points = map(points_and_distances, nets)
    return np.vstack(dist_points)
    
def spread_paths(nets):
    return map(paths_func, nets)
    
#-----------------------------------------------------------------------------
# GENERATE SECOND SET OF VARIABLES AND STATES
#-----------------------------------------------------------------------------
ideal_path = 'ideal_coordinates.pickle'
#if no paths have been done before, start afresh!
if not os.path.exists(ideal_path):
    #generate N kmeans cluster points from massive M number of randomly 
    #distributed points inside the shape file. 
    M = 1e5
    lot_points = ps.points_in_shape(shape_path, M)
    coords = cluster_points(lot_points, N)
#else import already processed coordinates if the program has already done so.
else:
    f = open(name=ideal_path, mode='rb')
    coords = pickle.load(f)
    f.close()

lonmin, lonmax = np.floor(min(coords[:,0])), np.ceil(max(coords[:,0]))
latmin, latmax = np.floor(min(coords[:,1])), np.ceil(max(coords[:,1]))
print lonmin,lonmax,latmin,latmax

t0 = datetime.datetime.now()


kappa = [np.vstack([[coord1[0],coord1[1],coord2[0],coord2[1]]\
コード例 #2
0
ファイル: decluster.py プロジェクト: weijias-fork/SeisSuite
use_old_path = False
searches_per_point = 3
factor = 0.05
cluster = False
N_cluster_points = False

while infinite_counter <= 1:
    t0 = datetime.datetime.now()

    #----------------------------------------------------------------------
    # Generate N new point coordinates
    #----------------------------------------------------------------------
    if cluster:
        new_coords = N_cluster_points
    else:
        new_coords = ps.points_in_shape(shape_path, N)

    coords = np.append(coords, new_coords, axis=0)

    coord_set = [np.vstack([[coord1[0],coord1[1],coord2[0],coord2[1]]\
                 for coord2 in coords]) for coord1 in coords]

    t0 = datetime.datetime.now()
    pool = mp.Pool()
    paths = pool.map(spread_paths, coord_set)
    pool.close()
    pool.join()
    t1 = datetime.datetime.now()
    print "time to generate new paths", t1 - t0

    # Append new set of paths now that old set has been deleted.
コード例 #3
0
    H = np.rot90(H)
    H = np.flipud(H)
    # Mask zeros
    Hmasked = np.ma.masked_where(H == 0, H)  # Mask pixels with a value of zero
    return Hmasked


t_total0 = datetime.datetime.now()

t0 = datetime.datetime.now()

ideal_path = 'ideal_coordinates.pickle'
#if no paths have been done before, start afresh!
if not os.path.exists(ideal_path):
    M = 1e5
    many_points = ps.points_in_shape(shape_path, M)
    coords = cluster_points(many_points, N)

#else import already processed coordinates if the program has already done so.
else:
    f = open(name=ideal_path, mode='rb')
    coords = pickle.load(f)
    f.close()

#generate N kmeans cluster points from massive M number of randomly distributed
#points inside the shape file.

lonmin = np.floor(min(coords[:, 0]))
lonmax = np.ceil(max(coords[:, 0]))
latmin = np.floor(min(coords[:, 1]))
latmax = np.ceil(max(coords[:, 1]))
コード例 #4
0
ファイル: full_test.py プロジェクト: iceseismic/SeisSuite
    #sigma02 = sigma01+sigma12
    lon01 = atan2(sin(alpha0)*sin(sigma01), cos(sigma01))
    lon0 = lon1 - lon01    
    npts = max(int((np.ceil(dist) + 1)/km), 100)
    all_d = np.linspace(0,dist,npts)/R  
    lons, lats = vlon_func(all_d, sigma01, alpha0, lon0), vlat_func(all_d, sigma01, alpha0, lon0)   
    
    return np.column_stack((lons, lats))

t_total0 = datetime.datetime.now()
number=0
while number < 50:
    t0 = datetime.datetime.now()

    #lat lon coordinates of random points generated within set shape file 
    coords = ps.points_in_shape(shape_path, N)
    #lons1,lats1 = coords[:,0], coords[:,1]
    #lons2,lats2 = lons1,lats1
    
    lonmin = np.floor(min(coords[:,0]))
    latmin = np.floor(min(coords[:,1]))

    coords1 = [coord1 for coord1 in coords for coord2 in coords]
                     
    coords2 = [coord2 for coord1 in coords for coord2 in coords]

    columns = np.column_stack((coords1, coords2))

    #dists = map(haversine, columns)
    
    #path_info = zip(coords1,coords2, dists)
コード例 #5
0
ファイル: loop.py プロジェクト: iceseismic/SeisSuite
    sigma01, alpha0 = atan2(tan(lat1), cos(alpha1)), asin(sin(alpha1)*cos(lat1))
    #sigma02 = sigma01+sigma12
    lon01 = atan2(sin(alpha0)*sin(sigma01), cos(sigma01))
    lon0 = lon1 - lon01    
    npts = max(int((np.ceil(dist) + 1)/km), 100)
    all_d = np.linspace(0,dist,npts)/R  
    lons, lats = vlon_func(all_d, sigma01, alpha0, lon0), vlat_func(all_d, sigma01, alpha0, lon0)   
    
    return np.column_stack((lons, lats))

t_total0 = datetime.datetime.now()


t0 = datetime.datetime.now()

coords = ps.points_in_shape(shape_path, N)

lonmin = np.floor(min(coords[:,0]))
latmin = np.floor(min(coords[:,1]))


#coords1 = [coord1 for coord1 in coords for coord2 in coords]
                     
#coords2 = [coord2 for coord1 in coords for coord2 in coords]

#columns = np.column_stack((coords1, coords2))

kappa = [np.vstack([[coord1[0],coord1[1],coord2[0],coord2[1]]\
                    for coord2 in coords]) for coord1 in coords]

def spread_paths(nets):