Exemple #1
0
def coord_regression(predictions_b, predictions, train, test, method):

    mean_error = []

    if (method == 1):
        machine_learn = KNeighborsRegressor(n_neighbors=5, weights='distance')
    elif (method == 2):
        #machine_learn = MLPClassifier(solver='sgd',learning_rate = 'adaptive',verbose='true',activation='tanh',alpha=1e-5)
        machine_learn = MLPClassifier(solver='sgd',
                                      learning_rate='adaptive',
                                      verbose='false',
                                      activation='tanh',
                                      alpha=1e-5,
                                      max_iter=400)  #THE BEST
        #machine_learn = MLPClassifier(hidden_layer_sizes=(100,5), solver='sgd',learning_rate = 'adaptive',verbose='true',activation='tanh',alpha=1e-5,max_iter=500)
        #model = MLPClassifier(learning_rate = 'adaptive')
        #solvers = ['lbfgs', 'sgd', 'adam']
        #activations = ['identity', 'logistic', 'tanh', 'relu']
        #max_its = [200,400,600]
        #machine_learn = GridSearchCV(estimator=model, param_grid=dict(activation =activations,max_iter=max_its),n_jobs=7) #GRID

        #for each building
    for j in range(3):
        new_train1 = train.loc[
            train['BUILDINGID'] ==
            j]  #select for training only buildings with that label (0,1, or 2)
        ind = [x for x in range(len(predictions_b)) if predictions_b[x] == j
               ]  #get the position of the samples that have building == i
        new_test1 = test.iloc[ind, :]

        if (ind):
            #for each floor
            for i in range(5):

                new_train2 = new_train1.loc[new_train1['FLOOR'] == i]
                if (not new_train2.empty):
                    indexes = [
                        x for x in range(len(predictions))
                        if (predictions[x] == i and predictions_b[x] == j)
                    ]  #get the position of the samples that have building == i
                else:
                    index = []

                if (indexes):  #if list is not empty

                    X_train = new_train2.ix[:, 0:519]
                    Y_train = new_train2[['LONGITUDE', 'LATITUDE']]
                    machine_learn.fit(X_train, Y_train)

                    #testing samples with prediction building == i
                    new_test2 = test.iloc[indexes, :]
                    X_test = new_test2.ix[:, 0:519]
                    Y_test = new_test2[['LONGITUDE', 'LATITUDE']]

                    #Turn into list
                    predicts_lon_lat = machine_learn.predict(X_test).tolist()
                    Y_test = Y_test.values.tolist()

                    distance = []
                    for j in range(len(predicts_lon_lat)):

                        #change the latitude and longitude unit
                        myProj = Proj(
                            "+proj=utm +zone=23K, +south +ellps=WGS84 +datum=WGS84 +units=m +no_defs"
                        )
                        lon_pred, lat_pred = myProj(predicts_lon_lat[j][0],
                                                    predicts_lon_lat[j][1],
                                                    inverse=True)
                        lon_Y, lat_Y = myProj(Y_test[j][0],
                                              Y_test[j][1],
                                              inverse=True)

                        #join in a unique list
                        Y = []
                        Y.append(lon_Y)
                        Y.append(lat_Y)
                        predict = []
                        predict.append(lon_pred)
                        predict.append(lat_pred)

                        #The distance between the two latitudes is the error
                        distance.append(vincenty(Y, predict).meters)
                        print "distance"
                        print distance
                        #If you want to use haversine distance, uncomment the line below
                        #print haversine(lon_Y, lat_Y, lon_pred, lat_pred)

                    mean_error.append(np.mean(distance))
                    #print(np.mean(distance))

    return np.mean(mean_error)
Exemple #2
0
def utm2deg(x, y, utmzone=43):
    p = Proj(proj='utm', zone=43, ellps='WGS84')
    Lon, Lat = p(x, y, inverse=True)
    return Lon, Lat
Exemple #3
0
else:
    sys.exit()

# output the plots
if output == cfg.PLAN_VIEW:
    # Set the map limit around Mossmorran
    extent = [-3.3733, -3.244456, 56.060534, 56.132276]
    fig, ax = plt.subplots()

    # x and y are curently in units of metres from the central point (0,0)
    # convert them to eastings and northings
    xe = cfg.UTM_easting + x
    yn = cfg.UTM_northing + y
    # now convert to lat-lon
    p2 = Proj(
        "+proj=utm +zone=30V, +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs"
    )
    xp, yp = p2(xe, yn, inverse=True)
    xmin = np.max(xp)
    xmax = np.min(xp)
    ymax = np.max(yp)
    ymin = np.min(yp)
    print(xmin, xmax, ymin, ymax)

    data = np.mean(C1, axis=2) * 1e6
    maxc = np.max(data)
    minc = np.min(data)
    print(minc, maxc)

    plt.contourf(xp,
                 yp,
def wgs84_to_tm128(longitude, latitude):
   return transform( Proj(**WGS84), Proj(**TM128), longitude, latitude )
def wgs84_to_tm127(longitude, latitude):
   return map(lambda x:2.5*x, transform( Proj(**WGS84), Proj(**TM127), longitude, latitude ))
Exemple #6
0
 def get_lon_lat():
     wgs84 = Proj(init='epsg:4326')
     mercator = Proj(init='epsg:3857')
     lon, lat = transform(mercator, wgs84, center_x, center_y)
     return lon, lat
    
Facies = Facies.fillna(0)
Facies['Lava'] = Facies[0]
Facies['Pflow'] = Facies[1]
Facies['Pfall'] = Facies[2]
# =============================================================================
# Facies['Lava'] = Facies[0]+Facies[1]
# Facies['Pflow'] = Facies[3]+Facies[4]+Facies[9]
# Facies['Pfall'] = Facies[7]+Facies[8]
# Facies['Sed'] = Facies[2]+Facies[5]+Facies[6]
# =============================================================================
#Facies.to_csv('FaciesRecap.csv', sep=',')
thick = pd.read_csv("TD.csv").set_index('WELL').sort_index()

xy = pd.read_csv("allcoordinate.csv").set_index('WELL').sort_index()
projection = Proj("+proj=utm +zone=48S, +south +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
xy['X'], xy['Y'] = projection(xy['X'].values, xy['Y'].values, inverse=True)

prod = pd.read_csv("Production.csv").set_index('WELL').fillna(0)
dataset = Facies.iloc[:,0:3]
dataset = dataset.merge(thick, left_index=True, right_index=True, how='inner')
dataset = dataset.merge(xy, left_index=True, right_index=True, how='inner')
dataset = dataset.merge(prod, left_index=True, right_index=True, how='inner')
x = dataset.iloc[:,4]
y = dataset.iloc[:,5]
oil = dataset.iloc[:,7]
gas = dataset.iloc[:,8]
thick = dataset.iloc[:,3]
wells = dataset.index

#Lava
def parFunc(c):

    # set path
    dataPath = "/media/Data/Data/PIG/"
    templatePath = "/home/setholinger/Documents/Projects/PIG/detections/templateMatch/multiTemplate/run3/"
    outPath = "/home/setholinger/Documents/Projects/PIG/location/polarization/"

    # set data paramters
    nproc = 15
    _3D = 1
    norm_component = 0
    type = "short"
    if _3D:
        type = type + "_3D"
    fs = 100
    numCluster = 14

    if _3D:
        outPath = outPath + "3D_clustering/"
        if norm_component:
            outPath = outPath + "normalized_components/"
            templatePath = templatePath + type.split(
                "_")[0] + "_normalized_3D_clustering/"
        else:
            templatePath = templatePath + type + "_clustering/"
    else:
        templatePath = templatePath + type + "_clustering/"

    # set threshold parameters
    # xcorr_percent_thresh = 0.1 will compute polarizations for the 10% best correlated events
    norm_thresh = 2.75
    MAD = 0
    xcorr_percent_thresh = 0.1

    # set windowing parameters
    snipLen = 500
    winLen = 10
    slide = 5
    numSteps = int((snipLen - winLen) / slide)

    # set stations and components
    chans = ["HHN", "HHE", "HHZ"]
    stations = ["PIG2", "PIG4", "PIG5"]
    #stat_coords = np.array([[-100.748596,-75.016701],[-100.786598,-75.010696],[-100.730904,-75.009201],[-100.723701,-75.020302],[-100.802696,-75.020103]])
    stat_coords = np.array([[-100.786598,
                             -75.010696], [-100.723701, -75.020302],
                            [-100.802696, -75.020103]])
    plotStat = 'PIG2'

    # read imagery data, get coordinate system, convert station coordinates to x and y, and take average station location
    file = "/media/Data/Data/PIG/TIF/LC08_L1GT_001113_20131012_20170429_01_T2_B4.TIF"
    sat_data = rasterio.open(file)
    p2 = Proj(sat_data.crs, preserve_units=False)
    p1 = Proj(proj='latlong', preserve_units=False)
    [stat_x, stat_y] = transform(p1, p2, stat_coords[:, 0], stat_coords[:, 1])
    avg_stat_x = np.mean(stat_x)
    avg_stat_y = np.mean(stat_y)

    # set frequency
    prefiltFreq = [0.05, 1]
    freq = [0.05, 1]

    # load waveforms
    #waves = obspy.read(templatePath + type + '_waveforms_' + str(prefiltFreq[0]) + "-" + str(prefiltFreq[1]) + 'Hz.h5')
    # load detection times
    detFile = h5py.File(templatePath + "detection_times.h5", "r")
    detTimes = list(detFile["times"])
    detFile.close()

    # load clustering results
    clustFile = h5py.File(
        templatePath + str(numCluster) + "/" + str(numCluster) +
        "_cluster_predictions_" + str(prefiltFreq[0]) + "-" +
        str(prefiltFreq[1]) + "Hz.h5", "r")
    pred = np.array(list(clustFile["cluster_index"]))
    centroids = list(clustFile["centroids"])
    clustFile.close()

    # read in correlation results for the current cluster
    corrFile = h5py.File(
        templatePath + str(numCluster) + "/centroid" + str(c) +
        "_correlations_" + str(prefiltFreq[0]) + "-" + str(prefiltFreq[1]) +
        "Hz.h5", "r")
    cluster_xcorr_coef = np.array(list(corrFile["corrCoefs"]))
    corrFile.close()

    # get indices of waveforms in current cluster and events in cluster above xcorr threshold
    clusterInd = [i for i, x in enumerate(pred == c) if x]
    if MAD:
        mad = stats.median_absolute_deviation(abs(cluster_xcorr_coef))
        mad = round(mad, 2)
        xcorr_percent_thresh = mad / max(abs(cluster_xcorr_coef))
        print(mad)
        print(xcorr_percent_thresh)
        n_events = round(xcorr_percent_thresh * len(clusterInd))
    else:
        n_events = round(xcorr_percent_thresh * len(clusterInd))
    threshInd = abs(cluster_xcorr_coef).argsort()[-1 * n_events:][::-1]

    # make array for storing pca vector sums and storing data to plot
    all_first_components = np.zeros((numSteps * len(threshInd), 2), "float64")
    clusterEventsAligned = np.zeros((len(threshInd), snipLen * fs), 'float64')

    # loop through indices of events in current cluster
    for i in range(len(threshInd)):

        # make array for storage of pca components and empty obspy stream for storing one trace from each station
        first_component_sums = np.zeros((numSteps, 2), "float64")
        event_stat = obspy.read()
        event_stat.clear()

        # loop through stations to get one trace from each to find earliest arrival
        try:
            for stat in range(len(stations)):

                # get times bounds for current event and read event
                #eventLims = [waves[clusterInd[threshInd[i]]].stations.starttime,waves[clusterInd[threshInd[i]]].stations.starttime + snipLen]
                starttime = UTCDateTime(detTimes[clusterInd[threshInd[i]]])
                endtime = starttime + snipLen
                eventLims = [starttime, endtime]
                event_stat += readEvent(dataPath + "MSEED/noIR/",
                                        stations[stat], chans[1], eventLims,
                                        freq)

            # find station with earliest arrival
            first_stat = observed_first_arrival(event_stat)

            # loop though stations to perform PCA on all windows in the event on each station's data
            for stat in range(len(stations)):

                # compute pca components for all windows in the event
                first_components = compute_pca(dataPath, stations[stat], chans,
                                               fs, winLen, slide, numSteps,
                                               freq, eventLims)

                # correct polarization direction based on first arrival
                first_components_corrected = correct_polarization(
                    first_components, stations, first_stat, avg_stat_x,
                    avg_stat_y, stat_x, stat_y)

                # sum results (this is vector sum across stations of pca first components for each window)
                first_component_sums = first_component_sums + first_components_corrected

            # give user output every % complete
            if round(i / len(threshInd) * 100) > round(
                (i - 1) / len(threshInd) * 100):
                print(
                    str(round(i / len(threshInd) * 100)) +
                    " % complete (cluster " + str(c) + ")")

            # fill results vector
            all_first_components[i * numSteps:(i + 1) *
                                 numSteps, :] = first_component_sums

        except:
            # give output if no data on current station
            print("Skipping cluster " + str(c) + " event " + str(i) +
                  " (missing data on " + stations[stat] + ")")

    # make plots
    fig, ax = plt.subplots(nrows=2,
                           ncols=1,
                           figsize=(8, 8),
                           gridspec_kw={'height_ratios': [1, 0.4]})

    # do some stuff
    corners = np.array([
        [sat_data.bounds[0], sat_data.bounds[1]],  # bottom left
        [sat_data.bounds[0], sat_data.bounds[3]],  # top left
        [sat_data.bounds[2], sat_data.bounds[1]],  # bottom right
        [sat_data.bounds[2], sat_data.bounds[3]]
    ])  # top right
    corners_lon, corners_lat = transform(p2, p1, corners[:, 0], corners[:, 1])

    # plot imagery
    show(sat_data, ax=ax[0], cmap="gray")

    # handle axes
    percent = xcorr_percent_thresh * 100
    plt.suptitle("Cluster " + str(c) + " Polarizations (top " + str(percent) +
                 "% of events)")

    # make array for storage
    back_azimuths = np.empty((0, 1), "float64")

    # plot pca compoments that exceed norm threshold
    # be wary- the transformed coordinate system's x-axis is meters north and the y-axis is meters east, so the pca_first_component[~,0] (which is cartesian x) is in [L] east
    # and therefore along the transformed y-axis and the pca_first_component[~,1] (which is cartesian y) is in [L] north and therefore along the transformed x-axis
    count = 0
    for s in range(len(all_first_components)):

        # only plot and save results if length of resultant vector has a norm exceeding the threshold
        if np.linalg.norm(all_first_components[s, :]) > norm_thresh:

            # calculate back azimuths and save in array
            baz = compute_baz(all_first_components[s, :])
            back_azimuths = np.vstack((back_azimuths, baz))
            count += 1

    # compute histogram of back azimuths
    baz_hist, edges = np.histogram(back_azimuths, bins=np.linspace(0, 360, 37))

    # set up colormap
    colors = [cm.plasma(x) for x in np.linspace(0, 1, max(baz_hist) + 1)]

    # plot all rays in 10-degree bins with length proportional to # of windows in that bin
    rays = np.zeros((36, 2), 'float64')
    scale = 40000
    max_width = 2 * np.pi * scale / 36
    max_width = 8
    for i in range(36):
        angle = i * 10
        rays[i, :] = compute_rays(angle)
        rayLength = baz_hist[i] / max(baz_hist) * scale
        [x, y] = [
            np.linspace(avg_stat_x, avg_stat_x + rays[i, 0] * rayLength, 100),
            np.linspace(avg_stat_y, avg_stat_y + rays[i, 1] * rayLength, 100)
        ]
        lwidths = np.linspace(0, max_width, 100) * rayLength / scale
        points = np.array([x, y]).T.reshape(-1, 1, 2)
        segments = np.concatenate([points[:-1], points[1:]], axis=1)
        lc = LineCollection(segments,
                            linewidths=lwidths,
                            color='maroon',
                            alpha=0.5,
                            zorder=len(all_first_components) * 10)
        ax[0].add_collection(lc)

    # define, transform, and plot lat/lon grid
    lat = [-74, -75]
    lon = [-98, -100, -102, -104]
    x_lab_pos = []
    y_lab_pos = []
    line = np.linspace(corners_lat[0] + 1, corners_lat[2] - 1, 100)
    for l in lon:
        line_x, line_y = transform(p1, p2, np.linspace(l, l, 100), line)
        ax[0].plot(line_x,
                   line_y,
                   linestyle='--',
                   linewidth=1,
                   c='gray',
                   alpha=1)
        y_lab_pos.append(line_y[np.argmin(np.abs(line_x - corners[0, 0]))])
    line = np.linspace(corners_lon[0] - 2, corners_lon[1] + 1, 100)
    for l in lat:
        line_x, line_y = transform(p1, p2, line, np.linspace(l, l, 100))
        ax[0].plot(line_x,
                   line_y,
                   linestyle='--',
                   linewidth=1,
                   c='gray',
                   alpha=1)
        x_lab_pos.append(line_x[np.argmin(np.abs(line_y - corners[0, 1]))])
    ax[0].set_xlim([corners[0, 0], corners[2, 0]])
    ax[0].set_ylim([corners[0, 1], corners[1, 1]])

    ax[0].set_xticks(x_lab_pos)
    ax[0].set_xticklabels(
        labels=[str(lat[0]) +
                '$^\circ$', str(lat[1]) + '$^\circ$'])
    ax[0].set_xlabel("Latitude")
    ax[0].set_yticks(y_lab_pos)
    ax[0].set_yticklabels(labels=[
        str(lon[0]) + '$^\circ$',
        str(lon[1]) + '$^\circ$',
        str(lon[2]) + '$^\circ$',
        str(lon[3]) + '$^\circ$'
    ])
    ax[0].set_ylabel("Longitude")

    # colors
    k1 = plt.rcParams['axes.prop_cycle'].by_key()['color'][0]
    k2 = plt.rcParams['axes.prop_cycle'].by_key()['color'][1]

    # plot ice front
    front_x = [
        -1.644e6, -1.64e6, -1.638e6, -1.626e6, -1.611e6, -1.6095e6, -1.6055e6,
        -1.6038e6, -1.598e6, -1.6005e6, -1.6e6, -1.595e6
    ]
    front_y = [
        -3.34e5, -3.33e5, -3.44e5, -3.445e5, -3.475e5, -3.43e5, -3.4e5,
        -3.413e5, -3.356e5, -3.32e5, -3.289e5, -3.29e5
    ]
    ax[0].plot(front_x, front_y, c=k1, zorder=len(all_first_components) * 10)

    # plot rift
    rift1_x = [-1.63e6, -1.6233e6, -1.6132e6, -1.6027e6]
    rift1_y = [-3.255e5, -3.237e5, -3.236e5, -3.281e5]
    ax[0].plot(rift1_x, rift1_y, c=k2, zorder=len(all_first_components) * 10)
    rift2_x = [-1.63e6, -1.6232e6, -1.6132e6]
    rift2_y = [-3.28e5, -3.2706e5, -3.236e5]
    ax[0].plot(rift2_x, rift2_y, c=k2, zorder=len(all_first_components) * 10)

    # plot station locations
    ax[0].scatter(stat_x, stat_y, marker="^", c='black', zorder=count * 10)

    # add north arrow
    ax[0].arrow(avg_stat_x - 65000,
                avg_stat_y + 70000,
                -10000,
                0,
                width=500,
                head_width=3000,
                head_length=3000,
                fc="k",
                ec="k",
                zorder=len(all_first_components) * 10)
    ax[0].text(avg_stat_x - 74000,
               avg_stat_y + 73000,
               "N",
               size="large",
               zorder=len(all_first_components) * 10)

    # add distance scale
    ax[0].plot(np.linspace(avg_stat_x - 60000, avg_stat_x - 80000, 10),
               np.ones(10) * avg_stat_y - 30000,
               c="k")
    ax[0].text(avg_stat_x - 82000, avg_stat_y - 26000, "20 km", size="medium")

    # plot events and centroid
    waveFile = h5py.File(
        templatePath + str(numCluster) + "/aligned_cluster" + str(c) +
        "_waveform_matrix_" + str(prefiltFreq[0]) + "-" + str(prefiltFreq[1]) +
        "Hz.h5", "r")
    alignedWaves = np.array(list(waveFile["waveforms"]))
    waveFile.close()
    alignedWave_fs = 2
    t = np.linspace(0, snipLen, snipLen * alignedWave_fs + 1)
    if _3D:
        t = np.linspace(0, snipLen * 3, (snipLen * alignedWave_fs + 1) * 3)
    maxAmps = np.zeros((len(threshInd), 1), 'float64')
    for w in range(len(threshInd)):
        ax[1].plot(t, alignedWaves[threshInd[w]], 'k', alpha=0.1)
        maxAmps[w] = np.max(np.abs(alignedWaves[threshInd[w]]))
    ax[1].plot(
        t, centroids[c].ravel() / np.max(abs(centroids[c].ravel())) *
        np.median(maxAmps))
    ax[1].set_ylim([-4 * np.median(maxAmps), 4 * np.median(maxAmps)])
    ax[1].title.set_text('Centroid and Events Used in Polarization Analysis')
    ax[1].ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
    ax[1].set_xlabel("Time (seconds)")
    ax[1].set_ylabel("Velocity (m/s)")
    ax[1].set_xlim([t[0], t[-1]])
    if _3D:
        clusterChans = ["HHZ", "HHN", "HHE"]
        ax[1].set_xticks([
            0, snipLen / 2, snipLen, snipLen * 3 / 2, snipLen * 2,
            snipLen * 5 / 2, snipLen * 3
        ])
        xPos = [snipLen, snipLen * 2]
        for xc in xPos:
            ax[1].axvline(x=xc, color='k', linestyle='--')
        ax[1].set_xticklabels([
            '0', '250\n' + clusterChans[0], '500  0   ',
            '250\n' + clusterChans[1], '500  0   ', '250\n' + clusterChans[2],
            '500'
        ])
    ax[1].grid(linestyle=":")
    ax[1].grid()

    plt.tight_layout()

    #plt.show()
    plt.savefig(outPath + "win_len_" + str(winLen) + "/norm>" +
                str(norm_thresh) + "/top_" + str(percent) + "%/cluster_" +
                str(c) + "_polarizations.png")
    plt.close()

    # save actual backazimuth data
    outFile = h5py.File(
        outPath + "win_len_" + str(winLen) + "/norm>" + str(norm_thresh) +
        "/top_" + str(percent) + "%/cluster_" + str(c) + "_backazimuths.h5",
        "w")
    outFile.create_dataset("backazimuths", data=back_azimuths)
    outFile.close()
Exemple #9
0
def get_proj_from_srid(srid):
    return Proj(init='EPSG:{}'.format(srid))
Exemple #10
0
def main():
    global inputArgs, grib, dir_path  #Make our global vars: grib is the object that will hold our Grib Class.
    dir_path = os.path.dirname(os.path.realpath(__file__))
    comparison_days = [0, -7]
    inputArgs = handle_args(
        sys.argv)  #All input arguments if run on the command line.
    for deltaDay in comparison_days:
        # MAKE SURE TO UNCOMMENT #inputArgs.date2 when putting back into production
        if deltaDay == 0:
            date2 = None
        else:
            date2 = ((datetime.datetime.now(pytz.timezone('US/Pacific'))) +
                     datetime.timedelta(days=deltaDay)).strftime("%Y%m%d")

        ##############
        # Debugging
        inputArgs.date = (datetime.datetime.today() -
                          datetime.timedelta(days=1)).strftime("%Y%m%d")
        #inputArgs.date = time.strftime("%Y%m%d")
        #inputArgs.date = str(dayNum)
        inputArgs.date2 = date2  #Comment this out for just one date
        inputArgs.map = True  # Make the map and save png to folder.
        inputArgs.plot = False
        findValueAtPoint = False  # Find all the values at specific lat/lng points within an excel file.
        #################
        grib = Grib()  #Assign variable to the Grib Class.
        grib.model = inputArgs.model  #Our model will always be "snodas" for this program
        grib.displayunits = inputArgs.displayunits
        grib.basin = inputArgs.basin  # Basin can be "French_Meadows", "Hell_Hole", or "MFP", this gets shapefile

        # Bounding box will clip the raster to focus in on a region of interest (e.g. CA) This makes the raster MUCH smaller
        # and easier to work with. See gdal.Open -> gdal.Translate below for where this is acutally used.
        grib.bbox = [
            -125.0, 50.0, -115.0, 30.0
        ]  #[upper left lon, upper left lat, lower left lon, lower left lat]
        grib = get_snowdas(
            grib, inputArgs.date
        )  #Get the snodas file and save data into the object variable grib
        #pngFile = makePNG()
        #Any reprojections of grib.gribAll have already been done in get_snowdas.
        #The original projection of snodas is EPSG:4326 (lat/lng), so it has been changed to EPSG:3875 (x/y) in get_snodas
        projInfo = grib.gribAll.GetProjection()

        geoinformation = grib.gribAll.GetGeoTransform(
        )  #Get the geoinformation from the grib file.

        xres = geoinformation[1]
        yres = geoinformation[5]
        xmin = geoinformation[0]
        xmax = geoinformation[0] + (xres * grib.gribAll.RasterXSize)
        ymin = geoinformation[3] + (yres * grib.gribAll.RasterYSize)
        ymax = geoinformation[3]

        spatialRef = osr.SpatialReference()
        spatialRef.ImportFromWkt(projInfo)
        spatialRefProj = spatialRef.ExportToProj4()

        # create a grid of xy (or lat/lng) coordinates in the original projection
        xy_source = np.mgrid[xmin:xmax:xres, ymax:ymin:yres]
        xx, yy = xy_source

        # A numpy grid of all the x/y into lat/lng
        # This will convert your projection to lat/lng (it's this simple).
        lons, lats = Proj(spatialRefProj)(xx, yy, inverse=True)

        # Find the center point of each grid box.
        # This says move over 1/2 a grid box in the x direction and move down (since yres is -) in the
        # y direction. Also, the +yres (remember, yres is -) is saying the starting point of this array will
        # trim off the y direction by one row (since it's shifted off the grid)
        xy_source_centerPt = np.mgrid[xmin + (xres / 2):xmax:xres,
                                      ymax + (yres / 2):ymin:yres]
        xxC, yyC = xy_source_centerPt

        lons_centerPt, lats_centerPt = Proj(spatialRefProj)(xxC,
                                                            yyC,
                                                            inverse=True)

        if grib.basin != "Hell_Hole":
            mask = createMask(xxC, yyC, spatialRefProj)
            grib.basinTotal, grib.basinSWE = calculateBasin(
                mask, grib, xres, yres)

        # The shape file for the Hell Hole basin includes the SMUD domain. Therefore, if we want to extract the SMUD
        # domain, then we will create another mask on top of the Hell Hole mask. This means that any grid point
        # outside of both domains will still = 0. The SMUD domain will contain it's mask AND the Hell Hole mask.
        if grib.basin == 'Hell_Hole':
            grib.basin = 'Hell_Hole_SMUD'  #This is just to get the correct directory structure
            submask = createMask(
                xxC, yyC, spatialRefProj
            )  # All areas in array = to 1 will be in SMUD's basin
            smud_BasinArea = grib.basinArea  # Used to remove SMUD's basin area (in m^2) from Hell_Hole's basin.
            # Get SMUD's information for the SMUD submask
            grib.SMUDbasinTotal, grib.SMUDbasinSWE = calculateBasin(
                submask, grib, xres, yres)

            grib.basin = 'Hell_Hole'  # reset back
            mask = createMask(
                xxC, yyC, spatialRefProj
            )  #This will be the entire Hell Hole basin, which includes SMUD
            hhMask = mask + submask  # Hell hole basin is now anywhere where hhMask = 1 and SMUD is anywhere it = 2
            hhMask[hhMask !=
                   1] = 0  # Set anything outside of Hell Hole's mask = 0
            grib.basinArea = grib.basinArea - smud_BasinArea  # grib.basinArea currently includes smuds basin, so remove it.

            # Get HellHoles's information from the hell hole submask
            grib.basinTotal, grib.basinSWE = calculateBasin(
                hhMask, grib, xres, yres)
            print("Current Basin Total: " + str(grib.basinTotal) +
                  " SMUD Total: " + str(grib.SMUDbasinTotal))
            #grib.basinTotal = grib.basinTotal - (0.92 * smudBasinTotal[0])
            mask = hhMask  # Need to do this so we can use the correct mask in compareDates and in calculateByElevation

        # Calculate the difference between two rasters
        if inputArgs.date2 != None:
            grib.basinTotal, grib.basinSWE = compareDates(
                mask, grib, xres, yres)

        #Need to do this after Hell_Hole's data has been manipulated (to account for SMUD)
        elevation_bins = calculateByElevation(mask, grib, xres, yres)

        #Send data for writing to Excel File
        if deltaDay == 0:
            excel_output(elevation_bins)

        if inputArgs.plot == True:
            makePlot(elevation_bins, deltaDay)
        print(elevation_bins)

        print(inputArgs.date, " Basin Total: ", grib.basinTotal)

        #findValue will return a dataframe with SWE values at various lat/lng points.
        df_ptVal = None
        if findValueAtPoint == True:
            df_ptVal = findPointValue(spatialRefProj, xy_source)

        if inputArgs.map == True:
            fig = plt.figure()
            ax = fig.add_subplot(111)
            m = Basemap(llcrnrlon=-122.8,
                        llcrnrlat=37.3,
                        urcrnrlon=-119.0,
                        urcrnrlat=40.3,
                        ax=ax)

            #m.arcgisimage(service='World_Imagery', xpixels=2000, verbose=True)
            im = Image.open(
                urlopen(
                    "http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/export?bbox=-122.8,37.3,-119.0,40.3&bboxSR=4326&imageSR=4326&size=2000,1578&dpi=96&format=png32&transparent=true&f=image"
                ))
            m.imshow(im, origin='upper')

            #For inset
            # loc =>'upper right': 1,
            # 'upper left': 2,
            # 'lower left': 3,
            # 'lower right': 4,
            # 'right': 5,
            # 'center left': 6,
            # 'center right': 7,
            # 'lower center': 8,
            # 'upper center': 9,
            # 'center': 10
            axin = inset_axes(m.ax, width="40%", height="40%", loc=8)

            m2 = Basemap(llcrnrlon=-120.7,
                         llcrnrlat=38.7,
                         urcrnrlon=-120.1,
                         urcrnrlat=39.3,
                         ax=axin)

            im2 = Image.open(
                urlopen(
                    "http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/export?bbox=-120.7,38.7,-120.09999999999998,39.3&bboxSR=4326&imageSR=4326&size=2000,1999&dpi=96&format=png32&transparent=true&f=image"
                ))
            m2.imshow(im2, origin='upper')

            #m2.arcgisimage(service='World_Imagery', xpixels=2000, verbose=True)
            mark_inset(ax, axin, loc1=2, loc2=4, fc="none", ec="0.5")

            ###################################DEBUGGING AREA###############################################################
            # Debugging: Test to prove a given lat/lng pair is accessing the correct grid box:

            #*********TEST 1: Test for center points
            #grib.data[0,0] = 15 #increase the variable by some arbitrary amount so it stands out.
            #xpts, ypts = m(lons_centerPt[0,0],lats_centerPt[0,0]) #This should be in the dead center of grid[0,0]
            #m.plot(xpts,ypts, 'ro')

            #*********TEST 2: Test for first grid box
            # Test to see a if the point at [x,y] is in the upper right corner of the cell (it better be!)
            #xpts, ypts = m(lons[0, 0], lats[0, 0])  # should be in upper right corner of cell
            #m.plot(xpts, ypts, 'bo')

            # *********TEST 3: Test for first grid box
            # Test to see the location of center points of each grid in polygon
            # To make this work, uncomment the variables in def create_mask
            #debug_Xpoly_center_pts, debug_Ypoly_center_pts = m(debugCenterX, debugCenterY)
            #m.plot(debug_Xpoly_center_pts, debug_Ypoly_center_pts, 'bo')

            # *********TEST 4: Test grid box size (In lat lng coords)
            # This is for use in a Basemap projection with lat/lon (e.g. EPSG:4326)
            #testX = np.array([[-120.1, -120.1], [-120.10833, -120.10833]])
            #testY = np.array([[39.0, 39.00833], [39.0, 39.00833]])
            # testVal = np.array([[4,4],[4,4]])

            # For use in basemap projection with x/y (e.g. espg:3857. In m=basemap just include the argument projection='merc')
            # testX = np.array([[500975, 500975], [(500975 + 1172), (500975 + 1172)]])
            # testY = np.array([[502363, (502363 + 1172)], [502363, (502363 + 1172)]])
            #testVal = np.array([[18, 18], [18, 18]])
            #im1 = m.pcolormesh(testX, testY, testVal, cmap=plt.cm.jet, vmin=0.1, vmax=10, latlon=False, alpha=0.5)

            # Test to see all points
            # xtest, ytest = m(lons,lats)
            # m.plot(xtest,ytest, 'bo')
            ################################################################################################################

            hr = 0
            makeMap(lons, lats, hr, m, m2, df_ptVal, deltaDay)
    return
Exemple #11
0
    def test_get_timeseries(self):
        """
        Simple regression test of WRF data repository.
        """
        EPSG, bbox, bpoly = self.wrf_epsg_bbox

        # Period start
        n_hours = 60
        t0 = api.YMDhms(1999, 10)
        date_str = "{}-{:02}".format(t0.year, t0.month)
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "wrf_data_repository")
        f1 = "wrfout_d03_{}".format(date_str)

        wrf1 = WRFDataRepository(EPSG,
                                 base_dir,
                                 filename=f1,
                                 allow_subset=True)
        wrf1_data_names = ("temperature", "wind_speed", "precipitation",
                           "relative_humidity", "radiation")
        sources = wrf1.get_timeseries(wrf1_data_names,
                                      period,
                                      geo_location_criteria=bpoly)
        self.assertTrue(len(sources) > 0)

        self.assertTrue(set(sources) == set(wrf1_data_names))
        self.assertTrue(sources["temperature"][0].ts.size() == n_hours + 1)
        r0 = sources["radiation"][0].ts
        p0 = sources["precipitation"][0].ts
        temp0 = sources["temperature"][0].ts
        self.assertTrue(r0.size() == n_hours + 1)
        self.assertTrue(p0.size() == n_hours + 1)
        self.assertTrue(r0.time(0) == temp0.time(0))
        self.assertTrue(p0.time(0) == temp0.time(0))
        self.assertTrue(r0.time(r0.size() - 1) == temp0.time(temp0.size() - 1))
        self.assertTrue(p0.time(r0.size() - 1) == temp0.time(temp0.size() - 1))
        self.assertTrue(p0.time(0), period.start)

        # Number test:
        # asserting shyft-sources time series are same as time series of corresponding location in wrf dataset.
        dset = Dataset(path.join(base_dir, f1))
        lat = dset.variables["XLAT"]
        lon = dset.variables["XLONG"]

        wrf_data = {}

        wrf_data["temperature"] = dset.variables["T2"][:]
        wrf_data["precipitation"] = dset.variables["PREC_ACC_NC"][:]
        wrf_data["radiation"] = dset.variables["SWDOWN"][:]
        pressure = dset.variables["PSFC"][:]
        mixing_ratio = dset.variables["Q2"][:]
        wrf_data["relative_humidity"] = wrf1._calculate_rel_hum(
            wrf_data["temperature"], pressure, mixing_ratio)
        wrf_data["temperature"] -= 273.16

        data_cs = "latlong"
        target_cs = "+init=EPSG:32643"
        data_proj = Proj(proj=data_cs)
        target_proj = Proj(target_cs)
        x, y = transform(data_proj, target_proj, lon[0, :, :], lat[0, :, :])

        for name, wrf_d in wrf_data.items():
            srs = sources[name]
            for i, s in enumerate(srs):
                mp = s.mid_point()
                x_ts, y_ts, z_ts = mp.x, mp.y, mp.z
                ts = s.ts
                ts_values = ts.v.to_numpy()

                # find indixes in wrf-dataset
                m = (x == x_ts) & (y == y_ts)
                idxs = np.where(m > 0)
                x_idx, y_idx = idxs[0][0], idxs[1][
                    0]  # assumung geo-location is unique in dataset
                self.assertTrue(
                    np.allclose(ts_values,
                                wrf_d[:n_hours + 1, x_idx, y_idx],
                                rtol=1e-4,
                                atol=1e-4),
                    f"wrf and shyft-TS of {name} are not the same.")
Exemple #12
0
    def get_region_model(self, region_id, catchments=None):
        """
        Return a fully specified shyft api region_model for region_id, based on data found
        in netcdf dataset.

        Parameters
        -----------
        region_id: string
            unique identifier of region in data

        catchments: list of unique integers
            catchment indices when extracting a region consisting of a subset
            of the catchments has attribs to construct params and cells etc.

        Returns
        -------
        region_model: shyft.api type
        """

        with Dataset(self._data_file) as dset:
            grp = dset.groups["elevation"]
            xcoord = grp.variables["xcoord"][:]
            ycoord = grp.variables["ycoord"][:]
            dataset_epsg = None
            if hasattr(grp, "epsg"):
                dataset_epsg = grp.epsg
            if hasattr(grp, "EPSG"):
                dataset_epsg = grp.EPSG
            if not dataset_epsg:
                raise interfaces.InterfaceError(
                    "netcdf: epsg attr not found in group elevation")
            if dataset_epsg != self._epsg:
                source_cs = "+proj=utm +zone={} +ellps={} +datum={} +units=m +no_defs".format(
                    int(self._epsg) - 32600, "WGS84", "WGS84")
                target_cs = "+proj=utm +zone={} +ellps={} +datum={} +units=m +no_defs".format(
                    int(dataset_epsg) - 32600, "WGS84", "WGS84")
                source_proj = Proj(source_cs)
                target_proj = Proj(target_cs)
                mesh2d = np.dstack(
                    transform(source_proj, target_proj,
                              *np.meshgrid(xcoord, ycoord))).reshape(-1, 2)
                dx = xcoord[1] - xcoord[0]
                dy = ycoord[1] - ycoord[0]
                x_corners = np.empty(len(xcoord) + 1, dtype=xcoord.dtype)
                y_corners = np.empty(len(ycoord) + 1, dtype=ycoord.dtype)
                x_corners[1:] = xcoord + dx / 2.0
                x_corners[0] = xcoord[0] - dx / 2.0
                y_corners[1:] = ycoord + dy / 2.0
                y_corners[0] = ycoord[0] - dy / 2.0
                xc, yc = transform(source_proj, target_proj,
                                   *np.meshgrid(x_corners, y_corners))
                areas = np.empty((len(xcoord), len(ycoord)),
                                 dtype=xcoord.dtype)
                for i in range(len(xcoord)):
                    for j in range(len(ycoord)):
                        pts = [(xc[j, i], yc[j, i]),
                               (xc[j, i + 1], yc[j, i + 1]),
                               (xc[j + 1, i + 1], yc[j + 1, i + 1]),
                               (xc[j + 1, i], yc[j + 1, i])]
                        areas[i, j] = Polygon(pts).area
                areas = areas.flatten()[self.mask]
            else:
                mesh2d = np.dstack(np.meshgrid(xcoord, ycoord)).reshape(-1, 2)
                areas = np.ones(len(xcoord) * len(ycoord), dtype=xcoord.dtype)[
                    self.mask] * (xcoord[1] - xcoord[0]) * (ycoord[1] -
                                                            ycoord[0])
            elevation = grp.variables["elevation"][:]
            coordinates = np.hstack((mesh2d, elevation.reshape(-1,
                                                               1)))[self.mask]
            catchments = dset.groups["catchments"].variables[
                "catchments"][:].reshape(-1)[self.mask]
            c_ids = dset.groups["catchments"].variables["catchment_indices"][:]

            def frac_extract(name):
                g = dset.groups  # Alias for readability
                return g[name].variables[name][:].reshape(-1)[self.mask]

            ff = frac_extract("forest-fraction")
            lf = frac_extract("lake-fraction")
            rf = frac_extract("reservoir-fraction")
            gf = frac_extract("glacier-fraction")
        # Construct bounding region
        box_fields = set(("upper_left_x", "upper_left_y", "step_x", "step_y",
                          "nx", "ny", "EPSG"))
        if box_fields.issubset(self._rconf.domain()):
            tmp = self._rconf.domain()
            epsg = tmp["EPSG"]
            x_min = tmp["upper_left_x"]
            x_max = x_min + tmp["nx"] * tmp["step_x"]
            y_max = tmp["upper_left_x"]
            y_min = y_max - tmp["ny"] * tmp["step_y"]
            bounding_region = BoundingBoxRegion(np.array([x_min, x_max]),
                                                np.array([y_min, y_max]), epsg,
                                                self._epsg)
        else:
            bounding_region = BoundingBoxRegion(xcoord, ycoord, dataset_epsg,
                                                self._epsg)

        # Construct region parameter:
        name_map = {
            "gamma_snow": "gs",
            "priestley_taylor": "pt",
            "kirchner": "kirchner",
            "actual_evapotranspiration": "ae",
            "skaugen": "skaugen",
            "hbv_snow": "snow"
        }
        region_parameter = self._region_model.parameter_t()
        for p_type_name, value_ in iteritems(self._mconf.model_parameters()):
            if p_type_name in name_map:
                if hasattr(region_parameter, name_map[p_type_name]):
                    sub_param = getattr(region_parameter,
                                        name_map[p_type_name])
                    for p, v in iteritems(value_):
                        setattr(sub_param, p, v)
            elif p_type_name == "p_corr_scale_factor":
                region_parameter.p_corr.scale_factor = value_

        # TODO: Move into yaml file similar to p_corr_scale_factor
        radiation_slope_factor = 0.9

        # Construct cells
        cell_vector = self._region_model.cell_t.vector_t()
        for pt, a, c_id, ff, lf, rf, gf in zip(coordinates, areas, catchments,
                                               ff, lf, rf, gf):
            cell = self._region_model.cell_t()
            cell.geo = api.GeoCellData(
                api.GeoPoint(*pt), a, int(c_id), radiation_slope_factor,
                api.LandTypeFractions(gf, lf, rf, ff, 1.0 - gf - lf - rf - ff))
            cell_vector.append(cell)

        # Construct catchment overrides
        catchment_parameters = self._region_model.parameter_t.map_t()
        for k, v in iteritems(self._rconf.parameter_overrides()):
            if k in c_ids:
                param = self._region_model.parameter_t(region_parameter)
                for p_type_name, value_ in iteritems(v):
                    if p_type_name in name_map:
                        sub_param = getattr(param, name_map[p_type_name])
                        for p, pv in iteritems(value_):
                            setattr(sub_param, p, pv)
                    elif p_type_name == "p_corr_scale_factor":
                        param.p_corr.scale_factor = value_
                    else:
                        # Avoid unknown params to go unadvertised
                        raise RegionConfigError(
                            "parameter {} is not in the set of allowed ones".
                            format(p_type_name))

                catchment_parameters[k] = param
        region_model = self._region_model(cell_vector, region_parameter,
                                          catchment_parameters)

        def do_clone(x):
            clone = x.__class__(x)
            clone.bounding_region = bounding_region
            return clone

        region_model.bounding_region = bounding_region
        region_model.clone = do_clone

        return region_model
Exemple #13
0
    def make_contours2(self,
                       con_var='q',
                       lats=np.arange(50, 86, 2),
                       plot=False):
        """
        Make contours for input into CAS. This method uses projection in to 
        x,y coordinates to avoid problem of contour segments cut off by prime
        meridian.
        """

        if os.path.isdir(self.working_dir + 'contours'):
            try:
                os.system('rm -f ' + self.working_dir + 'contours/*.in')
            except OSError:
                pass
        else:
            os.system('mkdir ' + self.working_dir + 'contours')

        # Only use 90 - 20 latitude
        d = self.ds[con_var].sel(latitude=slice(90, 20))[self.start_time, :]

        # Find contour levels by interpolation at lon=0
        #cons = d.mean(dim = 'longitude').interp(latitude=lats).data
        cons = d.isel(longitude=10).interp(latitude=lats).data
        print(cons)
        pa = Proj("+proj=stere +lat_0=90", preserve_units=True)
        lonv, latv = np.meshgrid(d.longitude.data, d.latitude.data)
        x, y = pa(lonv, latv)
        reg_x = np.linspace(np.min(x), np.max(x), 500)
        reg_y = np.linspace(np.min(y), np.max(y), 500)
        xi, yi = np.meshgrid(reg_x, reg_y)
        d2 = mlab.griddata(x.flatten(),
                           y.flatten(),
                           d.data.flatten(),
                           xi,
                           yi,
                           interp='linear')

        count = 0
        for icon in cons:

            inner = False
            if count > 0:
                if cons[count - 1] > icon:
                    inner = True

            fig = plt.figure(figsize=(10, 5))
            ax1 = fig.add_subplot(1, 2, 1)
            ax1.contourf(reg_x, reg_y, d2)
            xycon = ax1.contour(reg_x, reg_y, d2, [icon], colors='k')

            latloncon = []
            for iicon in range(len(xycon.allsegs[0])):
                ilons, ilats = pa(xycon.allsegs[0][iicon][:, 0],
                                  xycon.allsegs[0][iicon][:, 1],
                                  inverse=True)
                ilons = ilons % 360
                latloncon.append(np.vstack((ilons, ilats)).T)

            if len(latloncon) == 1:
                a = latloncon[0]
            else:
                print('more than one contour')
                lens = np.zeros(len(latloncon))
                for iicon in range(len(latloncon)):
                    lens[iicon] = self.calc_con_len(latloncon[iicon])
                if inner:
                    # 2nd longest contour
                    a = latloncon[np.where(lens == np.sort(lens)[-2])[0][0]]
                else:
                    a = latloncon[np.argmax(lens)]

            # This bit is important!
            # CA algorithm requires contour starting at meridian
            mina = np.argmin(a[:, 0])
            a = np.append(a[mina:, :], a[:mina, :], axis=0)

            if plot:
                lats = d.coords['latitude'].data
                lons = d.coords['longitude'].data
                #ax1.plot(a[:,0],a[:,1],color='red')
                ax2 = fig.add_subplot(1,
                                      2,
                                      2,
                                      projection=ccrs.NorthPolarStereo())
                theta = np.linspace(0, 2 * np.pi, 100)
                center, radius = [0.5, 0.5], 0.5
                verts = np.vstack([np.sin(theta), np.cos(theta)]).T
                circle = mpath.Path(verts * radius + center)
                ax2.set_boundary(circle, transform=ax2.transAxes)
                ax2.set_extent([-180, 180, 20, 90], ccrs.PlateCarree())
                ax2.gridlines()
                cyclic_data, cyclic_lons = cartopy.util.add_cyclic_point(
                    d.data, coord=lons)  ##
                con1 = ax2.contourf(cyclic_lons,
                                    lats,
                                    cyclic_data,
                                    transform=ccrs.PlateCarree())
                con = ax2.contour(cyclic_lons,
                                  lats,
                                  cyclic_data, [icon],
                                  colors='k',
                                  transform=ccrs.PlateCarree())
                ax2.plot(a[:, 0],
                         a[:, 1],
                         transform=ccrs.Geodetic(),
                         color='red')
                plt.show()
            else:
                plt.close()

            if inner:
                filename = self.working_dir + 'contours/%s_%.7f_tstep_%s_inner.in' % (
                    con_var, icon, self.start_time)
            else:
                filename = self.working_dir + 'contours/%s_%.7f_tstep_%s.in' % (
                    con_var, icon, self.start_time)

            with open(filename, "w") as csvfile:
                csvfile.write("Contour Advection with Surgery\n")
                csvfile.write("%s %.4f contour\n" % (con_var, icon))
                csvfile.write("\n")
                csvfile.write("%s  24  %.7f  %.7f  0.10000000  0.0000000\n" %
                              (self.ndays, self.time_step, self.time_step))
                csvfile.write("1 %s 0.00000\n" % a.shape[0])
                csvfile.write("%s %d %d 1.00000\n" %
                              (a.shape[0], a[0, 0], a[0, 1]))

            with open(filename, "a") as csvfile:
                writer = csv.writer(csvfile, delimiter=' ')
                for irow in range(a.shape[0]):
                    writer.writerow(a[irow, :])

            count += 1
Exemple #14
0
def regression_subset(predictions, train, test, method):

    mean_error = []
    if (method == 1):
        machine_learn = KNeighborsRegressor(n_neighbors=5, weights='distance')
    elif (method == 2):
        machine_learn = MLPRegressor(random_state=0)
    #for each building
    for i in range(3):

        new_train = train.loc[
            train['BUILDINGID'] ==
            i]  #select for training only buildings with that label (0,1, or 2)
        indexes = [x for x in range(len(predictions)) if predictions[x] == i
                   ]  #get the position of the samples that have building == i

        if (indexes):  #if list is not empty
            #training, samples with building == i
            X_train = new_train.ix[:, 0:519]
            Y_train = new_train[['LONGITUDE', 'LATITUDE']]
            machine_learn.fit(X_train, Y_train)

            #testing samples with prediction building == i
            new_test = test.iloc[indexes, :]
            X_test = new_test.ix[:, 0:519]
            Y_test = new_test[['LONGITUDE', 'LATITUDE']]

            #Turn into list
            predicts_lon_lat = machine_learn.predict(X_test).tolist()
            Y_test = Y_test.values.tolist()

            distance = []
            for j in range(len(predicts_lon_lat)):

                #change the latitude and longitude unit
                myProj = Proj(
                    "+proj=utm +zone=23K, +south +ellps=WGS84 +datum=WGS84 +units=m +no_defs"
                )
                lon_pred, lat_pred = myProj(predicts_lon_lat[j][0],
                                            predicts_lon_lat[j][1],
                                            inverse=True)
                lon_Y, lat_Y = myProj(Y_test[j][0], Y_test[j][1], inverse=True)

                #join in a unique list
                Y = []
                Y.append(lon_Y)
                Y.append(lat_Y)
                predict = []
                predict.append(lon_pred)
                predict.append(lat_pred)

                #The distance between the two latitudes is the error
                distance.append(vincenty(Y, predict).meters)

                #If you want to use haversine distance, uncomment the line below
                #print haversine(lon_Y, lat_Y, lon_pred, lat_pred)

            mean_error.append(np.mean(distance))
            #print(np.mean(distance))

    return np.mean(mean_error)
from pyramid.view import view_config
import requests, pyproj
from pyproj import Proj

p1 = Proj(init="EPSG:2169")
p2 = Proj(init="EPSG:4326")


def __reproject(_p):
    ll = pyproj.transform(p1, p2, _p[0], _p[1])
    _p[0] = round(ll[0], 6)
    _p[1] = round(ll[1], 6)
    return _p


@view_config(route_name='home', renderer="templates/mytemplate.pt")
def home(request):
    describe_url = "https://opendata.vdl.lu/odaweb/index.jsp?describe=1"
    catalog = requests.get(describe_url).json()
    _catalog = []
    for item in catalog["data"]:
        _item = {}
        _item["name"] = item["i18n"]["fr"]["name"]
        _item["id"] = item["id"]
        _catalog.append(_item)

    return {"_catalog": sorted(_catalog, key=lambda k: k['name'])}


@view_config(route_name='reproject', renderer="json")
def reproject(request):
Exemple #16
0
def getCoords(lonlat, lon0):
    inProj = Proj(init='epsg:4326')  # this is lon/lat
    outProj = Proj(proj='tmerc', lon_0=float(lon0))
    coords = lonlat.split(" ")
    x, y = transform(inProj, outProj, float(coords[0]), float(coords[1]))
    return str(x) + " " + str(y)  # this is UTM
Exemple #17
0
def netcdf2pcrInit(self, pcr, forcing):
    #-define input and ouput projections
    if getattr(self, forcing + 'InProj') == "rotated":
        inProj = Proj(init="epsg:4326")
    else:
        inProj = Proj(init=getattr(self, forcing + 'InProj'))
    outProj = Proj(init=getattr(self, forcing + 'OutProj'))

    #-get the attributes of cloneMap
    attributeClone = getMapAttributesALL(self.clonefile)
    cellsizeClone = attributeClone['cellsize']
    rowsClone = attributeClone['rows']
    colsClone = attributeClone['cols']
    xULClone = attributeClone['xUL']
    yULClone = attributeClone['yUL']

    #-read netcdf file
    f = nc.Dataset(getattr(self, forcing + 'NC'))
    filecache[getattr(self, forcing + 'NC')] = f

    #-get coordinates of upper right and lower left corners of model grid
    xURClone = xULClone + cellsizeClone * colsClone
    yURClone = yULClone
    xLLClone = xULClone
    yLLClone = yULClone - cellsizeClone * rowsClone
    yLRClone = yLLClone
    xLRClone = xURClone

    #-transform coordinates to netcdf projection coordinates
    xULCloneInput, yULCloneInput = transform(outProj, inProj, xULClone,
                                             yULClone)
    xURCloneInput, yURCloneInput = transform(outProj, inProj, xURClone,
                                             yURClone)
    xLRCloneInput, yLRCloneInput = transform(outProj, inProj, xLRClone,
                                             yLRClone)
    xLLCloneInput, yLLCloneInput = transform(outProj, inProj, xLLClone,
                                             yLLClone)

    #-determine netcdf cell size and subset coordinates to model domain
    if getattr(self, forcing + 'InProj') == "rotated":
        #-get coordinates from netcdf file
        xrot = f.variables[getattr(self, forcing + 'VarX')][:]
        yrot = f.variables[getattr(self, forcing + 'VarY')][:]

        #-get coordinates of north pole
        npLat = f.variables[getattr(self,
                                    forcing + 'VarX')].grid_north_pole_latitude
        npLon = f.variables[getattr(self, forcing +
                                    'VarX')].grid_north_pole_longitude

        #-transform x and y coordinates to grid
        xrot, yrot = np.meshgrid(xrot, yrot)

        #-transform rotated grid to lat,lon-coordinates
        xLatLon = xrot * 0
        yLatLon = yrot * 0
        for idx, row in enumerate(xrot):
            for idy, val in enumerate(row):
                x, y = rotated_grid_transform((xrot[idx, idy], yrot[idx, idy]),
                                              2, (npLon, npLat))
                xLatLon[idx, idy] = x
                yLatLon[idx, idy] = y

        #-transform x,y-coordinates to 2d array
        xyLatLon = [np.array(xLatLon).flatten(), np.array(yLatLon).flatten()]
        xyLatLon = list(map(list, zip(*xyLatLon)))

        #-function to find closest node from list of coordinates
        def closest_node(node, nodes):
            closest_index = distance.cdist([node], nodes).argmin()
            indices = np.where(
                np.ma.getdata(xLatLon) == nodes[closest_index][0])
            indices = np.array(indices).flatten().astype('int32')
            return indices.tolist()

        #-get indices of corner points clone map from netcdf
        indicesUL = closest_node((xULCloneInput, yULCloneInput), xyLatLon)
        indicesLL = closest_node((xLLCloneInput, yLLCloneInput), xyLatLon)
        indicesUR = closest_node((xURCloneInput, yURCloneInput), xyLatLon)
        indicesLR = closest_node((xLRCloneInput, yLRCloneInput), xyLatLon)

        #-determine indices of the corners of netcdf grid corresponding to model grid (+ buffer)
        xyUL = min(indicesUL[0], indicesLL[0], indicesUR[0], indicesLR[0]) - 2
        xyLL = max(indicesUL[0], indicesLL[0], indicesUR[0], indicesLR[0]) + 2
        xyUR = min(indicesUL[1], indicesLL[1], indicesUR[1], indicesLR[1]) - 2
        xyLR = max(indicesUL[1], indicesLL[1], indicesUR[1], indicesLR[1]) + 2

        #-determine x,y-coordinates corresponding to model grid (+ buffer) from netcdf grid
        x = xLatLon[xyUL:(xyLL + 1), xyUR:(xyLR + 1)]
        y = yLatLon[xyUL:(xyLL + 1), xyUR:(xyLR + 1)]

    else:
        #-get cell size, number of rows and columns and upper left corner coordinates from netcdf grid
        cellsizeInput = f.variables[getattr(
            self, forcing + 'VarY')][1] - f.variables[getattr(
                self, forcing + 'VarY')][0]
        cellsizeInput = float(cellsizeInput)

        #-determine x-coordinates corresponding to model grid (+ buffer) from netcdf grid
        xIdxSta = np.argmin(
            abs(f.variables[getattr(self, forcing + 'VarX')][:] -
                (min(xULCloneInput, xLLCloneInput) - 2 * cellsizeInput)))
        xIdxEnd = np.argmin(
            abs(f.variables[getattr(self, forcing + 'VarX')][:] -
                (max(xURCloneInput, xLRCloneInput) + 2 * cellsizeInput)))
        x = f.variables[getattr(self, forcing + 'VarX')][xIdxSta:(xIdxEnd + 1)]

        #-determine y-coordinates corresponding to model grid (+ buffer) from netcdf grid
        yIdxEnd = np.argmin(
            abs(f.variables[getattr(self, forcing + 'VarY')][:] -
                (max(yULCloneInput, yURCloneInput) + 2 * cellsizeInput)))
        yIdxSta = np.argmin(
            abs(f.variables[getattr(self, forcing + 'VarY')][:] -
                (min(yLLCloneInput, yLRCloneInput) - 2 * cellsizeInput)))
        y = f.variables[getattr(self, forcing + 'VarY')][yIdxSta:(yIdxEnd + 1)]

        #-transform x and y coordinates to grid
        x, y = np.meshgrid(x, y)

    #-project x and y coordinates to model grid projection
    x, y = transform(inProj, outProj, x, y)

    #-transform x and y coordinates to arrays
    x = np.asarray(x).ravel()
    y = np.asarray(y).ravel()

    #-determine model grid x and y coordinates and save in grid
    xi = np.arange(xULClone + cellsizeClone * 0.5,
                   (xULClone + cellsizeClone * 0.5) +
                   colsClone * cellsizeClone, cellsizeClone)
    yi = np.arange(
        (yULClone + cellsizeClone * 0.5) - rowsClone * cellsizeClone,
        yULClone + cellsizeClone * 0.5, cellsizeClone)
    yi = np.flipud(yi)
    xi, yi = np.meshgrid(xi, yi)

    #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
    setattr(self, forcing + 'x', x)
    setattr(self, forcing + 'y', y)
    setattr(self, forcing + 'xi', xi)
    setattr(self, forcing + 'yi', yi)
    if getattr(self, forcing + 'InProj') == "rotated":
        setattr(self, forcing + 'xyUL', xyUL)
        setattr(self, forcing + 'xyLL', xyLL)
        setattr(self, forcing + 'xyUR', xyUR)
        setattr(self, forcing + 'xyLR', xyLR)
    else:
        setattr(self, forcing + 'xIdxSta', xIdxSta)
        setattr(self, forcing + 'xIdxEnd', xIdxEnd)
        setattr(self, forcing + 'yIdxSta', yIdxSta)
        setattr(self, forcing + 'yIdxEnd', yIdxEnd)
Exemple #18
0
def coord_converter(x1, y1, inCoord='esri:102672', outCoord='epsg:4326'):
    inProj = Proj(init=inCoord, preserve_units=True)
    outProj = Proj(init=outCoord)
    lon, lat = transform(inProj, outProj, x1, y1)

    return (lat, lon)
Exemple #19
0
    def create_new_file(self):
        """
        creates a new netcdf file, and initializes the file with
        positional and time variables
        plus the variable as described by self.ts_meta_info
        :return: None
        """

        with Dataset(self.file_path, 'w') as ds:
            ds.Conventions = 'CF-1.6'
            # dimensions
            ds.createDimension('station', 1)
            ds.createDimension('time', None)

            # standard variables
            # Coordinate Reference System
            crs = ds.createVariable('crs', 'i4')
            epsg_spec = 'EPSG:{0}'.format(self.ts_meta_info.epsg_id)
            crs.epsg_code = epsg_spec
            crs.proj4 = Proj(
                init=epsg_spec).srs  # shyft expect crs.proj4 to exist
            crs.grid_mapping_name = 'transverse_mercator'

            ts_id = ds.createVariable('series_name', 'str', ('station', ))
            ts_id.long_name = 'timeseries_id'
            ts_id.cf_role = 'timeseries_id'
            # ts_id.units = ''

            time = ds.createVariable('time',
                                     'i8', ('time', ),
                                     least_significant_digit=1,
                                     zlib=True)
            time.long_name = 'time'
            time.units = 'seconds since 1970-01-01 00:00:00 +00:00'
            time.calendar = 'gregorian'

            x = ds.createVariable('x', 'f8', ('station', ))
            x.axis = 'X'
            x.standard_name = 'projection_x_coordinate'
            x.units = 'm'

            y = ds.createVariable('y', 'f8', ('station', ))
            y.axis = 'Y'
            y.standard_name = 'projection_y_coordinate'
            y.units = 'm'

            z = ds.createVariable('z', 'f8', ('station', ))
            z.axis = 'Z'
            z.standard_name = 'height'
            z.long_name = 'height above mean sea level'
            z.units = 'm'

            v = ds.createVariable(self.ts_meta_info.variable_name,
                                  'f8',
                                  dimensions=('time', 'station'),
                                  zlib=self.ts_meta_info.zlib)

            v.units = self.ts_meta_info.units
            v.standard_name = self.ts_meta_info.standard_name
            v.long_name = self.ts_meta_info.long_name
            v.coordinates = 'y x z'
            v.grid_mapping = 'crs'
            x[0] = self.ts_meta_info.x
            y[0] = self.ts_meta_info.y
            z[0] = self.ts_meta_info.z
import csv
import json
import numpy as np
import matplotlib.pyplot as plt
from boto3.session import Session
from cartopy import config as cartoconfig
from datetime import datetime
from os import environ, remove
from PIL import Image
from pyproj import Proj

cartoconfig['data_dir'] = '/tmp/'

# NDFD CONUS Projection
p = Proj(
    '+units=m +a=6371200.0 +b=6371200.0 +lon_0=265.0 +proj=lcc +lat_2=25.0 +lat_1=25.0 +lat_0=25.0'
)
offset_x, offset_y = p(238.445999, 20.191999)

# Initialize boto3 clients
aws = Session()
athena = aws.client('athena')
s3 = aws.resource('s3')
bucket = s3.Bucket(environ['OUTPUT_BUCKET'])


# Some custom exceptions for state machine logic
class QueryIncompleteException(Exception):
    pass

Exemple #21
0
# -*- coding: utf-8 -*-
from shapefile import Reader
from pyproj import Proj, transform

lambert_93 = Proj("+init=EPSG:2154")
wgs_84 = Proj("+init=EPSG:4326")

# Exemple d'utilisation de la librairie pyproj
# lat, lon = 656936.8, 3042238.0
# x, y = transform(lambert, wgs84, lat, lon)

sf = Reader("./tipi/data/DEPARTEMENT/DEPARTEMENT")
shapes = sf.shapes()
shapeRecs = sf.shapeRecords()
points = shapeRecs[5].shape.points

print shapeRecs[5].record
# print points

limite_departement = []
for i in points:
    lon, lat = transform(lambert_93, wgs_84, i[0], i[1])
    limite_departement.append([lon, lat])
    

departement = {
    "type": "Feature",
    "properties": {"party": "Republican"},
    "style" : {
    "color": "#ff7800",
    "weight": 5,
def convert_3857_to_lonlat(x, y):
    p1 = Proj(init='epsg:3857')
    x2, y2 = p1(x, y, inverse=True)
    return x2, y2
def tm128_to_wgs84(x, y):
   return transform( Proj(**TM128), Proj(**WGS84), x, y )
Exemple #24
0
def read_schedule(schedule_path, epsg):
    """
    Read MATSim schedule
    :param schedule_path: path to the schedule.xml file
    :param epsg: 'epsg:12345'
    :return: list of Service objects
    """
    services = []
    transformer = Transformer.from_proj(Proj(epsg), Proj('epsg:4326'))

    def write_transitLinesTransitRoute(transitLine, transitRoutes,
                                       transportMode):
        mode = transportMode['transportMode']
        service_id = transitLine['transitLine']['id']
        service_routes = []
        for transitRoute, transitRoute_val in transitRoutes.items():
            stops = [
                Stop(s['stop']['refId'],
                     x=transit_stop_id_mapping[s['stop']['refId']]['x'],
                     y=transit_stop_id_mapping[s['stop']['refId']]['y'],
                     epsg=epsg,
                     transformer=transformer)
                for s in transitRoute_val['stops']
            ]
            for s in stops:
                s.add_additional_attributes(transit_stop_id_mapping[s.id])

            arrival_offsets = []
            departure_offsets = []
            await_departure = []
            for stop in transitRoute_val['stops']:
                if 'departureOffset' not in stop[
                        'stop'] and 'arrivalOffset' not in stop['stop']:
                    pass
                elif 'departureOffset' not in stop['stop']:
                    arrival_offsets.append(stop['stop']['arrivalOffset'])
                    departure_offsets.append(stop['stop']['arrivalOffset'])
                elif 'arrivalOffset' not in stop['stop']:
                    arrival_offsets.append(stop['stop']['departureOffset'])
                    departure_offsets.append(stop['stop']['departureOffset'])
                else:
                    arrival_offsets.append(stop['stop']['arrivalOffset'])
                    departure_offsets.append(stop['stop']['departureOffset'])

                if 'awaitDeparture' in stop['stop']:
                    await_departure.append(
                        str(stop['stop']['awaitDeparture']).lower() in
                        ['true', '1'])

            route = [
                r_val['link']['refId'] for r_val in transitRoute_val['links']
            ]

            trips = {}
            for dep in transitRoute_val['departure_list']:
                trips[dep['departure']
                      ['id']] = dep['departure']['departureTime']

            r = Route(route_short_name=transitLine['transitLine']['name'],
                      mode=mode,
                      stops=stops,
                      route=route,
                      trips=trips,
                      arrival_offsets=arrival_offsets,
                      departure_offsets=departure_offsets,
                      id=transitRoute,
                      await_departure=await_departure)
            service_routes.append(r)
        services.append(Service(id=service_id, routes=service_routes))

    transitLine = {}
    transitRoutes = {}
    transportMode = {}
    transitStops = {}
    transit_stop_id_mapping = {}
    is_minimalTransferTimes = False
    minimalTransferTimes = {
    }  # {'stop_id_1': {stop: 'stop_id_2', transfer_time: 0.0}}

    # transitLines
    for event, elem in ET.iterparse(schedule_path, events=('start', 'end')):
        if event == 'start':
            if elem.tag == 'stopFacility':
                attribs = elem.attrib
                if attribs['id'] not in transitStops:
                    transitStops[attribs['id']] = attribs
                if attribs['id'] not in transit_stop_id_mapping:
                    transit_stop_id_mapping[attribs['id']] = elem.attrib
            if elem.tag == 'minimalTransferTimes':
                is_minimalTransferTimes = not is_minimalTransferTimes
            if elem.tag == 'relation':
                if is_minimalTransferTimes:
                    if not elem.attrib['toStop'] in minimalTransferTimes:
                        attribs = elem.attrib
                        minimalTransferTimes[attribs['fromStop']] = {
                            'stop': attribs['toStop'],
                            'transferTime': float(attribs['transferTime'])
                        }
            if elem.tag == 'transitLine':
                if transitLine:
                    write_transitLinesTransitRoute(transitLine, transitRoutes,
                                                   transportMode)
                transitLine = {"transitLine": elem.attrib}
                transitRoutes = {}

            if elem.tag == 'transitRoute':
                transitRoutes[elem.attrib['id']] = {
                    'stops': [],
                    'links': [],
                    'departure_list': [],
                    'attribs': elem.attrib
                }
                transitRoute = elem.attrib['id']

            # doesn't have any attribs
            # if elem.tag == 'routeProfile':
            #     routeProfile = {'routeProfile': elem.attrib}

            if elem.tag == 'stop':
                transitRoutes[transitRoute]['stops'].append(
                    {'stop': elem.attrib})

            # doesn't have any attribs
            # if elem.tag == 'route':
            #     route = {'route': elem.attrib}

            if elem.tag == 'link':
                transitRoutes[transitRoute]['links'].append(
                    {'link': elem.attrib})

            # doesn't have any attribs
            # if elem.tag == 'departures':
            #     departures = {'departures': elem.attrib}

            if elem.tag == 'departure':
                transitRoutes[transitRoute]['departure_list'].append(
                    {'departure': elem.attrib})
        elif (event == 'end') and (elem.tag == "transportMode"):
            transportMode = {'transportMode': elem.text}

    # add the last one
    write_transitLinesTransitRoute(transitLine, transitRoutes, transportMode)

    return services, minimalTransferTimes
def tm127_to_wgs84(x, y):
   return transform( Proj(**TM127), Proj(**WGS84), x/2.5, y/2.5 )
Exemple #26
0
from datashader.colors import Set1
from datashader.colors import Set2
from datashader.colors import Set3
from datashader.colors import Sets1to3

from pyproj import Proj, transform

app = Flask(__name__)
CORS(app)

s3 = boto3.client('s3')

cities = gpd.read_file('s3://makepath-reference/reference.gpkg',
                       layer='cities')

lngs, lats = transform(Proj(init='epsg:3857'), Proj(init='epsg:4326'),
                       cities['geometry'].x.values,
                       cities['geometry'].y.values)

cities['latitude'] = lats
cities['longitude'] = lngs
cities['zoom'] = 9
cities['name'] = cities['CITY_NAME']


def load_census_demo():
    print('Loading Census Data')
    parquet_file = fp.ParquetFile(path.expanduser('~/census.parq'))
    df = parquet_file.to_pandas()
    df.race = df.race.astype('category')
    return df
Exemple #27
0
def deg2utm(Lon, Lat, utmzone=43):
    p = Proj(proj='utm', zone=utmzone, ellps='WGS84')
    x, y = p(Lon, Lat)
    return x, y
Exemple #28
0
def test_itransform_no_error():
    with pytest.warns(DeprecationWarning):
        pj = Proj(init="epsg:4555")
    pjx, pjy = pj(116.366, 39.867)
    list(itransform(pj, Proj(4326), [(pjx, pjy)], radians=True, errcheck=True))
Exemple #29
0
 def set_projection(self, projstring):
     try:
         return Proj(projstring)
     except RuntimeError:
         log.ODM_EXCEPTION(
             'Could not set projection. Please use a proj4 string')
def find_line_of_sight (x_transmitter, y_transmitter, x_receiver, y_receiver,
    local_authority_ids):
    """
    Takes transmitter and receiver locations and determines line of sight.

    Parameters
    ----------
    x_transmitter: float
        Longitude coordinate of transmitter.
    y_transmitter : float
        Latitude coordinate of transmitter.
    x_receiver : float
        Longitude coordinate of receiver.
    y_receiver : float
        Latitude coordinate of transmitter.
    local_authority_ids : list
        Any local authority id for which the area in question intersects.

    """
    projOSGB36 = Proj(init='epsg:27700')
    projWGS84 = Proj(init='epsg:4326')

    x_transmitter, y_transmitter = transform(
        projWGS84, projOSGB36, x_transmitter, y_transmitter
        )
    x_receiver, y_receiver = transform(projWGS84, projOSGB36, x_receiver, y_receiver)

    x_coordinates = []
    y_coordinates = []

    x_coordinates.extend([x_transmitter, x_receiver])
    y_coordinates.extend([y_transmitter, y_receiver])

    x_min, x_max = min(x_coordinates), max(x_coordinates)
    y_min, y_max = min(y_coordinates), max(y_coordinates)

    tile_ids = find_osbg_tile(x_min, y_min, x_max, y_max)

    print(tile_ids)

    # premises_data = read_premises_data(x_min, y_min, x_max, y_max, local_authority_ids)

    # if len(premises_data) < 1:
    #     line_of_sight = 'los'
    # else:
    #     tile_ids = find_osbg_tile(x_min, y_min, x_max, y_max)

        # #get building heights
        # building_heights = []

        # for tile_id in tile_ids:
        #     pathlist = glob.iglob(os.path.join(DATA_RAW_INPUTS,'mastermap_building_heights_2726794',
        #     tile_id['tile_ref_2_digit'] + '/*.csv'))
        #     for path in pathlist:
        #         if path[-10:-6] == tile_id['tile_ref_4_digit'].lower():
        #             with open(path, 'r') as system_file:
        #                 reader = csv.reader(system_file)
        #                 next(reader)
        #                 for line in reader:
        #                     building_heights.append({
        #                         'id': line[0],
        #                         'max_height': line[6],
        #                     })
        #         else:
        #             pass

        # #match premises with building heights
        # premises_with_heights = []

        # for premises in premises_data:
        #     for building_height in building_heights:
        #         if premises['properties']['uid'] == building_height['id']:
        #             premises_with_heights.append({
        #                 'type': "Feature",
        #                 'geometry': {
        #                     "type": "Point",
        #                     "coordinates": [premises['geometry']['coordinates']]
        #                 },
        #                 'properties': {
        #                     'uid': premises['properties']['uid'],
        #                     'max_height': building_height['max_height']
        #                 }
        #             })

    # # make sure viewshed files have been generated
    # # get list of required tile ids
    # Create path
    output_dir = os.path.abspath(os.path.join(DATA_INTERMEDIATE, 'viewshed_tiles'))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    final_tile_id_list = []

    for tile_id in tile_ids:
        pathlist = glob.iglob(os.path.join(DATA_RAW_INPUTS,'terrain-5-dtm_2736772',
        tile_id['tile_ref_2_digit'] + '/*.asc'))
        for tile_path in pathlist:
            filename = os.path.basename(tile_path)[:-4]
            if filename == tile_id['full_tile_ref']:
                final_tile_id_list.append(tile_path)

    load_raster_files(final_tile_id_list, output_dir, x_transmitter, y_transmitter)

        # if len(final_tile_id_list) == 1:
        #     filename = final_tile_id_list[0]
        #     if not os.path.exists(os.path.join(output_dir, filename + '-viewshed.tif')):
        #         from generate_viewshed import generate_viewshed
        #         generate_viewshed(x_transmitter, y_transmitter, output_dir, filename, tile_path)
        # else:
        #     #TODO deal with multiple tiles
        #     for filename in final_tile_id_list:
        #         if not os.path.exists(os.path.join(output_dir, filename + '-viewshed.tif')):
        #             from generate_viewshed import generate_viewshed
        #             #merge files together
        #             #then generate viewshed
        #             generate_viewshed(x_transmitter, y_transmitter, output_dir, filename, tile_path)

        # #load in raster viewshed files
        # for final_tile in final_tile_id_list:
        #     path = os.path.join(output_dir,final_tile + '-viewshed.tif')
        #     src = rasterio.open(path)

        # from rasterio.plot import show
        # show(src, cmap='terrain')

        # for val in src.sample([(x_receiver, y_receiver)]):
        #     if val == 0:
        #         line_of_sight = 'nlos'
        #     elif val == 1:
        #         line_of_sight = 'los'
        #     else:
        #         print('binary viewshed .tif returning non-conforming value')

    return print('complete')#line_of_sight