Exemplo n.º 1
0
    def calculate_footprint_from(self, point):
        """从点和指定的角度计算地面覆盖的矩形(footprint)

        Parameters
        ----------
        point : list 
            指定点
        angle : float
            航线方向
        width : int 
            图像长度
        iheight : int 
            图像高度
        gsd : float 
            地面分辨率

        Returns
        -------
        tuple
            返回地面覆盖的矩形的四脚点坐标
        """
        width = self.cameraWidth * self.gsd
        height = self.cameraHeight * self.gsd

        imgAngle = math.atan(self.cameraWidth*1.0 /
                             self.cameraHeight) * 180/math.pi

        geod = Geod(ellps="WGS84")

        # 矩形的对角线长
        distance = math.sqrt(math.pow(width, 2) + math.pow(height, 2))

        # 计算右上角点
        angleTR = self.courseAngle - imgAngle
        longTR, latTR, tmpAngle = geod.fwd(
            point[0], point[1], angleTR, distance/2)

        # 计算右下角点
        angleBR = self.courseAngle + imgAngle
        longBR, latBR, tmpAngle = geod.fwd(
            point[0], point[1], angleBR, distance/2)

        # 计算左下角点
        angleBL = angleTR + 180
        longBL, latBL, tmpAngle = geod.fwd(
            point[0], point[1], angleBL, distance/2)

        # 计算左上角点
        angleTL = angleBR + 180
        longTL, latTL, tmpAngle = geod.fwd(
            point[0], point[1], angleTL, distance/2)

        result = []
        result.append((longTR, latTR))
        result.append((longBR, latBR))
        result.append((longBL, latBL))
        result.append((longTL, latTL))
        # 多边形闭合
        result.append((longTR, latTR))

        return result
Exemplo n.º 2
0
'''
Module's author : Jarry Gabriel
Date : June, July 2016

Some Algorithms was made by : Malivai Luce, Helene Piquet

This module handle different tools
'''

from pyproj import Proj, Geod
import numpy as np

# Projections
wgs84 = Proj("+init=EPSG:4326")
epsg3857 = Proj("+init=EPSG:3857")
g = Geod(ellps='WGS84')


# Returns pressure from altitude (ft)
def press(alt):
    z = alt / 3.28084
    return 1013.25 * (1 - (0.0065 * z) / 288.15)**5.255


# Returns the closest lvl from levels with altitude (atl)
def proxilvl(alt, lvls):
    p = press(alt)
    levels = np.array(sorted(lvls.keys()))
    return levels[np.abs(levels - p).argmin()]

from sklearn.metrics import f1_score
from xgboost import XGBClassifier
from xgboost import plot_importance
from catboost import CatBoostClassifier, Pool, EFstrType
import lightgbm as lgb
from sklearn.linear_model import LogisticRegression
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import StackingClassifier
from sklearn.model_selection import RepeatedStratifiedKFold
from matplotlib import pyplot
from pyproj import Geod
from imblearn.over_sampling import SMOTE

wgs84_geod = Geod(
    ellps='WGS84'
)  #Distance will be measured on this ellipsoid - more accurate than a spherical method


#Get distance between pairs of lat-lon points
def Distance(lat1, lon1, lat2, lon2):
    az12, az21, dist = wgs84_geod.inv(lon1, lat1, lon2, lat2)
    return dist


def timer(start_time=None):
    if not start_time:
        start_time = datetime.now()
        return start_time
    elif start_time:
        thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(),
Exemplo n.º 4
0
 def difference(self, spot) -> float:
     g = Geod(ellps='WGS84')
     azimuth, back_azimuth, distance_2d = g.inv(self.longitude, self.latitude, spot.longitude, spot.latitude)
     return distance_2d
def storm_motion_deltas_algorithm(REFlev, REFlev1, big_storm, zero_z_trigger, storm_to_track, year, month, day, hour, start_min, duration, calibration, station, Bunkers_s, Bunkers_m, track_dis=10, GR_mins=1.0):
    #Set vector perpendicular to FFD Z gradient
    #storm_relative_dir = storm_relative_dir
    #Set storm motion
    Bunkers_m = Bunkers_m
    #Set ZDR Threshold for outlining arcs
    #zdrlev = [zdrlev]
    #Set KDP Threshold for finding KDP feet
    #kdplev = [kdplev]
    #Set reflectivity thresholds for storm tracking algorithm
    REFlev = [REFlev]
    REFlev1 = [REFlev1]
    #Set storm size threshold that triggers subdivision of big storms
    big_storm = big_storm #km^2
    Outer_r = 30 #km
    Inner_r = 6 #km
    #Set trigger to ignore strangely-formatted files right before 00Z
    #Pre-SAILS #: 17
    #SAILS #: 25
    zero_z_trigger = zero_z_trigger
    storm_to_track = storm_to_track
    zdr_outlines = []
    #Here, set the initial time of the archived radar loop you want.
    #Our specified time
    dt = datetime(year,month, day, hour, start_min)
    station = station
    end_dt = dt + timedelta(hours=duration)

    #Set up nexrad interface
    conn = nexradaws.NexradAwsInterface()
    scans = conn.get_avail_scans_in_range(dt,end_dt,station)
    results = conn.download(scans, 'RadarFolder')

    #Setting counters for figures and Pandas indices
    f = 27
    n = 1
    storm_index = 0
    scan_index = 0
    tracking_index = 0
    #Create geod object for later distance and area calculations
    g = Geod(ellps='sphere')
    #Open the placefile
    f = open("DELTA_dev"+station+str(dt.year)+str(dt.month)+str(dt.day)+str(dt.hour)+str(dt.minute)+"_Placefile.txt", "w+")
    f.write("Title: Storm Motion Deltas Placefile \n")
    f.write("Refresh: 8 \n \n")

    #Load ML algorithm
#    forest_loaded = pickle.load(open('BestRandomForest.pkl', 'rb'))
#    forest_loaded_col = pickle.load(open('BestRandomForestColumnsLEN200.pkl', 'rb'))

    #Actual algorithm code starts here
    #Create a list for the lists of arc outlines
    zdr_out_list = []
    tracks_dataframe = []
    for i,scan in enumerate(results.iter_success(),start=1):
    #Local file option:
        #Loop over all files in the dataset and pull out each 0.5 degree tilt for analysis
        try:
            radar1 = scan.open_pyart()
        except:
            print('bad radar file')
            continue
        #Local file option
        print('File Reading')
        #Make sure the file isn't a strange format
        if radar1.nsweeps > zero_z_trigger:
            continue
            
        for i in range(radar1.nsweeps):
            print('in loop')
            print(radar1.nsweeps)
            try:
                radar4 = radar1.extract_sweeps([i])
            except:
                print('bad file')
            #Checking to make sure the tilt in question has all needed data and is the right elevation
            if ((np.mean(radar4.elevation['data']) < .65) and (np.max(np.asarray(radar4.fields['differential_reflectivity']['data'])) != np.min(np.asarray(radar4.fields['differential_reflectivity']['data'])))):
                n = n+1

                #Calling ungridded_section; Pulling apart radar sweeps and creating ungridded data arrays
                [radar,n,range_2d,ungrid_lons,ungrid_lats] = quality_control_arcalg(radar4,n,calibration)

                time_start = netCDF4.num2date(radar.time['data'][0], radar.time['units'])
                object_number=0.0
                month = time_start.month
                if month < 10:
                    month = '0'+str(month)
                hour = time_start.hour
                if hour < 10:
                    hour = '0'+str(hour)
                minute = time_start.minute
                if minute < 10:
                    minute = '0'+str(minute)
                day = time_start.day
                if day < 10:
                    day = '0'+str(day)
                time_beg = time_start - timedelta(minutes=0.1)
                time_end = time_start + timedelta(minutes=GR_mins)
                sec_beg = time_beg.second
                sec_end = time_end.second
                min_beg = time_beg.minute
                min_end = time_end.minute
                h_beg = time_beg.hour
                h_end = time_end.hour
                d_beg = time_beg.day
                d_end = time_end.day
                if sec_beg < 10:
                    sec_beg = '0'+str(sec_beg)
                if sec_end < 10:
                    sec_end = '0'+str(sec_end)
                if min_beg < 10:
                    min_beg = '0'+str(min_beg)
                if min_end < 10:
                    min_end = '0'+str(min_end)
                if h_beg < 10:
                    h_beg = '0'+str(h_beg)
                if h_end < 10:
                    h_end = '0'+str(h_end)
                if d_beg < 10:
                    d_beg = '0'+str(d_beg)
                if d_end < 10:
                    d_end = '0'+str(d_end)

                #Calling kdp_section; Using NWS method, creating ungridded, smoothed KDP field
                kdp_nwsdict = kdp_genesis(radar)

                #Add field to radar
                radar.add_field('KDP', kdp_nwsdict)
                kdp_ungridded_nws = radar.fields['KDP']['data']


                #Calling grid_section; Now let's grid the data on a ~250 m x 250 m grid
                [REF,KDP,CC,ZDRmasked1,REFmasked,KDPmasked,rlons,rlats,rlons_2d,rlats_2d,cenlat,cenlon] = gridding_arcalg(radar)

                #Calling gradient_section; Determining gradient direction and masking some Zhh and Zdr grid fields
                #[grad_mag,grad_ffd,ZDRmasked] = grad_mask_arcalg(REFmasked,REF,storm_relative_dir,ZDRmasked1,CC)

                #Let's create a field for inferred hail
                #Commenting out for the moment
#                 REF_Hail = np.copy(REFmasked)
#                 REF_Hail1 = ma.masked_where(ZDRmasked1 > 1.0, REF_Hail)
#                 REF_Hail2 = ma.masked_where(CC > 1.0, REF_Hail1)
#                 REF_Hail2 = ma.filled(REF_Hail2, fill_value = 1)

                #Let's set up the map projection!
                crs = ccrs.LambertConformal(central_longitude=-100.0, central_latitude=45.0)

                #Set up our array of latitude and longitude values and transform our data to the desired projection.
                tlatlons = crs.transform_points(ccrs.LambertConformal(central_longitude=265, central_latitude=25, standard_parallels=(25.,25.)),rlons[0,:,:],rlats[0,:,:])
                tlons = tlatlons[:,:,0]
                tlats = tlatlons[:,:,1]

                #Limit the extent of the map area, must convert to proper coords.
                LL = (cenlon-1.0,cenlat-1.0,ccrs.PlateCarree())
                UR = (cenlon+1.0,cenlat+1.0,ccrs.PlateCarree())
                print(LL)

                #Get data to plot state and province boundaries
                states_provinces = cfeature.NaturalEarthFeature(
                        category='cultural',
                        name='admin_1_states_provinces_lakes',
                        scale='50m',
                        facecolor='none')
                #Make sure these shapefiles are in the same directory as the script
                #fname = 'cb_2016_us_county_20m/cb_2016_us_county_20m.shp'
                #fname2 = 'cb_2016_us_state_20m/cb_2016_us_state_20m.shp'
                #counties = ShapelyFeature(Reader(fname).geometries(),ccrs.PlateCarree(), facecolor = 'none', edgecolor = 'black')
                #states = ShapelyFeature(Reader(fname2).geometries(),ccrs.PlateCarree(), facecolor = 'none', edgecolor = 'black')

                #Create a figure and plot up the initial data and contours for the algorithm
                fig=plt.figure(n,figsize=(30.,25.))
                ax = plt.subplot(111,projection=ccrs.PlateCarree())
                ax.coastlines('50m',edgecolor='black',linewidth=0.75)
                #ax.add_feature(counties, edgecolor = 'black', linewidth = 0.5)
                #ax.add_feature(states, edgecolor = 'black', linewidth = 1.5)
                ax.set_extent([LL[0],UR[0],LL[1],UR[1]])
                REFlevels = np.arange(20,73,2)

                #Options for Z backgrounds/contours
                #refp = ax.pcolormesh(ungrid_lons, ungrid_lats, ref_c, cmap=plt.cm.gist_ncar, vmin = 10, vmax = 73)
                #refp = ax.pcolormesh(ungrid_lons, ungrid_lats, ref_ungridded_base, cmap='HomeyerRainbow', vmin = 10, vmax = 73)
                #refp = ax.pcolormesh(rlons_2d, rlats_2d, REFrmasked, cmap=pyart.graph.cm_colorblind.HomeyerRainbow, vmin = 10, vmax = 73)
                refp2 = ax.contour(rlons_2d, rlats_2d, REFmasked, [40], colors='grey', linewidths=5, zorder=1)
                #refp3 = ax.contour(rlons_2d, rlats_2d, REFmasked, [45], color='r')
                #plt.contourf(rlons_2d, rlats_2d, ZDR_sum_stuff, depth_levels, cmap=plt.cm.viridis)

                #Option to have a ZDR background instead of Z:
                #zdrp = ax.pcolormesh(ungrid_lons, ungrid_lats, zdr_c, cmap=plt.cm.nipy_spectral, vmin = -2, vmax = 6)

                #Storm tracking algorithm starts here
                #Reflectivity smoothed for storm tracker
                smoothed_ref = ndi.gaussian_filter(REFmasked, sigma = 3, order = 0)
                #1st Z contour plotted
                refc = ax.contour(rlons[0,:,:],rlats[0,:,:],smoothed_ref,REFlev, alpha=.01)

                #Set up projection for area calculations
                proj = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'),
                           pyproj.Proj(init='epsg:3857'))

                #Main part of storm tracking algorithm starts by looping through all contours looking for Z centroids
                #This method for breaking contours into polygons based on this stack overflow tutorial:
                #https://gis.stackexchange.com/questions/99917/converting-matplotlib-contour-objects-to-shapely-objects
                #Calling stormid_section
                [storm_ids,max_lons_c,max_lats_c,ref_areas,storm_index, alg_speeds, alg_directions] = storm_objects_new(refc,proj,REFlev,REFlev1,big_storm,smoothed_ref,ax,rlons,rlats,storm_index,tracking_index,scan_index,tracks_dataframe, track_dis, time_start)

                #Setup tracking index for storm of interest
                tracking_ind=np.where(np.asarray(storm_ids)==storm_to_track)[0]
                max_lons_c = np.asarray(max_lons_c)
                max_lats_c = np.asarray(max_lats_c)
                ref_areas = np.asarray(ref_areas)
                #Create the ZDR and KDP contours which will later be broken into polygons
#                 if np.max(ZDRmasked) > zdrlev:
#                     zdrc = ax.contour(rlons[0,:,:],rlats[0,:,:],ZDRmasked,zdrlev,linewidths = 2, colors='purple', alpha = .5)
#                 else:
#                     zdrc=[]
#                 if np.max(KDPmasked) > kdplev:
#                     kdpc = ax.contour(rlons[0,:,:],rlats[0,:,:],KDPmasked,kdplev,linewidths = 2, colors='green', alpha = 0.01)
#                 else:
#                     kdpc=[]
#                 if np.max(REF_Hail2) > 50.0:
#                     hailc = ax.contour(rlons[0,:,:],rlats[0,:,:],REF_Hail2,[50],linewidths = 4, colors='pink', alpha = 0.01)
#                 else:
#                     hailc=[]
#                 if np.max(REFmasked) > 35.0:
#                     zhhc = ax.contour(rlons[0,:,:],rlats[0,:,:],REFmasked,[35.0],linewidths = 3,colors='orange', alpha = 0.8)
#                 else:
#                     zhhc=[]
                plt.contour(ungrid_lons, ungrid_lats, range_2d, [73000], linewidths=7, colors='r')
                plt.savefig('testfig.png')
                print('Testfig Saved')

                if len(max_lons_c) > 0:
                    #Calling zdr_arc_section; Create ZDR arc objects using a similar method as employed in making the storm objects
#                     [zdr_storm_lon,zdr_storm_lat,zdr_dist,zdr_forw,zdr_back,zdr_areas,zdr_centroid_lon,zdr_centroid_lat,zdr_mean,zdr_cc_mean,zdr_max,zdr_masks,zdr_outlines,ax,f] = zdrarc(zdrc,ZDRmasked,CC,REF,grad_ffd,grad_mag,KDP,forest_loaded,ax,f,time_start,month,d_beg,h_beg,min_beg,sec_beg,d_end,h_end,min_end,sec_end,rlons,rlats,max_lons_c,max_lats_c,zdrlev,proj,storm_relative_dir,Outer_r,Inner_r,tracking_ind)


                    #Calling hail_section; Identify Hail core objects in a similar way to the ZDR arc objects
#                     [hail_areas,hail_centroid_lon,hail_centroid_lat,hail_storm_lon,hail_storm_lat,ax,f] = hail_objects(hailc,REF_Hail2,ax,f,time_start,month,d_beg,h_beg,min_beg,sec_beg,d_end,h_end,min_end,sec_end,rlons,rlats,max_lons_c,max_lats_c,proj)


                    #Calling zhh_section; Identify 35dBz storm area in a similar way to the ZDR arc objects
#                     [zhh_areas,zhh_centroid_lon,zhh_centroid_lat,zhh_storm_lon,zhh_storm_lat,zhh_max,zhh_core_avg] = zhh_objects(zhhc,REFmasked,rlons,rlats,max_lons_c,max_lats_c,proj)


#                     #Calling kdpfoot_section; Identify KDP foot objects in a similar way to the ZDR arc objects
#                     [kdp_areas,kdp_centroid_lon,kdp_centroid_lat,kdp_storm_lon,kdp_storm_lat,kdp_max,ax,f] = kdp_objects(kdpc,KDPmasked,ax,f,time_start,month,d_beg,h_beg,min_beg,sec_beg,d_end,h_end,min_end,sec_end,rlons,rlats,max_lons_c,max_lats_c,kdplev,proj)


                    #Consolidating the arc objects associated with each storm:
#                     zdr_areas_arr = np.zeros((len(zdr_areas)))
#                     zdr_max_arr = np.zeros((len(zdr_max)))
#                     zdr_mean_arr = np.zeros((len(zdr_mean)))                    
#                     for i in range(len(zdr_areas)):
#                         zdr_areas_arr[i] = zdr_areas[i].magnitude
#                         zdr_max_arr[i] = zdr_max[i]
#                         zdr_mean_arr[i] = zdr_mean[i]
#                     zdr_centroid_lons = np.asarray(zdr_centroid_lon)
#                     zdr_centroid_lats = np.asarray(zdr_centroid_lat)
#                     zdr_con_areas = []
#                     zdr_con_maxes = []
#                     zdr_con_means = []
#                     zdr_con_centroid_lon = []
#                     zdr_con_centroid_lat = []
#                     zdr_con_max_lon = []
#                     zdr_con_max_lat = []
#                     zdr_con_storm_lon = []
#                     zdr_con_storm_lat = []
#                     zdr_con_masks = []
#                     zdr_con_dev = []
#                     zdr_con_10max = []
#                     zdr_con_mode = []
#                     zdr_con_median = []
#                     zdr_masks = np.asarray(zdr_masks)

#                     #Consolidate KDP objects as well
#                     kdp_areas_arr = np.zeros((len(kdp_areas)))
#                     kdp_max_arr = np.zeros((len(kdp_max)))
#                     for i in range(len(kdp_areas)):
#                         kdp_areas_arr[i] = kdp_areas[i].magnitude
#                         kdp_max_arr[i] = kdp_max[i]
#                     kdp_centroid_lons = np.asarray(kdp_centroid_lon)
#                     kdp_centroid_lats = np.asarray(kdp_centroid_lat)
#                     kdp_con_areas = []
#                     kdp_con_maxes = []
#                     kdp_con_centroid_lon = []
#                     kdp_con_centroid_lat = []
#                     kdp_con_max_lon = []
#                     kdp_con_max_lat = []
#                     kdp_con_storm_lon = []
#                     kdp_con_storm_lat = []

                    #Consolidate Hail objects as well
#                     hail_areas_arr = np.zeros((len(hail_areas)))
#                     for i in range(len(hail_areas)):
#                         hail_areas_arr[i] = hail_areas[i].magnitude
#                     hail_centroid_lons = np.asarray(hail_centroid_lon)
#                     hail_centroid_lats = np.asarray(hail_centroid_lat)
#                     hail_con_areas = []
#                     hail_con_centroid_lon = []
#                     hail_con_centroid_lat = []
#                     hail_con_storm_lon = []
#                     hail_con_storm_lat = []

                    #Consolidate Zhh objects as well
#                     zhh_areas_arr = np.zeros((len(zhh_areas)))
#                     zhh_max_arr = np.zeros((len(zhh_max)))
#                     zhh_core_avg_arr = np.zeros((len(zhh_core_avg)))
#                     for i in range(len(zhh_areas)):
#                         zhh_areas_arr[i] = zhh_areas[i].magnitude
#                         zhh_max_arr[i] = zhh_max[i]
#                         zhh_core_avg_arr[i] = zhh_core_avg[i]
#                     zhh_centroid_lons = np.asarray(zhh_centroid_lon)
#                     zhh_centroid_lats = np.asarray(zhh_centroid_lat)
#                     zhh_con_areas = []
#                     zhh_con_maxes = []
#                     zhh_con_core_avg = []
#                     zhh_con_centroid_lon = []
#                     zhh_con_centroid_lat = []
#                     zhh_con_max_lon = []
#                     zhh_con_max_lat = []
#                     zhh_con_storm_lon = []
#                     zhh_con_storm_lat = []

#                     for i in enumerate(max_lons_c):
#                         try:
#                             #Find the arc objects associated with this storm:
#                             zdr_objects_lons = zdr_centroid_lons[np.where(zdr_storm_lon == max_lons_c[i[0]])]
#                             zdr_objects_lats = zdr_centroid_lats[np.where(zdr_storm_lon == max_lons_c[i[0]])]

#                             #Get the sum of their areas
#                             zdr_con_areas.append(np.sum(zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             #print("consolidated area", np.sum(zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             zdr_con_maxes.append(np.max(zdr_max_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             #print("consolidated max", np.max(zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             zdr_con_means.append(np.mean(zdr_mean_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             #print("consolidated mean", np.mean(zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             zdr_con_max_lon.append(rlons_2d[np.where(ZDRmasked==np.max(zdr_max_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))])
#                             zdr_con_max_lat.append(rlats_2d[np.where(ZDRmasked==np.max(zdr_max_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))])

#                             #Find the actual centroids
#                             weighted_lons = zdr_objects_lons * zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]
#                             zdr_con_centroid_lon.append(np.sum(weighted_lons) / np.sum(zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             weighted_lats = zdr_objects_lats * zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]
#                             zdr_con_centroid_lat.append(np.sum(weighted_lats) / np.sum(zdr_areas_arr[np.where(zdr_storm_lon == max_lons_c[i[0]])]))
#                             zdr_con_storm_lon.append(max_lons_c[i[0]])
#                             zdr_con_storm_lat.append(max_lats_c[i[0]])
#                             zdr_con_masks.append(np.sum(zdr_masks[np.where(zdr_storm_lon == max_lons_c[i[0]])],axis=0, dtype=bool))
#                             mask_con = np.sum(zdr_masks[np.where(zdr_storm_lon == max_lons_c[i[0]])], axis=0, dtype=bool)
#                             zdr_con_dev.append(np.std(ZDRmasked[mask_con]))
#                             ZDRsorted = np.sort(ZDRmasked[mask_con])[::-1]
#                             zdr_con_10max.append(np.mean(ZDRsorted[0:10]))
#                             zdr_con_mode.append(stats.mode(ZDRmasked[mask_con]))
#                             zdr_con_median.append(np.median(ZDRmasked[mask_con]))
#                         except:
#                             zdr_con_maxes.append(0)
#                             zdr_con_means.append(0)
#                             zdr_con_centroid_lon.append(0)
#                             zdr_con_centroid_lat.append(0)
#                             zdr_con_max_lon.append(0)
#                             zdr_con_max_lat.append(0)
#                             zdr_con_storm_lon.append(max_lons_c[i[0]])
#                             zdr_con_storm_lat.append(max_lats_c[i[0]])
#                             zdr_con_masks.append(0)
#                             zdr_con_dev.append(0)
#                             zdr_con_10max.append(0)
#                             zdr_con_mode.append(0)
#                             zdr_con_median.append(0)

#                         try:
#                             #Find the kdp objects associated with this storm:
#                             kdp_objects_lons = kdp_centroid_lons[np.where(kdp_storm_lon == max_lons_c[i[0]])]
#                             kdp_objects_lats = kdp_centroid_lats[np.where(kdp_storm_lon == max_lons_c[i[0]])]

#                             #Get the sum of their areas
#                             kdp_con_areas.append(np.sum(kdp_areas_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]))
#                             kdp_con_maxes.append(np.max(kdp_max_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]))
#                             kdp_con_max_lon.append(rlons_2d[np.where(KDPmasked==np.max(kdp_max_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]))])
#                             kdp_con_max_lat.append(rlats_2d[np.where(KDPmasked==np.max(kdp_max_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]))])
#                             #Find the actual centroids
#                             weighted_lons_kdp = kdp_objects_lons * kdp_areas_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]
#                             kdp_con_centroid_lon.append(np.sum(weighted_lons_kdp) / np.sum(kdp_areas_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]))
#                             weighted_lats_kdp = kdp_objects_lats * kdp_areas_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]
#                             kdp_con_centroid_lat.append(np.sum(weighted_lats_kdp) / np.sum(kdp_areas_arr[np.where(kdp_storm_lon == max_lons_c[i[0]])]))
#                             kdp_con_storm_lon.append(max_lons_c[i[0]])
#                             kdp_con_storm_lat.append(max_lats_c[i[0]])
#                         except:
#                             kdp_con_maxes.append(0)
#                             kdp_con_max_lon.append(0)
#                             kdp_con_max_lat.append(0)
#                             kdp_con_centroid_lon.append(0)
#                             kdp_con_centroid_lat.append(0)
#                             kdp_con_storm_lon.append(0)
#                             kdp_con_storm_lat.append(0)

#                         try:
#                             #Find the hail core objects associated with this storm:
#                             hail_objects_lons = hail_centroid_lons[np.where(hail_storm_lon == max_lons_c[i[0]])]
#                             hail_objects_lats = hail_centroid_lats[np.where(hail_storm_lon == max_lons_c[i[0]])]
#                             #Get the sum of their areas
#                             hail_con_areas.append(np.sum(hail_areas_arr[np.where(hail_storm_lon == max_lons_c[i[0]])]))
#                             #Find the actual centroids
#                             weighted_lons_hail = hail_objects_lons * hail_areas_arr[np.where(hail_storm_lon == max_lons_c[i[0]])]
#                             hail_con_centroid_lon.append(np.sum(weighted_lons_hail) / np.sum(hail_areas_arr[np.where(hail_storm_lon == max_lons_c[i[0]])]))
#                             weighted_lats_hail = hail_objects_lats * hail_areas_arr[np.where(hail_storm_lon == max_lons_c[i[0]])]
#                             hail_con_centroid_lat.append(np.sum(weighted_lats_hail) / np.sum(hail_areas_arr[np.where(hail_storm_lon == max_lons_c[i[0]])]))
#                             hail_con_storm_lon.append(max_lons_c[i[0]])
#                             hail_con_storm_lat.append(max_lats_c[i[0]])
#                         except:
#                             hail_con_centroid_lon.append(0)
#                             hail_con_centroid_lat.append(0)
#                             hail_con_storm_lon.append(0)
#                             hail_con_storm_lat.append(0)

#                         try:
#                             #Find the zhh objects associated with this storm:
#                             zhh_objects_lons = zhh_centroid_lons[np.where(zhh_storm_lon == max_lons_c[i[0]])]
#                             zhh_objects_lats = zhh_centroid_lats[np.where(zhh_storm_lon == max_lons_c[i[0]])]
#                             #Get the sum of their areas
#                             zhh_con_areas.append(np.sum(zhh_areas_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))
#                             zhh_con_maxes.append(np.max(zhh_max_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))
#                             zhh_con_core_avg.append(np.max(zhh_core_avg_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))
#                             zhh_con_max_lon.append(rlons_2d[np.where(REFmasked==np.max(zhh_max_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))])
#                             zhh_con_max_lat.append(rlats_2d[np.where(REFmasked==np.max(zhh_max_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))])
#                             #Find the actual centroids
#                             weighted_lons_zhh = zhh_objects_lons * zhh_areas_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]
#                             zhh_con_centroid_lon.append(np.sum(weighted_lons_zhh) / np.sum(zhh_areas_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))
#                             weighted_lats_zhh = zhh_objects_lats * zhh_areas_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]
#                             zhh_con_centroid_lat.append(np.sum(weighted_lats_zhh) / np.sum(zhh_areas_arr[np.where(zhh_storm_lon == max_lons_c[i[0]])]))
#                             zhh_con_storm_lon.append(max_lons_c[i[0]])
#                             zhh_con_storm_lat.append(max_lats_c[i[0]])
#                         except:
#                             zhh_con_maxes.append(0)
#                             zhh_con_core_avg.append(0)
#                             zhh_con_max_lon.append(0)
#                             zhh_con_max_lat.append(0)
#                             zhh_con_centroid_lon.append(0)
#                             zhh_con_centroid_lat.append(0)
#                             zhh_con_storm_lon.append(0)
#                             zhh_con_storm_lat.append(0)

                        #Calculate KDP-ZDR separation
        #             kdp_con_centroid_lons1 = np.asarray(kdp_con_centroid_lon)
        #             kdp_con_centroid_lats1 = np.asarray(kdp_con_centroid_lat)
        #             zdr_con_centroid_lons1 = np.asarray(zdr_con_centroid_lon)
        #             zdr_con_centroid_lats1 = np.asarray(zdr_con_centroid_lat)
        #             #Eliminate consolidated arcs smaller than a specified area
        #             area = 2 #km*2
        #             zdr_con_areas_arr = np.asarray(zdr_con_areas)
        #             zdr_con_centroid_lats = zdr_con_centroid_lats1[zdr_con_areas_arr > area]
        #             zdr_con_centroid_lons = zdr_con_centroid_lons1[zdr_con_areas_arr > area]
        #             kdp_con_centroid_lats = kdp_con_centroid_lats1[zdr_con_areas_arr > area]
        #             kdp_con_centroid_lons = kdp_con_centroid_lons1[zdr_con_areas_arr > area]
        #             zdr_con_max_lons1 = np.asarray(zdr_con_max_lon)[zdr_con_areas_arr > area]
        #             zdr_con_max_lats1 = np.asarray(zdr_con_max_lat)[zdr_con_areas_arr > area]
        #             kdp_con_max_lons1 = np.asarray(kdp_con_max_lon)[zdr_con_areas_arr > area]
        #             kdp_con_max_lats1 = np.asarray(kdp_con_max_lat)[zdr_con_areas_arr > area]
        #             zdr_con_max1 = np.asarray(zdr_con_maxes)[zdr_con_areas_arr > area]
        #             zdr_con_areas1 = zdr_con_areas_arr[zdr_con_areas_arr > area]
#                     kdp_con_centroid_lat = np.asarray(kdp_con_centroid_lat)
#                     kdp_con_centroid_lon = np.asarray(kdp_con_centroid_lon)
#                     zdr_con_centroid_lat = np.asarray(zdr_con_centroid_lat)
#                     zdr_con_centroid_lon = np.asarray(zdr_con_centroid_lon)

#                     kdp_inds = np.where(kdp_con_centroid_lat*zdr_con_centroid_lat > 0)
#                     distance_kdp_zdr = g.inv(kdp_con_centroid_lon[kdp_inds], kdp_con_centroid_lat[kdp_inds], zdr_con_centroid_lon[kdp_inds], zdr_con_centroid_lat[kdp_inds])
#                     dist_kdp_zdr = distance_kdp_zdr[2] / 1000.
#                     #Now make an array for the distances which will have the same shape as the lats to prevent errors
#                     shaped_dist = np.zeros((np.shape(zdr_con_areas)))
#                     shaped_dist[kdp_inds] = dist_kdp_zdr

#                     #Get separation angle for KDP-ZDR centroids
#                     back_k = distance_kdp_zdr[1]
#                     for i in range(back_k.shape[0]):
#                         if distance_kdp_zdr[1][i] < 0:
#                             back_k[i] = distance_kdp_zdr[1][i] + 360

#                     forw_k = np.abs(back_k - storm_relative_dir)
#                     rawangle_k = back_k - storm_relative_dir
#                     #Account for weird angles
#                     for i in range(back_k.shape[0]):
#                         if forw_k[i] > 180:
#                             forw_k[i] = 360 - forw_k[i]
#                             rawangle_k[i] = (360-forw_k[i])*(-1)

#                     rawangle_k = rawangle_k*(-1)

#                     #Now make an array for the distances which will have the same shape as the lats to prevent errors
#                     shaped_ang = np.zeros((np.shape(zdr_con_areas)))
#                     shaped_ang[kdp_inds] = rawangle_k
#                     shaped_ang = (180-np.abs(shaped_ang))*(shaped_ang/np.abs(shaped_ang))

#                     new_angle_all = shaped_ang + storm_relative_dir

#                     shaped_ang = (new_angle_all - Bunkers_m)* (-1)

#                     shaped_ang = 180 - shaped_ang

                    ###Now let's consolidate everything to fit the Pandas dataframe!
#                     p_zdr_areas = []
#                     p_zdr_maxes = []
#                     p_zdr_means = []
#                     p_zdr_devs = []
#                     p_zdr_10max = []
#                     p_zdr_mode = []
#                     p_zdr_median = []
#                    # p_hail_areas = []
#                     p_zhh_areas = []
#                     p_zhh_maxes = []
#                     p_zhh_core_avgs = []
#                     p_separations = []
#                     p_sp_angle = []
#                     for storm in enumerate(max_lons_c):
#                         matching_ind = np.flatnonzero(np.isclose(max_lons_c[storm[0]], zdr_con_storm_lon, rtol=1e-05))
#                         if matching_ind.shape[0] > 0:
#                             p_zdr_areas.append((zdr_con_areas[matching_ind[0]]))
#                             p_zdr_maxes.append((zdr_con_maxes[matching_ind[0]]))
#                             p_zdr_means.append((zdr_con_means[matching_ind[0]]))
#                             p_zdr_devs.append((zdr_con_dev[matching_ind[0]]))
#                             p_zdr_10max.append((zdr_con_10max[matching_ind[0]]))
#                             p_zdr_mode.append((zdr_con_mode[matching_ind[0]]))
#                             p_zdr_median.append((zdr_con_median[matching_ind[0]]))
#                             p_separations.append((shaped_dist[matching_ind[0]]))
#                             p_sp_angle.append((shaped_ang[matching_ind[0]]))
#                         else:
#                             p_zdr_areas.append((0))
#                             p_zdr_maxes.append((0))
#                             p_zdr_means.append((0))
#                             p_zdr_devs.append((0))
#                             p_zdr_10max.append((0))
#                             p_zdr_mode.append((0))
#                             p_zdr_median.append((0))
#                             p_separations.append((0))
#                             p_sp_angle.append((0))

#                         matching_ind_hail = np.flatnonzero(np.isclose(max_lons_c[storm[0]], hail_con_storm_lon, rtol=1e-05))
#                         if matching_ind_hail.shape[0] > 0:
#                             p_hail_areas.append((hail_con_areas[matching_ind_hail[0]]))
#                         else:
#                             p_hail_areas.append((0))

#                         matching_ind_zhh = np.flatnonzero(np.isclose(max_lons_c[storm[0]],zhh_con_storm_lon, rtol=1e-05))
#                         if matching_ind_zhh.shape[0] > 0:
#                             p_zhh_maxes.append((zhh_con_maxes[matching_ind_zhh[0]]))
#                             p_zhh_areas.append((zhh_con_areas[matching_ind_zhh[0]]))
#                             p_zhh_core_avgs.append((zhh_con_core_avg[matching_ind_zhh[0]]))
#                         else:
#                             p_zhh_areas.append((0))
#                             p_zhh_maxes.append((0))
#                             p_zhh_core_avgs.append((0))

                    #Now start plotting stuff!
#                     if np.asarray(zdr_centroid_lon).shape[0] > 0:
#                         ax.scatter(zdr_centroid_lon, zdr_centroid_lat, marker = '*', s = 100, color = 'black', zorder = 10, transform=ccrs.PlateCarree())
#                     if np.asarray(kdp_centroid_lon).shape[0] > 0:
#                         ax.scatter(kdp_centroid_lon, kdp_centroid_lat, marker = '^', s = 100, color = 'black', zorder = 10, transform=ccrs.PlateCarree())
                    #Uncomment to print all object areas
                    #for i in enumerate(zdr_areas):
                    #    plt.text(zdr_centroid_lon[i[0]]+.016, zdr_centroid_lat[i[0]]+.016, "%.2f km^2" %(zdr_areas[i[0]].magnitude), size = 23)
                        #plt.text(zdr_centroid_lon[i[0]]+.016, zdr_centroid_lat[i[0]]+.016, "%.2f km^2 / %.2f km / %.2f dB" %(zdr_areas[i[0]].magnitude, zdr_dist[i[0]], zdr_forw[i[0]]), size = 23)
                        #plt.annotate(zdr_areas[i[0]], (zdr_centroid_lon[i[0]],zdr_centroid_lat[i[0]]))
                    #ax.contourf(rlons[0,:,:],rlats[0,:,:],KDPmasked,KDPlevels1,linewide = .01, colors ='b', alpha = .5)
                    #plt.tight_layout()
                    #plt.savefig('ZDRarcannotated.png')
                    storm_times = []
                    for l in range(len(max_lons_c)):
                        storm_times.append((time_start))
                    tracking_index = tracking_index + 1
                    #Get storm motion deltas:
                    u_B, v_B = wind_components(Bunkers_s*units('m/s'), Bunkers_m*units('degree'))
                    u_alg, v_alg = wind_components(alg_speeds*units('m/s'), alg_directions*units('degree'))
                    print(u_B, v_B, 'Bunkers motion components')
                    print(u_alg, v_alg, 'Observed motion components')
                    u_diff = u_alg-u_B
                    v_diff = v_alg-v_B
                    motion_delta = np.sqrt(u_diff**2 + v_diff**2).magnitude
                #If there are no storms, set everything to empty arrays!
                else:
                    storm_ids = []
                    storm_ids = []
                    alg_speeds = []
                    alg_directions = []
                    motion_delta = []
                    max_lons_c = []
                    max_lats_c = []
                    storm_times = time_start
                #Now record all data in a Pandas dataframe.
                new_cells = pd.DataFrame({
                    'scan': scan_index,
                    'storm_id' : storm_ids,
                    'storm speed' : alg_speeds,
                    'storm_direction' : alg_directions,
                    'motion_deltas' : motion_delta,
                    'storm_id1' : storm_ids,
                    'storm_lon' : max_lons_c,
                    'storm_lat' : max_lats_c,
                    'times' : storm_times
                })
                new_cells.set_index(['scan', 'storm_id'], inplace=True)
                if scan_index == 0:
                    tracks_dataframe = new_cells
                else:
                    tracks_dataframe = tracks_dataframe.append(new_cells)
                n = n+1
                scan_index = scan_index + 1

                #Plot the consolidated stuff!
                #Write some text objects for the ZDR arc attributes to add to the placefile
                f.write('TimeRange: '+str(time_start.year)+'-'+str(month)+'-'+str(d_beg)+'T'+str(h_beg)+':'+str(min_beg)+':'+str(sec_beg)+'Z '+str(time_start.year)+'-'+str(month)+'-'+str(d_end)+'T'+str(h_end)+':'+str(min_end)+':'+str(sec_end)+'Z')
                f.write('\n')
                f.write("Color: 139 000 000 \n")
                f.write('Font: 1, 30, 1,"Arial" \n')
                for y in range(len(max_lats_c)):
                    #f.write('Text: '+str(max_lats_c[y])+','+str(max_lons_c[y])+', 1, "X"," Arc Area: '+str(p_zdr_areas[y])+'\\n Arc Mean: '+str(p_zdr_means[y])+'\\n KDP-ZDR Separation: '+str(p_separations[y])+'\\n Separation Angle: '+str(p_sp_angle[y])+'" \n')
                    f.write('Text: '+str(max_lats_c[y])+','+str(max_lons_c[y])+', 1, "X"," Storm Speed: %.2f m/s \\n Storm Direction: %.2f deg \\n Motion Delta: %.2f m/s \n' %(alg_speeds[y], alg_directions[y], motion_delta[y]))



                title_plot = plt.title(station+' Radar Reflectivity, ZDR, and KDP '+str(time_start.year)+'-'+str(time_start.month)+'-'+str(time_start.day)+
                                           ' '+str(hour)+':'+str(minute)+' UTC', size = 25)

#                 try:
#                     plt.plot([zdr_con_centroid_lon[kdp_inds], kdp_con_centroid_lon[kdp_inds]], [zdr_con_centroid_lat[kdp_inds],kdp_con_centroid_lat[kdp_inds]], color = 'k', linewidth = 5, transform=ccrs.PlateCarree())
#                 except:
#                     print('Separation Angle Failure')

                ref_centroid_lon = max_lons_c
                ref_centroid_lat = max_lats_c
                if len(max_lons_c) > 0:
                    ax.scatter(max_lons_c,max_lats_c, marker = "o", color = 'k', s = 500, alpha = .6)
                    for i in enumerate(ref_centroid_lon): 
                        plt.text(ref_centroid_lon[i[0]]+.016, ref_centroid_lat[i[0]]+.016, "storm_id: %.1f" %(storm_ids[i[0]]), size = 25)
                #Comment out this line if not plotting tornado tracks
                #plt.plot([start_torlons, end_torlons], [start_torlats, end_torlats], color = 'purple', linewidth = 5, transform=ccrs.PlateCarree())
                #Add legend stuff
                zdr_outline = mlines.Line2D([], [], color='blue', linewidth = 5, linestyle = 'solid', label='ZDR Arc Outline(Area/Max)')
                kdp_outline = mlines.Line2D([], [], color='green', linewidth = 5,linestyle = 'solid', label='"KDP Foot" Outline')
                separation_vector = mlines.Line2D([], [], color='black', linewidth = 5,linestyle = 'solid', label='KDP/ZDR Centroid Separation Vector (Red Text=Distance)')
                #tor_track = mlines.Line2D([], [], color='purple', linewidth = 5,linestyle = 'solid', label='Tornado Tracks')
                elevation = mlines.Line2D([], [], color='grey', linewidth = 5,linestyle = 'solid', label='Height AGL (m)')

                plt.legend(handles=[zdr_outline, kdp_outline, separation_vector, elevation], loc = 3, fontsize = 25)
                alt_levs = [1000, 2000]
                plt.savefig('Machine_Learning/DELTA_dev'+station+str(time_start.year)+str(time_start.month)+str(day)+str(hour)+str(minute)+'.png')
                print('Figure Saved')
                plt.close()
                zdr_out_list.append(zdr_outlines)
                #except:
                #    traceback.print_exc()
                #    continue
    f.close()
    plt.show()
    print('Fin')
    #export_csv = tracks_dataframe.to_csv(r'C:\Users\Nick\Downloads\tracksdataframe.csv',index=None,header=True)
    return tracks_dataframe
Exemplo n.º 6
0
    def addLengthMeters(stream_network):
        """
        Adds length field in meters to network
        (The added field name will be 'LENGTH_M').

        .. note:: This may be needed for generating the kfac file
                  depending on the units of your raster. See: :doc:`gis_tools`.

        Parameters
        ----------
        stream_network: str
            Path to stream network file.


        Here is an example of how to use this:

        .. code:: python

            import os
            from RAPIDpy.gis.taudem import TauDEM

            output_directory = '/path/to/output/files'
            TauDEM.addLengthMeters(os.path.join(output_directory,
                                                "stream_reach_file.shp"))

        """
        network_shapefile = ogr.Open(stream_network, 1)
        network_layer = network_shapefile.GetLayer()
        network_layer_defn = network_layer.GetLayerDefn()

        # make sure projection EPSG:4326
        network_layer_proj = network_layer.GetSpatialRef()
        geographic_proj = osr.SpatialReference()
        geographic_proj.ImportFromEPSG(4326)
        proj_transform = None
        if network_layer_proj != geographic_proj:
            proj_transform = osr.CoordinateTransformation(
                network_layer_proj, geographic_proj)

        # check for field
        create_field = True
        for i in xrange(network_layer_defn.GetFieldCount()):
            field_name = network_layer_defn.GetFieldDefn(i).GetName()
            if field_name == 'LENGTH_M':
                create_field = False
                break

        if create_field:
            network_layer.CreateField(ogr.FieldDefn('LENGTH_M', ogr.OFTReal))

        geo_manager = Geod(ellps="WGS84")
        for network_feature in network_layer:
            feat_geom = network_feature.GetGeometryRef()
            # make sure coordinates are geographic
            if proj_transform:
                feat_geom.Transform(proj_transform)

            line = shapely_loads(feat_geom.ExportToWkb())
            lon_list, lat_list = line.xy
            dist = geo_manager.inv(lon_list[:-1], lat_list[:-1], lon_list[1:],
                                   lat_list[1:])[2]
            network_feature.SetField('LENGTH_M', sum(dist))
            network_layer.SetFeature(network_feature)
Exemplo n.º 7
0
 def __init__(self, ellps="clrk66"):
     self.geod = Geod(ellps=ellps)
Exemplo n.º 8
0
def lat_lon_grid_deltas(longitude, latitude, **kwargs):
    r"""Calculate the delta between grid points that are in a latitude/longitude
    format.

    Calculate the signed delta distance between grid points when the grid
    spacing is defined by delta lat/lon rather than delta x/y

    Parameters
    ----------
    longitude : array_like
        array of longitudes defining the grid
    latitude : array_like
        array of latitudes defining the grid
    kwargs
        Other keyword arguments to pass to :class:`~pyproj.Geod`

    Returns
    -------
    dx, dy:
        at least two dimensional arrays of signed deltas between grid points in
        the x and y direction

    Notes
    -----
    Accepts 1D, 2D, or higher arrays for latitude and longitude
    Assumes [..., Y, X] for >=2 dimensional arrays

    """
    from pyproj import Geod

    # Inputs must be the same number of dimensions
    if latitude.ndim != longitude.ndim:
        raise ValueError('Lat. and lon. must have the same number of dims.')

    # If we were given 1D arrays, make a mesh grid
    if latitude.ndim < 2:
        longitude, latitude = np.meshgrid(longitude, latitude)

    # pyproj requires ndarrays, not Quantities
    try:
        longitude = longitude.m_as('degrees')
        latitude = latitude.m_as('degrees')
    except AttributeError:
        longitude = np.asarray(longitude)
        latitude = np.asarray(latitude)

    geod_args = {'ellps': 'sphere'}
    if kwargs:
        geod_args = kwargs

    g = Geod(**geod_args)

    forward_az, _, dy = g.inv(longitude[..., :-1, :], latitude[..., :-1, :],
                              longitude[..., 1:, :], latitude[..., 1:, :])
    dy[(forward_az < -90.) | (forward_az > 90.)] *= -1

    forward_az, _, dx = g.inv(longitude[..., :, :-1], latitude[..., :, :-1],
                              longitude[..., :, 1:], latitude[..., :, 1:])
    dx[(forward_az < 0.) | (forward_az > 180.)] *= -1

    return dx, dy
Exemplo n.º 9
0
path (line) or a grid (Cartesian or 
latitude/longitude)

Author: [email protected]    
"""

import os
import sys
import numpy as np
import matplotlib.pyplot as plt

from pyproj import Geod
from scipy import interpolate
from netCDF4 import Dataset

sph_proj = Geod(ellps='sphere')

etopo_file = "ETOPO1_Ice_g_gmt4.grd"


def interp_etopo(ll_corner, ur_corner):
    """
        Loads and interpolates the ETOPO1 topography
            data within a region specified by low-left
            and upper-right corner latitude, longitudes

        Parameters
        ----------
        ll_corner : iterable
            Iterable containing the latitude and longitude
                of the lower-left corner of the region
Exemplo n.º 10
0
def get_distance(lat1, lng1, lat2, lng2):
    return Geod(ellps='WGS84').inv(lng1, lat1, lng2, lat2)[2]
Exemplo n.º 11
0
def get_mesh(pfs, rfi, sd, idl):
    """
    From a set of profiles creates the mesh in the forward direction from the
    reference profile.

    :param pfs:
        List of :class:`openquake.hazardlib.geo.line.Line` instances
    :param rfi:
        Index of the reference profile
    :param sd:
        Sampling distance [km] for the edges
    :param idl:
        Boolean indicating the need to account for the IDL
    :returns:
        An updated list of the profiles i.e. a list of
        :class:`openquake.hazardlib.geo.line.Line` instances
    """
    g = Geod(ellps='WGS84')

    # Residual distance, last index
    rdist = [0 for _ in range(0, len(pfs[0]))]
    laidx = [0 for _ in range(0, len(pfs[0]))]

    # New profiles
    npr = list([copy.copy(pfs[rfi])])

    # Run for all the profiles 'after' the reference one
    for i in range(rfi, len(pfs) - 1):

        # Profiles
        pr = pfs[i + 1]
        pl = pfs[i]

        # Fixing IDL case
        if idl:
            for ii in range(0, len(pl)):
                ptmp = pl[ii][0]
                ptmp = ptmp + 360 if ptmp < 0 else ptmp
                pl[ii][0] = ptmp

        # Point in common on the two profiles
        cmm = np.logical_and(np.isfinite(pr[:, 2]), np.isfinite(pl[:, 2]))
        cmmi = np.nonzero(cmm)[0].astype(int)

        # Update last profile index
        mxx = 0
        for ll in laidx:
            if ll is not None:
                mxx = max(mxx, ll)

        # Loop over the points in the right profile
        for x in range(0, len(pr[:, 2])):

            # This edge is in common between the last and the current profiles
            if x in cmmi and laidx[x] is None:
                iii = []
                for li, lv in enumerate(laidx):
                    if lv is not None:
                        iii.append(li)
                iii = np.array(iii)
                minidx = np.argmin(abs(iii - x))
                laidx[x] = mxx
                rdist[x] = rdist[minidx]
            elif x not in cmmi:
                laidx[x] = None
                rdist[x] = 0

        # Loop over profiles
        for k in list(np.nonzero(cmm)[0]):

            # Compute distance and azimuth between the corresponding points
            # on the two profiles
            az12, _, hdist = g.inv(pl[k, 0], pl[k, 1], pr[k, 0], pr[k, 1])
            hdist /= 1e3
            vdist = pr[k, 2] - pl[k, 2]
            tdist = (vdist**2 + hdist**2)**.5
            ndists = int(np.floor((tdist + rdist[k]) / sd))

            ll = g.npts(pl[k, 0], pl[k, 1], pr[k, 0], pr[k, 1],
                        np.ceil(tdist) * 20)
            ll = np.array(ll)
            lll = np.ones_like(ll)
            lll[:, 0] = pl[k, 0]
            lll[:, 1] = pl[k, 1]

            _, _, hdsts = g.inv(lll[:, 0], lll[:, 1], ll[:, 0], ll[:, 1])
            hdsts /= 1e3
            deps = np.linspace(pl[k, 2], pr[k, 2], ll.shape[0], endpoint=True)
            tdsts = (hdsts**2 + (pl[k, 2] - deps)**2)**0.5
            assert len(deps) == ll.shape[0]

            # Compute distance between consecutive profiles
            dd = distance(pl[k, 0], pl[k, 1], pl[k, 2], pr[k, 0], pr[k, 1],
                          pr[k, 2])

            # Check distance
            if abs(dd - tdist) > 0.1 * tdist:
                print('dd:', dd)
                tmps = 'Error while building the mesh'
                tmps += '\nDistances: {:f} {:f}'
                raise ValueError(tmps.format(dd, tdist))

            # Adding new points along the edge with index k
            for j in range(ndists):

                # Add new profile
                if len(npr) - 1 < laidx[k] + 1:
                    npr = add_empty_profile(npr)

                # Compute the coordinates of intermediate points along the
                # current edge
                tmp = (j + 1) * sd - rdist[k]
                lo, la, _ = g.fwd(pl[k, 0], pl[k, 1], az12,
                                  tmp * hdist / tdist * 1e3)

                tidx = np.argmin(abs(tdsts - tmp))
                lo = ll[tidx, 0]
                la = ll[tidx, 1]

                # Fix longitudes
                if idl:
                    lo = lo + 360 if lo < 0 else lo

                # Computing depths
                de = pl[k, 2] + tmp * vdist / hdist
                de = deps[tidx]

                npr[laidx[k] + 1][k] = [lo, la, de]
                if (k > 0 and np.all(np.isfinite(npr[laidx[k] + 1][k]))
                        and np.all(np.isfinite(npr[laidx[k]][k]))):

                    p1 = npr[laidx[k]][k]
                    p2 = npr[laidx[k] + 1][k]
                    d = distance(p1[0], p1[1], p1[2], p2[0], p2[1], p2[2])

                    # Check
                    if abs(d - sd) > 0.1 * sd:
                        tmpf = 'd: {:f} diff: {:f} tol: {:f} sd:{:f}'
                        tmpf += '\nresidual: {:f}'
                        tmps = tmpf.format(d, d - sd, TOL * sd, sd, rdist[k])
                        raise ValueError(tmps)
                laidx[k] += 1

            rdist[k] = tdist - sd * ndists + rdist[k]
            assert rdist[k] < sd

    return npr
Exemplo n.º 12
0
def build_model(
    geojson_path,
    cellsize,
    model,
    timestep,
    name,
    case_template,
    case_path,
    fews,
    fews_config_path,
    dem_path,
    river_path,
    outlet_path,
    region_filter,
):
    """Prepare a simple WFlow model, anywhere, based on global datasets."""

    # lists below need to stay synchronized, not sure of a better way
    [
        geojson_path,
        model,
        timestep,
        name,
        case_template,
        case_path,
        fews_config_path,
        dem_path,
        river_path,
        outlet_path,
        region_filter,
    ] = [
        encode_utf8(p) for p in [
            geojson_path,
            model,
            timestep,
            name,
            case_template,
            case_path,
            fews_config_path,
            dem_path,
            river_path,
            outlet_path,
            region_filter,
        ]
    ]

    # fill in the dependent defaults
    if name is None:
        name = "wflow_{}_case".format(model)
    if case_template is None:
        case_template = "wflow_{}_template".format(model)
    if model == "hbv":
        if timestep == "hourly":
            case_template = "wflow_{}_hourly_template".format(model)
        else:
            case_template = "wflow_{}_daily_template".format(model)

    # assumes it is in decimal degrees, see Geod
    case = os.path.join(case_path, name)
    path_catchment = os.path.join(case, "data/catchments/catchments.geojson")

    region = hydro_engine_geometry(geojson_path, region_filter)

    # get the centroid of the region, such that we have a point for unit conversion
    centroid = sg.shape(region).centroid
    x, y = centroid.x, centroid.y

    filter_upstream_gt = 1000
    crs = "EPSG:4326"

    g = Geod(ellps="WGS84")
    # convert to meters in the center of the grid
    # Earth Engine expects meters
    _, _, crossdist_m = g.inv(x, y, x + cellsize, y + cellsize)
    cellsize_m = sqrt(0.5 * crossdist_m**2)

    # start by making case an exact copy of the template
    copycase(case_template, case)

    # create folder structure for data folder
    for d in ["catchments", "dem", "rivers"]:
        dir_data = os.path.join(case, "data", d)
        ensure_dir_exists(dir_data)

    # create grid
    path_log = "wtools_create_grid.log"
    dir_mask = os.path.join(case, "mask")
    projection = "EPSG:4326"

    download_catchments(region,
                        path_catchment,
                        geojson_path,
                        region_filter=region_filter)
    cg_extent = path_catchment

    cg.main(path_log,
            dir_mask,
            cg_extent,
            projection,
            cellsize,
            locationid=name,
            snap=True)
    mask_tif = os.path.join(dir_mask, "mask.tif")

    with rasterio.open(mask_tif) as ds:
        bbox = ds.bounds

    # create static maps
    dir_dest = os.path.join(case, "staticmaps")
    # use custom inifile, default high res ldd takes too long
    path_inifile = os.path.join(case, "data/staticmaps.ini")
    path_dem_in = os.path.join(case, "data/dem/dem.tif")
    dir_lai = os.path.join(case, "data/parameters/clim")

    if river_path is None:
        # download the global dataset
        river_data_path = os.path.join(case, "data/rivers/rivers.geojson")
        # raise ValueError("User must supply river_path for now, see hydro-engine#14")
        download_rivers(region,
                        river_data_path,
                        filter_upstream_gt,
                        region_filter=region_filter)
    else:
        # take the local dataset, reproject and clip
        # command line equivalent of
        # ogr2ogr -t_srs EPSG:4326 -f GPKG -overwrite -clipdst xmin ymin xmax ymax rivers.gpkg rivers.shp
        river_data_path = os.path.join(case, "data/rivers/rivers.gpkg")
        ogr2ogr.main([
            "",
            "-t_srs",
            "EPSG:4326",
            "-f",
            "GPKG",
            "-overwrite",
            "-clipdst",
            str(bbox.left),
            str(bbox.bottom),
            str(bbox.right),
            str(bbox.top),
            river_data_path,
            river_path,
        ])

    if dem_path is None:
        # download the global dem
        download_raster(region,
                        path_dem_in,
                        "dem",
                        cellsize_m,
                        crs,
                        region_filter=region_filter)
    else:
        # warp the local dem onto model grid
        wt.warp_like(
            dem_path,
            path_dem_in,
            mask_tif,
            format="GTiff",
            co={"dtype": "float32"},
            resampling=warp.Resampling.med,
        )

    other_maps = {
        "sbm": [
            "FirstZoneCapacity",
            "FirstZoneKsatVer",
            "FirstZoneMinCapacity",
            "InfiltCapSoil",
            "M",
            "PathFrac",
            "WaterFrac",
            "thetaS",
            "soil_type",
            "landuse",
        ],
        "hbv": [
            "BetaSeepage",
            "Cfmax",
            "CFR",
            "FC",
            "K0",
            "LP",
            "Pcorr",
            "PERC",
            "SFCF",
            "TT",
            "WHC",
        ],
    }

    # TODO rename these in hydro-engine
    newnames = {
        "FirstZoneKsatVer": "KsatVer",
        "FirstZoneMinCapacity": "SoilMinThickness",
        "FirstZoneCapacity": "SoilThickness",
        "landuse": "wflow_landuse",
        "soil_type": "wflow_soil",
    }

    # destination paths
    path_other_maps = []
    for param in other_maps[model]:
        path = os.path.join(case, "data/parameters",
                            newnames.get(param, param) + ".tif")
        path_other_maps.append(path)

    for param, path in zip(other_maps[model], path_other_maps):
        if model == "sbm":
            download_raster(region,
                            path,
                            param,
                            cellsize_m,
                            crs,
                            region_filter=region_filter)
        elif model == "hbv":
            # these are not yet in the earth engine, use local paths
            if timestep == "hourly":
                path_staticmaps_global = (
                    r"p:\1209286-earth2observe\HBV-GLOBAL\staticmaps_hourly")
            else:
                path_staticmaps_global = (
                    r"p:\1209286-earth2observe\HBV-GLOBAL\staticmaps")
            path_in = os.path.join(path_staticmaps_global, param + ".tif")

            # warp the local staticmaps onto model grid
            wt.warp_like(
                path_in,
                path,
                mask_tif,
                format="GTiff",
                co={"dtype": "float32"},
                resampling=warp.Resampling.med,
            )

    if model == "sbm":
        ensure_dir_exists(dir_lai)
        for m in range(1, 13):
            mm = str(m).zfill(2)
            path = os.path.join(dir_lai, "LAI00000.0{}".format(mm))
            download_raster(
                region,
                path,
                "LAI{}".format(mm),
                cellsize_m,
                crs,
                region_filter=region_filter,
            )
    else:
        # TODO this creates defaults in static_maps, disable this behavior?
        # or otherwise adapt static_maps for the other models
        dir_lai = None

    # create default folder structure for running wflow
    dir_inmaps = os.path.join(case, "inmaps")
    ensure_dir_exists(dir_inmaps)
    dir_instate = os.path.join(case, "instate")
    ensure_dir_exists(dir_instate)
    for d in [
            "instate", "intbl", "intss", "outmaps", "outstate", "outsum",
            "runinfo"
    ]:
        dir_run = os.path.join(case, "run_default", d)
        ensure_dir_exists(dir_run)

    if outlet_path is None:
        # this is for coastal catchments only, if it is not coastal and no outlets
        # are found, then it will just be the pit of the ldd
        outlets = outlets_coords(path_catchment, river_data_path)
    else:
        # take the local dataset, reproject and clip
        outlet_data_path = os.path.join(case, "data/rivers/outlets.gpkg")
        ogr2ogr.main([
            "",
            "-t_srs",
            "EPSG:4326",
            "-f",
            "GPKG",
            "-overwrite",
            "-clipdst",
            str(bbox.left),
            str(bbox.bottom),
            str(bbox.right),
            str(bbox.top),
            outlet_data_path,
            outlet_path,
        ])
        x = []
        y = []
        with fiona.open(outlet_data_path) as c:
            for f in c:
                coords = f["geometry"]["coordinates"]
                x.append(coords[0])
                y.append(coords[1])
            outlets_x = np.array(x)
            outlets_y = np.array(y)
        outlets = outlets_x, outlets_y

    sm.main(
        dir_mask,
        dir_dest,
        path_inifile,
        path_dem_in,
        river_data_path,
        path_catchment,
        lai=dir_lai,
        other_maps=path_other_maps,
        outlets=outlets,
    )

    if fews:
        # save default state-files in FEWS-config
        dir_state = os.path.join(case, "outstate")
        ensure_dir_exists(dir_state)
        if model == "sbm":
            state_files = [
                "CanopyStorage.map",
                "GlacierStore.map",
                "ReservoirVolume.map",
                "SatWaterDepth.map",
                "Snow.map",
                "SnowWater.map",
                "SurfaceRunoff.map",
                "SurfaceRunoffDyn.map",
                "TSoil.map",
                "UStoreLayerDepth_0.map",
                "WaterLevel.map",
                "WaterLevelDyn.map",
            ]
        elif model == "hbv":
            state_files = [
                "DrySnow.map",
                "FreeWater.map",
                "InterceptionStorage.map",
                "LowerZoneStorage.map",
                "SoilMoisture.map",
                "SurfaceRunoff.map",
                "UpperZoneStorage.map",
                "WaterLevel.map",
            ]
        zip_name = name + "_GA_Historical default.zip"

        zip_loc = os.path.join(fews_config_path, "ColdStateFiles", zip_name)
        path_csf = os.path.dirname(zip_loc)
        ensure_dir_exists(path_csf)

        mask = pcr.readmap(os.path.join(dir_mask, "mask.map"))

        with zipfile.ZipFile(zip_loc, mode="w") as zf:
            for state_file in state_files:
                state_path = os.path.join(dir_state, state_file)
                pcr.report(pcr.cover(mask, pcr.scalar(0)), state_path)
                zf.write(state_path,
                         state_file,
                         compress_type=zipfile.ZIP_DEFLATED)
Exemplo n.º 13
0
    def build_record_section(self, x_range=None, y_range=None, replot=False,
                             new=False):
        tmin = None
        tmax = None
        labels = []
        stats_list = []
        if x_range is not None:
            xmin, xmax = x_range
            tmin = datetime.utcfromtimestamp(xmin/1e3)
            tmax = datetime.utcfromtimestamp(xmax/1e3)
            self.tmin_old = tmin
            self.tmax_old = tmax
        else:
            tmin = self.tmin_old
            tmax = self.tmax_old

        if y_range is not None:
            ymax = abs(min(0, y_range[0]))
            ymin = abs(min(0, y_range[1]))
            self.ymin_old = ymin
            self.ymax_old = ymax
        else:
            ymin = self.ymin_old
            ymax = self.ymax_old
            y_range = (-ymax, -ymin)

        if replot:
            tmin = None
            tmax = None
            xmin = np.datetime64(self.start.datetime).astype(np.int)//1e3
            xmax = np.datetime64(self.end.datetime).astype(np.int)//1e3
            x_range=(xmin,xmax)
            ymin = self.min_dist
            ymax = self.max_dist
            y_range = (-ymax, -ymin)

        g = Geod(ellps='WGS84')
        tlon, tlat = self.targets[self.target]
        st_seismic = self.streams['seismic'].copy()
        st_acoustic = self.streams['acoustic'].copy()

        if None not in [tmin, tmax]:
            starttime = UTCDateTime(tmin)
            endtime = UTCDateTime(tmax)
            st_seismic.trim(starttime, endtime)
            st_acoustic.trim(starttime, endtime)
        print(tmin, tmax, ymin, ymax)

        # Get rid of dummy curves
        self.curves = {k:self.curves[k] for k in self.curves if k[1] != "dummy"}

        if st_seismic.count() > 0 and self.plot_seismic:
            local_max_s = 0.
            for tr in st_seismic:
                _lat = tr.stats.coordinates.latitude
                _lon = tr.stats.coordinates.longitude
                _,_,d = g.inv(tlon, tlat, _lon, _lat)
                if d/1e3 >= ymin and d/1e3 <= ymax:
                    local_max_s = max(local_max_s, tr.data.max())
                tr.stats.distance = d/1e3

            for tr in st_seismic:
                key = (tr.stats.station, 'seismic')
                if tr.stats.distance < ymin or tr.stats.distance > ymax:
                    if key in self.curves:
                        self.curves.pop(key)
                    continue
                data = tr.data[:].astype(np.float)
                data /= local_max_s
                try:
                    t0 = int(tr.stats.distance*1e3/self.red_vel*1e3)
                    id0 = int(tr.stats.distance*1e3/(self.red_vel*tr.stats.delta))
                except ZeroDivisionError:
                    id0 = 0
                    t0 = 0
                times = np.arange(data.size)*(tr.stats.delta*1e3)
                dates = np.datetime64(tr.stats.starttime.datetime)+times.astype('timedelta64[ms]') 
                dates -= np.timedelta64(t0, 'ms')
                idates = np.array(dates.astype(np.int) // 1e3).astype(np.float)
                self.curves[key]  = hv.Curve((idates, data-tr.stats.distance))
                if tr.stats.station not in stats_list:
                    stats_list.append(tr.stats.station)
                    if self.plot_labels:
                        label = tr.stats.station
                    else:
                        label = ''
                    labels.append(hv.Text(idates[id0+100], -tr.stats.distance,
                                          label, 
                                          halign='left',
                                          valign='bottom').opts(norm=dict(framewise=True)))           
        if not self.plot_seismic:
            self.curves = {k:self.curves[k] for k in self.curves if k[1] != "seismic"}

        if st_acoustic.count() > 0 and self.plot_acoustic: 
            local_max_a = 0.
            for tr in st_acoustic:
                _lat = tr.stats.coordinates.latitude
                _lon = tr.stats.coordinates.longitude
                _,_,d = g.inv(tlon, tlat, _lon, _lat)
                if d/1e3 >= ymin and d/1e3 <= ymax:
                    local_max_a = max(local_max_a, tr.data.max())
                tr.stats.distance = d/1e3   

            for tr in st_acoustic:
                key = (tr.stats.station, 'acoustic')
                if tr.stats.distance < ymin or tr.stats.distance > ymax:
                    if key in self.curves:
                        self.curves.pop(key)
                    continue
                data = tr.data[:].astype(np.float)
                data /= local_max_a 
                try:
                    t0 = int(tr.stats.distance*1e3/self.red_vel*1e3)
                    id0 = int(tr.stats.distance*1e3/(self.red_vel*tr.stats.delta))
                except ZeroDivisionError:
                    id0 = 0
                    t0 = 0
                times = np.arange(data.size)*(tr.stats.delta*1e3)
                dates = np.datetime64(tr.stats.starttime.datetime)+times.astype('timedelta64[ms]')
                dates -= np.timedelta64(t0,'ms')
                idates = np.array(dates.astype(np.int) // 1e3).astype(np.float)
                self.curves[key] = hv.Curve((idates, data-tr.stats.distance))
                if tr.stats.station not in stats_list:
                    stats_list.append(tr.stats.station)
                    if self.plot_labels:
                        label = tr.stats.station
                    else:
                        label = ''
                    labels.append(hv.Text(idates[id0+100], -tr.stats.distance,
                                          label,
                                          halign='left',
                                          valign='bottom').opts(norm=dict(framewise=True)))           
               
        if not self.plot_acoustic:
            self.curves = {k:self.curves[k] for k in self.curves if k[1] != "acoustic"}
        if not self.curves:
            # Plot dummy curves in case there is no data
            x0 = np.datetime64(self.start.datetime).astype('int')//1e3
            x1 = np.datetime64(self.end.datetime).astype('int')//1e3
            dummy_x = np.linspace(x0, x1, 100)
            dummy_y = np.ones(100)*y_range[1]
            for i in range(len(tnp_seismic)+len(tnp_acoustic)):
                self.curves[('No_%d'%i, 'dummy')] = hv.Curve((dummy_x, dummy_y))
                labels.append(hv.Text(x0, 0., '').opts(norm=dict(framewise=True)))
           
        color_key = {'seismic':'blue', 'acoustic':'red', 'dummy':'grey'}    
        lyt = datashade(hv.NdOverlay(self.curves, kdims=['name', 'type']),
                        aggregator=ds.count_cat('type'),
                        color_key=color_key, dynamic=False, 
                        min_alpha=255, width=3000, height=2000, x_range=x_range, y_range=y_range,
                        y_sampling=0.1)      
        return (lyt*hv.Overlay(labels)).opts(plot=dict(width=3000, height=2000, 
                                                           finalize_hooks=[apply_axis_formatter]),
                                             norm=dict(framewise=True))
Exemplo n.º 14
0
def ilæg_revision(
    projektnavn: str,
    sagsbehandler: str,
    **kwargs,
) -> None:
    """Læg reviderede punktdata i databasen"""
    sag = find_sag(projektnavn)
    sagsgang = find_sagsgang(projektnavn)

    fire.cli.print(f"Sags/projekt-navn: {projektnavn}  ({sag.id})")
    fire.cli.print(f"Sagsbehandler:     {sagsbehandler}")
    fire.cli.print("")

    revision = find_faneblad(f"{projektnavn}-revision", "Revision",
                             ARKDEF_REVISION)

    # Tildel navne til endnu ikke oprettede punkter
    oprettelse = revision.query("Attribut == 'OPRET'")
    for i, _ in oprettelse.iterrows():
        revision.loc[i, "Punkt"] = f"NYTPUNKT{i}"

    # Udfyld udeladte identer
    punkter = list(revision["Punkt"])
    udfyldningsværdi = ""
    for i in range(len(punkter)):
        if punkter[i].strip() != "":
            udfyldningsværdi = punkter[i].strip()
            continue
        punkter[i] = udfyldningsværdi
    revision["Punkt"] = punkter

    # Find alle punkter, der skal nyoprettes
    nye_punkter = []
    oprettelse = revision.query("Attribut == 'OPRET'")
    for row in oprettelse.to_dict("records"):
        if row["id"] == -1:
            continue
        punkt = opret_punkt(row["Ny værdi"])
        fire.cli.print(f"Opretter nyt punkt {punkt.ident}: {row['Ny værdi']}")
        nye_punkter.append(punkt)

        # indsæt nyt punkt ID i Punkt kolonnen, for at kunne trække
        # dem ud med hent_punkt() senere
        erstat = lambda x: punkt.id if x == row["Punkt"] else x
        revision["Punkt"] = revision.Punkt.apply(erstat)

    revision = revision.query("Attribut != 'OPRET'")

    # Find alle lokationskoordinater, der skal korrigeres
    nye_lokationer = []
    lokation = revision.query("Attribut == 'LOKATION'")
    lokation = lokation.query("`Ny værdi` != ''")
    for row in lokation.to_dict("records"):
        punkt = fire.cli.firedb.hent_punkt(row["Punkt"])
        # gem her inden ny geometri tilknyttes punktet
        (λ1, φ1) = punkt.geometri.koordinater

        go = læs_lokation(row["Ny værdi"])
        go.punkt = punkt
        nye_lokationer.append(go)
        (λ2, φ2) = go.koordinater

        g = Geod(ellps="GRS80")
        _, _, dist = g.inv(λ1, φ1, λ2, φ2)
        if dist >= 25:
            fire.cli.print(
                f"    ADVARSEL: Ny lokationskoordinat afviger {dist:.0f} m fra den gamle",
                fg="yellow",
                bold=True,
            )

    if len(nye_punkter) > 0 or len(nye_lokationer) > 0:
        sagsevent = Sagsevent(
            id=uuid(),
            sagsid=sag.id,
            sagseventinfos=[
                SagseventInfo(beskrivelse="Oprettelse af nye punkter")
            ],
            eventtype=EventType.PUNKT_OPRETTET,
            punkter=nye_punkter,
            geometriobjekter=nye_lokationer,
        )
        fire.cli.firedb.indset_sagsevent(sagsevent, commit=False)
        sagsgang = opdater_sagsgang(sagsgang, sagsevent, sagsbehandler)
        flush()

    revision = revision.query("Attribut != 'LOKATION'")

    # Find alle koordinater, der skal oprettes

    # Først skal vi bruge alle gyldige koordinatsystemnavne
    srider = fire.cli.firedb.hent_srider()
    sridnavne = [srid.name.upper() for srid in srider]

    # Så itererer vi over hele rammen og ignorerer ikke-koordinaterne
    nye_koordinater = []
    opdaterede_punkter = []
    for r in revision.to_dict("records"):
        sridnavn = r["Attribut"].upper()
        if sridnavn not in sridnavne:
            continue
        try:
            koord = [float(k.replace(",", ".")) for k in r["Ny værdi"].split()]
        except ValueError as ex:
            fire.cli.print(
                f"Ukorrekt koordinatformat:\n{'    '.join(r['Ny værdi'])}\n{ex}"
            )
            fire.cli.print(
                "Skal være på formen: 'x y z t sx sy sz', hvor ubrugte værdier sættes til 'nan'"
            )
            sys.exit(1)

        # Oversæt NaN til None
        koord = [None if isnan(k) else k for k in koord]

        # Tæt-på-kopi af kode fra "niv/ilæg_nye_koter.py". Her bør mediteres og overvejes
        # hvordan denne opgave kan parametriseres på en rimeligt generel måde, så den kan
        # udstilles i et "højniveau-API"
        srid = fire.cli.firedb.hent_srid(sridnavn)

        punkt = fire.cli.firedb.hent_punkt(r["Punkt"])
        opdaterede_punkter.append(r["Punkt"])

        # Det er ikke helt så nemt som i C at oversætte decimal-år til datetime
        år = trunc(koord[3])
        rest = koord[3] - år
        startdato = datetime(år, 1, 1)
        årlængde = datetime(år + 1, 1, 1) - startdato
        tid = startdato + rest * årlængde

        koordinat = Koordinat(
            srid=srid,
            punkt=punkt,
            x=koord[0],
            y=koord[1],
            z=koord[2],
            t=tid,
            sx=koord[4],
            sy=koord[5],
            sz=koord[6],
        )
        nye_koordinater.append(koordinat)

        # I Grønland er vi nødt til at duplikere geografiske koordinater til UTM24,
        # da Oracles indbyggede UTM-rutine er for ringe til at vi kan generere
        # udstillingskoordinater on-the-fly.
        if sridnavn in ("EPSG:4909", "EPSG:4747"):
            srid_utm24 = fire.cli.firedb.hent_srid("EPSG:3184")
            utm24 = Proj("proj=utm zone=24 ellps=GRS80", preserve_units=False)
            x, y = utm24(koord[0], koord[1])
            koordinat = Koordinat(
                srid=srid_utm24,
                punkt=punkt,
                x=x,
                y=y,
                z=None,
                t=tid,
                sx=koord[4],
                sy=koord[5],
                sz=None,
            )
            nye_koordinater.append(koordinat)

    n = len(opdaterede_punkter)
    if n > 0:
        punktnavne = sorted(list(set(opdaterede_punkter)))
        if len(punktnavne) > 10:
            punktnavne[9] = "..."
            punktnavne[10] = punktnavne[-1]
            punktnavne = punktnavne[0:10]
        koordinatoprettelsestekst = (
            f"Opdatering af {n} koordinater til {', '.join(punktnavne)}")

        sagsevent = Sagsevent(
            id=uuid(),
            sagsid=sag.id,
            sagseventinfos=[
                SagseventInfo(beskrivelse=koordinatoprettelsestekst)
            ],
            eventtype=EventType.KOORDINAT_BEREGNET,
            koordinater=nye_koordinater,
        )
        fire.cli.firedb.indset_sagsevent(sagsevent, commit=False)
        sagsgang = opdater_sagsgang(sagsgang, sagsevent, sagsbehandler)
        flush()

    # Så tager vi fat på punktinformationerne
    til_opret = []
    til_ret = []
    til_sluk = []
    punkter_med_oprettelse = set()
    punkter_med_rettelse = set()
    punkter_med_slukning = set()

    # Først, tilknyt regionspunktinfo til nyoprettede punkter
    for p in nye_punkter:
        til_opret.append(opret_region_punktinfo(p))

    # Find identer for alle punkter, der indgår i revisionen
    identer = tuple(sorted(set(revision["Punkt"]) - set(["nan", ""])))
    fire.cli.print("")
    fire.cli.print(f"Behandler {len(identer)} punkter")

    # Så itererer vi over alle punkter
    for ident in identer:
        fire.cli.print(ident, fg="yellow", bold=True)

        # Hent punkt og alle relevante punktinformationer i databasen.
        # Her er det lidt sværere end for koordinaternes vedkommende:
        # Ved opdatering af eksisterende punkter vil vi gerne checke
        # infonøglerne, så vi er nødt til at hente det faktiske punkt,
        # med tilørende infonøgler, fra databasen
        try:
            punkt = fire.cli.firedb.hent_punkt(ident)
            infonøgler = {
                info.objektid: i
                for i, info in enumerate(punkt.punktinformationer)
            }
        except NoResultFound as ex:
            fire.cli.print(
                f"FEJL: Kan ikke finde punkt {ident}!",
                fg="yellow",
                bg="red",
                bold=True,
            )
            fire.cli.print(f"Mulig årsag: {ex}")
            sys.exit(1)

        # Hent alle revisionselementer for punktet fra revisionsarket
        rev = revision.query(f"Punkt == '{ident}'")

        for r in rev.to_dict("records"):
            if r["Attribut"] in sridnavne:
                continue
            if r["id"] == 999999:
                continue
            if r["id"] == -1:
                continue
            pitnavn = r["Attribut"]
            if pitnavn == "":
                continue

            if r["Sluk"] and r["Ny værdi"]:
                fire.cli.print(
                    f"    * FEJL: 'Sluk' og 'Ny værdi' begge udfyldt: {r['Ny værdi']}",
                    fg="red",
                    bold=False,
                )
                continue

            if r["Tekstværdi"] != "" and r["Tekstværdi"] == r["Ny værdi"]:
                fire.cli.print(
                    f"    ADVARSEL: Tekst i 'Ny værdi' identisk med udgangspunkt for {pitnavn}.",
                    fg="yellow",
                    bold=True,
                )
                continue

            if pitnavn is None:
                fire.cli.print(
                    "    * Ignorerer uanført punktinformationstype",
                    fg="red",
                    bold=False,
                )
                continue

            pit = fire.cli.firedb.hent_punktinformationtype(pitnavn)
            if pit is None:
                fire.cli.print(
                    f"    * Ignorerer ukendt punktinformationstype '{pitnavn}'",
                    fg="red",
                    bold=True,
                )
                continue

            # Nyt punktinfo-element?
            if pd.isna(r["id"]):
                # ATTR:muligt_datumstabil+slukket == ikke eksisterende i DB
                # Indsat af fire niv udtræk-revision
                if pitnavn == "ATTR:muligt_datumstabil" and r["Sluk"]:
                    continue

                fire.cli.print(
                    f"    Opretter nyt punktinfo-element: {pitnavn}")
                if pit.anvendelse == PunktInformationTypeAnvendelse.FLAG:
                    if r["Ny værdi"]:
                        fire.cli.print(
                            f"    BEMÆRK: {pitnavn} er et flag. Ny værdi '{r['Ny værdi']}' ignoreres",
                            fg="yellow",
                            bold=True,
                        )
                    pi = PunktInformation(infotype=pit, punkt=punkt)
                elif pit.anvendelse == PunktInformationTypeAnvendelse.TEKST:
                    # Ingen definitiv test her: Tom tekst kan være gyldig.
                    # Men vi sørger for at den ikke er None
                    tekst = r["Ny værdi"]
                    if tekst is None or tekst == "":
                        fire.cli.print(
                            f"    ADVARSEL: Tom tekst anført for {pitnavn}.",
                            fg="yellow",
                            bold=True,
                        )
                        tekst = ""
                    pi = PunktInformation(infotype=pit,
                                          punkt=punkt,
                                          tekst=tekst)
                else:
                    try:
                        # Både punktum og komma er accepterede decimalseparatorer
                        tal = float(r["Ny værdi"].replace(",", "."))
                    except ValueError as ex:
                        fire.cli.print(
                            f"    FEJL: {pitnavn} forventer numerisk værdi [{ex}].",
                            fg="yellow",
                            bold=True,
                        )
                        tal = 0
                    pi = PunktInformation(infotype=pit, punkt=punkt, tal=tal)

                til_opret.append(pi)
                punkter_med_oprettelse.add(ident)
                continue

            # Ingen ændringer? - så afslutter vi og går til næste element.
            if r["Sluk"] == r["Ny værdi"] == "":
                continue

            # Herfra håndterer vi kun punktinformationer med indførte ændringer

            # Nu kan vi bruge objektid som heltal (ovenfor havde vi brug for NaN-egenskaben)
            oid = int(r["id"])
            if r["Sluk"] == "x":
                try:
                    pi = punkt.punktinformationer[infonøgler[oid]]
                except KeyError:
                    fire.cli.print(
                        f"    * Ukendt id - ignorerer element '{oid}'",
                        fg="red",
                        bold=True,
                    )
                    continue
                fire.cli.print(f"    Slukker: {pitnavn}")
                # pi._registreringtil = func.current_timestamp()
                til_sluk.append(pi)
                punkter_med_slukning.add(punkt.ident)
                continue

            fire.cli.print(f"    Retter punktinfo-element: {pitnavn}")
            if pit.anvendelse == PunktInformationTypeAnvendelse.FLAG:
                pi = PunktInformation(infotype=pit, punkt=punkt)
            elif pit.anvendelse == PunktInformationTypeAnvendelse.TEKST:
                # Fjern overflødigt whitespace og duplerede punktummer
                tekst = r["Ny værdi"]
                tekst = re.sub(r"[ \t]+", " ", tekst.strip())
                tekst = re.sub(r"[.]+", ".", tekst)
                pi = PunktInformation(infotype=pit, punkt=punkt, tekst=tekst)
            else:
                try:
                    tal = float(r["Ny værdi"])
                except ValueError as ex:
                    fire.cli.print(
                        f"    FEJL: {pitnavn} forventer numerisk værdi [{ex}].",
                        fg="yellow",
                        bold=True,
                    )
                    tal = 0
                pi = PunktInformation(infotype=pit, punkt=punkt, tal=tal)
            til_ret.append(pi)
            punkter_med_rettelse.add(punkt.ident)
            continue

    fikspunktstyper = [FikspunktsType.GI for _ in nye_punkter]
    landsnumre = fire.cli.firedb.tilknyt_landsnumre(nye_punkter,
                                                    fikspunktstyper)
    til_opret.extend(landsnumre)
    for p in nye_punkter:
        punkter_med_oprettelse.add(p.ident)

    if len(til_opret) > 0 or len(til_ret) > 0:
        sagsevent = Sagsevent(
            id=uuid(),
            sagsid=sag.id,
            sagseventinfos=[
                SagseventInfo(beskrivelse="Opdatering af punktinformationer")
            ],
            eventtype=EventType.PUNKTINFO_TILFOEJET,
            punktinformationer=[*til_opret, *til_ret],
        )

        fire.cli.firedb.indset_sagsevent(sagsevent, commit=False)
        sagsgang = opdater_sagsgang(sagsgang, sagsevent, sagsbehandler)
        flush()

    if len(til_sluk) > 0:
        sagsevent = Sagsevent(
            id=uuid(),
            sagsid=sag.id,
            sagseventinfos=[
                SagseventInfo(beskrivelse="Lukning af punktinformationer")
            ],
            eventtype=EventType.PUNKTINFO_FJERNET,
            punktinformationer_slettede=til_sluk,
        )

        fire.cli.firedb.indset_sagsevent(sagsevent, commit=False)
        sagsgang = opdater_sagsgang(sagsgang, sagsevent, sagsbehandler)
        flush()

    opret_tekst = f"- oprette {len(til_opret)} attributter fordelt på {len(punkter_med_oprettelse)} punkter"
    sluk_tekst = f"- slukke for {len(til_sluk)} attributter fordelt på {len(punkter_med_slukning)} punkter"
    ret_tekst = f"- rette {len(til_ret)} attributter fordelt på {len(punkter_med_rettelse)} punkter"
    lok_tekst = f"- rette {len(nye_lokationer)} lokationskoordinater"

    fire.cli.print("")
    fire.cli.print("-" * 50)
    fire.cli.print("Punkter færdigbehandlet, klar til at")
    fire.cli.print(opret_tekst)
    fire.cli.print(sluk_tekst)
    fire.cli.print(ret_tekst)
    fire.cli.print(lok_tekst)

    spørgsmål = click.style(
        f"Er du sikker på du vil indsætte ovenstående i {fire.cli.firedb.db}-databasen",
        fg="white",
        bg="red",
    )
    if bekræft(spørgsmål):
        fire.cli.firedb.session.commit()
        skriv_ark(projektnavn, {"Sagsgang": sagsgang})
    else:
        fire.cli.firedb.session.rollback()
Exemplo n.º 15
0
def export_routes(database: SqliteUtil,
                  modes: List[str],
                  filepath: str,
                  skip_empty: bool,
                  epsg: int = 2223):

    transformer = Transformer.from_crs('epsg:2223',
                                       f'epsg:{epsg}',
                                       always_xy=True,
                                       skip_equivalent=True)
    transform = transformer.transform

    measurer = Geod(f'epsg:{epsg}')
    measure = lambda n1, n2: measurer.inv([n1.y], [n1.x], [n2.y], [n2.x])

    prjpath = os.path.splitext(filepath)[0] + '.prj'
    with open(prjpath, 'w') as prjfile:
        info = get_wkt_string(epsg)
        prjfile.write(info)

    log.info('Loading network node data.')
    query = '''
        SELECT
            node_id,
            point
        FROM nodes;
    '''
    nodes = {}
    database.cursor.execute(query)
    result = counter(database.fetch_rows(), 'Loading node %s.')

    for node_id, point in result:
        x, y = transform(*map(float, point[7:-1].split(' ')))
        nodes[node_id] = Node(x, y)

    log.info('Loading network link data.')
    query = '''
        SELECT 
            link_id,
            source_node,
            terminal_node
        FROM links;
    '''
    links = {}
    database.cursor.execute(query)
    result = counter(database.fetch_rows(), 'Loading link %s.')

    for link_id, source_node, terminal_node in result:
        src_node = nodes[source_node]
        term_node = nodes[terminal_node]
        length = measure(src_node, term_node)
        links[link_id] = Link(src_node, term_node, length)

    log.info('Loading network routing data.')
    query = f'''
        SELECT
            output_legs.leg_id,
            output_legs.agent_id,
            output_legs.agent_idx,
            output_legs.mode,
            output_legs.duration,
            GROUP_CONCAT(output_events.link_id, " ")
        FROM output_legs
        LEFT JOIN output_events
        ON output_legs.leg_id = output_events.leg_id
        WHERE output_legs.mode IN {tuple(modes)}
        GROUP BY
            output_legs.leg_id
        ORDER BY
            output_events.leg_id,
            output_events.leg_idx;
    '''
    database.cursor.execute(query)
    result = counter(database.fetch_rows(block=1000000), 'Exporting route %s.')

    routes = shapefile.Writer(filepath)
    routes.field('leg_id', 'N')
    routes.field('agent_id', 'N')
    routes.field('agent_idx', 'N')
    routes.field('mode', 'C')
    routes.field('duration', 'N')
    routes.field('length', 'N')

    log.info('Exporting simulation routes to shapefile.')
    for leg_id, agent_id, agent_idx, mode, duration, events in result:
        if events is not None:
            route = [links[l] for l in events.split(' ')]
            line = [(link.source_node.x, link.source_node.y) for link in route]
            line.append((route[-1].terminal_node.x, route[-1].terminal_node.y))
            length = sum((link.length for link in route))
            routes.record(leg_id, agent_id, agent_idx, mode, duration, length)
            routes.line([line])
        elif not skip_empty:
            routes.record(leg_id, agent_id, agent_idx, mode, duration, None)
            routes.null()

    if routes.recNum != routes.shpNum:
        log.error('Record/shape misalignment; internal exporting failure.')

    routes.close()

    log.info(f'Routing export complete: wrote {routes.shpNum} routes.')
Exemplo n.º 16
0
def get_reachMeasure(intersectionPoint, flowlines, *raindropPath):
    """Collect NHD Flowline Reach Code and Measure"""
    print('intersectionPoint: ', intersectionPoint)

    # Set Geoid to measure distances in meters
    geod = Geod(ellps="WGS84")

    # Convert the flowline to a geometry colelction to be exported
    nhdGeom = flowlines['features'][0]['geometry']
    nhdFlowline = GeometryCollection([shape(nhdGeom)])[0]

    # Select the stream name from the NHD Flowline
    streamname = flowlines['features'][0]['properties']['gnis_name']
    if streamname == ' ':
        streamname = 'none'

    # Create streamInfo dict and add some data
    streamInfo = {
        'gnis_name':
        streamname,
        'comid':
        flowlines['features'][0]['properties']
        ['comid'],  # 'lengthkm': flowlines['features'][0]['properties']['lengthkm'],
        'intersectionPoint':
        (intersectionPoint.coords[0][1], intersectionPoint.coords[0][0]),
        'reachcode':
        flowlines['features'][0]['properties']['reachcode']
    }

    # Add more data to the streamInfo dict
    if raindropPath:
        streamInfo['raindropPathDist'] = round(
            geod.geometry_length(raindropPath[0]), 2)

    # If the intersectionPoint is on the NHD Flowline, split the flowline at the point
    if nhdFlowline.intersects(intersectionPoint) is True:
        NHDFlowlinesCut = split(nhdFlowline, intersectionPoint)

    # If they don't intersect (weird right?), buffer the intersectionPoint and then split the flowline
    if nhdFlowline.intersects(intersectionPoint) is False:
        buffDist = intersectionPoint.distance(nhdFlowline) * 1.01
        buffIntersectionPoint = intersectionPoint.buffer(buffDist)
        NHDFlowlinesCut = split(nhdFlowline, buffIntersectionPoint)

    # If the NHD Flowline was split, then calculate measure
    try:
        NHDFlowlinesCut[1]
    except AssertionError as error:  # If NHDFlowline was not split, then the intersectionPoint is either the first or last point on the NHDFlowline
        startPoint = Point(nhdFlowline[0].coords[0][0],
                           nhdFlowline[0].coords[0][1])
        lastPointID = len(nhdFlowline[0].coords) - 1
        lastPoint = Point(nhdFlowline[0].coords[lastPointID][0],
                          nhdFlowline[0].coords[lastPointID][1])
        if (intersectionPoint == startPoint):
            streamInfo['measure'] = 100
            error = 'The point of intersection is the first point on the NHD Flowline.'
        if (intersectionPoint == lastPoint):
            streamInfo['measure'] = 0
            error = 'The point of intersection is the last point on the NHD Flowline.'
        if (intersectionPoint != startPoint
                and intersectionPoint != lastPoint):
            error = 'Error: NHD Flowline measure not calculated'
            streamInfo['measure'] = 'null'
        print(error)
    else:
        lastLineID = len(NHDFlowlinesCut) - 1
        distToOutlet = round(geod.geometry_length(NHDFlowlinesCut[lastLineID]),
                             2)
        flowlineLength = round(geod.geometry_length(nhdFlowline), 2)
        streamInfo['measure'] = round((distToOutlet / flowlineLength) * 100, 2)
    print('calculated measure and reach')

    return streamInfo
Exemplo n.º 17
0
from xml.etree import ElementTree
import os
import numpy as np

# TODO: lots of stuff here can be done with pyproj.Geod
# https://pyproj4.github.io/pyproj/stable/api/geod.html
from pyproj import Geod

from apertools.log import get_log

WGS84 = Geod(ellps="WGS84")

logger = get_log()


def rowcol_to_latlon(row, col, rsc_data=None, filename=None):
    """Takes the row, col of a pixel and finds its lat/lon

    Can also pass numpy arrays of row, col.
    row, col must match size

    Args:
        row (int or ndarray): row number
        col (int or ndarray): col number
        rsc_data (dict): data output from load_dem_rsc
        filename (str): gdal-readable file with geographic coordinates

    Returns:
        tuple[float, float]: lat, lon for the pixel

    Example:
Exemplo n.º 18
0
def subfault_distances_3D(home, project_name, fault_name, slab_name,
                          projection_zone):
    """
    Estimate the distance between subfaults i and j for every pair in the list
    fault.subfaults. For a #D fault geometry

    :Inputs:
      -  *fault* of MudPy .fault class
    
    :Outputs:
      - *D* array of Euclidean distances based on longitudes, latitudes, and depths
      - *Dstrike* array of estimated distances along strike direction
      - *Ddip* array of estimated distances along dip direction
    with D**2 = Dstrike**2 + Ddip**2 to within roundoff.

    For each array, the [i,j] entry is distance from subfault i to j when
    ordered in the order the subfaults appear in the list fault.subfaults.

    Distance in dip direction based on differences in depth and average dip of the fault.  

    """

    from numpy import sqrt, sin, cos, deg2rad, zeros, meshgrid, linspace, where, c_, unravel_index, sort, diff, genfromtxt, sign, unique
    from matplotlib.mlab import griddata
    from matplotlib import pyplot as plt
    from scipy.spatial.distance import cdist
    from pyproj import Geod

    #if you want the simplified distances
    if slab_name == None:
        #Read fault geometry data
        fault = genfromtxt(home + project_name + '/data/model_info/' +
                           fault_name)

        #Initalize distance output arrays
        nsubfaults = len(fault)
        Dstrike = zeros((nsubfaults, nsubfaults))
        Ddip = zeros((nsubfaults, nsubfaults))

        #What's the average dip of the fault model??
        dip = fault[:, 5].mean()

        #Instantiate projection object
        g = Geod(ellps='WGS84')

        #Loop over faults an compute distances
        print 'Getting inter-fault distances'
        for i in range(len(fault)):

            if i % 10 == 0:
                print '... working on subfault ' + str(i) + ' of ' + str(
                    len(fault))

            #For each subfault loop over every other subfault
            for j in range(len(fault)):

                #Subfault distance with itslef is zero
                if i == j:
                    Ddip[i, j] = 0
                    Dstrike[i, j] = 0
                else:
                    lon_origin = fault[i, 1]
                    lat_origin = fault[i, 2]
                    lon_target = fault[j, 1]
                    lat_target = fault[j, 2]
                    az, baz, dist = g.inv(lon_origin, lat_origin, lon_target,
                                          lat_target)
                    delta_strike = dist / 1000
                    #Down dip is jsut depth difference / avg dip of model
                    z_origin = fault[i, 3]
                    z_target = fault[j, 3]
                    delta_dip = abs(z_origin - z_target) / sin(deg2rad(dip))
                    Ddip[i, j] = delta_dip
                    Dstrike[i, j] = delta_strike

    #If there's a slab_model file and you want the onfault complicated to get distances
    else:

        #Load things
        fault = genfromtxt(home + project_name + '/data/model_info/' +
                           fault_name)
        slab_model = genfromtxt(home + project_name + '/data/model_info/' +
                                slab_name)

        #Initalize distance output arrays
        nsubfaults = len(fault)
        Dstrike = zeros((nsubfaults, nsubfaults))
        Ddip = zeros((nsubfaults, nsubfaults))

        #Get average dip
        avg_dip = fault[:, 5].mean()

        #get down-dip azimuths
        down_dip = fault[:, 4] + 90
        i = where(down_dip > 360)[0]
        down_dip[i] = down_dip[i] - 360

        #Convert slab1.0 to local UTM coordinates
        slab_x, slab_y = llz2utm(slab_model[:, 0], slab_model[:, 1],
                                 projection_zone)
        slab_x, slab_y = slab_x / 1000, slab_y / 1000
        slab_z = -slab_model[:, 2]

        #Convert faul centroid coordinates to local UTM
        fault_x, fault_y = llz2utm(fault[:, 1], fault[:, 2], projection_zone)
        fault_x, fault_y = fault_x / 1000, fault_y / 1000
        fault_z = fault[:, 3]

        # grid Slab1.0 for making depth contours to be then used for along-strike distance calculation
        ngrid_pts = 500
        X = linspace(slab_x.min(), slab_x.max(), ngrid_pts)
        Y = linspace(slab_y.min(), slab_y.max(), ngrid_pts)
        X, Y = meshgrid(X, Y)
        Z = griddata(slab_x, slab_y, slab_z, X, Y, interp='linear')

        # X, Y and Z are matrices with the grid info, now create one contour at each subfault centroid depth
        contour_levels = unique(sort(fault[:, 3]))
        all_contours = plt.contour(X, Y, Z, levels=contour_levels)

        # x-coordinates for down_dip line
        x_range = slab_x.max() - slab_x.min()
        x_down_dip = linspace(-x_range / 2, x_range / 2, 200)

        #Loop over number of subfaults, we want the distance from i-th fault to all other (j) subfaults
        print 'Getting inter-fault distances'
        for i in range(len(fault)):
            if i % 10 == 0:
                print '... working on subfault ' + str(i) + ' of ' + str(
                    len(fault))
            #Current fault
            xi = fault_x[i]
            yi = fault_y[i]
            zi = fault_z[i]

            #Get contour at depth of current subfault
            contour = all_contours.collections[i].get_paths()[0].vertices

            # Now find coordinates of point on this contour closest to subfault centroid
            dist = sqrt((xi - contour[:, 0])**2 + (yi - contour[:, 1])**2)
            imin = dist.argmin()

            # These are coordinates on the contour
            xmin_i = contour[imin, 0]
            ymin_i = contour[imin, 1]

            #For each subfault loop over every other subfault
            for j in range(len(fault)):
                xj = fault_x[j]
                yj = fault_y[j]
                zj = fault_z[j]

                #Get down_dip y coordinates
                y_down_dip = x_down_dip * cos(deg2rad(down_dip[j]))

                #Move line origin to subfault being tested
                x_down_dip_subfault = x_down_dip + xj
                y_down_dip_subfault = y_down_dip + yj

                #Get coordinates of intersection point between contour and down-dip line by finding minimum distance
                dist = cdist(contour, c_[x_down_dip_subfault,
                                         y_down_dip_subfault])
                r, c = unravel_index(dist.argmin(), dist.shape)
                xmin_j = contour[r, 0]
                ymin_j = contour[r, 1]

                #Keep only points on the contour array that correspond to starting and stopping points along the path
                keep = sort([r, imin])
                contour_integral = contour[keep[0]:keep[1] + 1]

                #Along strike distance is the path integral along contour between (xmin_i,ymin_i) and (xmin_j,ymin_j)
                dx = diff(contour_integral[:, 0])
                dy = diff(contour_integral[:, 1])
                delta_strike = sqrt(dx**2 + dy**2).sum()  #Phew, that was hard
                #Give negative sign if subfault is down_strike
                strike_sign = sign(ymin_j - ymin_i)
                delta_strike = strike_sign * delta_strike

                #get down dip distance from depth and average dip
                delta_dip = (zi - zj) / sin(deg2rad(avg_dip))

                #Now the outputs
                if i == j:
                    Ddip[i, j] = 0
                    Dstrike[i, j] = 0
                else:
                    Ddip[i, j] = delta_dip
                    Dstrike[i, j] = delta_strike

    return Dstrike, Ddip
Exemplo n.º 19
0
from matplotlib.ticker import LogLocator,LogFormatter
from matplotlib.colors import BoundaryNorm
from matplotlib.colors import LogNorm
import pickle
#fname='mc3e_hiwrap_20110520_134702-181402.nc'
#fname='mc3e_hiwrap_20110425_070444-115447_chirp.nc'
fname='IPHEX_HIWRAP_L1B_2014611-195727-2014611-210243_HKu_dist_v01.h5'
fname='IPHEX_EXRAD_L1B_20140523-225808-20140523-235906_nadir_dist_v01.nc'
fname='IPHEX_HIWRAP_L1B_2014523-225723-2014523-235850_HKu_dist_v01.h5'
fname='IPHEX_HIWRAP_L1B_20140523-215716-20140523-230217_HKu_dist_v01.nc'
fname='IPHEX_HIWRAP_L1B_20140523-225723-20140523-235850_HKu_dist_v01.nc'
fname='IPHEX_HIWRAP_L1B_20140612-215738-20140612-225747_HKa_dist_v01.nc'

zka,vka,latKa,lonKa,rKa,altKa,rollKa,tKa=readiphexKu(fname)
from pyproj import Geod
ws_geod=Geod(ellps='WGS84')
d=[]
dist=0
for i in range(latKa.shape[0]-1):
    d.append(dist)
    az1,az2,ddist=ws_geod.inv(lonKa[i],latKa[i],lonKa[i+1],latKa[i+1])
    dist+=ddist/1000.
a=nonzero(abs(rollKa)>2.5)
for i in a[0]:
    zka[i,:]=-99
hKa=altKa.mean()/1000.-rKa/1000.
zkam=ma.array(zka,mask=zka<-10)
vkam=ma.array(vka,mask=zka<-10)
vkam=ma.array(vka,mask=zka!=zka)
zKuL=[]
zKaL=[]
Exemplo n.º 20
0
def _resample_profile(line, sampling_dist):
    # TODO split this function into smaller components.
    """
    :parameter line:
        An instance of :class:`openquake.hazardlib.geo.line.Line`
    :parameter sampling_dist:
        A scalar defining the distance [km] used to sample the profile
    :returns:
        An instance of :class:`openquake.hazardlib.geo.line.Line`
    """
    lo = [pnt.longitude for pnt in line.points]
    la = [pnt.latitude for pnt in line.points]
    de = [pnt.depth for pnt in line.points]

    # Set projection
    g = Geod(ellps='WGS84')

    # Add a tolerance length to the last point of the profile
    # check that final portion of the profile is not vertical
    if abs(lo[-2] - lo[-1]) > 1e-5 and abs(la[-2] - la[-1]) > 1e-5:
        az12, _, odist = g.inv(lo[-2], la[-2], lo[-1], la[-1])
        odist /= 1e3
        slope = np.arctan((de[-1] - de[-2]) / odist)
        hdist = TOL * sampling_dist * np.cos(slope)
        vdist = TOL * sampling_dist * np.sin(slope)
        endlon, endlat, _ = g.fwd(lo[-1], la[-1], az12, hdist * 1e3)
        lo[-1] = endlon
        la[-1] = endlat
        de[-1] = de[-1] + vdist
        az12, _, odist = g.inv(lo[-2], la[-2], lo[-1], la[-1])

        # Checking
        odist /= 1e3
        slopec = np.arctan((de[-1] - de[-2]) / odist)
        assert abs(slope - slopec) < 1e-3
    else:
        de[-1] = de[-1] + TOL * sampling_dist

    # Initialise the cumulated distance
    cdist = 0.

    # Get the azimuth of the profile
    azim = azimuth(lo[0], la[0], lo[-1], la[-1])

    # Initialise the list with the resampled nodes
    idx = 0
    resampled_cs = [Point(lo[idx], la[idx], de[idx])]

    # Set the starting point
    slo = lo[idx]
    sla = la[idx]
    sde = de[idx]

    # Resampling
    while 1:

        # Check loop exit condition
        if idx > len(lo) - 2:
            break

        # Compute the distance between the starting point and the next point
        # on the profile
        segment_len = distance(slo, sla, sde, lo[idx + 1], la[idx + 1],
                               de[idx + 1])
        azim = azimuth(slo, sla, lo[idx + 1], la[idx + 1])

        # Search for the point along the profile
        if cdist + segment_len > sampling_dist:

            # This is the length of the last segment-fraction needed to
            # obtain the sampling distance
            delta = sampling_dist - cdist

            # Compute the slope of the last segment and its horizontal length.
            # We need to manage the case of a vertical segment TODO
            segment_hlen = distance(slo, sla, 0., lo[idx + 1], la[idx + 1], 0.)
            if segment_hlen > 1e-5:
                segment_slope = np.arctan((de[idx + 1] - sde) / segment_hlen)
            else:
                segment_slope = 90.

            # Horizontal and vertical length of delta
            delta_v = delta * np.sin(segment_slope)
            delta_h = delta * np.cos(segment_slope)

            # Add a new point to the cross section
            pnts = npoints_towards(slo, sla, sde, azim, delta_h, delta_v, 2)

            # Update the starting point
            slo = pnts[0][-1]
            sla = pnts[1][-1]
            sde = pnts[2][-1]
            resampled_cs.append(Point(slo, sla, sde))

            # Reset the cumulative distance
            cdist = 0.

        else:
            cdist += segment_len
            idx += 1
            slo = lo[idx]
            sla = la[idx]
            sde = de[idx]

    # Check the distances along the profile
    coo = [[pnt.longitude, pnt.latitude, pnt.depth] for pnt in resampled_cs]
    coo = np.array(coo)
    for i in range(0, coo.shape[0] - 1):
        dst = distance(coo[i, 0], coo[i, 1], coo[i, 2], coo[i + 1, 0],
                       coo[i + 1, 1], coo[i + 1, 2])
        if abs(dst - sampling_dist) > 0.1 * sampling_dist:
            msg = 'Distance between points along the profile larger than 10%'

            fmt = '\n   Expected {:.2f} Computed {:.2f}'
            msg += fmt.format(sampling_dist, dst)

            fmt = '\n   Point {:.2f} {:.2f} {:.2f}'
            msg += fmt.format(*[coo[i, j] for j in range(3)])
            msg += fmt.format(*[coo[i + 1, j] for j in range(3)])

            msg += '\n   Please, change the sampling distance or the'
            msg += ' points along the profile'
            raise ValueError(msg)

    return Line(resampled_cs)
def process_slices(slices, dset, datagroup, beam, ref_dt, vmin, vmax, vmin_pal,
                   vmax_pal):
    """"""
    geod = Geod(ellps='WGS84')

    # Inner, middle and outer beam widths
    beam_widths = (94.0, 120.0, 156.0,)
    beam_width = beam_widths[beam]

    for k, chunk_rows in enumerate(slices):
        _chunk_lon = dset.variables['beam_clon'][chunk_rows, beam]
        chunk_size = numpy.shape(_chunk_lon)[0]

        if 0 >= chunk_size:
            # Empty chunk, skip
            continue

        chunk_lon0 = _chunk_lon[0] - 180.0
        chunk_lon = numpy.mod(_chunk_lon - chunk_lon0, 360.0) + chunk_lon0
        chunk_lat = dset.variables['beam_clat'][chunk_rows, beam]
        values = dset.variables['SSS'][chunk_rows, beam]
        values = numpy.ma.masked_where(values==-999., values)

        # Build GCPs
        dgcp = 32.
        if numpy.max(numpy.abs(chunk_lat)) > 75.0:
            dgcp = 4.

        ngcplin = numpy.ceil(chunk_lon.size / dgcp).astype('int32')
        _gcp_alongtrack = numpy.linspace(0, chunk_lon.size - 1, num=ngcplin)
        _gcp_indices = numpy.round(_gcp_alongtrack).astype('int32')
        _gcppix = numpy.array([-1.0, 0.5, 2.0])
        ngcppix = _gcppix.size
        gcppix = numpy.tile(_gcppix[numpy.newaxis, :],
                            (ngcplin, 1))
        gcpind = numpy.tile(_gcp_indices[:, numpy.newaxis],
                            (1, ngcppix)).astype('int32')
        gcplin = gcpind + 0.5

        # Compute swath direction
        _ind0 = numpy.minimum(gcpind, chunk_lon.size - 2)
        _ind1 = _ind0 + 1
        ind_same = numpy.where((chunk_lon[_ind0] == chunk_lon[_ind1]) &
                               (chunk_lat[_ind0] == chunk_lat[_ind1]))
        for ig_line, ig_pixel in zip(ind_same[0], ind_same[1]):
            if _ind1[ig_line, ig_pixel] < chunk_lon.size -1:
                _ind1[ig_line, ig_pixel] += 1
            else:
                _ind0[ig_line, ig_pixel] -= 1

        lat_diff = chunk_lat[_ind1] - chunk_lat[_ind0]
        lon_diff = chunk_lon[_ind1] - chunk_lon[_ind0]
        swath_dir = numpy.arctan2(lat_diff, lon_diff)

        # Compute GCPs geographical coordinates from the location of the beam
        # center and its width
        gcphei = numpy.zeros(gcppix.shape)
        gcplon = numpy.zeros(gcppix.shape)
        gcplat = numpy.zeros(gcppix.shape)
        for gcp_i in range(len(_gcp_indices)):
            ind = _gcp_indices[gcp_i]
            central_lon = chunk_lon[ind]
            central_lat = chunk_lat[ind]
            across_dir = -1 * numpy.rad2deg(numpy.mod(swath_dir[gcp_i][1],
                                            2 * numpy.pi))
            lon_a, lat_a, _ = geod.fwd(central_lon, central_lat,
                                       across_dir,
                                       1000.0 * 1.5 * beam_width)
            lon_b, lat_b, _ = geod.fwd(central_lon, central_lat,
                                       180.0 + across_dir,
                                       1000.0 * 1.5 * beam_width)

            gcplon[gcp_i][0] = lon_b
            gcplon[gcp_i][1] = central_lon
            gcplon[gcp_i][2] = lon_a

            gcplat[gcp_i][0] = lat_b
            gcplat[gcp_i][1] = central_lat
            gcplat[gcp_i][2] = lat_a

        # Fix longitudinal continuity
        half_ind = numpy.floor(len(_gcp_indices) * 0.5).astype('int32')
        gcplon0 = gcplon[half_ind, 1] - 180.0
        gcplon = numpy.mod(gcplon - gcplon0, 360.0) + gcplon0

        # Construct metadata/geolocation/band(s)
        sec = dset.variables['sec'][chunk_rows]
        start_dt = ref_dt + datetime.timedelta(seconds=sec[0])
        stop_dt = ref_dt + datetime.timedelta(seconds=sec[-1])
        dtime, time_range = stfmt.format_time_and_range(start_dt, stop_dt,
                                                        units='h')
        metadata = {}
        metadata['name'] = '{}_{}'.format(datagroup, k)
        metadata['time_range'] = time_range
        metadata['datetime'] = dtime
        metadata['datagroup'] = datagroup

        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei,
                                                    gcppix, gcplin)

        # Build mask
        land_frac = dset.variables['land_frac'][chunk_rows, beam]
        ice_frac = dset.variables['ice_frac'][chunk_rows, beam]
        scat_land_frac = dset.variables['scat_land_frac'][chunk_rows, beam]
        scat_ice_frac = dset.variables['scat_ice_frac'][chunk_rows, beam]
        mask = (values.mask |
                (land_frac > 0.1) |
                (ice_frac > 0.1))  # |
                # (scat_land_frac > 0.001) |
                # (scat_ice_frac > 0.001))

        # Pack data
        band = []
        offset, scale = vmin, (vmax - vmin) / 254.
        numpy.clip(values.data, vmin, vmax, out=values.data)
        array = numpy.round((values.data - offset) / scale).astype('uint8')
        array[numpy.where(mask)] = 255
        colortable = stfmt.format_colortable('matplotlib_jet',
                                             vmin=vmin, vmax=vmax,
                                             vmin_pal=vmin_pal,
                                             vmax_pal=vmax_pal)
        array = array[:, numpy.newaxis]
        band.append({'array':array,
                     'scale':scale,
                     'offset':offset,
                     'description':'sea surface salinity',
                     'unittype':'PSS',
                     'nodatavalue':255,
                     'parameter_range':[vmin, vmax],
                     'colortable':colortable})

        yield metadata, geolocation, band
Exemplo n.º 22
0
def get_mesh(pfs, rfi, sd, idl):
    """
    From a set of profiles creates the mesh in the forward direction from the
    reference profile.

    :param pfs:
        List of :class:`openquake.hazardlib.geo.line.Line` instances
    :param rfi:
        Index of the reference profile
    :param sd:
        Sampling distance [km] for the edges
    :param idl:
        Boolean indicating the need to account for the IDL
    :returns:
        An updated list of the profiles i.e. a list of
        :class:`openquake.hazardlib.geo.line.Line` instances
    """
    g = Geod(ellps='WGS84')

    # Instantiate lists with the residual distance and the last profile index
    # with a finite value at a given depth
    rdist = [0 for _ in range(0, len(pfs[0]))]
    laidx = [0 for _ in range(0, len(pfs[0]))]
    angle = [0 for _ in range(0, len(pfs[0]))]

    # Creating a new list used to collect the new profiles which will describe
    # the mesh. We start with the initial profile i.e. the one identified by
    # the reference index rfi
    npr = list([copy.copy(pfs[rfi])])

    # Run for all the profiles 'after' the reference one
    for i in range(rfi, len(pfs) - 1):

        # Profiles: left and right
        pr = pfs[i + 1]
        pl = pfs[i]

        # Fixing IDL case
        if idl:
            for ii in range(0, len(pl)):
                ptmp = pl[ii][0]
                ptmp = ptmp + 360 if ptmp < 0 else ptmp
                pl[ii][0] = ptmp

        # Points in common on the two profiles i.e. points with finite
        # coordinates on both of them
        cmm = np.logical_and(np.isfinite(pr[:, 2]), np.isfinite(pl[:, 2]))
        cmmi = np.nonzero(cmm)[0].astype(int)

        # Find the index of the profiles previously analysed and with at least
        # a node in common with the current profile (i.e. with a continuity in
        # the mesh)
        mxx = 0
        for ll in laidx:
            if ll is not None:
                mxx = max(mxx, ll)

        # Loop over the points in the right profile
        for x in range(0, len(pr[:, 2])):

            # If true this edge connects the right and left profiles
            if x in cmmi and laidx[x] is None:
                iii = []
                for li, lv in enumerate(laidx):
                    if lv is not None:
                        iii.append(li)
                iii = np.array(iii)
                minidx = np.argmin(abs(iii - x))
                laidx[x] = mxx
                rdist[x] = rdist[minidx]
                angle[x] = angle[minidx]
            elif x not in cmmi:
                laidx[x] = None
                rdist[x] = 0
                angle[x] = None

        # Loop over the indexes of the edges in common for the two profiles
        # starting from the top and going down
        for k in list(np.nonzero(cmm)[0]):

            # Compute distance [km] and azimuth between the corresponding
            # points on the two consecutive profiles
            az12, _, hdist = g.inv(pl[k, 0], pl[k, 1], pr[k, 0], pr[k, 1])
            hdist /= 1e3
            # Vertical distance
            vdist = pr[k, 2] - pl[k, 2]
            # Total distance
            tdist = (vdist**2 + hdist**2)**.5

            # Update rdist
            new_rdist = rdist[k]
            if rdist[k] > 0 and abs(az12 - angle[k] > 2):
                new_rdist = update_rdist(rdist[k], az12, angle[k], sd)

            # Number of grid points
            # ndists = int(np.floor((tdist+rdist[k])/sd))
            ndists = int(np.floor((tdist + new_rdist) / sd))

            # Calculate points between the corresponding nodes on the
            # two profiles
            ll = g.npts(pl[k, 0], pl[k, 1], pr[k, 0], pr[k, 1],
                        np.ceil(tdist) * 20)
            ll = np.array(ll)
            lll = np.ones_like(ll)
            lll[:, 0] = pl[k, 0]
            lll[:, 1] = pl[k, 1]

            _, _, hdsts = g.inv(lll[:, 0], lll[:, 1], ll[:, 0], ll[:, 1])
            hdsts /= 1e3
            deps = np.linspace(pl[k, 2], pr[k, 2], ll.shape[0], endpoint=True)
            tdsts = (hdsts**2 + (pl[k, 2] - deps)**2)**0.5
            assert len(deps) == ll.shape[0]

            # Compute distance between nodels at depth 'k' on the two
            # consecutive profiles
            dd = distance(pl[k, 0], pl[k, 1], pl[k, 2], pr[k, 0], pr[k, 1],
                          pr[k, 2])

            # Check that the actual distance between these nodes is similar to
            # the one originally defined
            if abs(dd - tdist) > 0.1 * tdist:
                print('dd:', dd)
                tmps = 'Error while building the mesh'
                tmps += '\nDistances: {:f} {:f}'
                raise ValueError(tmps.format(dd, tdist))

            # Adding new points along the edge with index k
            for j in range(ndists):

                # Add new profile to 'npr' i.e. the list containing the new
                # set of profiles
                if len(npr) - 1 < laidx[k] + 1:
                    npr = add_empty_profile(npr)

                # Compute the coordinates of intermediate points along the
                # current edge. 'tmp' is the distance between the node on the
                # left edge and the j-th node on the edge. 'lo' and 'la' are
                # the coordinates of this new node
                # tmp = (j+1)*sd - rdist[k]
                tmp = (j + 1) * sd - new_rdist
                # lo, la, _ = g.fwd(pl[k, 0], pl[k, 1], az12,
                #                  tmp*hdist/tdist*1e3)

                # Find the index of the closest node in the vector sampled at
                # high frequency
                tidx = np.argmin(abs(tdsts - tmp))
                lo = ll[tidx, 0]
                la = ll[tidx, 1]

                # Fix longitudes in proximity of the IDL
                if idl:
                    lo = lo + 360 if lo < 0 else lo

                # Computing depths
                de = pl[k, 2] + tmp * vdist / hdist
                de = deps[tidx]

                # Updating the new profile
                npr[laidx[k] + 1][k] = [lo, la, de]
                if (k > 0 and np.all(np.isfinite(npr[laidx[k] + 1][k]))
                        and np.all(np.isfinite(npr[laidx[k]][k]))):

                    # Computing the distance between consecutive points on
                    # one edge
                    p1 = npr[laidx[k]][k]
                    p2 = npr[laidx[k] + 1][k]
                    d = distance(p1[0], p1[1], p1[2], p2[0], p2[1], p2[2])

                    # Check if the distance between consecutive points on one
                    # edge (with index k) is within a tolerance limit of the
                    # mesh distance defined by the user
                    if abs(d - sd) > TOL * sd:
                        tmpf = '\ndistance: {:f} difference: {:f} '
                        tmpf += '\ntolerance dist: {:f} sampling dist: {:f}'
                        tmpf += '\nresidual distance: {:f}'
                        tmps = tmpf.format(d, d - sd, TOL * sd, sd, new_rdist)
                        raise ValueError(tmps)
                laidx[k] += 1

            # Check that the residual distance along each edge is lower than
            # the sampling distance
            rdist[k] = tdist - sd * ndists + new_rdist
            angle[k] = az12
            assert rdist[k] < sd

    return npr
Exemplo n.º 23
0
# Distributed under the Apache License, Version 2.0.
# See accompanying NOTICE file for details.

import sys
import numpy as np
from pyproj import Geod
from shapely.geometry import Point

_wgs84_geod = Geod(ellps='WGS84')


class Detection(object):
    __slots__ = [
        'frame_number', 'tracking_plane_loc_x', 'tracking_plane_loc_y',
        'velocity_x', 'velocity_y', 'image_loc_x', 'image_loc_y',
        'image_bbox_TL_x', 'image_bbox_TL_y', 'image_bbox_BR_x',
        'image_bbox_BR_y', 'area', 'lon', 'lat', 'alt', 'timestamp'
    ]

    def __init__(self):
        self.frame_number = 0
        self.tracking_plane_loc_x = 0
        self.tracking_plane_loc_y = 0
        self.velocity_x = 0.0
        self.velocity_y = 0.0
        self.image_loc_x = 0
        self.image_loc_y = 0
        self.image_bbox_TL_x = 0
        self.image_bbox_TL_y = 0
        self.image_bbox_BR_x = 0
        self.image_bbox_BR_y = 0
Exemplo n.º 24
0
def get_mesh_back(pfs, rfi, sd, idl):
    """
    Compute resampled profiles in the backward direction from the reference
    profile and creates the portion of the mesh 'before' the reference profile.

    :param list pfs:
        Original profiles. Each profile is a :class:`numpy.ndarray` instance
        with 3 columns and as many rows as the number of points included
    :param int rfi:
        Index of the reference profile
    :param sd:
        Sampling distance [in km] along the strike
    :param boolean idl:
        A flag used to specify cases where the model crosses the IDL
    :returns:

    """

    # Projection
    g = Geod(ellps='WGS84')

    # Initialize residual distance and last index lists
    rdist = [0 for _ in range(0, len(pfs[0]))]
    laidx = [0 for _ in range(0, len(pfs[0]))]
    angle = [0 for _ in range(0, len(pfs[0]))]

    # Create list containing the new profiles. We start by adding the
    # reference profile
    npr = list([copy.deepcopy(pfs[rfi])])

    # Run for all the profiles from the reference one backward
    for i in range(rfi, 0, -1):

        # Set the profiles to be used for the construction of the mesh
        pr = pfs[i - 1]
        pl = pfs[i]

        # Points in common on the two profiles i.e. points that in both the
        # profiles are not NaN
        cmm = np.logical_and(np.isfinite(pr[:, 2]), np.isfinite(pl[:, 2]))

        # Transform the 'cmm' indexes into integers and calculate the index of
        # the last valid profile i.e. the index of the closest one to the
        # current left profile
        cmmi = np.nonzero(cmm)[0].astype(int)
        mxx = 0
        for ll in laidx:
            if ll is not None:
                mxx = max(mxx, ll)

        # For each edge in the right profile we compute
        for x in range(0, len(pr[:, 2])):

            # If this index is in cmmi and last index is None the mesh at this
            # depth starts from this profile
            if x in cmmi and laidx[x] is None:
                iii = []
                for li, lv in enumerate(laidx):
                    if lv is not None:
                        iii.append(li)
                iii = np.array(iii)
                minidx = np.argmin(abs(iii - x))
                laidx[x] = mxx
                rdist[x] = rdist[minidx]
                angle[x] = angle[minidx]
            elif x not in cmmi:
                laidx[x] = None
                rdist[x] = 0
                angle[x] = None

        # Loop over the points in common between the two profiles
        for k in list(np.nonzero(cmm)[0]):

            # Compute azimuth and horizontal distance
            az12, _, hdist = g.inv(pl[k, 0], pl[k, 1], pr[k, 0], pr[k, 1])
            hdist /= 1e3
            vdist = pr[k, 2] - pl[k, 2]
            tdist = (vdist**2 + hdist**2)**.5

            # Update rdist if this is larger than 0 and the new edge has a
            # different direction than the previous one
            new_rdist = rdist[k]
            if rdist[k] > 0 and abs(az12 - angle[k] > 2):
                new_rdist = update_rdist(rdist[k], az12, angle[k], sd)

            # Calculate the number of cells
            #ndists = int(np.floor((tdist+rdist[k])/sd))
            ndists = int(np.floor((tdist + new_rdist) / sd))

            # Adding new points along edge with index k
            for j, _ in enumerate(range(ndists)):
                #
                # add new profile
                if len(npr) - 1 < laidx[k] + 1:
                    npr = add_empty_profile(npr)
                #
                # fix distance
                #tmp = (j+1)*sd - rdist[k]
                tmp = (j + 1) * sd - new_rdist
                lo, la, _ = g.fwd(pl[k, 0], pl[k, 1], az12,
                                  tmp * hdist / tdist * 1e3)

                if idl:
                    lo = lo + 360 if lo < 0 else lo

                de = pl[k, 2] + tmp * vdist / hdist
                npr[laidx[k] + 1][k] = [lo, la, de]

                if (k > 0 and np.all(np.isfinite(npr[laidx[k] + 1][k]))
                        and np.all(np.isfinite(npr[laidx[k]][k]))):

                    p1 = npr[laidx[k]][k]
                    p2 = npr[laidx[k] + 1][k]
                    d = distance(p1[0], p1[1], p1[2], p2[0], p2[1], p2[2])

                    # This checks that the size of each newly created cell
                    # is similar (within some tolerance) to the intial mesh
                    # size provided by the user
                    if abs(d - sd) > TOL * sd:
                        tmpf = 'd: {:f} diff: {:f} tol: {:f} sd:{:f}'
                        tmpf += '\nresidual: {:f}'
                        tmps = tmpf.format(d, d - sd, TOL * sd, sd, new_rdist)
                        msg = 'The mesh spacing exceeds the tolerance limits'
                        tmps += '\n {:s}'.format(msg)
                        raise ValueError(tmps)

                # Updating the index of the last profile in the mesh at depth
                # 'k'
                laidx[k] += 1

            # Updating residual distances and angle (i.e. azimuth)
            rdist[k] = tdist - sd * ndists + new_rdist
            angle[k] = az12

            # Checking that the residual distance is lower than the sampling
            # distance
            assert rdist[k] < sd

    tmp = []
    for i in range(len(npr) - 1, 0, -1):
        tmp.append(npr[i])

    return tmp
Exemplo n.º 25
0
from plot_gmpe import plotting, plot_az,  plot_AIC, plotsiteterms, plot_dist_curves, plot_mag_curves, residual_histo, setup_test_curves, setup_test_curves_scatter, setup_curves_compare
from create_ANN import fitANN
from sklearn.model_selection import KFold
import glob
import os
from keras.models import model_from_json
from tensorflow.keras.initializers import glorot_uniform
from matplotlib import rc

# activate latex text rendering
rc('text', usetex=True)

plt.style.use("classic")

sns.set_context("poster")
g = Geod(ellps='clrk66') 

mpl.rcParams['font.size'] =28
sns.set(font_scale=3)
sns.set_style('whitegrid')



seed = 81
np.random.seed(seed)
#tf.set_random_seed(81)

t = '/Users/aklimase/Documents/GMM_ML/catalog/tstar_site.txt'
tstarcat = np.genfromtxt(t, comments = '#', delimiter = '\t', dtype = None, encoding = None, names = True)
                         
v = '/Users/aklimase/Documents/GMM_ML/catalog/vs30_sta.txt'
Exemplo n.º 26
0
"""
import warnings

import numpy as np

from numba import jit, float64, complex128

from scipy import signal
from scipy import stats
from scipy.interpolate import interp1d
from scipy.optimize import minimize_scalar, root

from pyproj import Geod

wgs84_proj = Geod(ellps='sphere')


# ####################### #
#    Data manipulation    #
# ####################### #
def stream_to_array_data(stream, latlon=None, t_start=None, t_end=None):
    """Extract time series from ObsPy stream on common time samples and define the array geometry

        Extracts the time series from individual traces of an Obspy stream and identifies a
        common set of time samples where all are defined.  Interpolates the individual traces
        into a single numpy array (x) for which x[m] = x_m(t).  The geometry of the array is
        also extracted to enable beamforming analysis.

        Parameters
        ----------
Exemplo n.º 27
0
    def update_detections(self, ip_detections, rng_max, linecolor='gray'):
        self.clear_plot(reset_zoom=False)

        self._detections = ip_detections

        lons = []
        lats = []

        # for scaling purposes, lets keep a copy of the lons and lats in a seperate array
        for detection in self._detections:
            lons.append(detection.longitude)
            lats.append(detection.latitude)

        self._trimmed_detections = self._detections.copy()

        # for scaling, lets keep track of the backaz line end points
        self.end_lats = []
        self.end_lons = []

        geod = Geod(ellps="WGS84")
        # this for loop draws the back azimuth lines. They will be length d (in degrees)
        for idx, detection in enumerate(self._detections):

            p_lons = [detection.longitude]
            p_lats = [detection.latitude]
            N = 20.
            count = 0

            if hasattr(detection, 'index'):
                name = detection.index
            else:
                name = str(idx)

            for d in np.arange(0, rng_max, rng_max / N):  # N points
                new_lon, new_lat, _ = geod.fwd(detection.longitude,
                                               detection.latitude,
                                               detection.back_azimuth, d)
                if count == N / 2:
                    self.axes.annotate(name, (new_lon, new_lat),
                                       textcoords='offset points',
                                       xytext=(0, 10),
                                       ha='center',
                                       gid='detection_label')
                p_lons.append(new_lon)
                p_lats.append(new_lat)
                count += 1

            self.end_lats.append(p_lats[-1])
            self.end_lons.append(p_lons[-1])

            self.axes.plot(p_lons,
                           p_lats,
                           color=linecolor,
                           transform=self._transform,
                           gid='detection_line')

        for detection in self._detections:
            if detection.array_dim == 3:
                symbol = '^'  # triangle
            elif detection.array_dim == 4:
                symbol = 's'  # square
            elif detection.array_dim == 5:
                symbol = 'p'  # pentagon
            elif detection.array_dim == 6:
                symbol = 'H'  # hexagon
            else:
                symbol = 'o'  # circle

            self.axes.plot(detection.longitude,
                           detection.latitude,
                           marker=symbol,
                           markersize=7,
                           color='black',
                           transform=self._transform,
                           gid='detection_marker')

        self.autoscale_plot()

        # draw it
        try:
            self.fig.canvas.draw()
            self.repaint()
        except http.client.IncompleteRead:
            return
Exemplo n.º 28
0
def points_to_meters(point_1, point_2):
    line_string = LineString([point_1, point_2])
    geod = Geod(ellps="WGS84")
    return geod.geometry_length(line_string)
Exemplo n.º 29
0
lon_diff_weight_2 = np.array([[1., 0., -1.]])/2.
lat_diff_weight_2 = lon_diff_weight_2.T
lon_diff_weight_4 = np.array([[-1., 8., 0., -8., 1.]])/12.
lat_diff_weight_4 = lon_diff_weight_4.T
lon_diff_weight_6 = np.array([[1./60., 	-3./20.,  3./4.,  0., -3./4., 3./20.,  -1./60.]])
lat_diff_weight_6 = lon_diff_weight_6.T

lon_diff2_weight_2 = np.array([[1., -2., 1.]])
lat_diff2_weight_2 = lon_diff2_weight_2.T
lon_diff2_weight_4 = np.array([[-1., 16., -30., 16., -1.]])/12.
lat_diff2_weight_4 = lon_diff2_weight_4.T
lon_diff2_weight_6 = np.array([[1./90., 	-3./20.,  3./2.,  -49./18., 3./2., -3./20.,  1./90.]])
lat_diff2_weight_6 = lon_diff2_weight_6.T

geodist = Geod(ellps='WGS84')

def discrete_cmap(N, base_cmap=None):
    """Create an N-bin discrete colormap from the specified input map"""
    # Note that if base_cmap is a string or None, you can simply do
    #    return plt.cm.get_cmap(base_cmap, N)
    # The following works for string, None, or a colormap instance:
    base = plt.cm.get_cmap(base_cmap)
    color_list = base(np.linspace(0, 1, N))
    cmap_name = base.name + str(N)
    return base.from_list(cmap_name, color_list, N)

class Field2d(object):
    """
    An object to analyze 2D spherical field data on Earth
    ===========================================================================
Exemplo n.º 30
0
def hail_objects(hailc, REF_Hail2, ax, f, time_start, month, d_beg, h_beg,
                 min_beg, sec_beg, d_end, h_end, min_end, sec_end, rlons,
                 rlats, max_lons_c, max_lats_c, proj):
    #Inputs,
    #REF_Hail2: REFmasked masked where Zdr and CC greater than 1.0
    #hailc: Contour of REF_Hail2 where reflectivity greater than 50.0 dBz
    #ax: Subplot object to be built on with each contour
    #f: Placefile, edited throughout the program
    #time_start: Radar file date and time of scan
    #month: Month of case, supplied by user
    #d_beg,h_beg,min_beg,sec_beg,d_end,h_end,min_end,sec_end: Day, hour, minute, second of the beginning and end of a scan
    #rlons,rlats: Full volume geographic coordinates, longitude and latitude respectively
    #max_lons_c,max_lats_c: Centroid coordinates of storm objects
    #proj: Projection of Earth's surface to be used for accurate area and distance calculations
    hail_areas = []
    hail_centroid_lon = []
    hail_centroid_lat = []
    hail_storm_lon = []
    hail_storm_lat = []
    if np.max(REF_Hail2) > 50.0:
        for level in hailc.collections:
            for contour_poly in level.get_paths():
                for n_contour, contour in enumerate(
                        contour_poly.to_polygons()):
                    contour_a = np.asarray(contour[:])
                    xa = contour_a[:, 0]
                    ya = contour_a[:, 1]
                    polygon_new = geometry.Polygon([(i[0], i[1])
                                                    for i in zip(xa, ya)])
                    if n_contour == 0:
                        polygon = polygon_new
                    else:
                        polygon = polygon.difference(polygon_new)

                pr_area = (transform(proj, polygon).area *
                           units('m^2')).to('km^2')
                boundary = np.asarray(polygon.boundary.xy)
                polypath = Path(boundary.transpose())
                coord_map = np.vstack(
                    (rlons[0, :, :].flatten(), rlats[0, :, :].flatten())).T
                #Create an Mx2 array listing all the coordinates in field
                mask_hail = polypath.contains_points(coord_map).reshape(
                    rlons[0, :, :].shape)

                if pr_area > 2 * units('km^2'):
                    g = Geod(ellps='sphere')
                    dist_hail = np.zeros((np.asarray(max_lons_c).shape[0]))
                    for i in range(dist_hail.shape[0]):
                        distance_hail = g.inv(polygon.centroid.x,
                                              polygon.centroid.y,
                                              max_lons_c[i], max_lats_c[i])
                        dist_hail[i] = distance_hail[2] / 1000.
                    if np.min(np.asarray(dist_hail)) < 15.0:
                        hail_path = polypath
                        hail_areas.append((pr_area))
                        hail_centroid_lon.append((polygon.centroid.x))
                        hail_centroid_lat.append((polygon.centroid.y))
                        hail_storm_lon.append((max_lons_c[np.where(
                            dist_hail == np.min(dist_hail))[0][0]]))
                        hail_storm_lat.append((max_lats_c[np.where(
                            dist_hail == np.min(dist_hail))[0][0]]))
                        patch = PathPatch(polypath,
                                          facecolor='gold',
                                          alpha=.7,
                                          edgecolor='gold',
                                          linewidth=4)
                        ax.add_patch(patch)
                        #Add polygon to placefile
                        f.write('TimeRange: ' + str(time_start.year) + '-' +
                                str(month) + '-' + str(d_beg) + 'T' +
                                str(h_beg) + ':' + str(min_beg) + ':' +
                                str(sec_beg) + 'Z ' + str(time_start.year) +
                                '-' + str(month) + '-' + str(d_end) + 'T' +
                                str(h_end) + ':' + str(min_end) + ':' +
                                str(sec_end) + 'Z')
                        f.write('\n')
                        f.write("Color: 245 242 066 \n")
                        f.write('Line: 3, 0, "Hail Core Outline" \n')
                        for i in range(len(hail_path.vertices)):
                            f.write("%.5f" % (hail_path.vertices[i][1]))
                            f.write(", ")
                            f.write("%.5f" % (hail_path.vertices[i][0]))
                            f.write('\n')
                        f.write("End: \n \n")
    #Returning Variables,
    #hail_areas: Hail core area
    #hail_centroid_lon,hail_centroid_lat: Hail core centroid coordinates
    #hail_storm_lon,hail_storm_lat: Storm object centroids associated with the hail core
    #ax: Subplot object to be built on with each contour
    #f: Placefile, edited throughout the program
    return hail_areas, hail_centroid_lon, hail_centroid_lat, hail_storm_lon, hail_storm_lat, ax, f