def _cut_and_scale(survey, region, data_region): """ Cut a subsection from the original survey and scale it to the given region Parameters ---------- survey : :class:`pandas.DataFrame` Original survey as a :class:`pandas.DataFrame` containing the following columns: ``longitude``, ``latitude`` and ``height``. region : tuple or list (optional) Region to which the survey points coordinates will be scaled. The boundaries must be passed in the following order: (``east``, ``west``, ``south``, ``north``, ...), defined on a geodetic coordinate system and in degrees. All subsequent boundaries will be ignored. If ``None``, the survey points won't be scaled. data_region : tuple or list (optional) Region where the original Great Britain magnetic dataset will be sampled. The boundaries must be passed in the following order: (``east``, ``west``, ``south``, ``north``, ...), defined on a geodetic coordinate system and in degrees. All subsequent boundaries will be ignored. Returns ------- survey : :class:`pandas.DataFrame` Dataframe containing the coordinates of the observation points on a geodetic coordinate system. Longitudes and latitudes are in degrees, and heights in meters. """ # Cut the data into the data_region inside_points = inside((survey.longitude, survey.latitude), data_region) survey = survey[inside_points].copy() # Scale survey coordinates to the passed region if region is not None: w, e, s, n = region[:4] longitude_min, longitude_max, latitude_min, latitude_max = get_region( (survey.longitude, survey.latitude)) survey["longitude"] = (e - w) / (longitude_max - longitude_min) * ( survey.longitude - longitude_min) + w survey["latitude"] = (n - s) / (latitude_max - latitude_min) * ( survey.latitude - latitude_min) + s return survey
import verde as vd import time grid_parameters = ['MAGR', 'THC', 'KC', 'CTC'] plt.rcParams['figure.dpi'] = 120 dados = pd.read_csv('scrr_1039.csv') #### Configure Geometry dados['geometry'] = [ geometry.Point(x, y) for x, y in zip(dados['UTME'], dados['UTMN']) ] crs = "+proj=utm +zone=23 +south +ellps=WGS84 +datum=WGS84 +units=m +no_defs" dados = gpd(dados, geometry='geometry', crs=crs) #### bounds dados = dados[vd.inside((dados.UTME, dados.UTMN), region=[290000, 345000, 7455000, 7510000])] coordinates = (dados.UTME.values, dados.UTMN.values) ### Counter def timelapse(begin, str_process="@@"): timelapse = np.floor(process_time() - begin) #transform to int Min_timelapse = "00" #configuring as default value if timelapse >= 60: Min_timelapse = timelapse / 60 timelapse = timelapse % 60 print(str_process + " timelapse: " + str(Min_timelapse) + ":" + str(timelapse)) #### Chanining configuration
""" import boule as bl import matplotlib.pyplot as plt import pyproj import verde as vd import harmonica as hm # Fetch the sample gravity data from South Africa data = hm.datasets.fetch_south_africa_gravity() # Slice a smaller portion of the survey data to speed-up calculations for this # example region = [18, 27, -34.5, -27] inside = vd.inside((data.longitude, data.latitude), region) data = data[inside] print("Number of data points:", data.shape[0]) print("Mean height of observations:", data.elevation.mean()) # Since this is a small area, we'll project our data and use Cartesian # coordinates projection = pyproj.Proj(proj="merc", lat_ts=data.latitude.mean()) easting, northing = projection(data.longitude.values, data.latitude.values) coordinates = (easting, northing, data.elevation) # Compute the gravity disturbance ellipsoid = bl.WGS84 data["gravity_disturbance"] = data.gravity - ellipsoid.normal_gravity( data.latitude, data.elevation)