def merge_eddies(): parser = EddyParser('Merge eddies') parser.add_argument('filename', nargs='+', help='all file to merge') parser.add_argument('out', help='output file') parser.add_argument('--add_rotation_variable', help='add rotation variables', action='store_true') parser.add_argument('--include_var', nargs='+', type=str, help='use only listed variable') args = parser.parse_args() if args.include_var is None: with Dataset(args.filename[0]) as h: args.include_var = h.variables.keys() obs = TrackEddiesObservations.load_file(args.filename[0], raw_data=True, include_vars=args.include_var) if args.add_rotation_variable: obs = obs.add_rotation_type() for filename in args.filename[1:]: other = TrackEddiesObservations.load_file( filename, raw_data=True, include_vars=args.include_var) if args.add_rotation_variable: other = other.add_rotation_type() obs = obs.merge(other) obs.write_file(filename=args.out)
def merge_eddies(): parser = EddyParser("Merge eddies") parser.add_argument("filename", nargs="+", help="all file to merge") parser.add_argument("out", help="output file") parser.add_argument( "--add_rotation_variable", help="add rotation variables", action="store_true" ) parser.add_argument( "--include_var", nargs="+", type=str, help="use only listed variable" ) args = parser.parse_args() if args.include_var is None: with Dataset(args.filename[0]) as h: args.include_var = h.variables.keys() obs = TrackEddiesObservations.load_file( args.filename[0], raw_data=True, include_vars=args.include_var ) if args.add_rotation_variable: obs = obs.add_rotation_type() for filename in args.filename[1:]: other = TrackEddiesObservations.load_file( filename, raw_data=True, include_vars=args.include_var ) if args.add_rotation_variable: other = other.add_rotation_type() obs = obs.merge(other) obs.write_file(filename=args.out)
def get_next_obs(i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs): TRACKS.append(ids["track"].copy()) INDICES.append(i_current) return TrackEddiesObservations.get_next_obs(i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs)
def anim(): parser = EddyParser( """Anim eddy, keyboard shortcut : Escape => exit, SpaceBar => pause, left arrow => t - 1, right arrow => t + 1, + => speed increase of 10 %, - => speed decrease of 10 %""" ) parser.add_argument("filename", help="eddy atlas") parser.add_argument("id", help="Track id to anim", type=int) parser.add_argument( "--intern", action="store_true", help="display intern contour inplace of outter contour", ) parser.add_argument( "--keep_step", default=25, help="number maximal of step displayed", type=int ) parser.add_argument("--cmap", help="matplotlib colormap used") parser.add_argument( "--time_sleep", type=float, default=0.01, help="Sleeping time in second between 2 frame", ) parser.add_argument( "--infinity_loop", action="store_true", help="Press Escape key to stop loop" ) args = parser.parse_args() variables = ["time", "track"] variables.extend(TrackEddiesObservations.intern(args.intern, public_label=True)) atlas = TrackEddiesObservations.load_file(args.filename, include_vars=variables) eddy = atlas.extract_ids([args.id]) a = Anim( eddy, intern=args.intern, sleep_event=args.time_sleep, cmap=args.cmap, nb_step=args.keep_step, ) a.show(infinity_loop=args.infinity_loop)
================ Do Geo stat with frequency and compare with center count method: :ref:`sphx_glr_python_module_10_tracking_diagnostics_pet_center_count.py` """ import py_eddy_tracker_sample from matplotlib import pyplot as plt from matplotlib.colors import LogNorm from py_eddy_tracker.observations.tracking import TrackEddiesObservations # %% # Load an experimental med atlas over a period of 26 years (1993-2019) a = TrackEddiesObservations.load_file( py_eddy_tracker_sample.get_demo_path( "eddies_med_adt_allsat_dt2018/Anticyclonic.zarr" ) ) c = TrackEddiesObservations.load_file( py_eddy_tracker_sample.get_demo_path("eddies_med_adt_allsat_dt2018/Cyclonic.zarr") ) # %% # Parameters step = 0.125 bins = ((-10, 37, step), (30, 46, step)) kwargs_pcolormesh = dict( cmap="terrain_r", vmin=0, vmax=0.75, factor=1 / a.nb_days, name="count" )
def merge(self, until=-1, raw_data=True): """Merge all the correspondance in one array with all fields """ # Start loading identification again to save in the finals tracks # Load first file self.reset_dataset_cache() self.swap_dataset(self.datasets[0], raw_data=raw_data) # Start create netcdf to agglomerate all eddy logger.debug('We will create an array (size %d)', self.nb_obs) eddies = TrackEddiesObservations( size=self.nb_obs, track_extra_variables=self.current_obs.track_extra_variables, track_array_variables=self.current_obs.track_array_variables, array_variables=self.current_obs.array_variables, raw_data=raw_data) # All the value put at nan, necessary only for all end of track eddies['cost_association'][:] = default_fillvals['f4'] # Calculate the index in each tracks, we compute in u4 and translate # in u2 (which are limited to 65535) logger.debug('Compute global index array (N)') eddies['n'][:] = uint16( arange(self.nb_obs, dtype='u4') - self.i_current_by_tracks.repeat(self.nb_obs_by_tracks)) logger.debug('Compute global track array') eddies['track'][:] = arange(self.current_id).repeat( self.nb_obs_by_tracks) # Set type of eddy with first file eddies.sign_type = self.current_obs.sign_type # Fields to copy fields = self.current_obs.obs.dtype.descr # To know if the track start first_obs_save_in_tracks = zeros(self.i_current_by_tracks.shape, dtype=bool_) for i, file_name in enumerate(self.datasets[1:]): if until != -1 and i >= until: break logger.debug('Merge data from %s', file_name) # Load current file (we begin with second one) self.swap_dataset(file_name, raw_data=raw_data) # We select the list of id which are involve in the correspondance i_id = self[i]['id'] # Index where we will write in the final object index_final = self.i_current_by_tracks[i_id] # First obs of eddies m_first_obs = ~first_obs_save_in_tracks[i_id] if m_first_obs.any(): # Index in the previous file index_in = self[i]['in'][m_first_obs] # Copy all variable for field in fields: var = field[0] if var == 'cost_association': continue eddies[var][index_final[m_first_obs]] = self.previous_obs[ var][index_in] # Increment self.i_current_by_tracks[i_id[m_first_obs]] += 1 # Active this flag, we have only one first by tracks first_obs_save_in_tracks[i_id] = True index_final = self.i_current_by_tracks[i_id] if self.virtual: # If the flag virtual in correspondance is active, # the previous is virtual m_virtual = self[i]['virtual'] if m_virtual.any(): # Incrementing index self.i_current_by_tracks[i_id[m_virtual]] += self[i][ 'virtual_length'][m_virtual] # Get new index index_final = self.i_current_by_tracks[i_id] # Index in the current file index_current = self[i]['out'] # Copy all variable for field in fields: var = field[0] if var == 'cost_association': eddies[var][index_final - 1] = self[i]['cost_value'] else: eddies[var][index_final] = self.current_obs[var][ index_current] # Add increment for each index used self.i_current_by_tracks[i_id] += 1 self.previous_obs = self.current_obs return eddies
ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], aspect="equal") ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46) ax.set_title(title, weight="bold") return ax def update_axes(ax, mappable=None): ax.grid() if mappable: plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9])) # %% # Load eddies dataset cyclonic_eddies = TrackEddiesObservations.load_file( py_eddy_tracker_sample.get_demo_path( "eddies_med_adt_allsat_dt2018/Cyclonic.zarr")) anticyclonic_eddies = TrackEddiesObservations.load_file( py_eddy_tracker_sample.get_demo_path( "eddies_med_adt_allsat_dt2018/Anticyclonic.zarr")) # %% # Load loopers dataset loopers_med = TrackEddiesObservations.load_file( data.get_demo_path("loopers_lumpkin_med.nc")) # %% # Global view # =========== ax = start_axes("All drifters available in Med from Lumpkin dataset") loopers_med.plot(ax, lw=0.5, color="r", ref=-10)
Network basic manipulation ========================== """ from matplotlib import pyplot as plt import py_eddy_tracker.gui from py_eddy_tracker import data from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.observations.tracking import TrackEddiesObservations # %% # Load data # --------- # Load data where observations are put in same network but no segmentation e = TrackEddiesObservations.load_file(data.get_path("c568803.nc")) # FIXME : Must be rewrote e.lon[:] = (e.lon + 180) % 360 - 180 e.contour_lon_e[:] = ((e.contour_lon_e.T - e.lon + 180) % 360 - 180 + e.lon).T e.contour_lon_s[:] = ((e.contour_lon_s.T - e.lon + 180) % 360 - 180 + e.lon).T # %% # Do segmentation # --------------- # Segmentation based on maximum overlap, temporal window for candidates = 5 days n = NetworkObservations.from_split_network(e, e.split_network(intern=False, window=5)) # %% # Timeline # -------- # %%