coord_system=args.coordinate_system, vert_ref=args.vertical_reference, add_cast_files=args.cast_profiles, show_progress=args.show_progress, parallel_write=args.parallel_write, error_files=args.error_files, logfiles=args.export_log, weekstart_year=args.weekstart_year, weekstart_week=args.weekstart_week, override_datum=args.coordinate_system, max_gap_length=args.max_navigation_gap) elif funcname == 'intel_processing': intel_process(args.files, outfold=args.output_folder, coord_system=args.coordinate_system, vert_ref=args.vertical_reference, parallel_write=args.parallel_write, force_coordinate_system=args.force_coordinate_system, process_mode=args.process_mode) elif funcname == 'intel_service': intel_process_service( args.folder, outfold=args.output_folder, coord_system=args.coordinate_system, vert_ref=args.vertical_reference, parallel_write=args.parallel_write, force_coordinate_system=args.force_coordinate_system, process_mode=args.process_mode) elif funcname == 'convert': convert_multibeam(args.files, input_datum=args.input_datum,
# Examples related to changing, subsetting, filtering and saving data, last updated 2/23/2022, Kluster 0.8.10 # uses the multibeam file hstb_kluster/test_data/0009_20170523_181119_FA2806.all # Written by Eric Younkin import numpy as np from HSTB.kluster.fqpr_convenience import reload_data from HSTB.kluster.fqpr_intelligence import intel_process from HSTB.kluster import kluster_variables # we start with one of the preferred processing steps from the data_processing example _, fq = intel_process(r"C:\data_dir\0009_20170523_181119_FA2806.all") fq = fq[ 0] # this should just be a list of one element if you have just one sonar/day of data, take the first one to get the data # or we can just reload if you have data from before fq = reload_data(r"C:\data_dir\em2040_40111_05_23_2017") # Build out a polygon in geographic coordinates to just get a subset of data from this dataset (lon, lat) polygon = np.array([[-122.47798556, 47.78949665], [-122.47798556, 47.78895117], [-122.47771027, 47.78895117], [-122.47771027, 47.78949665]]) # return soundings gets you the variables used in Points View, these are all 1d arrays of the same length head, x, y, z, tvu, rejected, pointtime, beam = fq.return_soundings_in_polygon( polygon) assert head.shape == x.shape == y.shape == z.shape == tvu.shape == rejected.shape == pointtime.shape == beam.shape assert x.shape == (1911, ) # rejected array is actually an array of integers that are the sounding flags kluster uses for rejecting/accepting soundings print(kluster_variables.amplitude_detect_flag) # added in kluster 0.8.10 print(kluster_variables.phase_detect_flag) # added in kluster 0.8.10 print(kluster_variables.rejected_flag) print(kluster_variables.accepted_flag)
# when you drag in new multibeam data into Kluster, it generates a new conversion action and organizes data into: # sonarmodelnumber_serialnumber_dateofsurvey # which it gets from the multibeam data itself. This is the Kluster Intelligence module that will basically perform # the convert_multibeam and process_multibeam actions for you, putting data in the right place and doing only those # steps that are required. For this reason it is recommended that you use the intelligence module rather than # the core processing routines described in 1 and 2. Learn more here: https://kluster.readthedocs.io/en/latest/indepth/intel.html # the intel process command will perform just like if you were to drag in new files in Kluster. You just provide all the # files that you want to add, and Kluster Intelligence determines the type of file, how to add it and what processing # steps to take. Those steps are all performed and you get the Fqpr object back # just like all the others, either provide a list of files... _, fq = intel_process([ r"C:\data_dir\0009_20170523_181119_FA2806.all", r'C:\data_dir\sbet.out', r'C:\data_dir\smrmsg.out', r'C:\data_dir\export_log.txt', r'C:\data_dir\mysvpfile.svp' ], coord_system='WGS84', vert_ref='ellipse') # a single file or a path to a directory full of files: _, fq = intel_process(r"C:\data_dir") # it will behave much like step 2, you probably won't notice a difference during the processing # what you should notice is that the output directory is now a folder with the sonarmodelnumber_serialnumber_dateofsurvey of the file # - note: fq is a list of the converted fqpr objects, since data is organized by modelnumber,etc., you might end up with multiple containers fq[0].output_folder # Out[10]: 'C:\\data_dir\\em2040_40111_05_23_2017' # the last thing to mention is the intel_process_service, which combines intel_process with folder monitoring. The service # will monitor a folder and add/process any files that you add to that directory (or are already in there) _, fq = intel_process_service(r"C:\data_dir") # this will lock up the console until you force it to quit