def filter_subsample_timepoints(experiment_dir, interval=3): ''' interval - subsampling interval in hours ''' e_metadata = load_data.read_metadata(experiment_dir) timestamps = numpy.array(e_metadata['timestamps']) timepoints = e_metadata['timepoints'] expt_start = timestamps[0] expt_end = timestamps[-1] step = interval * 3600 #sec i = 1 timepoints_to_load = [] while expt_start + step * i < expt_end: timepoints_to_load.append( timepoints[abs(timestamps - (expt_start + step * i)).argmin()]) i += 1 def filter(position_name, position_annotations, timepoint_annotations): if position_annotations['exclude']: return False return [ timepoint in timepoints_to_load for timepoint in timepoint_annotations ] return filter
def reset_positions_with_offset(experiment_dir, offset): ''' Modify position coordinates based on a fixed x-y offset Parameters: experiment_dir - str/pathlib.Path to experiment root offset - list giving the x-y offset ''' experiment_dir = pathlib.Path(experiment_dir) print(f'Modifying positions for {experiment_dir.name}') metadata = load_data.read_metadata(experiment_dir) new_metadata = metadata.copy() if len(offset) == 2: offset.extend([0]) try: time_label = time.strftime('%Y%m%d-%H%M-%S') for position in metadata['positions']: position_coords = metadata['positions'][position] new_metadata['positions'][position] = [ position_coords[0] + offset[0], position_coords[1] + offset[1], position_coords[2] + offset[2] ] with (experiment_dir / f'experiment_metadata_beforechangingpositions_{time_label}.json' ).open('w') as mdata_file: datafile.json_encode_legible_to_file(metadata, mdata_file) load_data.write_metadata(new_metadata, experiment_dir) except KeyboardInterrupt: pass
def make_multipass_measurements(experiment_root, annotations, adult_only=True): experiment_metadata = load_data.read_metadata(experiment_root) microns_per_pixel = 1.3 * 5 / (experiment_metadata['objective'] * experiment_metadata['optocoupler']) measures = [MultipassPoseMeasurements(microns_per_pixel=microns_per_pixel)] measurement_name = 'multipass_measures' if adult_only: annotations = load_data.filter_annotations( annotations, elegant_filters.filter_by_stage('adult')).copy() process_data.measure_worms(experiment_root, annotations, measures, measurement_name)
def reset_positions_manual(scope, experiment_dir, *annotation_filters, revert_z=False): '''Reset positions manually for an experiment (i.e. with a separate ris_widget window open) Parameters: scope - ScopeClient object as defined by scope.scope_client experiment_dir - str/pathlib.Path to experiment annotation_filters - Optional variable filters to use to isolate specific positions of interest Call with annotation filters like so: reset_position.reset_positions(scope, experiment_dir, elegant_filters.filter_excluded, elegant_filters.filter_live_animals) ''' experiment_dir = pathlib.Path(experiment_dir) print(f'Traversing {experiment_dir.name}') metadata = load_data.read_metadata(experiment_dir) if annotation_filters: experiment_annotations = load_data.read_annotations(experiment_dir) for filter in annotation_filters: experiment_annotations = load_data.filter_annotations( experiment_annotations, filter) positions = experiment_annotations.keys() else: positions = metadata['positions'].keys() new_positions = poll_positions(scope, metadata, positions, revert_z=revert_z) if new_positions: try: input(f'\nPress any key to save positions; ctrl-c to abort') time_label = time.strftime('%Y%m%d-%H%M-%S') with (experiment_dir / f'experiment_metadata_beforechangingpositions_{time_label}.json' ).open('w') as mdata_file: datafile.json_encode_legible_to_file(metadata, mdata_file) metadata['positions'].update(new_positions) load_data.write_metadata(metadata, experiment_dir) except KeyboardInterrupt: pass else: print('No positions found to reset')
def _check_metadata_for_timepoints(experiment_root): pm_files = list(experiment_root.glob('*/position_metadata.json')) with pm_files[0].open('r') as pm_fp: position_metadata = json.load(pm_fp) if 'timepoint' not in position_metadata[0]: experiment_metadata = load_data.read_metadata(experiment_root) for pm_file in pm_files: position_root = pm_file.parent if not (position_root / 'position_metadata_original.json').exists(): shutil.copyfile( str(pm_file), str(position_root / 'position_metadata_original.json')) with pm_file.open('r') as pm_fp: position_metadata = json.load(pm_fp) for metadata_entry, timepoint in zip( position_metadata, experiment_metadata['timepoint']): # Was there a bug with the purging code that breaks this? metadata_entry['timepoint'] = timepoint with pm_file.open('w') as pm_fp: json.dump(position_metadata, pm_fp)
def make_mask_measurements(experiment_root, annotations=None, adult_only=True): #process_data.annotate(experiment_root, annotators=[annotate_timepoints]) # Why? experiment_metadata = load_data.read_metadata(experiment_root) microns_per_pixel = 1.3 * 5 / (experiment_metadata['objective'] * experiment_metadata['optocoupler']) measures = [MaskPoseMeasurements(microns_per_pixel=microns_per_pixel)] measurement_name = 'mask_measures' if annotations is None: annotations = load_data.read_annotations(experiment_root) annotations = load_data.filter_annotations(annotations, filter_excluded) annotations = load_data.filter_annotations( annotations, elegant_filters.filter_living_timepoints) if adult_only: annotations = load_data.filter_annotations( annotations, elegant_filters.filter_by_stage('adult')) process_data.measure_worms(experiment_root, annotations, measures, measurement_name)
return experiment_images if __name__ == "__main__": expt_dir = pathlib.Path(sys.argv[1]) show_poses = True try: rw except NameError: rw = ris_widget.RisWidget() if hasattr(rw, 'annotator'): rw.annotator.close() del(rw.annotator) # measurement_pipeline.propagate_stages(expt_dir) experiment_images = load_masks(expt_dir) annotation_fields = [] annotation_fields.append(stage_field.StageField()) if show_poses: metadata = load_data.read_metadata(expt_dir) pa = pose_annotation.PoseAnnotation.from_experiment_metadata(metadata, rw) annotation_fields.append(pa) ea = experiment_annotator.ExperimentAnnotator(rw, expt_dir.parts[-1], experiment_images, annotation_fields)
def process_experiment_with_filter(experiment_root, model, image_filter, mask_root=None, overwrite_existing=False, channels='bf', make_masks=True, do_annotations=True): ''' image_filter - filter for scan_experiment_dir ''' if mask_root is None: mask_root = pathlib.Path(experiment_root) / 'derived_data' / 'mask' # Temporary hacks until migration to new elegant complete (while zpl-9000 no longer updates annotations automatically) process_data.update_annotations(experiment_root) elegant_hacks.propagate_stages(experiment_root) start_t = time.time() positions = load_data.scan_experiment_dir(experiment_root, timepoint_filter=image_filter, channels=channels) scan_t = time.time() print(f'scanning done after {(scan_t-start_t)} s') if make_masks: process = segment_images.segment_positions(positions, model, mask_root, use_gpu=True, overwrite_existing=False) if process.stderr: print(f'Errors during segmentation: {process.stderr}' ) #raise Exception) #raise Exception() segment_t = time.time() print(f'segmenting done after {(segment_t-scan_t)} s') with (mask_root / 'notes.txt').open('a+') as notes_file: notes_file.write( f'{datetime.datetime.today().strftime("%Y-%m-%dt%H%M")} These masks were segmented with model {model}\n' ) else: print(f'No segmenting performed') if do_annotations: annotations = load_data.read_annotations(experiment_root) metadata = load_data.read_metadata(experiment_root) age_factor = metadata.get('age_factor', 1) width_estimator = worm_widths.WidthEstimator.from_experiment_metadata( metadata, age_factor) segment_images.annotate_poses_from_masks(positions, mask_root, annotations, overwrite_existing, width_estimator) load_data.write_annotations(experiment_root, annotations) annotation_t = time.time() print( f'annotation done after {(annotation_t - segment_t)} s') # ~3.5 hr else: print('No annotations done')