def main(self, *args): self.assert_env() if not self.no_clear: self.clear() match_string = safe_list_get(args, 0) with Context(debug_mode=self.debug_mode): return self.run_zests( verbose=self.verbose, match_string=match_string, coverage=self.coverage, run_groups=self.run_groups, )
def run(cls, argv=None, exit=True): """ ZBS: Plumbum subcommand startup sequence is complicated. But, during the default run() it instantiates this class and passes only the next argument which prevents me from jamming dynamic switches into the class. So here I duplicate the argument I need argv[1] into the head of the list. And then later I have to overload _parse_args() in order to pop those arguments back off. Also, if you pass in "--help" that would normally be handled by plumbum correctly, but these hacks prevent that so I have to keep track of the construct_fail and let it proceed so that an instance it correctly allocated because the "help" commands only work on a functional instance (ie you can not raise the Help exception during construction). """ # files spec'd to gen will be copied here for this job, and moved to # the job folder if the generator succeeds. cls.job_uuid = uuid.uuid4().hex cls.local_sources_tmp_folder = local.path( tempfile.gettempdir()) / cls.job_uuid cls.local_sources_tmp_folder.mkdir() cls.construct_fail = False if not argv or len(argv) < 2 or argv[1].startswith("--"): if argv is not None and argv[1] == "--readme": # This is a crazy work-around to get the app instance # to construct so I can print the readme. cls.construct_fail = True inst = super(GenApp, cls).run(argv=["", "calib", "--job=foo"], exit=False) inst[0].readme() return 0 cls.construct_fail = True log.error( "You must specify a generator as the first argument after 'gen'.\n" f"Options are {', '.join(GenApp.generator_klass_by_name.keys())}" ) argv = ["gen", "--help"] if argv is not None: return super(GenApp, cls).run(argv=[utils.safe_list_get(argv, 1)] + argv, exit=exit) else: return super(GenApp, cls).run(argv=argv, exit=exit)
def it_returns_default_on_none_list(): assert utils.safe_list_get(None, 2, "bad") == "bad"
def it_returns_default_on_bad_list_get(): assert utils.safe_list_get(["a", "b"], 2, "bad") == "bad"
def it_returns_value_on_in_bound_get(): assert utils.safe_list_get(["a", "b"], 1, "bad") == "b"
def report_assemble(self): """Assemble the report from its pieces. A giant Munch is returned""" if len(self._report_sections) == 0: return None report = Munch(**self.report_metadata) report.cells = [] preamble_block = self._markdown_to_markdown_block( self._report_preamble) report.cells += [preamble_block] # LOAD all templates templates_by_name = {} for section_type, section_data in self._report_sections: if section_type == SectionType.TEMPLATE: templates_by_name[section_data] = utils.json_load_munch( self._nb_template_path(section_data)) # FIND all of the @IMPORT-MERGE blocks import_merge = [] for _, template in templates_by_name.items(): for cell in template.cells: if cell.cell_type == SectionType.CODE: first_line = utils.safe_list_get(cell.source, 0, "") if "# @IMPORT-MERGE" in first_line: for line in cell.source: if "import" in line: import_merge += [line] # report.cells += [ # ( # SectionType.CODE, # ( # "# Uncomment this line to restart kernel\n" # "# from plaster.tools.ipynb_helpers.displays import restart_kernel; restart_kernel()\n" # ), # ) # ] mpl_config_block = Munch(**self.code_block) mpl_config_block.source = [ "import os\n", 'os.environ["MPLCONFIGDIR"] = "/tmp"', ] report.cells += [mpl_config_block] import_merge += ["from plaster.tools.zplots import zplots\n"] import_merge = sorted(list(set(import_merge))) + ["z = zplots.setup()"] import_block = Munch(**self.code_block) import_block.source = import_merge report.cells += [import_block] for section_type, section_data in self._report_sections: if section_type == SectionType.CODE: lines = section_data block = Munch(**self.code_block) block.source = lines report.cells += [block] elif section_type == SectionType.MARKDOWN: block = self._markdown_to_markdown_block(section_data) report.cells += [block] elif section_type == SectionType.TEMPLATE: file_path = section_data template = templates_by_name[file_path] for cell in template.cells: if cell.cell_type == SectionType.CODE: first_line = utils.safe_list_get(cell.source, 0, "") if ("@IMPORT-MERGE" not in first_line and "@REMOVE-FROM-TEMPLATE" not in first_line): block = Munch(**self.code_block) block.source = cell.source report.cells += [block] if cell.cell_type == SectionType.MARKDOWN: block = Munch(**self.markdown_block) block.source = cell.source report.cells += [block] return report
def _step_4_find_peaks( aligned_composite_bg_removed_im, aligned_roi_rect, raw_mask_rects, border_size, field_df, sigproc_params, ): """ Find peaks on the composite image TASK: Remove the mask rect checks and replace with the same masking logic that is now implemented in the alignment phase. That is, just remove the peaks from the source instead of in post-processing. """ from skimage.feature import peak_local_max # Defer slow import from scipy.stats import iqr n_outchannels, n_inchannels, n_cycles, dim = sigproc_params.channels_cycles_dim assert ( aligned_composite_bg_removed_im.shape[0] == aligned_composite_bg_removed_im.shape[1] ) aligned_dim, _ = aligned_composite_bg_removed_im.shape check.array_t(aligned_composite_bg_removed_im, is_square=True) hat_rad = sigproc_params.hat_rad brim_rad = sigproc_params.hat_rad + 1 hat_mask, brim_mask = _hat_masks(hat_rad, brim_rad) kernel = imops.generate_gauss_kernel(1.0) kernel = kernel - kernel.mean() _fiducial_im = imops.convolve(aligned_composite_bg_removed_im, kernel) # Black out the convolution artifact around the perimeter of the _fiducial_im search_roi_rect = Rect( aligned_roi_rect.b + brim_rad, aligned_roi_rect.t - brim_rad, aligned_roi_rect.l + brim_rad, aligned_roi_rect.r - brim_rad, ) search_roi = search_roi_rect.roi() composite_fiducial_im = np.zeros_like(aligned_composite_bg_removed_im) # Use Inter-Quartile Range for some easy filtering _iqr = 0 if sigproc_params.iqr_rng is not None: _iqr = iqr( _fiducial_im[search_roi], rng=(100 - sigproc_params.iqr_rng, sigproc_params.iqr_rng), ) composite_fiducial_im[search_roi] = (_fiducial_im[search_roi] - _iqr).clip(min=0) locs = peak_local_max( composite_fiducial_im, min_distance=hat_rad, threshold_abs=sigproc_params.threshold_abs, ) # Emergency exit to prevent memory overflows # check.affirm(len(locs) < 7000, f"Too many peaks {len(locs)}") shift = field_df.set_index("cycle_i").sort_index()[["shift_y", "shift_x"]].values shift_y = shift[:, 0] shift_x = shift[:, 1] # Discard any peak in any mask_rect # ALIGN the mask rects to the composite coordinate system aligned_mask_rects = [] for channel in range(sigproc_params.n_output_channels): channel_rects = safe_list_get(raw_mask_rects, channel, []) for cycle in range(n_cycles): for rect in safe_list_get(channel_rects, cycle, []): yx = XY(rect[0], rect[1]) hw = WH(rect[2], rect[3]) yx += XY(border_size, border_size) - XY(shift_x[cycle], shift_y[cycle]) aligned_mask_rects += [(yx[0], yx[1], yx[0] + hw[0], yx[1] + hw[1])] aligned_mask_rects = np.array(aligned_mask_rects) if aligned_mask_rects.shape[0] > 0: # To compare every loc with every mask rect we use the tricky np.fn.outer() y_hits = np.greater_equal.outer(locs[:, 0], aligned_mask_rects[:, 0]) y_hits &= np.less.outer(locs[:, 0], aligned_mask_rects[:, 2]) x_hits = np.greater_equal.outer(locs[:, 1], aligned_mask_rects[:, 1]) x_hits &= np.less.outer(locs[:, 1], aligned_mask_rects[:, 3]) inside_rect = x_hits & y_hits # inside a rect if x and y are inside the rect locs_to_keep = ~np.any( inside_rect, axis=1 ) # Reject if inside of any masked rect locs = locs[locs_to_keep] circle_im = np.zeros((aligned_dim, aligned_dim)) center = aligned_dim / 2 peak_rows = [] for field_peak_i, loc in enumerate(locs): if sigproc_params.radial_filter is not None: radius = math.sqrt((loc[0] - center) ** 2 + (loc[1] - center) ** 2) radius /= center if radius >= sigproc_params.radial_filter: continue imops.set_with_mask_in_place(circle_im, brim_mask, 1, loc=loc, center=True) peak_rows += [ Munch( peak_i=0, field_peak_i=field_peak_i, aln_y=int(loc[0]), aln_x=int(loc[1]), ) ] peak_df = pd.DataFrame(peak_rows) return peak_df, circle_im, aligned_mask_rects
def report_assemble(self): """Assemble the report from its pieces. A giant Munch is returned""" report = Munch(**self.report_metadata) report.cells = [] preamble_block = self._markdown_to_markdown_block( self._report_preamble) report.cells += [preamble_block] # LOAD all templates templates_by_name = {} for section_type, section_data in self._report_sections: if section_type == "template": file_path = section_data templates_by_name[file_path] = utils.json_load_munch( f"./plaster/gen/nb_templates/{file_path}") # FIND all of the @IMPORT-MERGE blocks import_merge = [] for _, template in templates_by_name.items(): for cell in template.cells: if cell.cell_type == "code": first_line = utils.safe_list_get(cell.source, 0, "") if "# @IMPORT-MERGE" in first_line: for line in cell.source: if "import" in line: import_merge += [line] import_merge += ["from plaster.tools.zplots import zplots\n"] import_merge = sorted(list(set(import_merge))) + ["z=zplots.setup()"] import_block = Munch(**self.code_block) import_block.source = import_merge report.cells += [import_block] for section_type, section_data in self._report_sections: if section_type == "code": lines = section_data block = Munch(**self.code_block) block.source = lines report.cells += [block] elif section_type == "markdown": block = self._markdown_to_markdown_block(section_data) report.cells += [block] elif section_type == "template": file_path = section_data template = templates_by_name[file_path] for cell in template.cells: if cell.cell_type == "code": first_line = utils.safe_list_get(cell.source, 0, "") if ("@IMPORT-MERGE" not in first_line and "@REMOVE-FROM-TEMPLATE" not in first_line): block = Munch(**self.code_block) block.source = cell.source report.cells += [block] if cell.cell_type == "markdown": block = Munch(**self.markdown_block) block.source = cell.source report.cells += [block] return report