Ejemplo n.º 1
0
 def test_mask_transformation(self):
     """Keep countours near ROIs: Test that when using the basic mask generated script this tool produces a mask"""
     op = IptKeepCountoursNearRois()
     op.apply_test_values_overrides(use_cases=("Mask cleanup",))
     script = LoosePipeline.load(
         "./ipso_phen/ipapi/samples/pipelines/test_cleaners.json"
     )
     script.add_module(operator=op, target_group="grp_test_cleaners")
     wrapper = BaseImageProcessor(
         "./ipso_phen/ipapi/samples/images/arabido_small.jpg",
         database=None,
     )
     res = script.execute(src_image=wrapper, silent_mode=True)
     self.assertTrue(
         res, "Failed to process Keep countours near ROIs with test script"
     )
     self.assertIsInstance(
         wrapper.mask, np.ndarray, "Empty result for Range threshold"
     )
     self.assertEqual(len(wrapper.mask.shape), 2, "Masks can only have one channel")
     self.assertEqual(
         np.sum(wrapper.mask[wrapper.mask != 255]),
         0,
         "Masks values can only be 0 or 255",
     )
 def test_feature_out(self):
     """Split overlapped ellipses: "Test that when using the basic mask generated script this tool extracts features"""
     op = IptSplitOverlappedEllipses()
     op.apply_test_values_overrides(use_cases=("Feature extraction", ))
     script = LoosePipeline.load(
         "./ipso_phen/ipapi/samples/pipelines/test_extractors.json")
     script.add_module(operator=op, target_group="grp_test_extractors")
     wrapper = BaseImageProcessor(
         "./ipso_phen/ipapi/samples/images/arabido_small.jpg",
         database=None,
     )
     res = script.execute(src_image=wrapper, silent_mode=True)
     self.assertIsInstance(
         op,
         IptBaseAnalyzer,
         "Split overlapped ellipses must inherit from ipso_phen.ipapi.iptBaseAnalyzer",
     )
     self.assertTrue(
         res,
         "Failed to process Split overlapped ellipses with test script")
     self.assertNotEqual(
         first=len(wrapper.csv_data_holder.data_list),
         second=0,
         msg="Split overlapped ellipses returned no data",
     )
Ejemplo n.º 3
0
 def test_extractors_pipeline(self):
     """Loose pipeline: Test extractors's test pipeline"""
     pipeline = LoosePipeline.load(
         os.path.join(
             self.pipeline_dir_path,
             "test_extractors.json",
         ))
     wrapper = BaseImageProcessor(
         os.path.join(
             os.path.dirname(__file__),
             "..",
             "ipso_phen",
             "ipapi",
             "samples",
             "images",
             "arabido_small.jpg",
         ),
         database=None,
     )
     res = pipeline.execute(src_image=wrapper, silent_mode=True)
     self.assertTrue(
         res,
         "Failed to process Keep countours near ROIs with test pipeline")
     self.assertIsInstance(wrapper.mask, np.ndarray,
                           "Empty result for Range threshold")
     self.assertEqual(len(wrapper.mask.shape), 2,
                      "Masks can only have one channel")
     self.assertEqual(
         np.sum(wrapper.mask[wrapper.mask != 255]),
         0,
         "Masks values can only be 0 or 255",
     )
Ejemplo n.º 4
0
 def test_image_transformation(self):
     """Crop: Test that when an image is in an image goes out"""
     op = IptCrop()
     op.apply_test_values_overrides(use_cases=("Pre processing", ))
     wrapper = BaseImageProcessor(
         "./ipso_phen/ipapi/samples/images/arabido_small.jpg",
         database=None,
     )
     res = op.process_wrapper(wrapper=wrapper)
     self.assertTrue(res, "Failed to process Crop")
     self.assertIsInstance(op.result, np.ndarray, "Empty result for Crop")
     """Crop: "Test that when using the basic mask generated script this tool extracts features"""
     op = IptCrop()
     op.apply_test_values_overrides(use_cases=("Feature extraction", ))
     script = LoosePipeline.load(
         "./ipso_phen/ipapi/samples/pipelines/test_extractors.json")
     script.add_module(operator=op, target_group="grp_test_extractors")
     wrapper = BaseImageProcessor(
         "./ipso_phen/ipapi/samples/images/arabido_small.jpg",
         database=None,
     )
     res = script.execute(src_image=wrapper, silent_mode=True)
     self.assertIsInstance(
         op,
         IptBaseAnalyzer,
         "Crop must inherit from ipso_phen.ipapi.iptBaseAnalyzer",
     )
     self.assertTrue(res, "Failed to process Crop with test script")
     self.assertNotEqual(
         first=len(wrapper.csv_data_holder.data_list),
         second=0,
         msg="Crop returned no data",
     )
 def test_mask_transformation(self):
     """Clean horizontal noise (Hough method): Test that when using the basic mask generated script this tool produces a mask"""
     op = IptCleanHorizontalNoiseHough()
     op.apply_test_values_overrides(use_cases=("Mask cleanup", ))
     script = LoosePipeline.load(
         "./ipso_phen/ipapi/samples/pipelines/test_cleaners.json")
     script.add_module(operator=op, target_group="grp_test_cleaners")
     wrapper = BaseImageProcessor(
         "./ipso_phen/ipapi/samples/images/18HP01U17-CAM11-20180712221558.bmp",
         database=None,
     )
     res = script.execute(src_image=wrapper, silent_mode=True)
     self.assertTrue(
         res,
         "Failed to process Clean horizontal noise (Hough method) with test script",
     )
     self.assertIsInstance(wrapper.mask, np.ndarray,
                           "Empty result for Range threshold")
     self.assertEqual(len(wrapper.mask.shape), 2,
                      "Masks can only have one channel")
     self.assertEqual(
         np.sum(wrapper.mask[wrapper.mask != 255]),
         0,
         "Masks values can only be 0 or 255",
     )
Ejemplo n.º 6
0
 def test_load(self):
     """Loose pipeline: Try loading all test pipelines"""
     for file_name in [
             "test_cleaners.json",
             "sample_pipeline_arabidopsis.json",
             "test_extractors.json",
             "tutorial.json",
     ]:
         pipeline = LoosePipeline.load(
             os.path.join(
                 self.pipeline_dir_path,
                 "test_cleaners.json",
             ))
         self.assertIsInstance(pipeline,
                               LoosePipeline,
                               msg=f'Failed to load "{file_name}"')
Ejemplo n.º 7
0
def prepare_process_muncher(progress_callback, abort_callback, **kwargs):
    dbi = DbInfo.from_json(
        json_data=json.loads(kwargs["database_info"].replace("'", '"'))
    )
    output_folder = get_user_path(
        user_name=kwargs["current_user"],
        key="analysis_folder",
        extra=dbi.display_name.lower(),
    )
    database = db_info_to_database(dbi)
    pp = PipelineProcessor(
        dst_path=output_folder,
        overwrite=kwargs["overwrite_existing"],
        seed_output=False,
        group_by_series=kwargs["generate_series_id"],
        store_images=False,
        database=database,
    )
    pp.progress_callback = progress_callback
    pp.abort_callback = abort_callback
    pp.ensure_root_output_folder()
    pp.grab_files_from_data_base(
        experiment=dbi.display_name.lower(),
        **database.main_selector,
    )
    pp.script = LoosePipeline.from_json(json_data=kwargs["script"])
    if not pp.accepted_files:
        return {
            "current": 100,
            "total": 100,
            "status": "No images in task!",
            "result": 42,
        }

    try:
        pp.multi_thread = int(kwargs.get("thread_count", 1))
    except:
        pp.multi_thread = False

    return {
        "pipeline_processor": pp,
        "output_folder": output_folder,
    }
Ejemplo n.º 8
0
def launch(**kwargs):
    def exit_error_message(msg: str) -> None:
        print(msg)
        logger.error(msg)

    start = timer()

    if "user" in kwargs and "password" in kwargs:
        dbp.master_password = kwargs.pop("user"), kwargs.pop("password")

    # Script
    script = kwargs.get("script", None)
    if script is not None and os.path.isfile(script):
        with open(script, "r") as f:
            script = json.load(f)
    kwargs["script"] = script

    # Image(s)
    image = kwargs.get("image", None)
    image_list = kwargs.get("image_list", None)
    image_folder = kwargs.get("image_folder", None)
    src_count = (1 if "experiment" in kwargs and "database" in kwargs else len(
        [src for src in [image, image_list, image_folder] if src is not None]))
    if src_count == 0:
        exit_error_message("Missing source images")
        return 1
    elif src_count > 1:
        exit_error_message("Too many sources")
        return 1

    if image is not None:
        kwargs["images"] = [image]
    elif image_list is not None:
        with open(image_list, "r") as f:
            kwargs["images"] = [
                img.os.replace("\n", "") for img in f.readlines()
            ]
    elif image_folder is not None:
        img_lst = ImageList((".jpg", ".tiff", ".png", ".bmp"))
        img_lst.add_folder(image_folder)
        kwargs["images"] = img_lst.filter(masks=None)

    # State
    stored_state = kwargs.pop("stored_state", None)

    res = restore_state(blob=stored_state, overrides=kwargs)

    # Retrieve images
    image_list_ = res.get("images", None)

    # Build database
    db_data = res.get("database_data", None)
    database = res.get("database", None)
    experiment = res.get("experiment", None)
    if db_data is None:
        if database is None:
            db = None
        else:
            db = dbf.db_info_to_database(
                DbInfo(
                    display_name=experiment,
                    target=database,
                    dbms="pandas",
                ))
    else:
        db = dbf.db_info_to_database(dbb.DbInfo(**db_data))

    if experiment is not None:
        if "sub_folder_name" not in res or not res["sub_folder_name"]:
            res["sub_folder_name"] = experiment
        if "csv_file_name" not in res or not res["csv_file_name"]:
            res["csv_file_name"] = f"{experiment.lower()}_raw_data"
    else:
        db = dbf.db_info_to_database(dbb.DbInfo(**db_data))

    # Retrieve output folder
    output_folder_ = res.get("output_folder", None)
    if not output_folder_:
        exit_error_message("Missing output folder")
        return 1
    elif res.get("sub_folder_name", ""):
        output_folder_ = os.path.join(output_folder_, res["sub_folder_name"],
                                      "")
    else:
        output_folder_ = os.path.join(output_folder_, "")
    force_directories(output_folder_)
    csv_file_name = res.get("csv_file_name", None)
    if not csv_file_name:
        exit_error_message("Missing output file name")
        return 1
    if IS_USE_MULTI_THREAD and "thread_count" in res:
        try:
            mpc = int(res["thread_count"])
        except Exception as e:
            mpc = False
    else:
        mpc = False

    try:
        if isinstance(res["script"], str) and os.path.isfile(res["script"]):
            script = LoosePipeline.load(res["script"])
        elif isinstance(res["script"], dict):
            script = LoosePipeline.from_json(json_data=res["script"])
        else:
            exit_error_message("Failed to load script: Unknown error")
            return 1
    except Exception as e:
        exit_error_message(f"Failed to load script: {repr(e)}")
        return 1

    # Build pipeline processor
    pp = PipelineProcessor(
        database=None if db is None else db.copy(),
        dst_path=output_folder_,
        overwrite=res["overwrite"],
        seed_output=res["append_time_stamp"],
        group_by_series=res["generate_series_id"],
        store_images=False,
    )
    if not image_list_:
        pp.grab_files_from_data_base(
            experiment=db.db_info.display_name.lower(),
            order_by="plant",
            **db.main_selector,
        )
    else:
        pp.accepted_files = image_list_

    if res.get("randomize", False) is True:
        random.shuffle(pp.accepted_files)

    logger.info("Process summary")
    logger.info("_______________")
    logger.info(f'database: {res.get("database_data", None)}')
    logger.info(f'Output folder: {res["output_folder"]}')
    logger.info(f'CSV file name: {res["csv_file_name"]}')
    logger.info(f'Overwrite data: {res["overwrite"]}')
    logger.info(f'Subfolder name: {res["sub_folder_name"]}')
    logger.info(f'Append timestamp to root folder: {res["append_time_stamp"]}')
    logger.info(f'Generate series ID: {res["generate_series_id"]}')
    logger.info(f'Series ID time delta allowed: {res["series_id_time_delta"]}')
    logger.info(f'Build annotation ready CSV: {res["build_annotation_csv"]}')
    logger.info(f"Images: {len(pp.accepted_files)}")
    logger.info(f"Concurrent processes count: {mpc}")
    logger.info(f"Script summary: {str(script)}")

    if pp.accepted_files is None or len(pp.accepted_files) < 1:
        exit_error_message("No images to process")
        return 1

    pp.progress_callback = kwargs.get("progress_callback", None)
    pp.error_callback = kwargs.get("error_callback", None)
    pp.ensure_root_output_folder()

    pp.script = script
    if not pp.accepted_files:
        logger.error("Nothing to precess")
        return 1

    # Process data
    groups_to_process = pp.prepare_groups(res["series_id_time_delta"])
    if res["build_annotation_csv"]:
        try:
            if pp.options.group_by_series:
                files, luids = map(list, zip(*groups_to_process))
                wrappers = [
                    file_handler_factory(files[i], db)
                    for i in [luids.index(x) for x in set(luids)]
                ]
            else:
                wrappers = [
                    file_handler_factory(f, db)
                    for f in tqdm.tqdm(groups_to_process,
                                       desc="Building annotation CSV")
                ]
            pd.DataFrame.from_dict({
                "plant": [i.plant for i in wrappers],
                "date_time": [i.date_time for i in wrappers],
                "disease_index": "",
            }).sort_values(by=["plant", "date_time"],
                           axis=0,
                           na_position="first",
                           ascending=True).to_csv(
                               os.path.join(
                                   pp.options.dst_path,
                                   f"{csv_file_name}_diseaseindex.csv"),
                               index=False,
                           )
        except Exception as e:
            preffix = "FAIL"
            logger.exception("Unable to build disease index file")
        else:
            preffix = "SUCCESS"
            logger.info("Built disease index file")
        print(f"{preffix} - Disease index file")

    groups_to_process_count = len(groups_to_process)
    if groups_to_process_count > 0:
        pp.multi_thread = mpc
        pp.process_groups(groups_list=groups_to_process)

    # Merge dataframe
    pp.merge_result_files(csv_file_name=csv_file_name + ".csv")
    logger.info(
        f"Processed {groups_to_process_count} groups/images in {format_time(timer() - start)}"
    )

    # Build videos

    print("Done, see logs for more details")

    return 0