Exemple #1
0
def preprocess_parameters_for_cube_creator(elem_args):
    """
    This function does two things:
        1) convert class_ids from
            name: class_ids@text, values: [0, 1, 2, 3]
           to
            name: class_ids, values: {"@text": [0, 1, 2, 3]}
        2) type conversion for "values" field.

    Parameters
    ----------
    elem_args: strictyaml.YAML object
        (contains dict inside)

    Returns
    -------
    new_elem_args: dict
    """

    for param_portion in elem_args["parameters"]:
        name = str(param_portion["name"])
        if name.startswith("class_ids"):
            validator = Float() | Seq(Float())
        else:
            validator = Seq(ARTM_TYPES[name])
        param_schema = Map({
            "name": Str(),
            "values": validator
        })
        param_portion.revalidate(param_schema)
    def _get_info(cls):

        # https://hitchdev.com/strictyaml
        schema = Map({
            Optional("knobs"):
            Map({
                Optional("log_to_console"): Bool(),
                Optional("log_level_debug"): Bool(),
            })
            | EmptyDict(),
            "globals":
            Map({
                "topic_prefix":
                Str(),
                Optional(
                    "reconnect_interval",
                    default=const.MQTT_DEFAULT_RECONNECT_INTERVAL,
                ):
                Float(),
                Optional("poll_interval",
                         default=const.MYQ_DEFAULT_POLL_INTERVAL):
                Float(),
                Optional(
                    "periodic_mqtt_report",
                    default=const.DEFAULT_PERIODIC_MQTT_REPORT,
                ):
                Float(),
                Optional("user_agent"):
                Str(),
            }),
            "mqtt":
            Map({
                "host":
                Str(),
                Optional("client_id", default=const.MQTT_DEFAULT_CLIENT_ID):
                Str(),
                Optional("username"):
                Str(),
                Optional("password"):
                Str(),
            }),
            "myq":
            Map({
                "email": Email(),
                "password": Str(),
            }),
            Optional("alias"):
            MapPattern(Str(), Str()) | EmptyDict(),
        })

        if not cls._info:
            config_filename = cls._get_config_filename()
            logger.info("loading yaml config file %s", config_filename)
            with open(config_filename, "r") as ymlfile:
                raw_cfg = load(ymlfile.read(), schema).data
                cls._parse_raw_cfg(raw_cfg)
        return cls._info
Exemple #3
0
    def _get_value_schema(cls,
                          value_type: ScalarValidator = Float(),
                          has_unit=True) -> Validator:
        """Schema for parameter value."""
        map_dict = {"value": Float() | Str()}
        if has_unit:
            map_dict[Optional("unit", default=None)] = Str()

        return value_type | Str() | Map(map_dict)
Exemple #4
0
 def _get_target_schema(cls) -> Map:
     target_schema_map = {}
     for key in [f.name for f in fields(FlightPoint)]:
         target_schema_map[Optional(
             key,
             default=None)] = (Float()
                               | Str()
                               | Map({
                                   "value": Float() | Str(),
                                   Optional("unit", default=None): Str()
                               }))
     return Map(target_schema_map)
 def __init__(self):
     super().__init__({
         Optional(conditions.ALL_OF): self,
         Optional(conditions.ENDS_EARLIER_THAN): Float(),
         Optional(conditions.ENDS_LATER_THAN): Float(),
         Optional(conditions.NONE_OF): self,
         Optional(conditions.ONE_OF): self,
         Optional(conditions.STARTS_EARLIER_THAN): Float(),
         Optional(conditions.STARTS_LATER_THAN): Float(),
         Optional(conditions.WEEKDAY): Bool(),
         Optional(conditions.WEEKEND): Bool(),
     })
 def get_robot_part_schema():
     """
     Getter for robot schema
     :return: schema that is used to verify the robot yaml
     """
     return Map({
         'name': Str(),
         'type': Str(),
         'brick': Int(),
         'x_offset': Float(),
         'y_offset': Float(),
         Optional('port'): Regex('ev3-ports:(in[1-4]|out[A-D])'),
         Optional('side'): Regex('left|right|rear'),
         Optional('direction'): Regex('bottom|front'),
     })
Exemple #7
0
 def _get_base_step_mapping(cls) -> dict:
     polar_coeff_schema = CommaSeparated(Float()) | Str()
     polar_schema = Map({
         "CL": polar_coeff_schema,
         "CD": polar_coeff_schema
     }) | Str()
     return {
         Optional("target", default=None): cls._get_target_schema(),
         Optional("engine_setting", default=None): Str(),
         Optional(POLAR_TAG, default=None): polar_schema,
         Optional("thrust_rate", default=None): Float() | Str(),
         Optional("climb_thrust_rate", default=None): Float() | Str(),
         Optional("time_step", default=None): Float(),
         Optional("maximum_flight_level", default=None): Float() | Str(),
     }
Exemple #8
0
 def _get_base_part_mapping(cls) -> dict:
     """Base mapping for segment/phase schemas."""
     polar_coeff_schema = CommaSeparated(Float()) | Str()
     polar_schema = Map({
         "CL": polar_coeff_schema,
         "CD": polar_coeff_schema
     }) | Str()
     return {
         # TODO: this mapping covers all possible segments, but some options are relevant
         #  only for some segments. A better check could be done in second-pass validation.
         Optional("target", default=None):
         cls._get_target_schema(),
         Optional("engine_setting", default=None):
         cls._get_value_schema(Str(), False),
         Optional(POLAR_TAG, default=None):
         polar_schema,
         Optional("thrust_rate", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("climb_thrust_rate", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("time_step", default=None):
         cls._get_value_schema(),
         Optional("maximum_flight_level", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("mass_ratio", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("reserve_mass_ratio", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("use_max_lift_drag_ratio", default=None):
         cls._get_value_schema(Bool(), False),
     }
Exemple #9
0
class Config:
    SCHEMA = Map(
        dict(image_scale=Float(),
             frame_limit=Int(),
             dump_stats=Bool(),
             alignment=Map(
                 dict(enabled=Bool(),
                      mode=Str(),
                      max_iterations=Int(),
                      termination_eps=Float()))))

    def __init__(self, file_or_yaml=open('config.yaml', 'r')):
        if isinstance(file_or_yaml, str):
            yaml_str = file_or_yaml
        else:
            yaml_str = file_or_yaml.read()
            file_or_yaml.close()
        self.cfg = load(yaml_str, Config.SCHEMA).data

    def get(self, key):
        return self.cfg[key]
Exemple #10
0
 def _get_mission_schema(cls) -> Map:
     """Schema of the mission section."""
     return Map({
         PARTS_TAG:
         Seq(
             Map({
                 Optional(ROUTE_TAG, default=None):
                 Str(),
                 Optional(PHASE_TAG, default=None):
                 Str(),
                 Optional(RESERVE_TAG, default=None):
                 Map({
                     "ref": Str(),
                     "multiplier": Float() | Str()
                 }),
             })),
     })
Exemple #11
0
def build_schema_for_cubes():
    """
    Returns
    -------
    dict
        each element is str -> strictyaml.Map
        where key is name of cube,
        value is a schema used for validation and type-coercion
    """
    schemas = {}
    for class_of_object in SUPPORTED_CUBES:
        res = build_schema_from_signature(class_of_object)

        # "selection" isn't used in __init__, but we will need it later
        res["selection"] = Seq(Str())

        # shortcut for strategy intialization
        if is_key_in_schema("strategy", res):
            signature_validation = {}
            for strategy_class in SUPPORTED_STRATEGIES:
                local_signature_validation = build_schema_from_signature(
                    strategy_class)
                signature_validation.update(local_signature_validation)
            res[Optional("strategy_params")] = Map(signature_validation)

        # we will deal with "values" later, but we can check at least some simple things already
        if class_of_object.__name__ == "CubeCreator":
            element = Map({"name": Str(), "values": Seq(Any())})
            res["parameters"] = Seq(element)
        if class_of_object.__name__ == "RegularizersModifierCube":
            element = Map({
                Optional("name"): Str(),
                Optional("regularizer"): Any(),
                Optional("tau_grid"): Seq(Float())
            })
            res["regularizer_parameters"] = element | Seq(element)

        res = Map(res)

        specific_schema = Map({class_of_object.__name__: res})
        schemas[class_of_object.__name__] = specific_schema
    return schemas
Exemple #12
0
def choose_validator(param):
    """
    Parameters
    ----------
    param : inspect.Parameter

    Returns
    -------
    instance of strictyaml.Validator
    """
    if param.annotation is int:
        return Int()
    if param.annotation is float:
        return Float()
    if param.annotation is bool:
        return Bool()
    if param.annotation is str:
        return Str()
    if param.name in ARTM_TYPES:
        return ARTM_TYPES[param.name]
    return Any()
def parse_modalities_data(parsed):
    has_modalities_to_use = is_key_in_schema("modalities_to_use", parsed["model"])
    has_weights = is_key_in_schema("modalities_weights", parsed["model"])
    main_modality = parsed["model"]["main_modality"]

    # exactly one should be specified
    if has_modalities_to_use == has_weights:
        raise ValueError(f"Either 'modalities_to_use' or 'modalities_weights' should be specified")

    if has_weights:
        modalities_to_use = list(parsed["model"]["modalities_weights"].data)
        if main_modality not in modalities_to_use:
            modalities_to_use.append(main_modality)
        local_schema = Map({
            key: Float() for key in modalities_to_use
        })
        parsed["model"]["modalities_weights"].revalidate(local_schema)
        modalities_weights = parsed["model"]["modalities_weights"].data
        return modalities_weights
    else:
        modalities_to_use = parsed.data["model"]["modalities_to_use"]
        return modalities_to_use
Exemple #14
0
             'type': Literal('uniform'),
             'interval': FixedSeq([Scalar(), Scalar()]),
             'num': Int(),
         }),
         Map({
             'type': Literal('graded'),
             'interval': FixedSeq([Scalar(), Scalar()]),
             'num': Int(),
             'grading': Scalar(),
         })),
 ),
 Optional('evaluate'):
 MapPattern(Str(), Str()),
 Optional('constants'):
 MapPattern(Str(),
            Int() | Float() | Str()),
 Optional('templates'):
 Seq(FileMapping(glob_allowed=True)),
 Optional('prefiles'):
 Seq(FileMapping(glob_allowed=True)),
 Optional('postfiles'):
 Seq(FileMapping(glob_allowed=True)),
 Optional('script'):
 Seq(
     First(
         "script command",
         Str(),
         Seq(Str()),
         Map({
             'command':
             Str() | Seq(Str()),
Exemple #15
0
from strictyaml import (
    Optional,
    Map,
    MapPattern,
    Float,
    Int,
    Seq,
    Str,
    dirty_load,
    as_document,
)

import demes

Number = Int() | Float()

_epoch_schema = Map({
    "start_time": Number,
    Optional("end_time"): Number,
    Optional("initial_size"): Number,
    Optional("final_size"): Number,
})

_migration_schema = Map({
    Optional("start_time"): Number,
    Optional("end_time"): Number,
    "source": Str(),
    "dest": Str(),
    "rate": Float(),
})
Exemple #16
0
class Experiment:
    """
    This class orchestrates the analysis pipeline for our redox imaging experiments.
    """

    experiment_schema = Map({
        "pipeline":
        Map({
            "strategy": Str(),
            "acquisition_method": Enum(["acquire", "mda"]),
            "trimmed_profile_length": Int(),
            "untrimmed_profile_length": Int(),
            "seg_threshold": Int(),
            "measurement_order": Int(),
            "measure_thickness": Float(),
            "reference_wavelength": Str(),
            "image_register": Int(),
            "channel_register": Int(),
            "population_register": Int(),
            "trimmed_regions": MapPattern(Str(), CommaSeparated(Float())),
            "untrimmed_regions": MapPattern(Str(), CommaSeparated(Float())),
        }),
        "redox":
        Map({
            "ratio_numerator": Str(),
            "ratio_denominator": Str(),
            "r_min": Float(),
            "r_max": Float(),
            "instrument_factor": Float(),
            "midpoint_potential": Float(),
            "z": Int(),
            "temperature": Float(),
        }),
        "registration":
        Map({
            "n_deriv": Float(),
            "warp_n_basis": Float(),
            "warp_order": Float(),
            "warp_lambda": Float(),
            "smooth_lambda": Float(),
            "smooth_n_breaks": Float(),
            "smooth_order": Float(),
            "rough_lambda": Float(),
            "rough_n_breaks": Float(),
            "rough_order": Float(),
        }),
        "output":
        Map({
            "should_save_plots": Bool(),
            "should_save_profile_data": Bool(),
            "should_save_summary_data": Bool(),
        }),
    })

    seg_images: xr.DataArray = None
    rot_fl: xr.DataArray = None
    rot_seg: xr.DataArray = None

    midlines: xr.DataArray = None

    untrimmed_raw_profiles: xr.DataArray = None
    untrimmed_std_profiles: xr.DataArray = None
    untrimmed_reg_profiles: xr.DataArray = None

    trimmed_raw_profiles: xr.DataArray = None
    trimmed_std_profiles: xr.DataArray = None
    trimmed_reg_profiles: xr.DataArray = None

    channel_warps: xr.DataArray = None
    std_warps: xr.DataArray = None

    def __init__(self, exp_dir):
        self.experiment_dir = Path(exp_dir)
        self.settings_path = self.experiment_dir.joinpath("settings.yaml")
        try:
            with open(self.settings_path, "r") as f:
                self.config = load(f.read(), self.experiment_schema).data
        except YAMLError:
            raise ValueError("Incorrectly specified config file.")

        self.experiment_id = self.experiment_dir.stem

        # compute the filenames/paths for this experiment
        self.movement_path = self.experiment_dir.joinpath(self.experiment_id +
                                                          "-mvmt.csv")
        self.frame_map_path = self.experiment_dir.joinpath(self.experiment_id +
                                                           "-frame_map.csv")
        self.processed_images_dir = self.experiment_dir.joinpath(
            "processed_images")
        self.rot_seg_dir = self.processed_images_dir.joinpath("rot_seg")
        self.rot_fl_dir = self.processed_images_dir.joinpath("rot_fl")
        self.fl_imgs_dir = self.processed_images_dir.joinpath(
            "fluorescent_images")
        self.orig_images_path = self.processed_images_dir.joinpath("images.nc")
        self.seg_images_path = self.processed_images_dir.joinpath(
            "seg_images.nc")
        self.aligned_images_path = self.processed_images_dir.joinpath(
            "aligned_images.nc")
        self.aligned_seg_images_path = self.processed_images_dir.joinpath(
            "aligned_seg_images.nc")

        # load images
        self.images = self._load_raw_images()

        # try to load masks
        try:
            self.load_masks()
        except IOError:
            logging.info("No masks found in experiment directory")
            pass

    # Computed Filepaths

    @property
    def midlines_path(self) -> Path:
        return self.analysis_dir.joinpath("midlines.pickle")

    @property
    def raw_img_stack_path(self) -> Path:
        # TODO test that this works
        accepted_extensions = [".tif", ".tiff", ".stk"]

        candidate_paths = [
            self.experiment_dir.joinpath(f"{self.experiment_id}{ext}")
            for ext in accepted_extensions
        ]

        for path in candidate_paths:
            if path.exists():
                return path

        raise ValueError(
            f"No image found in experiment directory. Tried the following files: {candidate_paths}"
        )

    @property
    def fig_dir(self):
        return self.analysis_dir.joinpath("figs")

    def untrimmed_profile_data_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-untrimmed_{treatment}_profile_data.nc")

    def trimmed_profile_data_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-trimmed_{treatment}_profile_data.nc")

    @property
    def channel_warp_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-channel_warps.nc")

    @property
    def std_warp_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id + "-std_warps.nc")

    def untrimmed_profile_data_csv_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-untrimmed_{treatment}_profile_data.csv")

    def trimmed_profile_data_csv_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-trimmed_{treatment}_profile_data.csv")

    @property
    def untrimmed_region_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-untrimmed_region_data.csv")

    @property
    def trimmed_region_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-trimmed_region_data.csv")

    @property
    def analysis_dir(self) -> Path:
        date_str = datetime.datetime.now().strftime("%Y-%m-%d")
        strategy = self.config["pipeline"]["strategy"]
        if len(strategy) > 0:
            suffix = f"_{strategy}"
        else:
            suffix = ""
        analysis_dir_ = self.experiment_dir.joinpath(
            "analyses",
            utils.get_valid_filename(f"{date_str}{suffix}"),
        )
        # analysis_dir_.mkdir(parents=True, exist_ok=True)
        return analysis_dir_

    def _load_raw_images(self):
        """
        This returns the raw (non-median-subtracted) images
        """
        logging.info(f"Loading image data from {self.raw_img_stack_path}")
        raw_image_data = pio.load_tiff_as_hyperstack(
            img_stack_path=self.raw_img_stack_path,
            manual_metadata=self.frame_map_path,
            mvmt_metadata=self.movement_path,
        )

        raw_image_data = raw_image_data.assign_coords({
            "experiment_id": (
                ("animal", ),
                np.repeat(self.experiment_id, raw_image_data.animal.size),
            )
        })
        raw_image_data = self.add_experiment_metadata_to_data_array(
            raw_image_data)

        return raw_image_data

    def _load_movement(self) -> pd.DataFrame:
        movement_path = self.experiment_dir.joinpath(self.experiment_id +
                                                     "-mvmt.csv")
        try:
            df = pd.read_csv(movement_path)
            df = df.pivot_table(index="animal",
                                columns=["region", "pair"],
                                values="movement")
            df = df.stack("pair")
            return df
        except FileNotFoundError:
            logging.warning(
                f"Tried to access {movement_path}; file was not found")
            return None

    def make_analysis_dir(self) -> None:
        logging.info(f"Making analysis directory at {self.analysis_dir}")
        self.analysis_dir.mkdir(parents=True, exist_ok=True)

    @property
    def trimmed_summary_table(self):
        df = profile_processing.summarize_over_regions(
            self.trimmed_raw_profiles,
            regions=self.config["pipeline"]["trimmed_regions"],
            rescale=False,
            **self.config["redox"],
        )
        return df

    @property
    def untrimmed_summary_table(self):
        df = profile_processing.summarize_over_regions(
            self.untrimmed_raw_profiles,
            regions=self.config["pipeline"]["untrimmed_regions"],
            **self.config["redox"],
        )
        return df

    ####################################################################################
    # PIPELINE
    ####################################################################################

    def full_pipeline(self):
        logging.info(f"Starting full pipeline run for {self.experiment_dir}")

        self.make_analysis_dir()

        logging.info(f"Saving fluorescent images to {self.fl_imgs_dir}")
        pio.save_images_xarray_to_tiffs(self.images,
                                        self.fl_imgs_dir,
                                        prefix=self.experiment_id)

        self.segment_pharynxes()
        self.register_images()
        self.align_and_center()
        self.calculate_midlines()
        self.measure_under_midlines()
        self.register_profiles()
        self.trim_data()
        self.calculate_redox()
        self.do_manual_ap_flips()
        self.persist_to_disk()

        logging.info(f"Finished full pipeline run for {self.experiment_dir}")

        return self

    def run_neuron_pipeline(self):
        logging.info(
            f"Starting full neuron analysis pipeline run for {self.experiment_dir}"
        )
        self.make_analysis_dir()
        df = ip.measure_under_labels(self.images,
                                     self.seg_images).reset_index()

        df.to_csv(self.analysis_dir /
                  (self.experiment_id + "-neuron_analysis.csv"))

    def segment_pharynxes(self):
        if self.seg_images is not None:
            logging.info("masks have been specified. skipping mask generation")
            self.save_masks()
            return
        else:
            logging.info("Generating masks")
            self.seg_images = ip.segment_pharynxes(
                self.images,
                wvl=self.config["pipeline"]["reference_wavelength"])
            self.save_masks()

    def register_images(self):
        if self.config["pipeline"]["image_register"]:
            logging.info("Registering Images")
            self.images = ip.register_all_images(self.images, self.seg_images)

    def align_and_center(self):
        logging.info("Centering and rotating pharynxes")
        self.rot_fl, self.rot_seg = ip.center_and_rotate_pharynxes(
            self.images,
            self.seg_images,
        )

        logging.info(f"Saving rotated FL images to {self.aligned_images_path}")
        pio.save_profile_data(self.rot_fl, self.aligned_images_path)

        logging.info(f"Saving rotated masks to {self.aligned_seg_images_path}")
        pio.save_profile_data(self.rot_seg, self.aligned_seg_images_path)

    def calculate_midlines(self):
        logging.info("Calculating midlines")
        self.midlines = ip.calculate_midlines(self.rot_seg, degree=4)

    def measure_under_midlines(self):
        logging.info("Measuring under midlines")
        self.untrimmed_raw_profiles = ip.measure_under_midlines(
            self.rot_fl,
            self.midlines,
            n_points=self.config["pipeline"]["untrimmed_profile_length"],
            order=self.config["pipeline"]["measurement_order"],
            thickness=float(self.config["pipeline"]["measure_thickness"]),
        )
        self.untrimmed_raw_profiles = profile_processing.align_pa(
            self.untrimmed_raw_profiles)
        self.untrimmed_raw_profiles = self.add_experiment_metadata_to_data_array(
            self.untrimmed_raw_profiles)

        # subtract the image medians from the profile data
        logging.info("Subtracting image medians from profile data")
        self.untrimmed_raw_profiles = ip.subtract_medians(
            self.untrimmed_raw_profiles, self.images)

    def register_profiles(self):

        if self.config["pipeline"]["population_register"]:
            logging.info("Standardizing profiles")
            (
                self.untrimmed_std_profiles,
                self.std_warps,
            ) = profile_processing.standardize_profiles(
                self.untrimmed_raw_profiles,
                redox_params=self.config["redox"],
                **self.config["registration"],
            )

        if self.config["pipeline"]["channel_register"]:
            logging.info("Channel-Registering profiles")

            if self.untrimmed_std_profiles is not None:
                logging.info(
                    "using the standardize profiles for channel-registration")
                data_to_register = self.untrimmed_std_profiles
            else:
                logging.info("using the raw profiles for channel-registration")
                data_to_register = self.untrimmed_raw_profiles

            (
                self.untrimmed_reg_profiles,
                self.channel_warps,
            ) = profile_processing.channel_register(
                data_to_register,
                redox_params=self.config["redox"],
                reg_params=self.config["registration"],
            )

    def trim_data(self):
        logging.info("Trimming intensity data")

        self.trimmed_raw_profiles = self.add_experiment_metadata_to_data_array(
            profile_processing.trim_profiles(
                self.untrimmed_raw_profiles,
                self.config["pipeline"]["seg_threshold"],
                ref_wvl=self.config["pipeline"]["reference_wavelength"],
            ))

        if self.untrimmed_std_profiles is not None:
            self.trimmed_std_profiles = self.add_experiment_metadata_to_data_array(
                profile_processing.trim_profiles(
                    self.untrimmed_std_profiles,
                    self.config["pipeline"]["seg_threshold"],
                    ref_wvl=self.config["pipeline"]["reference_wavelength"],
                ))

        if self.untrimmed_reg_profiles is not None:
            self.trimmed_reg_profiles = self.add_experiment_metadata_to_data_array(
                profile_processing.trim_profiles(
                    self.untrimmed_reg_profiles,
                    self.config["pipeline"]["seg_threshold"],
                    ref_wvl=self.config["pipeline"]["reference_wavelength"],
                ))

    def calculate_redox(self):
        logging.info("Calculating redox measurements")

        redox_params = self.config["redox"]

        # Images
        self.images = utils.add_derived_wavelengths(self.images,
                                                    **redox_params)
        self.rot_fl = utils.add_derived_wavelengths(self.rot_fl,
                                                    **redox_params)

        # profiles
        self.trimmed_raw_profiles = utils.add_derived_wavelengths(
            self.trimmed_raw_profiles, **redox_params)

        self.untrimmed_raw_profiles = utils.add_derived_wavelengths(
            self.untrimmed_raw_profiles, **redox_params)

    def do_manual_ap_flips(self):
        # TODO finish implementation
        logging.info("skipping manual AP flips - not implemented")

    def flip_at(self, idx):
        # TODO finish implementation
        raise NotImplementedError

    ####################################################################################
    # PERSISTENCE / IO
    ####################################################################################

    def save_images(self):
        """Save this experiment's images to disk as netCDF4 files"""
        imgs_paths = [
            (self.images, self.orig_images_path),
            (self.rot_fl, self.aligned_images_path),
            (self.seg_images, self.seg_images_path),
            (self.rot_seg, self.aligned_seg_images_path),
        ]
        for img, path in imgs_paths:
            if img is not None:
                logging.info(f"Saving images to {path}")
                img.to_netcdf(path)

    # def load_tiff_as_hyperstack(self):
    # pass

    def make_fig_dir(self):
        fig_dir = self.analysis_dir.joinpath("figs")
        fig_dir.mkdir(parents=True, exist_ok=True)
        return fig_dir

    def save_individual_profiles(self, profile_data, treatment: str,
                                 trimmed: bool):
        if profile_data is None:
            return

        fig_dir = self.make_fig_dir()

        profile_data_fig_dir = (fig_dir / "profile_data" / treatment /
                                ("trimmed" if trimmed else "untrimmed"))

        individual_data_fig_dir = profile_data_fig_dir.joinpath("inividual")
        individual_data_fig_dir.mkdir(exist_ok=True, parents=True)

        for title, fig in plots.generate_wvl_pair_timepoint_profile_plots(
                profile_data):
            title = title.replace(" ", "")
            fig.savefig(individual_data_fig_dir /
                        f"{self.experiment_id}-{title}-individuals.pdf")
            plt.close(fig)

    def save_avg_profiles(self, profile_data, treatment: str, trimmed: bool):
        if profile_data is None:
            return

        fig_dir = self.make_fig_dir()

        profile_data_fig_dir = (fig_dir / "profile_data" / treatment /
                                ("trimmed" if trimmed else "untrimmed"))

        individual_data_fig_dir = profile_data_fig_dir.joinpath("avg")
        individual_data_fig_dir.mkdir(exist_ok=True, parents=True)

        for title, fig in plots.generate_avg_wvl_pair_profile_plots(
                profile_data):
            title = title.replace(" ", "")
            fig.savefig(individual_data_fig_dir /
                        f"{self.experiment_id}-{title}-avg.pdf")
            plt.close(fig)

    def save_plots(self):
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")

            for data, treatment, trimmed in [
                (self.untrimmed_raw_profiles, "raw", False),
                (self.untrimmed_std_profiles, "standardized", False),
                (self.untrimmed_reg_profiles, "channel-registered", False),
                (self.trimmed_raw_profiles, "raw", True),
                (self.trimmed_std_profiles, "standardized", True),
                (self.trimmed_reg_profiles, "channel-registered", True),
            ]:
                self.save_individual_profiles(data, treatment, trimmed)
                self.save_avg_profiles(data, treatment, trimmed)

            # frame-normed Ratio Images
            mvmt_annotation_img_path = self.fig_dir.joinpath(
                f"{self.experiment_id}-movement_annotation_imgs.pdf")
            imgs = utils.add_derived_wavelengths(self.images,
                                                 **self.config["redox"])
            with PdfPages(mvmt_annotation_img_path) as pdf:
                for i in tqdm(range(self.images.animal.size)):
                    fig = plots.plot_pharynx_R_imgs(imgs[i],
                                                    mask=self.seg_images[i])
                    fig.suptitle(f"animal = {i}")
                    pdf.savefig(fig)
                    if (i % 20) == 0:
                        plt.close("all")

            # Pop-normed ratio images
            u = self.trimmed_raw_profiles.sel(wavelength="r").mean()
            std = self.trimmed_raw_profiles.sel(wavelength="r").std()

            for pair in self.rot_fl.pair.values:
                for tp in self.rot_fl.timepoint.values:
                    ratio_img_path = self.fig_dir.joinpath(
                        f"{self.experiment_id}-ratio_images-pair={pair};timepoint={tp}.pdf"
                    )
                    with PdfPages(ratio_img_path) as pdf:
                        logging.info(
                            f"Saving ratio images to {ratio_img_path}")
                        for i in tqdm(range(self.rot_fl.animal.size)):
                            fig, ax = plt.subplots(dpi=300)
                            ratio_img = (self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_numerator"],
                                pair=pair,
                                timepoint=tp,
                            ) / self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_denominator"],
                                pair=pair,
                                timepoint=tp,
                            ))[i]
                            fl_img = self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_numerator"],
                                pair=pair,
                                timepoint=tp,
                            )[i]
                            im, cbar = plots.imshow_ratio_normed(
                                ratio_img,
                                fl_img,
                                r_min=u - (std * 1.96),
                                r_max=u + (std * 1.96),
                                colorbar=True,
                                i_max=5000,
                                i_min=1000,
                                ax=ax,
                            )
                            ax.plot(
                                *self.midlines.sel(
                                    pair=pair,
                                    timepoint=tp,
                                )[i].values[()].linspace(),
                                color="green",
                                alpha=0.3,
                            )
                            strain = self.rot_fl.strain.values[i]
                            ax.set_title(
                                f"Animal={i} ; Pair={pair} ; Strain={strain}")
                            cax = cbar.ax
                            for j in range(len(self.trimmed_raw_profiles)):
                                cax.axhline(
                                    self.trimmed_raw_profiles.sel(
                                        wavelength="r",
                                        pair=pair,
                                        timepoint=tp)[j].mean(),
                                    color="k",
                                    alpha=0.1,
                                )
                            cax.axhline(
                                self.trimmed_raw_profiles.sel(
                                    wavelength="r", pair=pair,
                                    timepoint=tp)[i].mean(),
                                color="k",
                            )
                            pdf.savefig()
                            if (i % 20) == 0:
                                plt.close("all")

    def persist_profile_data(self):
        for treatment, untrimmed_profile_data in (
            ("raw", self.untrimmed_raw_profiles),
            ("std", self.untrimmed_std_profiles),
            ("reg", self.untrimmed_reg_profiles),
        ):
            if untrimmed_profile_data is not None:
                untrimmed_prof_path = self.untrimmed_profile_data_path(
                    treatment)
                logging.info(
                    f"Saving untrimmed {treatment} profile data to {untrimmed_prof_path}"
                )
                pio.save_profile_data(untrimmed_profile_data,
                                      untrimmed_prof_path)

                untrimmed_prof_path_csv = self.untrimmed_profile_data_csv_path(
                    treatment)
                profile_processing.to_dataframe(
                    untrimmed_profile_data,
                    "value").to_csv(untrimmed_prof_path_csv)

        for treatment, trimmed_profile_data in (
            ("raw", self.trimmed_raw_profiles),
            ("std", self.trimmed_std_profiles),
            ("reg", self.trimmed_reg_profiles),
        ):
            if trimmed_profile_data is not None:
                trimmed_prof_path = self.trimmed_profile_data_path(treatment)
                logging.info(
                    f"Saving trimmed {treatment} profile data to {trimmed_prof_path}"
                )
                pio.save_profile_data(trimmed_profile_data, trimmed_prof_path)

                trimmed_prof_path_csv = self.trimmed_profile_data_csv_path(
                    treatment)
                logging.info(
                    f"Saving trimmed {treatment} profile data to {trimmed_prof_path_csv}"
                )
                profile_processing.to_dataframe(
                    trimmed_profile_data,
                    "value").to_csv(trimmed_prof_path_csv)

        # Warps, if necessary
        if self.config["pipeline"]["channel_register"]:
            logging.info(
                f"Saving channel warp data to {self.channel_warp_data_path}")
            self.channel_warps.to_netcdf(self.channel_warp_data_path)

        if self.config["pipeline"]["population_register"]:
            logging.info(
                f"Saving channel warp data to {self.std_warp_data_path}")
            self.std_warps.to_netcdf(self.std_warp_data_path)

    def save_summary_data(self):
        # Persist the region means
        logging.info(
            f"Saving untrimmed region means to {self.untrimmed_region_data_path}"
        )
        self.untrimmed_summary_table.to_csv(self.untrimmed_region_data_path)
        logging.info(
            f"Saving trimmed region means to {self.trimmed_region_data_path}")
        self.trimmed_summary_table.to_csv(self.trimmed_region_data_path)

    def save_masks(self):
        logging.info(f"saving masks to {self.seg_images_path}")
        pio.save_profile_data(self.seg_images, self.seg_images_path)

    def load_masks(self):
        self.seg_images = pio.load_profile_data(self.seg_images_path)
        logging.info(f"Loaded masks from {self.seg_images_path}")

    def save_midlines(self):
        pio.save_midlines(self.midlines_path, self.midlines)

    def load_midlines(self):
        return pio.load_midlines(self.midlines_path)

    def persist_to_disk(self):
        logging.info(
            f"Saving {self.experiment_id} inside {self.experiment_dir}")

        self.save_midlines()

        if self.config["output"]["should_save_summary_data"]:
            self.save_summary_data()

        if self.config["output"]["should_save_profile_data"]:
            self.persist_profile_data()

        if self.config["output"]["should_save_plots"]:
            self.save_plots()

    ####################################################################################
    # MISC / HELPER
    ####################################################################################
    def add_experiment_metadata_to_data_array(self, data_array: xr.DataArray):
        params = {}
        params.update(self.config["pipeline"])
        params.update(self.config["redox"])
        params.update(self.config["registration"])

        to_remove = ["trimmed_regions", "untrimmed_regions"]
        for k in to_remove:
            del params[k]

        return data_array.assign_attrs(**params)
Exemple #17
0
from .dataset import Dataset


from .cubes import PerplexityStrategy, GreedyStrategy
from .model_constructor import init_simple_default_model

import artm

from inspect import signature, Parameter
from strictyaml import Map, Str, Int, Seq, Any, Optional, Float, EmptyNone, Bool
from strictyaml import dirty_load

# TODO: use stackoverflow.com/questions/37929851/parse-numpydoc-docstring-and-access-components
# for now just hardcode most common / important types
ARTM_TYPES = {
    "tau": Float(),
    "topic_names": Str() | Seq(Str()) | EmptyNone(),
    # TODO: handle class_ids in model and in regularizers separately
    "class_ids": Str() | Seq(Str()) | EmptyNone(),
    "gamma": Float() | EmptyNone(),
    "seed": Int(),
    "num_document_passes": Int(),
    "num_processors": Int(),
    "cache_theta": Bool(),
    "reuse_theta": Bool(),
    "theta_name": Str()
}


element = Any()
base_schema = Map({
Exemple #18
0
# import cog
# from alfasim_sdk import CaseDescription
# from alfasim_sdk._internal.alfacase.generate_schema import get_all_classes_that_needs_schema, generate_alfacase_schema
# cog.out("from strictyaml import Bool, Enum, Int, Map, MapPattern, Optional, Seq, Str, Float # noreorder")
# cog.out("\n\n")
# cog.out("\n\n")
# list_of_classes_that_needs_schema = get_all_classes_that_needs_schema(CaseDescription)
# for class_ in list_of_classes_that_needs_schema:
#    cog.out(generate_alfacase_schema(class_))
# ]]]
from strictyaml import Bool, Enum, Int, Map, MapPattern, Optional, Seq, Str, Float  # noreorder

bip_description_schema = Map({
    "component_1": Str(),
    "component_2": Str(),
    "value": Float(),
})
casing_section_description_schema = Map({
    "name":
    Str(),
    "hanger_depth":
    Map({
        "value": Float(),
        "unit": Str()
    }),
    "settings_depth":
    Map({
        "value": Float(),
        "unit": Str()
    }),
    "hole_diameter":
Exemple #19
0
def Scalar():
    """Validator that matches integers and floats."""
    return Int() | Float()
Exemple #20
0
 def _get_route_mapping(cls) -> dict:
     return {"range": Float() | Str(), STEPS_TAG: Seq(Any())}
Exemple #21
0
class Engine(BaseEngine):
    schema = StorySchema(given={
        Optional("files"): MapPattern(Str(), Str()),
        Optional("variables"): MapPattern(Str(), Str()),
        Optional("python version"): Str(),
        Optional("setup"): Str(),
        Optional("code"): Str(),
    }, )

    def __init__(self, pathgroup, settings):
        self.path = pathgroup
        self.settings = settings

    def set_up(self):
        self.path.state = self.path.gen.joinpath("state")
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        for filename, text in self.given.get("files", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().mkdir()
            filepath.write_text(str(text))
            filepath.chmod("u+x")

        for filename, text in self.given.get("variables", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().mkdir()
            filepath.write_text(str(text))

        self.path.key.joinpath("code_that_does_things.py").copy(
            self.path.state)

        self.python_package = hitchpython.PythonPackage(
            self.given.get('python_version', self.given['python version']))
        self.python_package.build()

        self.pip = self.python_package.cmd.pip
        self.python = self.python_package.cmd.python

        with hitchtest.monitor(
            [self.path.key.joinpath("debugrequirements.txt")]) as changed:
            if changed:
                self.pip("install", "-r",
                         "debugrequirements.txt").in_dir(self.path.key).run()

        with hitchtest.monitor(
                pathq(self.path.project.joinpath("icommandlib")).ext(
                    "py")) as changed:
            if changed:
                self.pip("uninstall", "icommandlib",
                         "-y").ignore_errors().run()
                self.pip("install", ".").in_dir(self.path.project).run()

        self.example_py_code = ExamplePythonCode(
            self.python,
            self.path.state,
        ).with_setup_code(self.given.get('setup', '')).with_code(
            self.given.get('code', '')).with_timeout(4.0)

    @expected_exception(HitchRunPyException)
    def run_code(self):
        self.result = self.example_py_code.run()

    @expected_exception(HitchRunPyException)
    def start_code(self):
        self.running_python = self.example_py_code.running_code()

    def pause_for_half_a_second(self):
        import time
        time.sleep(0.5)

    def send_signal_and_wait_for_finish(self, signal_name):
        SIGNAL_NAMES_TO_NUMBERS = {
            name: getattr(signal, name)
            for name in dir(signal)
            if name.startswith('SIG') and '_' not in name
        }
        self.running_python.iprocess.psutil._send_signal(
            int(SIGNAL_NAMES_TO_NUMBERS[signal_name]))
        self.running_python.iprocess.wait_for_finish()

    @expected_exception(HitchRunPyException)
    @validate(
        exception_type=Map({
            "in python 2": Str(),
            "in python 3": Str()
        }) | Str(),
        message=Map({
            "in python 2": Str(),
            "in python 3": Str()
        }) | Str(),
    )
    def raises_exception(self, exception_type=None, message=None):
        """
        Expect an exception.
        """
        differential = False

        if exception_type is not None:
            if not isinstance(exception_type, str):
                differential = True
                exception_type = exception_type['in python 2']\
                    if self.given['python version'].startswith("2")\
                    else exception_type['in python 3']

        if message is not None:
            if not isinstance(message, str):
                differential = True
                message = message['in python 2']\
                    if self.given['python version'].startswith("2")\
                    else message['in python 3']

        try:
            result = self.example_py_code.expect_exceptions().run()
            result.exception_was_raised(exception_type, message)
        except ExpectedExceptionMessageWasDifferent as error:
            if self.settings.get("overwrite artefacts") and not differential:
                self.current_step.update(message=error.actual_message)
            else:
                raise

    @validate(from_filenames=Seq(Str()))
    def processes_not_alive(self, from_filenames=None):
        still_alive = []
        for from_filename in from_filenames:
            import psutil
            pid = int(
                self.path.state.joinpath(from_filename).bytes().decode(
                    'utf8').strip())
            try:
                proc = psutil.Process(pid)
                proc.kill()
                still_alive.append(from_filename)
            except psutil.NoSuchProcess:
                pass
        if len(still_alive) > 0:
            raise Exception("Processes from {0} still alive.".format(
                ', '.join(still_alive)))

    def touch_file(self, filename):
        self.path.state.joinpath(filename).write_text("\nfile touched!",
                                                      append=True)

    def _will_be(self, content, text, reference, changeable=None):
        if text is not None:
            if content.strip() == text.strip():
                return
            else:
                raise RuntimeError(
                    "Expected to find:\n{0}\n\nActual output:\n{1}".format(
                        text,
                        content,
                    ))

        artefact = self.path.key.joinpath(
            "artefacts", "{0}.txt".format(reference.replace(" ", "-").lower()))

        from simex import DefaultSimex
        simex = DefaultSimex(
            open_delimeter="(((",
            close_delimeter=")))",
        )

        simex_contents = content

        if changeable is not None:
            for replacement in changeable:
                simex_contents = simex.compile(replacement).sub(
                    replacement, simex_contents)

        if not artefact.exists():
            artefact.write_text(simex_contents)
        else:
            if self.settings.get('overwrite artefacts'):
                if artefact.bytes().decode('utf8') != simex_contents:
                    artefact.write_text(simex_contents)
                    print(content)
            else:
                if simex.compile(artefact.bytes().decode('utf8')).match(
                        content) is None:
                    raise RuntimeError(
                        "Expected to find:\n{0}\n\nActual output:\n{1}".format(
                            artefact.bytes().decode('utf8'),
                            content,
                        ))

    def file_contents_will_be(self,
                              filename,
                              text=None,
                              reference=None,
                              changeable=None):
        output_contents = self.path.state.joinpath(filename).bytes().decode(
            'utf8')
        self._will_be(output_contents, text, reference, changeable)

    def output_will_be(self, text=None, reference=None, changeable=None):
        output_contents = self.path.state.joinpath(
            "output.txt").bytes().decode('utf8')
        self._will_be(output_contents, text, reference, changeable)

    @validate(seconds=Float())
    def sleep(self, seconds):
        import time
        time.sleep(float(seconds))

    def on_success(self):
        if self.settings.get("overwrite artefacts"):
            self.new_story.save()
Exemple #22
0
         Optional('ip', default=''): Str(),
         'type': Str(),
         'ami': Str(),
         'snap': Str(),
         'user': Str()
     }),
     'db':
     Map({
         'name': Str(),
         'host': Str(),
         'port': Int()
     }),
     'orbits':
     Map({
         'start': Datetime(),
         'step': Float(),
         'duration': Float()
     }),
     'agent_interval':
     Float(),
     'metrics_interval':
     Float(),
     'start_delay':
     Int()
 }),
 'satellites':
 Seq(
     Map({
         'hostname': Str(),
         Optional('ip', default=''): Str(),
         'type': Str(),
 def get_settings_schema():
     """
     Getter for settings schema
     :return: schema that is used to verify the settings yaml
     """
     return Map({
         'screen_settings':
         Map({
             'background_color': CommaSeparated(Int()),
             'edge_spacing': Int(),
             'screen_height': Int(),
             'screen_width': Int(),
             'side_bar_width': Int(),
             'screen_title': Str(),
             'falling_message': Str()
         }),
         'image_paths':
         Map({
             'arm': Str(),
             'arm_large': Str(),
             'body': Str(),
             'color_sensor_black': Str(),
             'color_sensor_blue': Str(),
             'color_sensor_green': Str(),
             'color_sensor_red': Str(),
             'color_sensor_white': Str(),
             'color_sensor_yellow': Str(),
             'led_amber': Str(),
             'led_black': Str(),
             'led_green': Str(),
             'led_orange': Str(),
             'led_red': Str(),
             'led_yellow': Str(),
             'touch_sensor_left': Str(),
             'touch_sensor_rear': Str(),
             'touch_sensor_right': Str(),
             'ultrasonic_sensor_top': Str(),
             'ultrasonic_sensor_bottom': Str(),
             'wheel': Str()
         }),
         'body_part_sizes':
         Map({
             'body':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'arm':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'led':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'color_sensor':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'speaker':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'touch_sensor_bar':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'touch_sensor_bar_rear':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'ultrasonic_sensor_bottom':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'ultrasonic_sensor_top':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
             'wheel':
             Map({
                 'width': Int(),
                 'height': Int()
             }),
         }),
         'exec_settings':
         Map({
             'frames_per_second': Int(),
             'socket_port': Int(),
             'bluetooth_port': Int(),
             'message_size': Int()
         }),
         'motor_settings':
         Map({
             'distance_coasting_subtraction': Float(),
             'degree_coasting_subtraction': Float()
         }),
         'wheel_settings':
         Map({'circumference': Float()})
     })
Exemple #24
0
 Opt("shell"): Str(),
 Opt("concurrencyPolicy"): Enum(['Allow', 'Forbid', 'Replace']),
 Opt("captureStderr"): Bool(),
 Opt("captureStdout"): Bool(),
 Opt("saveLimit"): Int(),
 Opt("utc"): Bool(),
 Opt("failsWhen"): Map({
     "producesStdout": Bool(),
     Opt("producesStderr"): Bool(),
     Opt("nonzeroReturn"): Bool(),
     Opt("always"): Bool(),
 }),
 Opt("onFailure"): Map({
     Opt("retry"): Map({
         "maximumRetries": Int(),
         "initialDelay": Float(),
         "maximumDelay": Float(),
         "backoffMultiplier": Float(),
     }),
     Opt("report"): _report_schema,
 }),
 Opt("onPermanentFailure"): Map({
     Opt("report"): _report_schema,
 }),
 Opt("onSuccess"): Map({
     Opt("report"): _report_schema,
 }),
 Opt("environment"): Seq(Map({
     "key": Str(),
     "value": Str(),
 })),
Exemple #25
0
)

import artm

from strictyaml import Map, Str, Int, Seq, Float, Bool
from strictyaml import Any, Optional, EmptyDict, EmptyNone, EmptyList
from strictyaml import dirty_load

SUPPORTED_CUBES = [CubeCreator, RegularizersModifierCube]
SUPPORTED_STRATEGIES = [PerplexityStrategy, GreedyStrategy]

TYPE_VALIDATORS = {
    'int': Int(),
    'bool': Bool(),
    'str': Str(),
    'float': Float()
}


def choose_key(param):
    """
    Parameters
    ----------
    param : inspect.Parameter

    Returns
    -------
    str or strictyaml.Optional
    """
    if param.default is not Parameter.empty:
        return Optional(param.name)
Exemple #26
0
NPC = namedtuple('NPC', [
    'name',
    'age',
    'race',
    'class_',
    'physical',
    'personality',
])

NPC_FILENAME = os.path.join(os.path.dirname(__file__), 'data/npc.yaml')
NPC_JSON_FILENAME = os.path.join(os.path.dirname(__file__), 'data/npc.json')

NPC_SCHEMA = Map({
    'races': Seq(Map({
        'v': Str(),
        'w': Float()
    })),
    'classes': Seq(Str()),
    'age': Seq(Map({
        'v': Str(),
        'w': Float()
    })),
    'physical': Seq(Str()),
    'personality': Seq(Str()),
    'names': Seq(Str()),
})


def _read_data():
    """Read NPC data.
Exemple #27
0
 Int(),
 Opt("utc"):
 Bool(),
 Opt("failsWhen"):
 Map({
     "producesStdout": Bool(),
     Opt("producesStderr"): Bool(),
     Opt("nonzeroReturn"): Bool(),
     Opt("always"): Bool(),
 }),
 Opt("onFailure"):
 Map({
     Opt("retry"):
     Map({
         "maximumRetries": Int(),
         "initialDelay": Float(),
         "maximumDelay": Float(),
         "backoffMultiplier": Float(),
     }),
     Opt("report"):
     _report_schema,
 }),
 Opt("onPermanentFailure"):
 Map({Opt("report"): _report_schema}),
 Opt("onSuccess"):
 Map({Opt("report"): _report_schema}),
 Opt("environment"):
 Seq(Map({
     "key": Str(),
     "value": Str()
 })),
Exemple #28
0
This schema represents all known key/value pairs for the builder config file.
"""
from strictyaml import (load, Map, MapPattern, Str, Int, Float, Seq, YAMLError,
                        Optional, Bool)

stat_schema = Seq(
    Map({
        "name":
        Str(),
        "tag":
        Str(),
        "values":
        Seq(
            Map({
                "name": Str(),
                "value": Int() | Float(),
                Optional("nominalValue"): Int() | Float(),
                Optional("linkedValue"): Int() | Float(),
                Optional("rangeMinValue"): Int() | Float(),
                Optional("rangeMaxValue"): Int() | Float(),
                Optional("flags"): Int()
            }))
    }), )

stat_format4_schema = Seq(
    Map({
        "name": Str(),
        Optional("flags"): Int(),
        "location": MapPattern(Str(),
                               Int() | Float()),
    }))
    "fea_dim": Int(),
    "out_fea_dim": Int(),
    "num_class": Int(),
    "num_input_features": Int(),
    "use_norm": Bool(),
    "init_size": Int(),
})

dataset_params = Map({
    "dataset_type": Str(),
    "pc_dataset_type": Str(),
    "ignore_label": Int(),
    "return_test": Bool(),
    "fixed_volume_space": Bool(),
    "label_mapping": Str(),
    "max_volume_space": Seq(Float()),
    "min_volume_space": Seq(Float()),
})

train_data_loader = Map({
    "data_path": Str(),
    "imageset": Str(),
    "return_ref": Bool(),
    "batch_size": Int(),
    "shuffle": Bool(),
    "num_workers": Int(),
})

val_data_loader = Map({
    "data_path": Str(),
    "imageset": Str(),
Exemple #30
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        setup=GivenProperty(Str()),
        boxname=GivenProperty(Str()),
        vmname=GivenProperty(Str()),
        issue=GivenProperty(Str()),
        files=GivenProperty(MapPattern(Str(), Str())),
        python_version=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        status=InfoProperty(schema=Enum(["experimental", "stable"])),
        docs=InfoProperty(schema=Str()),
    )

    def __init__(self, paths, settings):
        self.path = paths
        self.settings = settings

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.cachestate = self.path.gen.joinpath("cachestate")
        self.path.state = self.path.gen.joinpath("state")
        self.path.working_dir = self.path.gen.joinpath("working")
        self.path.build_path = self.path.gen.joinpath("build_path")
        self.path.localsync = self.path.gen.joinpath("local_sync")

        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        if self.path.localsync.exists():
            self.path.localsync.rmtree(ignore_errors=True)
        self.path.localsync.mkdir()

        if self.path.build_path.exists():
            self.path.build_path.rmtree(ignore_errors=True)
        self.path.build_path.mkdir()

        self.python = hitchpylibrarytoolkit.project_build(
            "hitchbuildvagrant", self.path,
            self.given.get("python_version", "3.7.0")).bin.python

        if not self.path.cachestate.exists():
            self.path.cachestate.mkdir()

        for filename, contents in self.given.get("files", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().makedirs()
            filepath.write_text(contents)

        if self.path.working_dir.exists():
            self.path.working_dir.rmtree(ignore_errors=True)
        self.path.working_dir.mkdir()

        self.example_py_code = (ExamplePythonCode(
            self.python, self.path.state).with_setup_code(
                self.given.get("setup", "").replace(
                    "/path/to/share",
                    self.path.cachestate)).with_terminal_size(
                        160, 100).with_long_strings(
                            share=str(self.path.cachestate),
                            build_path=str(self.path.build_path),
                            issue=str(self.given.get("issue")),
                            boxname=str(self.given.get("boxname")),
                            vmname=str(self.given.get("vmname")),
                            local_sync_path=str(self.path.localsync),
                        ))

    @no_stacktrace_for(HitchRunPyException)
    def run(self, code):
        self.example_py_code.with_code(code).run()

    def write_to_localsync(self, **files):
        for filename, contents in files.items():
            self.path.localsync.joinpath(filename).write_text(contents)

    def delete_localsync_file(self, filename):
        self.path.localsync.joinpath(filename).remove()

    def write_file(self, filename, contents):
        self.path.state.joinpath(filename).write_text(contents)

    def raises_exception(self, message=None, exception_type=None):
        try:
            result = self.example_python_code.expect_exceptions().run(
                self.path.state, self.python)
            result.exception_was_raised(exception_type, message.strip())
        except ExpectedExceptionMessageWasDifferent as error:
            if self.settings.get("rewrite"):
                self.current_step.update(message=error.actual_message)
            else:
                raise

    def file_contains(self, filename, contents):
        assert (self.path.working_dir.joinpath(filename).bytes().decode("utf8")
                == contents)

    @validate(duration=Float())
    def sleep(self, duration):
        import time

        time.sleep(duration)

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def on_failure(self, reason):
        pass

    def tear_down(self):
        for vagrantfile in pathquery(self.path.state).named("Vagrantfile"):
            Command("vagrant", "destroy",
                    "-f").in_dir(vagrantfile.abspath().dirname()).run()