Exemple #1
0
def load_scheme(yaml_text):
    '''
    Some optional keys have enforced default values, otherwise use dict.get()
    '''
    schema = MapPattern(
        Str(),
        Map({
            'description':
            Str(),
            Optional('alias'):
            Str(),
            'version':
            Str(),
            'directives':
            MapPattern(
                Str(),
                Map({
                    Optional('description'):
                    Str(),
                    'formats':
                    Seq(Enum(FORMATS)),
                    Optional('prepare'):
                    Map({
                        Optional('remove_whitespace', default=False): Bool(),
                        Optional('remove_characters', default=['']):
                        Seq(Str()),
                        Optional('strip_characters', default=['']): Seq(Str()),
                    }),
                    Optional('validate'):
                    Map({
                        Optional('alphabet'): Enum(ALPHABETS),
                        Optional('min_length'): Int(),
                        Optional('max_length'): Int(),
                    }),
                    Optional('target'):
                    Str(),
                    Optional('helper', default=False):
                    Bool()
                }),
            ),
            'algorithm':
            Enum(ALGORITHMS),
            'encodings':
            MapPattern(
                Str(),
                Map({
                    'type':
                    Enum(ENCODINGS),
                    Optional('length', default=0):
                    Int(),
                    Optional('prefix', default=''):
                    Str(),
                    Optional('separator'):
                    Map({
                        'character': Str(),
                        'interval': Int()
                    })
                }))
        }))
    return load(yaml_text, schema)
    def _get_info(cls):

        # https://hitchdev.com/strictyaml
        schema = Map({
            Optional("knobs"):
            Map({
                Optional("log_to_console"): Bool(),
                Optional("log_level_debug"): Bool(),
            })
            | EmptyDict(),
            "globals":
            Map({
                "topic_prefix":
                Str(),
                Optional(
                    "reconnect_interval",
                    default=const.MQTT_DEFAULT_RECONNECT_INTERVAL,
                ):
                Float(),
                Optional("poll_interval",
                         default=const.MYQ_DEFAULT_POLL_INTERVAL):
                Float(),
                Optional(
                    "periodic_mqtt_report",
                    default=const.DEFAULT_PERIODIC_MQTT_REPORT,
                ):
                Float(),
                Optional("user_agent"):
                Str(),
            }),
            "mqtt":
            Map({
                "host":
                Str(),
                Optional("client_id", default=const.MQTT_DEFAULT_CLIENT_ID):
                Str(),
                Optional("username"):
                Str(),
                Optional("password"):
                Str(),
            }),
            "myq":
            Map({
                "email": Email(),
                "password": Str(),
            }),
            Optional("alias"):
            MapPattern(Str(), Str()) | EmptyDict(),
        })

        if not cls._info:
            config_filename = cls._get_config_filename()
            logger.info("loading yaml config file %s", config_filename)
            with open(config_filename, "r") as ymlfile:
                raw_cfg = load(ymlfile.read(), schema).data
                cls._parse_raw_cfg(raw_cfg)
        return cls._info
 def __init__(self):
     super().__init__({
         Optional(conditions.ALL_OF): self,
         Optional(conditions.ENDS_EARLIER_THAN): Float(),
         Optional(conditions.ENDS_LATER_THAN): Float(),
         Optional(conditions.NONE_OF): self,
         Optional(conditions.ONE_OF): self,
         Optional(conditions.STARTS_EARLIER_THAN): Float(),
         Optional(conditions.STARTS_LATER_THAN): Float(),
         Optional(conditions.WEEKDAY): Bool(),
         Optional(conditions.WEEKEND): Bool(),
     })
Exemple #4
0
    def schema(cls):
        basic_schema = {
            # `name` is not part of the sub schema, name is the title of the block.
            # `default` must be provided in the sub schemas
            "label": Str(),
            "type": Str(),
            Optional("variable"): Str(),
            Optional("helptext"): Str(),
            Optional("visibility"): Int(),
            Optional("required"): Bool(),
            Optional("readonly"): Bool(),
        }

        return Map(basic_schema)
 def get_world_schema():
     """
     Getter for world schema
     :return: schema that is used to verify the world yaml
     """
     return Map({
         'robots':
         Seq(
             Map({
                 'name':
                 Str(),
                 'center_x':
                 Int(),
                 'center_y':
                 Int(),
                 'orientation':
                 Int(),
                 Optional('type'):
                 Str(),
                 Optional('parts'):
                 Seq(ConfigChecker.get_robot_part_schema())
             })),
         'board_height':
         Int(),
         'board_width':
         Int(),
         'board_color':
         CommaSeparated(Int()),
         'obstacles':
         Seq(
             Map({
                 'name': Str(),
                 'type': Str(),
                 Optional('outer_spacing'): Int(),
                 Optional('depth'): Int(),
                 Optional('color'): CommaSeparated(Int()),
                 Optional('border_width'): Int(),
                 Optional('inner_radius'): Int(),
                 Optional('x'): Int(),
                 Optional('y'): Int(),
                 Optional('width'): Int(),
                 Optional('height'): Int(),
                 Optional('angle'): Int(),
                 Optional('movable'): Bool(),
                 Optional('hole'): Bool(),
                 Optional('radius'): Int(),
             })),
     })
Exemple #6
0
 def _get_base_part_mapping(cls) -> dict:
     """Base mapping for segment/phase schemas."""
     polar_coeff_schema = CommaSeparated(Float()) | Str()
     polar_schema = Map({
         "CL": polar_coeff_schema,
         "CD": polar_coeff_schema
     }) | Str()
     return {
         # TODO: this mapping covers all possible segments, but some options are relevant
         #  only for some segments. A better check could be done in second-pass validation.
         Optional("target", default=None):
         cls._get_target_schema(),
         Optional("engine_setting", default=None):
         cls._get_value_schema(Str(), False),
         Optional(POLAR_TAG, default=None):
         polar_schema,
         Optional("thrust_rate", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("climb_thrust_rate", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("time_step", default=None):
         cls._get_value_schema(),
         Optional("maximum_flight_level", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("mass_ratio", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("reserve_mass_ratio", default=None):
         cls._get_value_schema(has_unit=False),
         Optional("use_max_lift_drag_ratio", default=None):
         cls._get_value_schema(Bool(), False),
     }
Exemple #7
0
def _personal_settings():
    settings_file = DIR.key.joinpath("personalsettings.yml")

    if not settings_file.exists():
        settings_file.write_text(("engine:\n"
                                  "  rewrite: no\n"
                                  "  cprofile: no\n"
                                  "params:\n"
                                  "  python version: 3.5.0\n"))
    return load(
        settings_file.bytes().decode("utf8"),
        Map({
            "engine": Map({
                "rewrite": Bool(),
                "cprofile": Bool()
            }),
            "params": Map({"python version": Str()}),
        }),
    )
Exemple #8
0
def load_data(filename):
    schema = MapPattern(
        Regex(u'[A-Za-z. ]+'),
        MapPattern(
            Regex(u'[A-Za-z\\-. ]+'),
            Map({
                'Description': Regex('.+'),
                'Free': Bool(),
                'Link': Url(),
            }, Regex(u'.+'))))
    with open(filename) as f:
        return strictyaml.load(f.read(), schema).data
Exemple #9
0
class Config:
    SCHEMA = Map(
        dict(image_scale=Float(),
             frame_limit=Int(),
             dump_stats=Bool(),
             alignment=Map(
                 dict(enabled=Bool(),
                      mode=Str(),
                      max_iterations=Int(),
                      termination_eps=Float()))))

    def __init__(self, file_or_yaml=open('config.yaml', 'r')):
        if isinstance(file_or_yaml, str):
            yaml_str = file_or_yaml
        else:
            yaml_str = file_or_yaml.read()
            file_or_yaml.close()
        self.cfg = load(yaml_str, Config.SCHEMA).data

    def get(self, key):
        return self.cfg[key]
Exemple #10
0
def is_pipelines_config_valid(strictyaml_pipelines: YAML) -> YAML:
    """
    TODO: Refactor to test and analyzer specific config validation.
    """
    pipelines_schema = Map({
        "pipelines":
        Seq(
            Map({
                "name":
                Str(),
                "type":
                Enum(["test", "analyzer"]),
                Optional("coverage"):
                Str(),
                Optional("commands"):
                Map({
                    "partial-scope": Str(),
                    "full-scope": Str()
                }),
                Optional("dirs"):
                Seq(
                    Map({
                        "path": Str(),
                        Optional("full-scope", default=False): Bool()
                    })),
                Optional("files"):
                Seq(
                    Map({
                        "path": Str(),
                        Optional("full-scope", default=False): Bool()
                    }))
            }))
    })
    try:
        strictyaml_pipelines.revalidate(pipelines_schema)
        return True
    except YAMLValidationError:
        return False
def config() -> Config:
    CONFIG_DIR.mkdir(parents=True, exist_ok=True)

    config_file = CONFIG_DIR / 'config.yaml'
    if not config_file.exists():
        sys.exit(f'{config_file} not found')

    restaurant_schema = Map({
        'url': Str(),
        'name': Str(),
    })
    schema = Map({
        'restaurants':
        Seq(restaurant_schema),
        'mail':
        Map({
            'enable': Bool(),
            'sender': Str(),
            'password': Str(),
            'smtp_server': Str(),
            'recipients': UniqueSeq(Str()),
        }),
    })

    yaml = load(config_file.read_text(), schema)
    _config = Config()

    for i in yaml.get('restaurants', []):
        restaurant = Restaurant(i['url'], i['name'])
        _config.restaurants.append(restaurant)

    mail_config = Mail(
        enable=yaml['mail']['enable'],
        sender=yaml['mail']['sender'],
        password=yaml['mail']['password'],
        smtp_server=yaml['mail']['smtp_server'],
        recipients=yaml['mail']['recipients'],
    )
    _config.mail = mail_config

    return _config
def build_schema_for_regs():
    """
    Returns
    -------
    strictyaml.Map
        schema used for validation and type-coercion
    """
    schemas = {}
    for elem in artm.regularizers.__all__:
        if "Regularizer" in elem:
            class_of_object = getattr(artm.regularizers, elem)
            res = build_schema_from_signature(class_of_object)
            if elem in ["SmoothSparseThetaRegularizer", "SmoothSparsePhiRegularizer",
                        "DecorrelatorPhiRegularizer"]:
                res[Optional("relative", default=None)] = Bool()
            res = wrap_in_map(res)

            specific_schema = Map({class_of_object.__name__: res})
            schemas[class_of_object.__name__] = specific_schema

    return schemas
Exemple #13
0
def choose_validator(param):
    """
    Parameters
    ----------
    param : inspect.Parameter

    Returns
    -------
    instance of strictyaml.Validator
    """
    if param.annotation is int:
        return Int()
    if param.annotation is float:
        return Float()
    if param.annotation is bool:
        return Bool()
    if param.annotation is str:
        return Str()
    if param.name in ARTM_TYPES:
        return ARTM_TYPES[param.name]
    return Any()
Exemple #14
0
class WooSchema:
    """Schema for localization YAML files."""

    # https://github.com/woocart/woocart-defaults/blob/master/src/importers/class-woopage.php#L14
    productMeta = {
        "title": Str(),
        "description": Str(),
        Optional("price"): Str(),
        Optional("category"): Str(),
        "images": Seq(Str()),
    }

    # https://github.com/woocart/woocart-defaults/blob/master/src/importers/class-woopage.php#L14
    pageMeta = {
        "post_title": Str(),
        Optional("post_name"): Str(),
        Optional("post_excerpt"): Str(),
        "post_status": Enum(["draft", "publish"]),
        "post_type": Enum(["page", "post"]),
        Optional("post_category"): Str(),
        Optional("meta_input"): MapPattern(Str(), Str()),
        Optional("woocart_defaults"): MapPattern(Str(), Str()),
    }

    localization = {
        "woo/woocommerce_default_country":
        Enum(COUNTRIES),
        "wp/date_format":
        Enum(["d/m/Y", "Y-m-d", "F j, Y", "m/d/Y"]),
        "wp/time_format":
        Enum(["H:i", "g:i A"]),
        "wp/start_of_week":
        Enum(["1", "2", "3", "4", "5", "6", "7"]),
        "wp/timezone_string":
        Enum(TIMEZONES),
        "wp/blog_charset":
        Enum(["UTF-8"]),
        "wp/DEFAULT_WPLANG":
        Enum(WPLANGS),
        Optional("wp/blogdescription"):
        Str(),
        Optional("wp/woocommerce_demo_store_notice"):
        Str(),
        "woo/woocommerce_weight_unit":
        Enum(["kg", "k", "lbs", "oz"]),
        "woo/woocommerce_dimension_unit":
        Enum(["m", "cm", "mm", "in", "yd"]),
        "woo/woocommerce_currency":
        Enum(CURRENCIES),
        "woo/woocommerce_currency_pos":
        Enum(["right_space", "left_space", "left", "right"]),
        "woo/woocommerce_price_thousand_sep":
        Enum([".", ","]),
        "woo/woocommerce_price_decimal_sep":
        Enum([",", "."]),
        "woo/woocommerce_price_num_decimals":
        Enum(["2"]),
        Optional("woo/woocommerce_tax_classes"):
        Seq(Str()),
        "woo/woocommerce_bacs_settings":
        Map({
            "enabled": Bool(),
            Optional("title"): Str(),
            Optional("description"): Str(),
            Optional("instructions"): Str(),
            Optional("account_name"): Str(),
            Optional("account_number"): Str(),
            Optional("sort_code"): Str(),
            Optional("bank_name"): Str(),
            Optional("iban"): Str(),
            Optional("bic"): Str(),
            Optional("account_details"): Str(),
        }),
        "woo/woocommerce_cod_settings":
        Map({
            "enabled": Bool(),
            Optional("title"): Str(),
            Optional("description"): Str(),
            Optional("instructions"): Str(),
            Optional("enable_for_methods"): Str(),
            Optional("enable_for_virtual"): Bool(),
        }),
        "woo/woocommerce_checkout_privacy_policy_text":
        Str(),
        "woo/woocommerce_registration_privacy_policy_text":
        Str(),
        ".woo/woocommerce_bacs_settings_format":
        Enum(["serialized"]),
        ".woo/woocommerce_cod_settings_format":
        Enum(["serialized"]),
        Optional(".woo/woocommerce_tax_classes_format"):
        Enum(["implode_newline"]),
    }

    @staticmethod
    def load(path: Path, schema_pointer):
        """Load and validate .yaml file."""
        schema = copy.deepcopy(schema_pointer)
        with path.open() as f:
            yaml = f.read()
            data = yaml_load(yaml, Any())
            is_template = path.name == "template.yaml"

            # Replace real Country and Timezone values with fakes
            if is_template:
                schema["woo/woocommerce_default_country"] = Enum(["LL"])
                schema["wp/timezone_string"] = Enum(["Region/Country"])
                schema["wp/DEFAULT_WPLANG"] = Enum(["ll_LL"])
                schema["woo/woocommerce_currency"] = Enum(["LLL"])

            if "woo/woocommerce_tax_classes" in data:
                # Inspect that tax classes and taxes match

                # create enum for taxes from defined tax_classes
                tax_classes = [
                    str(tax).lower().replace(" ", "-")
                    for tax in data["woo/woocommerce_tax_classes"]
                ]
                # +1 is for standard schema which is never defined in tax class
                for x in range(len(tax_classes) + 1):
                    # start counting with 1
                    schema[f"wootax/{x+1}"] = Map({
                        "country":
                        Enum(["LL"]) if is_template else Enum(COUNTRIES),
                        "state":
                        Str(),
                        "rate":
                        Decimal(),
                        "name":
                        Str(),
                        "priority":
                        Int(),
                        "compound":
                        Int(),
                        "shipping":
                        Int(),
                        "order":
                        Int(),
                        "class":
                        Enum([""]) if x == 0 else Enum(tax_classes),
                        "locations":
                        Map({}),
                    })
            try:
                return yaml_load(yaml, Map(schema), path)
            except YAMLError:
                raise

        return as_document(schema)

    @staticmethod
    def load_string(data: bytes, schema, path: str):
        """Load and validate yaml data."""
        try:
            return yaml_load(data, Map(schema), path)
        except YAMLError:
            raise

        return as_document(schema)
Exemple #15
0
_report_schema = Map({
    Opt("sentry"):
    Map({
        Opt("dsn"):
        Map({
            Opt("value"): EmptyNone() | Str(),
            Opt("fromFile"): EmptyNone() | Str(),
            Opt("fromEnvVar"): EmptyNone() | Str(),
        }),
        Opt("fingerprint"):
        Seq(Str()),
        Opt("level"):
        Str(),
        Opt("extra"):
        MapPattern(Str(),
                   Str() | Int() | Bool()),
        Opt("body"):
        Str(),
    }),
    Opt("mail"):
    Map({
        "from": EmptyNone() | Str(),
        "to": EmptyNone() | Str(),
        Opt("smtpHost"): Str(),
        Opt("smtpPort"): Int(),
        Opt("subject"): Str(),
        Opt("body"): Str(),
    }),
})

_job_defaults_common = {
Exemple #16
0
from inspect import signature, Parameter
from strictyaml import Map, Str, Int, Seq, Any, Optional, Float, EmptyNone, Bool
from strictyaml import dirty_load

# TODO: use stackoverflow.com/questions/37929851/parse-numpydoc-docstring-and-access-components
# for now just hardcode most common / important types
ARTM_TYPES = {
    "tau": Float(),
    "topic_names": Str() | Seq(Str()) | EmptyNone(),
    # TODO: handle class_ids in model and in regularizers separately
    "class_ids": Str() | Seq(Str()) | EmptyNone(),
    "gamma": Float() | EmptyNone(),
    "seed": Int(),
    "num_document_passes": Int(),
    "num_processors": Int(),
    "cache_theta": Bool(),
    "reuse_theta": Bool(),
    "theta_name": Str()
}


element = Any()
base_schema = Map({
    'regularizers': Seq(element),
    'stages': Seq(element),
    'model': Map({
        "dataset_path": Str(),
        "modalities_to_use": Seq(Str()),
        "main_modality": Str()
    }),
    'topics': Map({
Exemple #17
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        python_version=GivenProperty(Str()),
        selenium_version=GivenProperty(Str()),
        website=GivenProperty(MapPattern(Str(), Str())),
        selectors_yml=GivenProperty(Str()),
        javascript=GivenProperty(Str()),
        setup=GivenProperty(Str()),
        code=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        status=InfoProperty(schema=Enum(["experimental", "stable"])),
        docs=InfoProperty(schema=Str()),
    )

    def __init__(self, keypath, settings):
        self.path = keypath
        self.settings = settings

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.state = self.path.gen.joinpath("state")
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        self.path.profile = self.path.gen.joinpath("profile")
        dirtemplate.DirTemplate(
            "webapp", self.path.key / "htmltemplate", self.path.state
        ).with_vars(javascript=self.given.get("javascript", "")).with_files(
            base_html={
                filename: {
                    "content": content
                }
                for filename, content in self.given.get("website", {}).items()
            }).ensure_built()

        self.path.state.joinpath("selectors.yml").write_text(
            self.given["selectors.yml"])

        self.server = (python("-m", "http.server").in_dir(self.path.state /
                                                          "webapp").pexpect())
        self.server.expect("Serving HTTP on 0.0.0.0")

        if not self.path.profile.exists():
            self.path.profile.mkdir()

        self.python = project_build(self.path, self.given["python version"],
                                    self.given["selenium version"]).bin.python

        self.example_py_code = (ExamplePythonCode(
            self.python,
            self.path.state).with_setup_code(self.given.get(
                "setup", "")).with_terminal_size(160, 100).with_long_strings())

    @validate(
        code=Str(),
        will_output=Map({
            "in python 2": Str(),
            "in python 3": Str()
        }) | Str(),
        raises=Map({
            Optional("type"):
            Map({
                "in python 2": Str(),
                "in python 3": Str()
            })
            | Str(),
            Optional("message"):
            Map({
                "in python 2": Str(),
                "in python 3": Str()
            })
            | Str(),
        }),
        in_interpreter=Bool(),
    )
    def run(self, code, will_output=None, raises=None, in_interpreter=False):
        if in_interpreter:
            code = "{0}\nprint(repr({1}))".format(
                "\n".join(code.strip().split("\n")[:-1]),
                code.strip().split("\n")[-1])

        to_run = self.example_py_code.with_code(code)

        if self.settings.get("cprofile"):
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)))

        result = (to_run.expect_exceptions().run()
                  if raises is not None else to_run.run())

        if will_output is not None:
            actual_output = "\n".join(
                [line.rstrip() for line in result.output.split("\n")])
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self.settings.get("rewrite"):
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        if raises is not None:
            differential = False  # Difference between python 2 and python 3 output?
            exception_type = raises.get("type")
            message = raises.get("message")

            if exception_type is not None:
                if not isinstance(exception_type, str):
                    differential = True
                    exception_type = (
                        exception_type["in python 2"]
                        if self.given["python version"].startswith("2") else
                        exception_type["in python 3"])

            if message is not None:
                if not isinstance(message, str):
                    differential = True
                    message = (message["in python 2"]
                               if self.given["python version"].startswith("2")
                               else message["in python 3"])

            try:
                result.exception_was_raised(exception_type, message)
            except ExpectedExceptionMessageWasDifferent:
                if self.settings.get("rewrite") and not differential:
                    new_raises = raises.copy()
                    new_raises["message"] = result.exception.message
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    def do_nothing(self):
        pass

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def tear_down(self):
        self.server.kill(signal.SIGTERM)
        self.server.wait()
Exemple #18
0
        "from": EmptyNone() | Str(),
        "to": EmptyNone() | Str(),
        Opt("smtpHost"): Str(),
        Opt("smtpPort"): Int(),
        Opt("subject"): Str(),
        Opt("body"): Str(),
    })
})

_job_defaults_common = {
    Opt("shell"):
    Str(),
    Opt("concurrencyPolicy"):
    Enum(['Allow', 'Forbid', 'Replace']),
    Opt("captureStderr"):
    Bool(),
    Opt("captureStdout"):
    Bool(),
    Opt("saveLimit"):
    Int(),
    Opt("failsWhen"):
    Map({
        "producesStdout": Bool(),
        Opt("producesStderr"): Bool(),
        Opt("nonzeroReturn"): Bool(),
    }),
    Opt("onFailure"):
    Map({
        Opt("retry"):
        Map({
            "maximumRetries": Int(),
Exemple #19
0
 Str(),
 ModelMetadataKeys.TARGET_TYPE:
 Str(),
 Optional(ModelMetadataKeys.ENVIRONMENT_ID):
 Str(),
 Optional(ModelMetadataKeys.VALIDATION):
 Map({
     "input": Str(),
     Optional("targetName"): Str()
 }),
 Optional(ModelMetadataKeys.MODEL_ID):
 Str(),
 Optional(ModelMetadataKeys.DESCRIPTION):
 Str(),
 Optional(ModelMetadataKeys.MAJOR_VERSION):
 Bool(),
 Optional(ModelMetadataKeys.INFERENCE_MODEL):
 Map({
     Optional("targetName"): Str(),
     Optional("positiveClassLabel"): Str(),
     Optional("negativeClassLabel"): Str(),
     Optional("classLabels"): Seq(Str()),
     Optional("classLabelsFile"): Str(),
     Optional("predictionThreshold"): Int(),
 }),
 Optional(ModelMetadataKeys.TRAINING_MODEL):
 Map({Optional("trainOnProject"): Str()}),
 Optional(ModelMetadataKeys.HYPERPARAMETERS):
 Any(),
 Optional(ModelMetadataKeys.VALIDATION_SCHEMA):
 get_type_schema_yaml_validator(),
Exemple #20
0
#: `repo` metadata keys.
REPO_KEYS = {
    "name": Str(),  # Downloader doesn't use this, but cogs.red might.
    "short": Str(),
    "description": Str(),
    "install_msg": Str(),
    "author": Seq(Str()),
}

#: Metadata keys common to `shared_fields` and `cogs` schemas.
COMMON_KEYS = {
    Optional("min_bot_version"): RedVersion(),
    Optional("max_bot_version"): RedVersion(),
    Optional("min_python_version"): PythonVersion(),
    Optional("hidden", False): Bool(),
    Optional("disabled", False): Bool(),
    Optional("type", "COG"): Enum(["COG", "SHARED_LIBRARY"]),
}

#: `shared_fields` metadata keys.
SHARED_FIELDS_KEYS = {
    "install_msg": Str(),
    "author": Seq(Str()),
    **COMMON_KEYS,
}

#: `cogs` metadata keys.
COG_KEYS = {
    "name": Str(),  # Downloader doesn't use this but I can set friendlier name
    "short": Str(),
Exemple #21
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        yaml_snippet=GivenProperty(
            Str(), document="yaml_snippet:\n```yaml\n{{ yaml_snippet }}\n```"),
        yaml_snippet_1=GivenProperty(
            Str(),
            document="yaml_snippet_1:\n```yaml\n{{ yaml_snippet_1 }}\n```"),
        yaml_snippet_2=GivenProperty(
            Str(),
            document="yaml_snippet_2:\n```yaml\n{{ yaml_snippet_2 }}\n```"),
        modified_yaml_snippet=GivenProperty(
            Str(),
            document=
            "modified_yaml_snippet:\n```yaml\n{{ modified_yaml_snippet }}\n```"
        ),
        python_version=GivenProperty(Str()),
        ruamel_version=GivenProperty(Str()),
        setup=GivenProperty(Str(), document="```python\n{{ setup }}\n```"),
    )

    info_definition = InfoDefinition(
        status=InfoProperty(schema=Enum(["experimental", "stable"])),
        docs=InfoProperty(schema=Str()),
        fails_on_python_2=InfoProperty(schema=Bool()),
        description=InfoProperty(schema=Str()),
        experimental=InfoProperty(schema=Bool()),
    )

    def __init__(self,
                 keypath,
                 python_path=None,
                 rewrite=False,
                 cprofile=False):
        self.path = keypath
        self._python_path = python_path
        self._rewrite = rewrite
        self._cprofile = cprofile

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.profile = self.path.gen.joinpath("profile")

        if not self.path.profile.exists():
            self.path.profile.mkdir()

        if not self._python_path:
            self.python = hitchpylibrarytoolkit.project_build(
                "strictyaml",
                self.path,
                self.given["python version"],
                {
                    "ruamel.yaml": self.given["ruamel version"]
                },
            ).bin.python
        else:
            self.python = Path(self._python_path)
            assert self.python.exists()

        self.example_py_code = (ExamplePythonCode(
            self.python, self.path.gen).with_code(self.given.get(
                "code", "")).with_setup_code(self.given.get(
                    "setup", "")).with_terminal_size(160, 100).with_strings(
                        yaml_snippet_1=self.given.get("yaml_snippet_1"),
                        yaml_snippet=self.given.get("yaml_snippet"),
                        yaml_snippet_2=self.given.get("yaml_snippet_2"),
                        modified_yaml_snippet=self.given.get(
                            "modified_yaml_snippet"),
                    ))

    @no_stacktrace_for(AssertionError)
    @no_stacktrace_for(HitchRunPyException)
    @validate(
        code=Str(),
        will_output=Map({
            "in python 2": Str(),
            "in python 3": Str()
        }) | Str(),
        raises=Map({
            Optional("type"): CODE_TYPE,
            Optional("message"): CODE_TYPE
        }),
        in_interpreter=Bool(),
    )
    def run(
        self,
        code,
        will_output=None,
        yaml_output=True,
        raises=None,
        in_interpreter=False,
    ):
        if in_interpreter:
            code = "{0}\nprint(repr({1}))".format(
                "\n".join(code.strip().split("\n")[:-1]),
                code.strip().split("\n")[-1])
        to_run = self.example_py_code.with_code(code)

        if self._cprofile:
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)))

        if raises is None:
            result = (to_run.expect_exceptions().run()
                      if raises is not None else to_run.run())

            if will_output is not None:
                actual_output = "\n".join(
                    [line.rstrip() for line in result.output.split("\n")])
                try:
                    Templex(will_output).assert_match(actual_output)
                except AssertionError:
                    if self._rewrite:
                        self.current_step.update(
                            **{"will output": actual_output})
                    else:
                        raise

        elif raises is not None:
            differential = False  # Difference between python 2 and python 3 output?
            exception_type = raises.get("type")
            message = raises.get("message")

            if exception_type is not None:
                if not isinstance(exception_type, str):
                    differential = True
                    exception_type = (
                        exception_type["in python 2"]
                        if self.given["python version"].startswith("2") else
                        exception_type["in python 3"])

            if message is not None:
                if not isinstance(message, str):
                    differential = True
                    message = (message["in python 2"]
                               if self.given["python version"].startswith("2")
                               else message["in python 3"])

            try:
                result = to_run.expect_exceptions().run()
                result.exception_was_raised(exception_type, message)
            except ExpectedExceptionMessageWasDifferent as error:
                if self._rewrite and not differential:
                    new_raises = raises.copy()
                    new_raises["message"] = result.exception.message
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def on_success(self):
        if self._rewrite:
            self.new_story.save()
        if self._cprofile:
            self.python(
                self.path.key.joinpath("printstats.py"),
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)),
            ).run()
Exemple #22
0
    # it is a place holder if user wants to add some fields and read them on his own
    CUSTOM_PREDICTOR = "customPredictor"


MODEL_CONFIG_SCHEMA = Map(
    {
        ModelMetadataKeys.NAME: Str(),
        ModelMetadataKeys.TYPE: Str(),
        ModelMetadataKeys.TARGET_TYPE: Str(),
        Optional(ModelMetadataKeys.ENVIRONMENT_ID): Str(),
        Optional(ModelMetadataKeys.VALIDATION): Map(
            {"input": Str(), Optional("targetName"): Str()}
        ),
        Optional(ModelMetadataKeys.MODEL_ID): Str(),
        Optional(ModelMetadataKeys.DESCRIPTION): Str(),
        Optional(ModelMetadataKeys.MAJOR_VERSION): Bool(),
        Optional(ModelMetadataKeys.INFERENCE_MODEL): Map(
            {
                "targetName": Str(),
                Optional("positiveClassLabel"): Str(),
                Optional("negativeClassLabel"): Str(),
                Optional("classLabels"): Seq(Str()),
                Optional("classLabelsFile"): Str(),
                Optional("predictionThreshold"): Int(),
            }
        ),
        Optional(ModelMetadataKeys.TRAINING_MODEL): Map({Optional("trainOnProject"): Str()}),
        Optional(ModelMetadataKeys.HYPERPARAMETERS): Any(),
        Optional(ModelMetadataKeys.CUSTOM_PREDICTOR): Any(),
    }
)
Exemple #23
0
    def build(self):
        if self._build_path.exists():
            self._build_path.rmtree()
        self._build_path.mkdir()

        if self._src_path.joinpath("dirtemplate.yml").exists():
            config = load(
                self._src_path.joinpath("dirtemplate.yml").text(),
                Map({
                    Optional("base templates"):
                    Str(),
                    "templated":
                    Seq(
                        MapPattern(
                            Str(),
                            Map({
                                Optional("content"): Bool(),
                                Optional("filename"): Bool(),
                            }))),
                })).data
        else:
            config = {"templated": []}

        src_paths = list(pathquery(self._src_path))

        templated_filenames = [
            list(template.keys())[0] for template in config['templated']
        ]

        if "base templates" in config:
            templated_filenames.extend(
                pathquery(self._src_path.joinpath(config['base templates'])))

        non_templated = []

        for srcpath in src_paths:
            relpath = srcpath.relpath(self._src_path)
            add = True

            if relpath in templated_filenames:
                add = False

            if relpath == "dirtemplate.yml":
                add = False

            if srcpath.isdir():
                add = False

            if "base templates" in config:
                if relpath.startswith(config['base templates']):
                    add = False

            if add:
                non_templated.append(relpath)

        for relpath in non_templated:
            dest_path = self._dest.joinpath(relpath)

            if not dest_path.dirname().exists():
                dest_path.dirname().makedirs()

            if str(relpath) not in self._ignore_files:
                self._src_path.joinpath(relpath).copy(dest_path)

        for template_configuration in config['templated']:
            for src_path in src_paths:
                if not src_path.isdir():
                    relpath = src_path.relpath(self._src_path)

                    if relpath in template_configuration.keys():
                        if 'filename' in template_configuration[relpath]:
                            slug = slugify(relpath, separator=u'_')

                            if slug in self._files.keys():
                                for filename, variables in self._files[
                                        slug].items():
                                    dest_path = self._dest.joinpath(filename)

                                    if not dest_path.dirname().exists():
                                        dest_path.dirname().makedirs()

                                    render_vars = {}

                                    for name, var in self._render_vars.items():
                                        render_vars[name] = var

                                    for name, filevar in variables.items():
                                        render_vars[name] = filevar

                                    render_vars['thisdir'] = pathquery(
                                        dest_path.dirname())

                                    dest_path.write_text(
                                        render(
                                            src_path, self._functions,
                                            render_vars,
                                            base_templates(
                                                self._src_path,
                                                config.get("base templates"))))
                            else:
                                raise Exception(
                                    ("{0} templated filename exists but not "
                                     "specified with with_files".format(
                                         relpath)))
                        else:
                            dest_path = self._dest.joinpath(relpath)

                            if not dest_path.dirname().exists():
                                dest_path.dirname().makedirs()

                            render_vars = copy(self._render_vars)
                            render_vars['thisdir'] = pathquery(
                                dest_path.dirname())

                            if template_configuration[relpath]['content']:
                                dest_path.write_text(
                                    render(
                                        src_path,
                                        self._functions,
                                        render_vars,
                                        base_templates(
                                            self._src_path,
                                            config.get("base templates")),
                                    ))
        self.refingerprint()
Exemple #24
0
    parsed = load(s, schema_ver, label=fname)
except InconsistentIndentationDisallowed as e:
    print('Use the same indentation across the file')
    print(e)
    sys.exit(1)
except YAMLError as e:
    print('YAML parsing error:')
    print(e)
    sys.exit(1)

schema = Map({
    "kiplot":
    Map({"version": Int()}),  # noqa: E127
    Optional("preflight"):
    Map({
        Optional("run_drc"): Bool(),  # noqa: E121
        Optional("run_erc"): Bool(),
        Optional("update_xml"): Bool(),
        Optional("check_zone_fills"): Bool(),
        Optional("ignore_unconnected"): Bool(),
    }),
    Optional("outputs"):
    Seq(Any())
})

try:
    parsed = load(s, schema, label=fname)
except YAMLError as e:
    print('YAML parsing error:')
    print(e)
    sys.exit(1)
Exemple #25
0
    count_vocab_size,
    handle_regularizer,
)

import artm

from strictyaml import Map, Str, Int, Seq, Float, Bool
from strictyaml import Any, Optional, EmptyDict, EmptyNone, EmptyList
from strictyaml import dirty_load

SUPPORTED_CUBES = [CubeCreator, RegularizersModifierCube]
SUPPORTED_STRATEGIES = [PerplexityStrategy, GreedyStrategy]

TYPE_VALIDATORS = {
    'int': Int(),
    'bool': Bool(),
    'str': Str(),
    'float': Float()
}


def choose_key(param):
    """
    Parameters
    ----------
    param : inspect.Parameter

    Returns
    -------
    str or strictyaml.Optional
    """
# -*- coding:utf-8 -*-
# author: Xinge

from pathlib import Path

from strictyaml import Bool, Float, Int, Map, Seq, Str, as_document, load

model_params = Map({
    "model_architecture": Str(),
    "output_shape": Seq(Int()),
    "fea_dim": Int(),
    "out_fea_dim": Int(),
    "num_class": Int(),
    "num_input_features": Int(),
    "use_norm": Bool(),
    "init_size": Int(),
})

dataset_params = Map({
    "dataset_type": Str(),
    "pc_dataset_type": Str(),
    "ignore_label": Int(),
    "return_test": Bool(),
    "fixed_volume_space": Bool(),
    "label_mapping": Str(),
    "max_volume_space": Seq(Float()),
    "min_volume_space": Seq(Float()),
})

train_data_loader = Map({
    "data_path": Str(),
Exemple #27
0
from datarobot_drum.drum.exceptions import DrumCommonException

CONFIG_FILENAME = "model-metadata.yaml"
DR_LINK_FORMAT = "{}/model-registry/custom-models/{}"
MODEL_LOGS_LINK_FORMAT = "{url}/projects/{project_id}/models/{model_id}/log"

schema = Map(
    {
        "name": Str(),
        "type": Str(),
        "environmentID": Str(),
        "targetType": Str(),
        "validation": Map({"input": Str(), Optional("targetName"): Str()}),
        Optional("modelID"): Str(),
        Optional("description"): Str(),
        Optional("majorVersion"): Bool(),
        Optional("inferenceModel"): Map(
            {
                "targetName": Str(),
                Optional("positiveClassLabel"): Str(),
                Optional("negativeClassLabel"): Str(),
                Optional("predictionThreshold"): Int(),
            }
        ),
        Optional("trainingModel"): Map({Optional("trainOnProject"): Str()}),
    }
)


def _read_metadata(code_dir):
    code_dir = Path(code_dir)
Exemple #28
0
class Experiment:
    """
    This class orchestrates the analysis pipeline for our redox imaging experiments.
    """

    experiment_schema = Map({
        "pipeline":
        Map({
            "strategy": Str(),
            "acquisition_method": Enum(["acquire", "mda"]),
            "trimmed_profile_length": Int(),
            "untrimmed_profile_length": Int(),
            "seg_threshold": Int(),
            "measurement_order": Int(),
            "measure_thickness": Float(),
            "reference_wavelength": Str(),
            "image_register": Int(),
            "channel_register": Int(),
            "population_register": Int(),
            "trimmed_regions": MapPattern(Str(), CommaSeparated(Float())),
            "untrimmed_regions": MapPattern(Str(), CommaSeparated(Float())),
        }),
        "redox":
        Map({
            "ratio_numerator": Str(),
            "ratio_denominator": Str(),
            "r_min": Float(),
            "r_max": Float(),
            "instrument_factor": Float(),
            "midpoint_potential": Float(),
            "z": Int(),
            "temperature": Float(),
        }),
        "registration":
        Map({
            "n_deriv": Float(),
            "warp_n_basis": Float(),
            "warp_order": Float(),
            "warp_lambda": Float(),
            "smooth_lambda": Float(),
            "smooth_n_breaks": Float(),
            "smooth_order": Float(),
            "rough_lambda": Float(),
            "rough_n_breaks": Float(),
            "rough_order": Float(),
        }),
        "output":
        Map({
            "should_save_plots": Bool(),
            "should_save_profile_data": Bool(),
            "should_save_summary_data": Bool(),
        }),
    })

    seg_images: xr.DataArray = None
    rot_fl: xr.DataArray = None
    rot_seg: xr.DataArray = None

    midlines: xr.DataArray = None

    untrimmed_raw_profiles: xr.DataArray = None
    untrimmed_std_profiles: xr.DataArray = None
    untrimmed_reg_profiles: xr.DataArray = None

    trimmed_raw_profiles: xr.DataArray = None
    trimmed_std_profiles: xr.DataArray = None
    trimmed_reg_profiles: xr.DataArray = None

    channel_warps: xr.DataArray = None
    std_warps: xr.DataArray = None

    def __init__(self, exp_dir):
        self.experiment_dir = Path(exp_dir)
        self.settings_path = self.experiment_dir.joinpath("settings.yaml")
        try:
            with open(self.settings_path, "r") as f:
                self.config = load(f.read(), self.experiment_schema).data
        except YAMLError:
            raise ValueError("Incorrectly specified config file.")

        self.experiment_id = self.experiment_dir.stem

        # compute the filenames/paths for this experiment
        self.movement_path = self.experiment_dir.joinpath(self.experiment_id +
                                                          "-mvmt.csv")
        self.frame_map_path = self.experiment_dir.joinpath(self.experiment_id +
                                                           "-frame_map.csv")
        self.processed_images_dir = self.experiment_dir.joinpath(
            "processed_images")
        self.rot_seg_dir = self.processed_images_dir.joinpath("rot_seg")
        self.rot_fl_dir = self.processed_images_dir.joinpath("rot_fl")
        self.fl_imgs_dir = self.processed_images_dir.joinpath(
            "fluorescent_images")
        self.orig_images_path = self.processed_images_dir.joinpath("images.nc")
        self.seg_images_path = self.processed_images_dir.joinpath(
            "seg_images.nc")
        self.aligned_images_path = self.processed_images_dir.joinpath(
            "aligned_images.nc")
        self.aligned_seg_images_path = self.processed_images_dir.joinpath(
            "aligned_seg_images.nc")

        # load images
        self.images = self._load_raw_images()

        # try to load masks
        try:
            self.load_masks()
        except IOError:
            logging.info("No masks found in experiment directory")
            pass

    # Computed Filepaths

    @property
    def midlines_path(self) -> Path:
        return self.analysis_dir.joinpath("midlines.pickle")

    @property
    def raw_img_stack_path(self) -> Path:
        # TODO test that this works
        accepted_extensions = [".tif", ".tiff", ".stk"]

        candidate_paths = [
            self.experiment_dir.joinpath(f"{self.experiment_id}{ext}")
            for ext in accepted_extensions
        ]

        for path in candidate_paths:
            if path.exists():
                return path

        raise ValueError(
            f"No image found in experiment directory. Tried the following files: {candidate_paths}"
        )

    @property
    def fig_dir(self):
        return self.analysis_dir.joinpath("figs")

    def untrimmed_profile_data_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-untrimmed_{treatment}_profile_data.nc")

    def trimmed_profile_data_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-trimmed_{treatment}_profile_data.nc")

    @property
    def channel_warp_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-channel_warps.nc")

    @property
    def std_warp_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id + "-std_warps.nc")

    def untrimmed_profile_data_csv_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-untrimmed_{treatment}_profile_data.csv")

    def trimmed_profile_data_csv_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-trimmed_{treatment}_profile_data.csv")

    @property
    def untrimmed_region_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-untrimmed_region_data.csv")

    @property
    def trimmed_region_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-trimmed_region_data.csv")

    @property
    def analysis_dir(self) -> Path:
        date_str = datetime.datetime.now().strftime("%Y-%m-%d")
        strategy = self.config["pipeline"]["strategy"]
        if len(strategy) > 0:
            suffix = f"_{strategy}"
        else:
            suffix = ""
        analysis_dir_ = self.experiment_dir.joinpath(
            "analyses",
            utils.get_valid_filename(f"{date_str}{suffix}"),
        )
        # analysis_dir_.mkdir(parents=True, exist_ok=True)
        return analysis_dir_

    def _load_raw_images(self):
        """
        This returns the raw (non-median-subtracted) images
        """
        logging.info(f"Loading image data from {self.raw_img_stack_path}")
        raw_image_data = pio.load_tiff_as_hyperstack(
            img_stack_path=self.raw_img_stack_path,
            manual_metadata=self.frame_map_path,
            mvmt_metadata=self.movement_path,
        )

        raw_image_data = raw_image_data.assign_coords({
            "experiment_id": (
                ("animal", ),
                np.repeat(self.experiment_id, raw_image_data.animal.size),
            )
        })
        raw_image_data = self.add_experiment_metadata_to_data_array(
            raw_image_data)

        return raw_image_data

    def _load_movement(self) -> pd.DataFrame:
        movement_path = self.experiment_dir.joinpath(self.experiment_id +
                                                     "-mvmt.csv")
        try:
            df = pd.read_csv(movement_path)
            df = df.pivot_table(index="animal",
                                columns=["region", "pair"],
                                values="movement")
            df = df.stack("pair")
            return df
        except FileNotFoundError:
            logging.warning(
                f"Tried to access {movement_path}; file was not found")
            return None

    def make_analysis_dir(self) -> None:
        logging.info(f"Making analysis directory at {self.analysis_dir}")
        self.analysis_dir.mkdir(parents=True, exist_ok=True)

    @property
    def trimmed_summary_table(self):
        df = profile_processing.summarize_over_regions(
            self.trimmed_raw_profiles,
            regions=self.config["pipeline"]["trimmed_regions"],
            rescale=False,
            **self.config["redox"],
        )
        return df

    @property
    def untrimmed_summary_table(self):
        df = profile_processing.summarize_over_regions(
            self.untrimmed_raw_profiles,
            regions=self.config["pipeline"]["untrimmed_regions"],
            **self.config["redox"],
        )
        return df

    ####################################################################################
    # PIPELINE
    ####################################################################################

    def full_pipeline(self):
        logging.info(f"Starting full pipeline run for {self.experiment_dir}")

        self.make_analysis_dir()

        logging.info(f"Saving fluorescent images to {self.fl_imgs_dir}")
        pio.save_images_xarray_to_tiffs(self.images,
                                        self.fl_imgs_dir,
                                        prefix=self.experiment_id)

        self.segment_pharynxes()
        self.register_images()
        self.align_and_center()
        self.calculate_midlines()
        self.measure_under_midlines()
        self.register_profiles()
        self.trim_data()
        self.calculate_redox()
        self.do_manual_ap_flips()
        self.persist_to_disk()

        logging.info(f"Finished full pipeline run for {self.experiment_dir}")

        return self

    def run_neuron_pipeline(self):
        logging.info(
            f"Starting full neuron analysis pipeline run for {self.experiment_dir}"
        )
        self.make_analysis_dir()
        df = ip.measure_under_labels(self.images,
                                     self.seg_images).reset_index()

        df.to_csv(self.analysis_dir /
                  (self.experiment_id + "-neuron_analysis.csv"))

    def segment_pharynxes(self):
        if self.seg_images is not None:
            logging.info("masks have been specified. skipping mask generation")
            self.save_masks()
            return
        else:
            logging.info("Generating masks")
            self.seg_images = ip.segment_pharynxes(
                self.images,
                wvl=self.config["pipeline"]["reference_wavelength"])
            self.save_masks()

    def register_images(self):
        if self.config["pipeline"]["image_register"]:
            logging.info("Registering Images")
            self.images = ip.register_all_images(self.images, self.seg_images)

    def align_and_center(self):
        logging.info("Centering and rotating pharynxes")
        self.rot_fl, self.rot_seg = ip.center_and_rotate_pharynxes(
            self.images,
            self.seg_images,
        )

        logging.info(f"Saving rotated FL images to {self.aligned_images_path}")
        pio.save_profile_data(self.rot_fl, self.aligned_images_path)

        logging.info(f"Saving rotated masks to {self.aligned_seg_images_path}")
        pio.save_profile_data(self.rot_seg, self.aligned_seg_images_path)

    def calculate_midlines(self):
        logging.info("Calculating midlines")
        self.midlines = ip.calculate_midlines(self.rot_seg, degree=4)

    def measure_under_midlines(self):
        logging.info("Measuring under midlines")
        self.untrimmed_raw_profiles = ip.measure_under_midlines(
            self.rot_fl,
            self.midlines,
            n_points=self.config["pipeline"]["untrimmed_profile_length"],
            order=self.config["pipeline"]["measurement_order"],
            thickness=float(self.config["pipeline"]["measure_thickness"]),
        )
        self.untrimmed_raw_profiles = profile_processing.align_pa(
            self.untrimmed_raw_profiles)
        self.untrimmed_raw_profiles = self.add_experiment_metadata_to_data_array(
            self.untrimmed_raw_profiles)

        # subtract the image medians from the profile data
        logging.info("Subtracting image medians from profile data")
        self.untrimmed_raw_profiles = ip.subtract_medians(
            self.untrimmed_raw_profiles, self.images)

    def register_profiles(self):

        if self.config["pipeline"]["population_register"]:
            logging.info("Standardizing profiles")
            (
                self.untrimmed_std_profiles,
                self.std_warps,
            ) = profile_processing.standardize_profiles(
                self.untrimmed_raw_profiles,
                redox_params=self.config["redox"],
                **self.config["registration"],
            )

        if self.config["pipeline"]["channel_register"]:
            logging.info("Channel-Registering profiles")

            if self.untrimmed_std_profiles is not None:
                logging.info(
                    "using the standardize profiles for channel-registration")
                data_to_register = self.untrimmed_std_profiles
            else:
                logging.info("using the raw profiles for channel-registration")
                data_to_register = self.untrimmed_raw_profiles

            (
                self.untrimmed_reg_profiles,
                self.channel_warps,
            ) = profile_processing.channel_register(
                data_to_register,
                redox_params=self.config["redox"],
                reg_params=self.config["registration"],
            )

    def trim_data(self):
        logging.info("Trimming intensity data")

        self.trimmed_raw_profiles = self.add_experiment_metadata_to_data_array(
            profile_processing.trim_profiles(
                self.untrimmed_raw_profiles,
                self.config["pipeline"]["seg_threshold"],
                ref_wvl=self.config["pipeline"]["reference_wavelength"],
            ))

        if self.untrimmed_std_profiles is not None:
            self.trimmed_std_profiles = self.add_experiment_metadata_to_data_array(
                profile_processing.trim_profiles(
                    self.untrimmed_std_profiles,
                    self.config["pipeline"]["seg_threshold"],
                    ref_wvl=self.config["pipeline"]["reference_wavelength"],
                ))

        if self.untrimmed_reg_profiles is not None:
            self.trimmed_reg_profiles = self.add_experiment_metadata_to_data_array(
                profile_processing.trim_profiles(
                    self.untrimmed_reg_profiles,
                    self.config["pipeline"]["seg_threshold"],
                    ref_wvl=self.config["pipeline"]["reference_wavelength"],
                ))

    def calculate_redox(self):
        logging.info("Calculating redox measurements")

        redox_params = self.config["redox"]

        # Images
        self.images = utils.add_derived_wavelengths(self.images,
                                                    **redox_params)
        self.rot_fl = utils.add_derived_wavelengths(self.rot_fl,
                                                    **redox_params)

        # profiles
        self.trimmed_raw_profiles = utils.add_derived_wavelengths(
            self.trimmed_raw_profiles, **redox_params)

        self.untrimmed_raw_profiles = utils.add_derived_wavelengths(
            self.untrimmed_raw_profiles, **redox_params)

    def do_manual_ap_flips(self):
        # TODO finish implementation
        logging.info("skipping manual AP flips - not implemented")

    def flip_at(self, idx):
        # TODO finish implementation
        raise NotImplementedError

    ####################################################################################
    # PERSISTENCE / IO
    ####################################################################################

    def save_images(self):
        """Save this experiment's images to disk as netCDF4 files"""
        imgs_paths = [
            (self.images, self.orig_images_path),
            (self.rot_fl, self.aligned_images_path),
            (self.seg_images, self.seg_images_path),
            (self.rot_seg, self.aligned_seg_images_path),
        ]
        for img, path in imgs_paths:
            if img is not None:
                logging.info(f"Saving images to {path}")
                img.to_netcdf(path)

    # def load_tiff_as_hyperstack(self):
    # pass

    def make_fig_dir(self):
        fig_dir = self.analysis_dir.joinpath("figs")
        fig_dir.mkdir(parents=True, exist_ok=True)
        return fig_dir

    def save_individual_profiles(self, profile_data, treatment: str,
                                 trimmed: bool):
        if profile_data is None:
            return

        fig_dir = self.make_fig_dir()

        profile_data_fig_dir = (fig_dir / "profile_data" / treatment /
                                ("trimmed" if trimmed else "untrimmed"))

        individual_data_fig_dir = profile_data_fig_dir.joinpath("inividual")
        individual_data_fig_dir.mkdir(exist_ok=True, parents=True)

        for title, fig in plots.generate_wvl_pair_timepoint_profile_plots(
                profile_data):
            title = title.replace(" ", "")
            fig.savefig(individual_data_fig_dir /
                        f"{self.experiment_id}-{title}-individuals.pdf")
            plt.close(fig)

    def save_avg_profiles(self, profile_data, treatment: str, trimmed: bool):
        if profile_data is None:
            return

        fig_dir = self.make_fig_dir()

        profile_data_fig_dir = (fig_dir / "profile_data" / treatment /
                                ("trimmed" if trimmed else "untrimmed"))

        individual_data_fig_dir = profile_data_fig_dir.joinpath("avg")
        individual_data_fig_dir.mkdir(exist_ok=True, parents=True)

        for title, fig in plots.generate_avg_wvl_pair_profile_plots(
                profile_data):
            title = title.replace(" ", "")
            fig.savefig(individual_data_fig_dir /
                        f"{self.experiment_id}-{title}-avg.pdf")
            plt.close(fig)

    def save_plots(self):
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")

            for data, treatment, trimmed in [
                (self.untrimmed_raw_profiles, "raw", False),
                (self.untrimmed_std_profiles, "standardized", False),
                (self.untrimmed_reg_profiles, "channel-registered", False),
                (self.trimmed_raw_profiles, "raw", True),
                (self.trimmed_std_profiles, "standardized", True),
                (self.trimmed_reg_profiles, "channel-registered", True),
            ]:
                self.save_individual_profiles(data, treatment, trimmed)
                self.save_avg_profiles(data, treatment, trimmed)

            # frame-normed Ratio Images
            mvmt_annotation_img_path = self.fig_dir.joinpath(
                f"{self.experiment_id}-movement_annotation_imgs.pdf")
            imgs = utils.add_derived_wavelengths(self.images,
                                                 **self.config["redox"])
            with PdfPages(mvmt_annotation_img_path) as pdf:
                for i in tqdm(range(self.images.animal.size)):
                    fig = plots.plot_pharynx_R_imgs(imgs[i],
                                                    mask=self.seg_images[i])
                    fig.suptitle(f"animal = {i}")
                    pdf.savefig(fig)
                    if (i % 20) == 0:
                        plt.close("all")

            # Pop-normed ratio images
            u = self.trimmed_raw_profiles.sel(wavelength="r").mean()
            std = self.trimmed_raw_profiles.sel(wavelength="r").std()

            for pair in self.rot_fl.pair.values:
                for tp in self.rot_fl.timepoint.values:
                    ratio_img_path = self.fig_dir.joinpath(
                        f"{self.experiment_id}-ratio_images-pair={pair};timepoint={tp}.pdf"
                    )
                    with PdfPages(ratio_img_path) as pdf:
                        logging.info(
                            f"Saving ratio images to {ratio_img_path}")
                        for i in tqdm(range(self.rot_fl.animal.size)):
                            fig, ax = plt.subplots(dpi=300)
                            ratio_img = (self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_numerator"],
                                pair=pair,
                                timepoint=tp,
                            ) / self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_denominator"],
                                pair=pair,
                                timepoint=tp,
                            ))[i]
                            fl_img = self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_numerator"],
                                pair=pair,
                                timepoint=tp,
                            )[i]
                            im, cbar = plots.imshow_ratio_normed(
                                ratio_img,
                                fl_img,
                                r_min=u - (std * 1.96),
                                r_max=u + (std * 1.96),
                                colorbar=True,
                                i_max=5000,
                                i_min=1000,
                                ax=ax,
                            )
                            ax.plot(
                                *self.midlines.sel(
                                    pair=pair,
                                    timepoint=tp,
                                )[i].values[()].linspace(),
                                color="green",
                                alpha=0.3,
                            )
                            strain = self.rot_fl.strain.values[i]
                            ax.set_title(
                                f"Animal={i} ; Pair={pair} ; Strain={strain}")
                            cax = cbar.ax
                            for j in range(len(self.trimmed_raw_profiles)):
                                cax.axhline(
                                    self.trimmed_raw_profiles.sel(
                                        wavelength="r",
                                        pair=pair,
                                        timepoint=tp)[j].mean(),
                                    color="k",
                                    alpha=0.1,
                                )
                            cax.axhline(
                                self.trimmed_raw_profiles.sel(
                                    wavelength="r", pair=pair,
                                    timepoint=tp)[i].mean(),
                                color="k",
                            )
                            pdf.savefig()
                            if (i % 20) == 0:
                                plt.close("all")

    def persist_profile_data(self):
        for treatment, untrimmed_profile_data in (
            ("raw", self.untrimmed_raw_profiles),
            ("std", self.untrimmed_std_profiles),
            ("reg", self.untrimmed_reg_profiles),
        ):
            if untrimmed_profile_data is not None:
                untrimmed_prof_path = self.untrimmed_profile_data_path(
                    treatment)
                logging.info(
                    f"Saving untrimmed {treatment} profile data to {untrimmed_prof_path}"
                )
                pio.save_profile_data(untrimmed_profile_data,
                                      untrimmed_prof_path)

                untrimmed_prof_path_csv = self.untrimmed_profile_data_csv_path(
                    treatment)
                profile_processing.to_dataframe(
                    untrimmed_profile_data,
                    "value").to_csv(untrimmed_prof_path_csv)

        for treatment, trimmed_profile_data in (
            ("raw", self.trimmed_raw_profiles),
            ("std", self.trimmed_std_profiles),
            ("reg", self.trimmed_reg_profiles),
        ):
            if trimmed_profile_data is not None:
                trimmed_prof_path = self.trimmed_profile_data_path(treatment)
                logging.info(
                    f"Saving trimmed {treatment} profile data to {trimmed_prof_path}"
                )
                pio.save_profile_data(trimmed_profile_data, trimmed_prof_path)

                trimmed_prof_path_csv = self.trimmed_profile_data_csv_path(
                    treatment)
                logging.info(
                    f"Saving trimmed {treatment} profile data to {trimmed_prof_path_csv}"
                )
                profile_processing.to_dataframe(
                    trimmed_profile_data,
                    "value").to_csv(trimmed_prof_path_csv)

        # Warps, if necessary
        if self.config["pipeline"]["channel_register"]:
            logging.info(
                f"Saving channel warp data to {self.channel_warp_data_path}")
            self.channel_warps.to_netcdf(self.channel_warp_data_path)

        if self.config["pipeline"]["population_register"]:
            logging.info(
                f"Saving channel warp data to {self.std_warp_data_path}")
            self.std_warps.to_netcdf(self.std_warp_data_path)

    def save_summary_data(self):
        # Persist the region means
        logging.info(
            f"Saving untrimmed region means to {self.untrimmed_region_data_path}"
        )
        self.untrimmed_summary_table.to_csv(self.untrimmed_region_data_path)
        logging.info(
            f"Saving trimmed region means to {self.trimmed_region_data_path}")
        self.trimmed_summary_table.to_csv(self.trimmed_region_data_path)

    def save_masks(self):
        logging.info(f"saving masks to {self.seg_images_path}")
        pio.save_profile_data(self.seg_images, self.seg_images_path)

    def load_masks(self):
        self.seg_images = pio.load_profile_data(self.seg_images_path)
        logging.info(f"Loaded masks from {self.seg_images_path}")

    def save_midlines(self):
        pio.save_midlines(self.midlines_path, self.midlines)

    def load_midlines(self):
        return pio.load_midlines(self.midlines_path)

    def persist_to_disk(self):
        logging.info(
            f"Saving {self.experiment_id} inside {self.experiment_dir}")

        self.save_midlines()

        if self.config["output"]["should_save_summary_data"]:
            self.save_summary_data()

        if self.config["output"]["should_save_profile_data"]:
            self.persist_profile_data()

        if self.config["output"]["should_save_plots"]:
            self.save_plots()

    ####################################################################################
    # MISC / HELPER
    ####################################################################################
    def add_experiment_metadata_to_data_array(self, data_array: xr.DataArray):
        params = {}
        params.update(self.config["pipeline"])
        params.update(self.config["redox"])
        params.update(self.config["registration"])

        to_remove = ["trimmed_regions", "untrimmed_regions"]
        for k in to_remove:
            del params[k]

        return data_array.assign_attrs(**params)
Exemple #29
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        scripts=GivenProperty(MapPattern(Str(), Str())),
        python_version=GivenProperty(Str()),
        pexpect_version=GivenProperty(Str()),
        icommandlib_version=GivenProperty(Str()),
        setup=GivenProperty(Str()),
        files=GivenProperty(MapPattern(Str(), Str())),
        code=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        importance=InfoProperty(schema=Int()),
        docs=InfoProperty(schema=Str()),
        fails_on_python_2=InfoProperty(schema=Bool()),
    )

    def __init__(self, keypath, rewrite=False):
        self.path = keypath
        self._rewrite = rewrite
        self._cprofile = False

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.state = self.path.gen.joinpath("state")
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        for script in self.given.get("scripts", []):
            script_path = self.path.state.joinpath(script)

            if not script_path.dirname().exists():
                script_path.dirname().makedirs()

            script_path.write_text(self.given["scripts"][script])
            script_path.chmod("u+x")

        for filename, contents in self.given.get("files", {}).items():
            self.path.state.joinpath(filename).write_text(contents)

        self.python = hitchpylibrarytoolkit.project_build(
            "commandlib", self.path, self.given["python version"]
        ).bin.python

        self.example_py_code = (
            ExamplePythonCode(self.python, self.path.state)
            .with_code(self.given.get("code", ""))
            .with_setup_code(self.given.get("setup", ""))
        )

    def _story_friendly_output(self, text):
        return text.replace(self.path.state, "/path/to")

    @no_stacktrace_for(AssertionError)
    @no_stacktrace_for(HitchRunPyException)
    @validate(
        code=Str(),
        will_output=Str(),
        raises=Map(
            {
                Optional("type"): Map({"in python 2": Str(), "in python 3": Str()})
                | Str(),
                Optional("message"): Map({"in python 2": Str(), "in python 3": Str()})
                | Str(),
            }
        ),
    )
    def run(self, code, will_output=None, raises=None):
        to_run = self.example_py_code.with_code(code)

        if self._cprofile:
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug))
            )

        result = (
            to_run.expect_exceptions().run() if raises is not None else to_run.run()
        )

        if will_output is not None:
            actual_output = "\n".join(
                [line.rstrip() for line in result.output.split("\n")]
            )
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self._rewrite:
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        if raises is not None:
            differential = False  # Difference between python 2 and python 3 output?
            exception_type = raises.get("type")
            message = raises.get("message")

            if exception_type is not None:
                if not isinstance(exception_type, str):
                    differential = True
                    exception_type = (
                        exception_type["in python 2"]
                        if self.given["python version"].startswith("2")
                        else exception_type["in python 3"]
                    )

            if message is not None:
                if not isinstance(message, str):
                    differential = True
                    message = (
                        message["in python 2"]
                        if self.given["python version"].startswith("2")
                        else message["in python 3"]
                    )

            try:
                result = self.example_py_code.expect_exceptions().run()
                result.exception_was_raised(exception_type)
                exception_message = self._story_friendly_output(
                    result.exception.message
                )
                Templex(exception_message).assert_match(message)
            except AssertionError:
                if self._rewrite and not differential:
                    new_raises = raises.copy()
                    new_raises["message"] = self._story_friendly_output(
                        result.exception.message
                    )
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    def file_contents_will_be(self, filename, contents):
        file_contents = "\n".join(
            [
                line.rstrip()
                for line in self.path.state.joinpath(filename)
                .bytes()
                .decode("utf8")
                .strip()
                .split("\n")
            ]
        )
        try:
            # Templex(file_contents).assert_match(contents.strip())
            assert file_contents == contents.strip(), "{0} not {1}".format(
                file_contents, contents.strip()
            )
        except AssertionError:
            if self._rewrite:
                self.current_step.update(contents=file_contents)
            else:
                raise

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def on_success(self):
        if self._cprofile:
            self.python(
                self.path.key.joinpath("printstats.py"),
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)),
            ).run()
Exemple #30
0
# конфиг которым управляют админы
CONFIG_ENV_NAME = 'games_config'

if CONFIG_ENV_NAME in os.environ:
    src_file = os.environ.get(CONFIG_ENV_NAME)
else:
    src_file = '/etc/games/games.yml'

with open(src_file, 'r') as file_h:
    CFG_CONTENT = file_h.read()

SCHEMA = Map({
    "connections":
    Map({
        "pgsql": Map({"uri": Str()}),
        "celery": Map({"uri": Str()}),
    }),
    "static_root":
    Str(),
    "api_docs_enabled":
    Bool(),
    "debug":
    Bool(),
    "secret_key":
    Str(),
    "media_root":
    Str()
})

config = load(CFG_CONTENT, SCHEMA)