Example #1
0
def load_scheme(yaml_text):
    '''
    Some optional keys have enforced default values, otherwise use dict.get()
    '''
    schema = MapPattern(
        Str(),
        Map({
            'description':
            Str(),
            Optional('alias'):
            Str(),
            'version':
            Str(),
            'directives':
            MapPattern(
                Str(),
                Map({
                    Optional('description'):
                    Str(),
                    'formats':
                    Seq(Enum(FORMATS)),
                    Optional('prepare'):
                    Map({
                        Optional('remove_whitespace', default=False): Bool(),
                        Optional('remove_characters', default=['']):
                        Seq(Str()),
                        Optional('strip_characters', default=['']): Seq(Str()),
                    }),
                    Optional('validate'):
                    Map({
                        Optional('alphabet'): Enum(ALPHABETS),
                        Optional('min_length'): Int(),
                        Optional('max_length'): Int(),
                    }),
                    Optional('target'):
                    Str(),
                    Optional('helper', default=False):
                    Bool()
                }),
            ),
            'algorithm':
            Enum(ALGORITHMS),
            'encodings':
            MapPattern(
                Str(),
                Map({
                    'type':
                    Enum(ENCODINGS),
                    Optional('length', default=0):
                    Int(),
                    Optional('prefix', default=''):
                    Str(),
                    Optional('separator'):
                    Map({
                        'character': Str(),
                        'interval': Int()
                    })
                }))
        }))
    return load(yaml_text, schema)
Example #2
0
    def __init__(self, filename):
        """Load config from YAML file."""
        filename = path.abspath(filename)

        if filename is None:
            self._config = []
        else:
            try:
                with open(filename, 'r') as handle:
                    self._yaml = handle.read()

                self._config = load(
                    self._yaml,
                    Seq(
                        Map({
                            Optional("name"):
                            "name",
                            "request":
                            Map({
                                Optional("path"):
                                Str(),
                                Optional("method"):
                                Enum([
                                    "get",
                                    "post",
                                    "put",
                                    "delete",
                                    "GET",
                                    "POST",
                                    "PUT",
                                    "DELETE",
                                ]),
                                Optional("headers"):
                                MapPattern(Str(), Str()),
                                Optional("data"):
                                Str(),
                            }),
                            "response":
                            Map({
                                "content": Str() | Map({"file": Str()}),
                                Optional("code"): Int(),
                                Optional("headers"): MapPattern(Str(), Str()),
                            }),
                        })))
            except Exception as e:
                sys.stderr.write(
                    "Error reading YAML config file: {0}\n".format(str(e)))
                sys.exit(1)

            # Read and store all references to external content files
            for pair in self._config:
                content = pair.get('response', {}).get('content')
                if type(content) != str and "file" in content:
                    with open(
                            path.join(path.dirname(filename), content['file']),
                            'r') as content_file_handle:
                        pair['response']['content'] = \
                            content_file_handle.read()
Example #3
0
 def _get_schema(cls):
     return Map({
         PHASE_DEFINITIONS_TAG:
         MapPattern(Str(), Map(cls._get_phase_mapping())),
         ROUTE_DEFINITIONS_TAG:
         MapPattern(Str(), Map(cls._get_route_mapping())),
         MISSION_DEFINITION_TAG:
         Map(cls._get_mission_mapping()),
     })
Example #4
0
 def _get_schema(cls):
     """Schema of the whole mission file."""
     return Map({
         PHASE_DEFINITIONS_TAG:
         MapPattern(Str(), cls._get_phase_schema()),
         ROUTE_DEFINITIONS_TAG:
         MapPattern(Str(), cls._get_route_schema()),
         MISSION_DEFINITION_TAG:
         MapPattern(Str(), cls._get_mission_schema()),
     })
Example #5
0
def load_data(filename):
    schema = MapPattern(
        Regex(u'[A-Za-z. ]+'),
        MapPattern(
            Regex(u'[A-Za-z\\-. ]+'),
            Map({
                'Description': Regex('.+'),
                'Free': Bool(),
                'Link': Url(),
            }, Regex(u'.+'))))
    with open(filename) as f:
        return strictyaml.load(f.read(), schema).data
Example #6
0
    def __init__(self, sqlite_filename, templates):
        self._db = Database(sqlite_filename)
        self._templates = load(
            Path(templates).bytes().decode('utf8'),
            Map({
                "documents": MapPattern(Str(), Str()),
                "steps": MapPattern(Str(), Str()),
            }),
        ).data

        self._stories = [
            Story(recording, self._db, self._templates)
            for recording in self._db.Recording.select()
        ]
Example #7
0
def parse_yaml(yaml_text):
    selectors = load(
        yaml_text,
        MapPattern(Str(), Map({"appears when": Str(), "elements": ELEMENTS_SCHEMA})),
    )

    for page, page_properties in selectors.items():
        revalidate_subelements(page_properties["elements"])
    return selectors
Example #8
0
    def _get_info(cls):

        # https://hitchdev.com/strictyaml
        schema = Map({
            Optional("knobs"):
            Map({
                Optional("log_to_console"): Bool(),
                Optional("log_level_debug"): Bool(),
            })
            | EmptyDict(),
            "globals":
            Map({
                "topic_prefix":
                Str(),
                Optional(
                    "reconnect_interval",
                    default=const.MQTT_DEFAULT_RECONNECT_INTERVAL,
                ):
                Float(),
                Optional("poll_interval",
                         default=const.MYQ_DEFAULT_POLL_INTERVAL):
                Float(),
                Optional(
                    "periodic_mqtt_report",
                    default=const.DEFAULT_PERIODIC_MQTT_REPORT,
                ):
                Float(),
                Optional("user_agent"):
                Str(),
            }),
            "mqtt":
            Map({
                "host":
                Str(),
                Optional("client_id", default=const.MQTT_DEFAULT_CLIENT_ID):
                Str(),
                Optional("username"):
                Str(),
                Optional("password"):
                Str(),
            }),
            "myq":
            Map({
                "email": Email(),
                "password": Str(),
            }),
            Optional("alias"):
            MapPattern(Str(), Str()) | EmptyDict(),
        })

        if not cls._info:
            config_filename = cls._get_config_filename()
            logger.info("loading yaml config file %s", config_filename)
            with open(config_filename, "r") as ymlfile:
                raw_cfg = load(ymlfile.read(), schema).data
                cls._parse_raw_cfg(raw_cfg)
        return cls._info
 def test_operations_yaml_schema(self):
     try:
         load(
             Path("../operations.yaml").bytes().decode('utf8'),
             MapPattern(Str(), Map({
                 "command": Str(),
                 "tags": Str()
             })))
     except YAMLError as error:
         self.fail("Something wrong with your schema")
Example #10
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        scripts=GivenProperty(MapPattern(Str(), Str())),
        python_version=GivenProperty(Str()),
        pexpect_version=GivenProperty(Str()),
        icommandlib_version=GivenProperty(Str()),
        setup=GivenProperty(Str()),
        files=GivenProperty(MapPattern(Str(), Str())),
        code=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        importance=InfoProperty(schema=Int()),
        docs=InfoProperty(schema=Str()),
        fails_on_python_2=InfoProperty(schema=Bool()),
    )

    def __init__(self, keypath, rewrite=False):
        self.path = keypath
        self._rewrite = rewrite
        self._cprofile = False

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.state = self.path.gen.joinpath("state")
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        for script in self.given.get("scripts", []):
            script_path = self.path.state.joinpath(script)

            if not script_path.dirname().exists():
                script_path.dirname().makedirs()

            script_path.write_text(self.given["scripts"][script])
            script_path.chmod("u+x")

        for filename, contents in self.given.get("files", {}).items():
            self.path.state.joinpath(filename).write_text(contents)

        self.python = hitchpylibrarytoolkit.project_build(
            "commandlib", self.path, self.given["python version"]
        ).bin.python

        self.example_py_code = (
            ExamplePythonCode(self.python, self.path.state)
            .with_code(self.given.get("code", ""))
            .with_setup_code(self.given.get("setup", ""))
        )

    def _story_friendly_output(self, text):
        return text.replace(self.path.state, "/path/to")

    @no_stacktrace_for(AssertionError)
    @no_stacktrace_for(HitchRunPyException)
    @validate(
        code=Str(),
        will_output=Str(),
        raises=Map(
            {
                Optional("type"): Map({"in python 2": Str(), "in python 3": Str()})
                | Str(),
                Optional("message"): Map({"in python 2": Str(), "in python 3": Str()})
                | Str(),
            }
        ),
    )
    def run(self, code, will_output=None, raises=None):
        to_run = self.example_py_code.with_code(code)

        if self._cprofile:
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug))
            )

        result = (
            to_run.expect_exceptions().run() if raises is not None else to_run.run()
        )

        if will_output is not None:
            actual_output = "\n".join(
                [line.rstrip() for line in result.output.split("\n")]
            )
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self._rewrite:
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        if raises is not None:
            differential = False  # Difference between python 2 and python 3 output?
            exception_type = raises.get("type")
            message = raises.get("message")

            if exception_type is not None:
                if not isinstance(exception_type, str):
                    differential = True
                    exception_type = (
                        exception_type["in python 2"]
                        if self.given["python version"].startswith("2")
                        else exception_type["in python 3"]
                    )

            if message is not None:
                if not isinstance(message, str):
                    differential = True
                    message = (
                        message["in python 2"]
                        if self.given["python version"].startswith("2")
                        else message["in python 3"]
                    )

            try:
                result = self.example_py_code.expect_exceptions().run()
                result.exception_was_raised(exception_type)
                exception_message = self._story_friendly_output(
                    result.exception.message
                )
                Templex(exception_message).assert_match(message)
            except AssertionError:
                if self._rewrite and not differential:
                    new_raises = raises.copy()
                    new_raises["message"] = self._story_friendly_output(
                        result.exception.message
                    )
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    def file_contents_will_be(self, filename, contents):
        file_contents = "\n".join(
            [
                line.rstrip()
                for line in self.path.state.joinpath(filename)
                .bytes()
                .decode("utf8")
                .strip()
                .split("\n")
            ]
        )
        try:
            # Templex(file_contents).assert_match(contents.strip())
            assert file_contents == contents.strip(), "{0} not {1}".format(
                file_contents, contents.strip()
            )
        except AssertionError:
            if self._rewrite:
                self.current_step.update(contents=file_contents)
            else:
                raise

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def on_success(self):
        if self._cprofile:
            self.python(
                self.path.key.joinpath("printstats.py"),
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)),
            ).run()
Example #11
0
class WooSchema:
    """Schema for localization YAML files."""

    # https://github.com/woocart/woocart-defaults/blob/master/src/importers/class-woopage.php#L14
    productMeta = {
        "title": Str(),
        "description": Str(),
        Optional("price"): Str(),
        Optional("category"): Str(),
        "images": Seq(Str()),
    }

    # https://github.com/woocart/woocart-defaults/blob/master/src/importers/class-woopage.php#L14
    pageMeta = {
        "post_title": Str(),
        Optional("post_name"): Str(),
        Optional("post_excerpt"): Str(),
        "post_status": Enum(["draft", "publish"]),
        "post_type": Enum(["page", "post"]),
        Optional("post_category"): Str(),
        Optional("meta_input"): MapPattern(Str(), Str()),
        Optional("woocart_defaults"): MapPattern(Str(), Str()),
    }

    localization = {
        "woo/woocommerce_default_country":
        Enum(COUNTRIES),
        "wp/date_format":
        Enum(["d/m/Y", "Y-m-d", "F j, Y", "m/d/Y"]),
        "wp/time_format":
        Enum(["H:i", "g:i A"]),
        "wp/start_of_week":
        Enum(["1", "2", "3", "4", "5", "6", "7"]),
        "wp/timezone_string":
        Enum(TIMEZONES),
        "wp/blog_charset":
        Enum(["UTF-8"]),
        "wp/DEFAULT_WPLANG":
        Enum(WPLANGS),
        Optional("wp/blogdescription"):
        Str(),
        Optional("wp/woocommerce_demo_store_notice"):
        Str(),
        "woo/woocommerce_weight_unit":
        Enum(["kg", "k", "lbs", "oz"]),
        "woo/woocommerce_dimension_unit":
        Enum(["m", "cm", "mm", "in", "yd"]),
        "woo/woocommerce_currency":
        Enum(CURRENCIES),
        "woo/woocommerce_currency_pos":
        Enum(["right_space", "left_space", "left", "right"]),
        "woo/woocommerce_price_thousand_sep":
        Enum([".", ","]),
        "woo/woocommerce_price_decimal_sep":
        Enum([",", "."]),
        "woo/woocommerce_price_num_decimals":
        Enum(["2"]),
        Optional("woo/woocommerce_tax_classes"):
        Seq(Str()),
        "woo/woocommerce_bacs_settings":
        Map({
            "enabled": Bool(),
            Optional("title"): Str(),
            Optional("description"): Str(),
            Optional("instructions"): Str(),
            Optional("account_name"): Str(),
            Optional("account_number"): Str(),
            Optional("sort_code"): Str(),
            Optional("bank_name"): Str(),
            Optional("iban"): Str(),
            Optional("bic"): Str(),
            Optional("account_details"): Str(),
        }),
        "woo/woocommerce_cod_settings":
        Map({
            "enabled": Bool(),
            Optional("title"): Str(),
            Optional("description"): Str(),
            Optional("instructions"): Str(),
            Optional("enable_for_methods"): Str(),
            Optional("enable_for_virtual"): Bool(),
        }),
        "woo/woocommerce_checkout_privacy_policy_text":
        Str(),
        "woo/woocommerce_registration_privacy_policy_text":
        Str(),
        ".woo/woocommerce_bacs_settings_format":
        Enum(["serialized"]),
        ".woo/woocommerce_cod_settings_format":
        Enum(["serialized"]),
        Optional(".woo/woocommerce_tax_classes_format"):
        Enum(["implode_newline"]),
    }

    @staticmethod
    def load(path: Path, schema_pointer):
        """Load and validate .yaml file."""
        schema = copy.deepcopy(schema_pointer)
        with path.open() as f:
            yaml = f.read()
            data = yaml_load(yaml, Any())
            is_template = path.name == "template.yaml"

            # Replace real Country and Timezone values with fakes
            if is_template:
                schema["woo/woocommerce_default_country"] = Enum(["LL"])
                schema["wp/timezone_string"] = Enum(["Region/Country"])
                schema["wp/DEFAULT_WPLANG"] = Enum(["ll_LL"])
                schema["woo/woocommerce_currency"] = Enum(["LLL"])

            if "woo/woocommerce_tax_classes" in data:
                # Inspect that tax classes and taxes match

                # create enum for taxes from defined tax_classes
                tax_classes = [
                    str(tax).lower().replace(" ", "-")
                    for tax in data["woo/woocommerce_tax_classes"]
                ]
                # +1 is for standard schema which is never defined in tax class
                for x in range(len(tax_classes) + 1):
                    # start counting with 1
                    schema[f"wootax/{x+1}"] = Map({
                        "country":
                        Enum(["LL"]) if is_template else Enum(COUNTRIES),
                        "state":
                        Str(),
                        "rate":
                        Decimal(),
                        "name":
                        Str(),
                        "priority":
                        Int(),
                        "compound":
                        Int(),
                        "shipping":
                        Int(),
                        "order":
                        Int(),
                        "class":
                        Enum([""]) if x == 0 else Enum(tax_classes),
                        "locations":
                        Map({}),
                    })
            try:
                return yaml_load(yaml, Map(schema), path)
            except YAMLError:
                raise

        return as_document(schema)

    @staticmethod
    def load_string(data: bytes, schema, path: str):
        """Load and validate yaml data."""
        try:
            return yaml_load(data, Map(schema), path)
        except YAMLError:
            raise

        return as_document(schema)
Example #12
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        python_version=GivenProperty(Str()),
        selenium_version=GivenProperty(Str()),
        website=GivenProperty(MapPattern(Str(), Str())),
        selectors_yml=GivenProperty(Str()),
        javascript=GivenProperty(Str()),
        setup=GivenProperty(Str()),
        code=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        status=InfoProperty(schema=Enum(["experimental", "stable"])),
        docs=InfoProperty(schema=Str()),
    )

    def __init__(self, keypath, settings):
        self.path = keypath
        self.settings = settings

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.state = self.path.gen.joinpath("state")
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        self.path.profile = self.path.gen.joinpath("profile")
        dirtemplate.DirTemplate(
            "webapp", self.path.key / "htmltemplate", self.path.state
        ).with_vars(javascript=self.given.get("javascript", "")).with_files(
            base_html={
                filename: {
                    "content": content
                }
                for filename, content in self.given.get("website", {}).items()
            }).ensure_built()

        self.path.state.joinpath("selectors.yml").write_text(
            self.given["selectors.yml"])

        self.server = (python("-m", "http.server").in_dir(self.path.state /
                                                          "webapp").pexpect())
        self.server.expect("Serving HTTP on 0.0.0.0")

        if not self.path.profile.exists():
            self.path.profile.mkdir()

        self.python = project_build(self.path, self.given["python version"],
                                    self.given["selenium version"]).bin.python

        self.example_py_code = (ExamplePythonCode(
            self.python,
            self.path.state).with_setup_code(self.given.get(
                "setup", "")).with_terminal_size(160, 100).with_long_strings())

    @validate(
        code=Str(),
        will_output=Map({
            "in python 2": Str(),
            "in python 3": Str()
        }) | Str(),
        raises=Map({
            Optional("type"):
            Map({
                "in python 2": Str(),
                "in python 3": Str()
            })
            | Str(),
            Optional("message"):
            Map({
                "in python 2": Str(),
                "in python 3": Str()
            })
            | Str(),
        }),
        in_interpreter=Bool(),
    )
    def run(self, code, will_output=None, raises=None, in_interpreter=False):
        if in_interpreter:
            code = "{0}\nprint(repr({1}))".format(
                "\n".join(code.strip().split("\n")[:-1]),
                code.strip().split("\n")[-1])

        to_run = self.example_py_code.with_code(code)

        if self.settings.get("cprofile"):
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)))

        result = (to_run.expect_exceptions().run()
                  if raises is not None else to_run.run())

        if will_output is not None:
            actual_output = "\n".join(
                [line.rstrip() for line in result.output.split("\n")])
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self.settings.get("rewrite"):
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        if raises is not None:
            differential = False  # Difference between python 2 and python 3 output?
            exception_type = raises.get("type")
            message = raises.get("message")

            if exception_type is not None:
                if not isinstance(exception_type, str):
                    differential = True
                    exception_type = (
                        exception_type["in python 2"]
                        if self.given["python version"].startswith("2") else
                        exception_type["in python 3"])

            if message is not None:
                if not isinstance(message, str):
                    differential = True
                    message = (message["in python 2"]
                               if self.given["python version"].startswith("2")
                               else message["in python 3"])

            try:
                result.exception_was_raised(exception_type, message)
            except ExpectedExceptionMessageWasDifferent:
                if self.settings.get("rewrite") and not differential:
                    new_raises = raises.copy()
                    new_raises["message"] = result.exception.message
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    def do_nothing(self):
        pass

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def tear_down(self):
        self.server.kill(signal.SIGTERM)
        self.server.wait()
Example #13
0
SHARED_FIELDS_KEYS = {
    "install_msg": Str(),
    "author": Seq(Str()),
    **COMMON_KEYS,
}

#: `cogs` metadata keys.
COG_KEYS = {
    "name": Str(),  # Downloader doesn't use this but I can set friendlier name
    "short": Str(),
    "description": Str(),
    "end_user_data_statement": Str(),
    Optional("class_docstring"): Str(),
    Optional("install_msg"): Str(),
    Optional("author"): Seq(Str()),
    Optional("required_cogs", {}): EmptyDict() | MapPattern(Str(), Url()),
    Optional("requirements", []): EmptyList() | Seq(Str()),
    Optional("tags", []): EmptyList() | Seq(Str()),
    **COMMON_KEYS,
}

#: Root schema of the info.yaml file.
SCHEMA = Map({
    "repo": Map(REPO_KEYS),
    "shared_fields": Map(SHARED_FIELDS_KEYS),
    "cogs": MapPattern(Str(), Map(COG_KEYS)),
})

#: Keys that should be skipped when outputting to info.json.
#: These keys are for infogen's usage.
KEYS_TO_SKIP_IN_COG_INFO = {"class_docstring"}
Example #14
0
class Engine(BaseEngine):
    """Python engine for running tests."""
    given_definition = GivenDefinition(
        files=GivenProperty(MapPattern(Str(), Str())),
        python_version=GivenProperty(Str()),
        setup=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        docs=InfoProperty(schema=Str()),
    )

    def __init__(self, paths, settings):
        self.path = paths
        self.settings = settings
        self._build = hitchpylibrarytoolkit.PyLibraryBuild(
            "hitchqs",
            self.path,
        )

    def set_up(self):
        """Set up the environment ready to run the stories."""
        self._build.ensure_built()
        self.path.state = self.path.gen.joinpath("state")

        if self.path.state.exists():
            self.path.state.rmtree()
        self.path.state.mkdir()

        for filename, content in self.given['files'].items():
            filepath = self.path.state.joinpath(filename)

            if not filepath.dirname().exists():
                filepath.dirname().makedirs()
            filepath.write_text(content)

        self.python = self._build.bin.python

    def _process_exception(self, string):
        return string.replace(self.path.state, "/path/to")

    @no_stacktrace_for(AssertionError)
    @no_stacktrace_for(HitchRunPyException)
    @validate(
        code=Str(),
        will_output=Str(),
        raises=Map({
            Optional("type"): Str(),
            Optional("message"): Str(),
        })
    )
    def run(self, code, will_output=None, raises=None):
        self.example_py_code = ExamplePythonCode(self.python, self.path.state)\
            .with_terminal_size(160, 100)\
            .with_setup_code(self.given['setup'])\
            .in_dir(self.path.state)
        to_run = self.example_py_code.with_code(code)

        if self.settings.get("cprofile"):
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug))
            )

        result = to_run.expect_exceptions().run() if raises is not None else to_run.run()

        actual_output = result.output

        if will_output is not None:
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self.settings.get("overwrite artefacts"):
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        if raises is not None:
            exception_type = raises.get('type')
            message = raises.get('message')

            try:
                result.exception_was_raised(exception_type)
                exception_message = self._process_exception(result.exception.message)
                Templex(message).assert_match(exception_message)
            except AssertionError:
                if self.settings.get("overwrite artefacts"):
                    new_raises = raises.copy()
                    new_raises['message'] = exception_message
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    @no_stacktrace_for(AssertionError)
    @validate(files=MapPattern(Str(), Str()))
    def build_output_is(self, files):
        for filename, content in files.items():
            filepath = self.path.state.joinpath(filename)

            assert filepath.exists(), "{0} does not exist".format(filename)

            if content != "":
                try:
                    Templex(content).assert_match(filepath.text())
                except AssertionError as error:
                    raise AssertionError("{0} is nonmatching:\n\n{1}".format(filename, error))

        actual_files = [
            path.replace(self.path.state + "/", "")
            for path in pathquery(self.path.state.joinpath("example")).is_not_dir()
        ]

        assert len(actual_files) == len(files.keys()), \
            "Should be:\n\n{0}\n\nAre actually:\n\n{1}\n".format(
                '\n'.join(files.keys()),
                '\n'.join(actual_files),
            )

    def on_success(self):
        self.new_story.save()
Example #15
0
 MapPattern(
     Str(
         doc={
             "text":
             "The name of the output, as it will be referenced through the client library from within the function implementation."
         }),
     Map(
         {
             "target":
             Str(
                 doc={
                     "text":
                     "The name of the resource that will receive or be triggered by the messages sent over this output"
                 }),
             Optional("transform"):
             Any(doc={"any_options": ["transform"]}),
             Optional("filter"):
             Any(doc={"any_options": ["filter"]}),
         },
         doc={
             "title":
             "output",
             "text":
             "The target resource and, optionally, any transforms or filters that are applied before it is invoked."
         },
     ),
     doc={
         "title":
         "outputs",
         "text":
         "The (named) outputs of the function, which will receive or be triggered by the messages that are emitted by the function. The marshaling and connectivity will all be handled by the Plausible client library and the deployment",
     },
 ),
Example #16
0
    def build(self):
        if self._build_path.exists():
            self._build_path.rmtree()
        self._build_path.mkdir()

        if self._src_path.joinpath("dirtemplate.yml").exists():
            config = load(
                self._src_path.joinpath("dirtemplate.yml").text(),
                Map({
                    Optional("base templates"):
                    Str(),
                    "templated":
                    Seq(
                        MapPattern(
                            Str(),
                            Map({
                                Optional("content"): Bool(),
                                Optional("filename"): Bool(),
                            }))),
                })).data
        else:
            config = {"templated": []}

        src_paths = list(pathquery(self._src_path))

        templated_filenames = [
            list(template.keys())[0] for template in config['templated']
        ]

        if "base templates" in config:
            templated_filenames.extend(
                pathquery(self._src_path.joinpath(config['base templates'])))

        non_templated = []

        for srcpath in src_paths:
            relpath = srcpath.relpath(self._src_path)
            add = True

            if relpath in templated_filenames:
                add = False

            if relpath == "dirtemplate.yml":
                add = False

            if srcpath.isdir():
                add = False

            if "base templates" in config:
                if relpath.startswith(config['base templates']):
                    add = False

            if add:
                non_templated.append(relpath)

        for relpath in non_templated:
            dest_path = self._dest.joinpath(relpath)

            if not dest_path.dirname().exists():
                dest_path.dirname().makedirs()

            if str(relpath) not in self._ignore_files:
                self._src_path.joinpath(relpath).copy(dest_path)

        for template_configuration in config['templated']:
            for src_path in src_paths:
                if not src_path.isdir():
                    relpath = src_path.relpath(self._src_path)

                    if relpath in template_configuration.keys():
                        if 'filename' in template_configuration[relpath]:
                            slug = slugify(relpath, separator=u'_')

                            if slug in self._files.keys():
                                for filename, variables in self._files[
                                        slug].items():
                                    dest_path = self._dest.joinpath(filename)

                                    if not dest_path.dirname().exists():
                                        dest_path.dirname().makedirs()

                                    render_vars = {}

                                    for name, var in self._render_vars.items():
                                        render_vars[name] = var

                                    for name, filevar in variables.items():
                                        render_vars[name] = filevar

                                    render_vars['thisdir'] = pathquery(
                                        dest_path.dirname())

                                    dest_path.write_text(
                                        render(
                                            src_path, self._functions,
                                            render_vars,
                                            base_templates(
                                                self._src_path,
                                                config.get("base templates"))))
                            else:
                                raise Exception(
                                    ("{0} templated filename exists but not "
                                     "specified with with_files".format(
                                         relpath)))
                        else:
                            dest_path = self._dest.joinpath(relpath)

                            if not dest_path.dirname().exists():
                                dest_path.dirname().makedirs()

                            render_vars = copy(self._render_vars)
                            render_vars['thisdir'] = pathquery(
                                dest_path.dirname())

                            if template_configuration[relpath]['content']:
                                dest_path.write_text(
                                    render(
                                        src_path,
                                        self._functions,
                                        render_vars,
                                        base_templates(
                                            self._src_path,
                                            config.get("base templates")),
                                    ))
        self.refingerprint()
Example #17
0
from strictyaml import Any, Map, MapPattern, Optional, Str

addon = Map({
    "spec": Str(),
    Optional("install"): MapPattern(Str(), Any(), minimum_keys=1),
    "meta": Map({
        "name": Str(),
        "version": Str()
    }),
    Optional("config"): MapPattern(Str(), Any(), minimum_keys=1),
})

application = Map({
    "spec":
    Str(),
    Optional("meta"):
    Map({
        Optional("name"): Str(),
        Optional("version"): Str()
    }),
    Optional("services"):
    MapPattern(Str(), Map({"type": Str()})),
    Optional("addons"):
    MapPattern(
        Str(),
        Map({
            "manager": Str(),
            "hash": Str(),
            Optional("settings"): MapPattern(Str(), Any(), minimum_keys=1),
        }),
        minimum_keys=1,
Example #18
0
    def __init__(self) -> None:
        from strictyaml import Map, MapPattern, Optional, Str, Seq, Int, Bool, EmptyList, OrValidator

        d_named_object = {
            'name': Str(),
            'description': Str(),
        }

        d_feture_check = {
            Optional('since', default=''): Str(),
            Optional('deprecated', default=''): Str(),
        }

        self.s_posarg = Map({
            **d_feture_check,
            'description': Str(),
            'type': Str(),
            Optional('default', default=''): Str(),
        })

        self.s_varargs = Map({
            **d_named_object,
            **d_feture_check,
            'type': Str(),
            Optional('min_varargs', default=-1): Int(),
            Optional('max_varargs', default=-1): Int(),
        })

        self.s_kwarg = Map({
            **d_feture_check,
            'type': Str(),
            'description': Str(),
            Optional('required', default=False): Bool(),
            Optional('default', default=''): Str(),
        })

        self.s_function = Map({
            **d_named_object,
            **d_feture_check,
            'returns':
            Str(),
            Optional('notes', default=[]):
            OrValidator(Seq(Str()), EmptyList()),
            Optional('warnings', default=[]):
            OrValidator(Seq(Str()), EmptyList()),
            Optional('example', default=''):
            Str(),
            Optional('posargs'):
            MapPattern(Str(), self.s_posarg),
            Optional('optargs'):
            MapPattern(Str(), self.s_posarg),
            Optional('varargs'):
            self.s_varargs,
            Optional('posargs_inherit', default=''):
            Str(),
            Optional('optargs_inherit', default=''):
            Str(),
            Optional('varargs_inherit', default=''):
            Str(),
            Optional('kwargs'):
            MapPattern(Str(), self.s_kwarg),
            Optional('kwargs_inherit', default=[]):
            OrValidator(OrValidator(Seq(Str()), EmptyList()), Str()),
        })

        self.s_object = Map({
            **d_named_object,
            **d_feture_check, 'long_name':
            Str(),
            Optional('extends', default=''):
            Str(),
            Optional('notes', default=[]):
            OrValidator(Seq(Str()), EmptyList()),
            Optional('warnings', default=[]):
            OrValidator(Seq(Str()), EmptyList()),
            Optional('example', default=''):
            Str(),
            Optional('methods'):
            Seq(self.s_function),
            Optional('is_container', default=False):
            Bool()
        })
Example #19
0
#!/usr/bin/python3
import os
import sys
cur_dir = os.path.dirname(os.path.abspath(__file__))
# subdir = 'strictyaml-1.0.6'
subdir = 'strictyaml'
sys.path.append(os.path.join(cur_dir, subdir))
# Depende de ruamel: python3-ruamel.yaml
# Depende de dateutil: python3-dateutil
from strictyaml import (load, Map, Str, Int, Seq, Any, Bool, Optional,
                        MapPattern, YAMLError)
from strictyaml.exceptions import InconsistentIndentationDisallowed

schema_ver = MapPattern(Str(), Any())
# fname = 'scanner_error.yaml'
# fname = 'indent.yaml'
fname = 'test.yaml'
with open(fname) as f:
    s = f.read()
try:
    parsed = load(s, schema_ver, label=fname)
except InconsistentIndentationDisallowed as e:
    print('Use the same indentation across the file')
    print(e)
    sys.exit(1)
except YAMLError as e:
    print('YAML parsing error:')
    print(e)
    sys.exit(1)

schema = Map({
Example #20
0
class Engine(BaseEngine):
    """Engine for running tests and inspecting result."""
    given_definition = GivenDefinition(
        files=GivenProperty(MapPattern(Str(), Str())),
        setup=GivenProperty(Str()),
    )

    def __init__(self, paths, rewrite, debug):
        self.path = paths
        self._rewrite = rewrite
        self._debug = debug

    def set_up(self):
        self.path.state = self.path.gen.joinpath("state")

        self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        self.path.example = self.path.project / "example"

        if self.path.example.exists():
            self.path.example.rmtree()
        self.path.example.makedirs()

        for filename, text in self.given.get("files", {}).items():
            filepath = self.path.example.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().makedirs()
            filepath.write_text(text)

        ubuntu = hitchbuildvagrant.Box("hitchkey", "ubuntu-trusty-64")\
                                  .with_download_path(self.path.share)\
                                  .which_syncs(self.path.project, "/hitchkey")

        setup_code = self.given.get("setup", "")

        class PythonSnapshot(hitchbuildvagrant.Snapshot):
            def setup(self):
                self.cmd(setup_code).run()
                self.cmd("sudo apt-get install python-setuptools -y").run()
                self.cmd("sudo apt-get install build-essential -y").run()
                self.cmd("sudo apt-get install python-pip -y").run()
                self.cmd("sudo apt-get install python-virtualenv -y").run()
                self.cmd("sudo apt-get install python3 -y").run()
                self.cmd("cd /hitchkey/ ; sudo pip install .").run()

        setuphash = md5(setup_code.encode('utf8')).hexdigest()

        self.snapshot = PythonSnapshot("hitchkey_{}".format(setuphash),
                                       ubuntu).with_build_path(self.path.gen)
        self.snapshot.ensure_built()
        self.snapshot.box.vagrant("rsync").run()
        self.snapshot.cmd(
            "cd /hitchkey/ ; sudo pip uninstall hitchkey -y ; sudo pip install ."
        ).run()

    @validate(timeout=Int(), exit_code=Int())
    @no_stacktrace_for(ICommandError)
    @no_stacktrace_for(AssertionError)
    def run(self, cmd=None, will_output=None, timeout=240, exit_code=0):
        process = self.snapshot.cmd(cmd.replace("\n",
                                                " ; ")).interact().screensize(
                                                    160, 100).run()
        process.wait_for_finish(timeout=timeout)

        actual_output = '\n'.join(process.stripshot().split("\n")[:-1])

        if will_output is not None:
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self._rewrite:
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        assert process.exit_code == exit_code, "Exit code should be {} was {}, output:\n{}".format(
            exit_code,
            process.exit_code,
            actual_output,
        )

    def pause(self):
        import IPython
        IPython.embed()

    def on_failure(self, reason):
        if self._debug:
            import IPython
            IPython.embed()

    def tear_down(self):
        """Clean out the state directory."""
        if hasattr(self, 'snapshot'):
            self.snapshot.shutdown()
Example #21
0
from strictyaml import Any, Enum, Int, Map, MapPattern, Optional, Regex, Seq, Str

limits_schema = Map(
    {
        "time": Int(),
        Optional("memory", default=8192): Int(),
        Optional("output"): Int(),
        Optional("cores"): Int(),
    }
)

module_schema = Regex(r"\.?\w+(\.\w+)*")

plugin_schema = Map(
    {"module": module_schema, Optional("config"): MapPattern(Str(), Any())}
)

task_sources = Enum(["local", "url"])

schema = Map(
    {
        "title": Str(),
        Optional("description"): Str(),
        "limits": limits_schema,
        "steps": Map(
            {"run": Seq(plugin_schema), Optional("analysis"): Seq(plugin_schema)}
        ),
        "observers": Seq(plugin_schema),
        "tasks": MapPattern(Str(), MapPattern(Str(), Any())),
        "tools": MapPattern(
            Str(),
Example #22
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        python_version=GivenProperty(Str()),
        files=GivenProperty(MapPattern(Str(), Str())),
    )

    info_definition = InfoDefinition(
        status=InfoProperty(schema=Enum(["experimental", "stable"])),
        docs=InfoProperty(schema=Str()),
        fails_on_python_2=InfoProperty(schema=Bool()),
        description=InfoProperty(schema=Str()),
        experimental=InfoProperty(schema=Bool()),
    )

    def __init__(self, keypath, rewrite=False, build=False):
        self.path = keypath
        self._rewrite = rewrite
        self._build = hitchpylibrarytoolkit.PyLibraryBuild(
            "hitchqs",
            self.path,
        )

    def set_up(self):
        """Set up your applications and the test environment."""  
        self._build.ensure_built()
        
        self.path.state = self.path.project.parent / "tempqs"
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        for filename, contents in self.given.get("files", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().makedirs()
            filepath.write_text(contents)

        self.path.profile = self.path.gen.joinpath("profile")

        if not self.path.profile.exists():
            self.path.profile.mkdir()

        self.qs = self._build.bin.quickstart

    def _run(self, command, args, will_output=None, exit_code=0, timeout=5):
        process = command(*shlex.split(args)).interact().screensize(160, 80).run()
        process.wait_for_finish()

        actual_output = process.stripshot()\
                               .replace(self.path.state, "/path/to")

        # Replace 35.1 SECONDS with n.n SECONDS
        actual_output = re.sub("[0-9]+\.[0-9]", "n.n", actual_output)

        if will_output is not None:
            try:
                Templex(will_output).assert_match(actual_output)
            except AssertionError:
                if self._rewrite:
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        assert process.exit_code == exit_code, "Exit code should be {} was {}, output:\n{}".format(
            exit_code,
            process.exit_code,
            actual_output,
        )

    @validate(timeout=Int(), exit_code=Int())
    @no_stacktrace_for(AssertionError)
    def quickstart(self, args, will_output=None, exit_code=0, timeout=5):
        self._run(self.qs.in_dir(self.path.state), args, will_output, exit_code, timeout=timeout)

    @no_stacktrace_for(AssertionError)
    def hk(self, args, will_output=None, exit_code=0, timeout=5, in_dir=""):
        if self._build:
            self._run(Command("hk").in_dir(self.path.state / in_dir), args, will_output, exit_code, timeout=timeout)

    def initial_hk(self, args="", in_dir=""):
        if self._build:
            Command("hk", *shlex.split(args)).in_dir(self.path.state / in_dir).run()

    @validate(filenames=Seq(Str()))
    def files_appear(self, filenames):
        appeared = set()
        should_appear = set(filenames)
        for existing_file in pathquery(self.path.state):
            if "__pycache__" not in existing_file and not existing_file.isdir():
                appeared.add(str(existing_file.relpath(self.path.state)))

        diff = should_appear.symmetric_difference(appeared)

        assert diff == set(), \
            "Difference in files that appeared:\n{}".format('\n'.join(diff))

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()


    def tear_down(self):
        if self._build:
            if self.path.state.exists():
                Command("hk", "--clean").ignore_errors().in_dir(self.path.state).run()
Example #23
0
class Experiment:
    """
    This class orchestrates the analysis pipeline for our redox imaging experiments.
    """

    experiment_schema = Map({
        "pipeline":
        Map({
            "strategy": Str(),
            "acquisition_method": Enum(["acquire", "mda"]),
            "trimmed_profile_length": Int(),
            "untrimmed_profile_length": Int(),
            "seg_threshold": Int(),
            "measurement_order": Int(),
            "measure_thickness": Float(),
            "reference_wavelength": Str(),
            "image_register": Int(),
            "channel_register": Int(),
            "population_register": Int(),
            "trimmed_regions": MapPattern(Str(), CommaSeparated(Float())),
            "untrimmed_regions": MapPattern(Str(), CommaSeparated(Float())),
        }),
        "redox":
        Map({
            "ratio_numerator": Str(),
            "ratio_denominator": Str(),
            "r_min": Float(),
            "r_max": Float(),
            "instrument_factor": Float(),
            "midpoint_potential": Float(),
            "z": Int(),
            "temperature": Float(),
        }),
        "registration":
        Map({
            "n_deriv": Float(),
            "warp_n_basis": Float(),
            "warp_order": Float(),
            "warp_lambda": Float(),
            "smooth_lambda": Float(),
            "smooth_n_breaks": Float(),
            "smooth_order": Float(),
            "rough_lambda": Float(),
            "rough_n_breaks": Float(),
            "rough_order": Float(),
        }),
        "output":
        Map({
            "should_save_plots": Bool(),
            "should_save_profile_data": Bool(),
            "should_save_summary_data": Bool(),
        }),
    })

    seg_images: xr.DataArray = None
    rot_fl: xr.DataArray = None
    rot_seg: xr.DataArray = None

    midlines: xr.DataArray = None

    untrimmed_raw_profiles: xr.DataArray = None
    untrimmed_std_profiles: xr.DataArray = None
    untrimmed_reg_profiles: xr.DataArray = None

    trimmed_raw_profiles: xr.DataArray = None
    trimmed_std_profiles: xr.DataArray = None
    trimmed_reg_profiles: xr.DataArray = None

    channel_warps: xr.DataArray = None
    std_warps: xr.DataArray = None

    def __init__(self, exp_dir):
        self.experiment_dir = Path(exp_dir)
        self.settings_path = self.experiment_dir.joinpath("settings.yaml")
        try:
            with open(self.settings_path, "r") as f:
                self.config = load(f.read(), self.experiment_schema).data
        except YAMLError:
            raise ValueError("Incorrectly specified config file.")

        self.experiment_id = self.experiment_dir.stem

        # compute the filenames/paths for this experiment
        self.movement_path = self.experiment_dir.joinpath(self.experiment_id +
                                                          "-mvmt.csv")
        self.frame_map_path = self.experiment_dir.joinpath(self.experiment_id +
                                                           "-frame_map.csv")
        self.processed_images_dir = self.experiment_dir.joinpath(
            "processed_images")
        self.rot_seg_dir = self.processed_images_dir.joinpath("rot_seg")
        self.rot_fl_dir = self.processed_images_dir.joinpath("rot_fl")
        self.fl_imgs_dir = self.processed_images_dir.joinpath(
            "fluorescent_images")
        self.orig_images_path = self.processed_images_dir.joinpath("images.nc")
        self.seg_images_path = self.processed_images_dir.joinpath(
            "seg_images.nc")
        self.aligned_images_path = self.processed_images_dir.joinpath(
            "aligned_images.nc")
        self.aligned_seg_images_path = self.processed_images_dir.joinpath(
            "aligned_seg_images.nc")

        # load images
        self.images = self._load_raw_images()

        # try to load masks
        try:
            self.load_masks()
        except IOError:
            logging.info("No masks found in experiment directory")
            pass

    # Computed Filepaths

    @property
    def midlines_path(self) -> Path:
        return self.analysis_dir.joinpath("midlines.pickle")

    @property
    def raw_img_stack_path(self) -> Path:
        # TODO test that this works
        accepted_extensions = [".tif", ".tiff", ".stk"]

        candidate_paths = [
            self.experiment_dir.joinpath(f"{self.experiment_id}{ext}")
            for ext in accepted_extensions
        ]

        for path in candidate_paths:
            if path.exists():
                return path

        raise ValueError(
            f"No image found in experiment directory. Tried the following files: {candidate_paths}"
        )

    @property
    def fig_dir(self):
        return self.analysis_dir.joinpath("figs")

    def untrimmed_profile_data_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-untrimmed_{treatment}_profile_data.nc")

    def trimmed_profile_data_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-trimmed_{treatment}_profile_data.nc")

    @property
    def channel_warp_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-channel_warps.nc")

    @property
    def std_warp_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id + "-std_warps.nc")

    def untrimmed_profile_data_csv_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-untrimmed_{treatment}_profile_data.csv")

    def trimmed_profile_data_csv_path(self, treatment="raw"):
        return self.analysis_dir.joinpath(
            self.experiment_id + f"-trimmed_{treatment}_profile_data.csv")

    @property
    def untrimmed_region_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-untrimmed_region_data.csv")

    @property
    def trimmed_region_data_path(self):
        return self.analysis_dir.joinpath(self.experiment_id +
                                          "-trimmed_region_data.csv")

    @property
    def analysis_dir(self) -> Path:
        date_str = datetime.datetime.now().strftime("%Y-%m-%d")
        strategy = self.config["pipeline"]["strategy"]
        if len(strategy) > 0:
            suffix = f"_{strategy}"
        else:
            suffix = ""
        analysis_dir_ = self.experiment_dir.joinpath(
            "analyses",
            utils.get_valid_filename(f"{date_str}{suffix}"),
        )
        # analysis_dir_.mkdir(parents=True, exist_ok=True)
        return analysis_dir_

    def _load_raw_images(self):
        """
        This returns the raw (non-median-subtracted) images
        """
        logging.info(f"Loading image data from {self.raw_img_stack_path}")
        raw_image_data = pio.load_tiff_as_hyperstack(
            img_stack_path=self.raw_img_stack_path,
            manual_metadata=self.frame_map_path,
            mvmt_metadata=self.movement_path,
        )

        raw_image_data = raw_image_data.assign_coords({
            "experiment_id": (
                ("animal", ),
                np.repeat(self.experiment_id, raw_image_data.animal.size),
            )
        })
        raw_image_data = self.add_experiment_metadata_to_data_array(
            raw_image_data)

        return raw_image_data

    def _load_movement(self) -> pd.DataFrame:
        movement_path = self.experiment_dir.joinpath(self.experiment_id +
                                                     "-mvmt.csv")
        try:
            df = pd.read_csv(movement_path)
            df = df.pivot_table(index="animal",
                                columns=["region", "pair"],
                                values="movement")
            df = df.stack("pair")
            return df
        except FileNotFoundError:
            logging.warning(
                f"Tried to access {movement_path}; file was not found")
            return None

    def make_analysis_dir(self) -> None:
        logging.info(f"Making analysis directory at {self.analysis_dir}")
        self.analysis_dir.mkdir(parents=True, exist_ok=True)

    @property
    def trimmed_summary_table(self):
        df = profile_processing.summarize_over_regions(
            self.trimmed_raw_profiles,
            regions=self.config["pipeline"]["trimmed_regions"],
            rescale=False,
            **self.config["redox"],
        )
        return df

    @property
    def untrimmed_summary_table(self):
        df = profile_processing.summarize_over_regions(
            self.untrimmed_raw_profiles,
            regions=self.config["pipeline"]["untrimmed_regions"],
            **self.config["redox"],
        )
        return df

    ####################################################################################
    # PIPELINE
    ####################################################################################

    def full_pipeline(self):
        logging.info(f"Starting full pipeline run for {self.experiment_dir}")

        self.make_analysis_dir()

        logging.info(f"Saving fluorescent images to {self.fl_imgs_dir}")
        pio.save_images_xarray_to_tiffs(self.images,
                                        self.fl_imgs_dir,
                                        prefix=self.experiment_id)

        self.segment_pharynxes()
        self.register_images()
        self.align_and_center()
        self.calculate_midlines()
        self.measure_under_midlines()
        self.register_profiles()
        self.trim_data()
        self.calculate_redox()
        self.do_manual_ap_flips()
        self.persist_to_disk()

        logging.info(f"Finished full pipeline run for {self.experiment_dir}")

        return self

    def run_neuron_pipeline(self):
        logging.info(
            f"Starting full neuron analysis pipeline run for {self.experiment_dir}"
        )
        self.make_analysis_dir()
        df = ip.measure_under_labels(self.images,
                                     self.seg_images).reset_index()

        df.to_csv(self.analysis_dir /
                  (self.experiment_id + "-neuron_analysis.csv"))

    def segment_pharynxes(self):
        if self.seg_images is not None:
            logging.info("masks have been specified. skipping mask generation")
            self.save_masks()
            return
        else:
            logging.info("Generating masks")
            self.seg_images = ip.segment_pharynxes(
                self.images,
                wvl=self.config["pipeline"]["reference_wavelength"])
            self.save_masks()

    def register_images(self):
        if self.config["pipeline"]["image_register"]:
            logging.info("Registering Images")
            self.images = ip.register_all_images(self.images, self.seg_images)

    def align_and_center(self):
        logging.info("Centering and rotating pharynxes")
        self.rot_fl, self.rot_seg = ip.center_and_rotate_pharynxes(
            self.images,
            self.seg_images,
        )

        logging.info(f"Saving rotated FL images to {self.aligned_images_path}")
        pio.save_profile_data(self.rot_fl, self.aligned_images_path)

        logging.info(f"Saving rotated masks to {self.aligned_seg_images_path}")
        pio.save_profile_data(self.rot_seg, self.aligned_seg_images_path)

    def calculate_midlines(self):
        logging.info("Calculating midlines")
        self.midlines = ip.calculate_midlines(self.rot_seg, degree=4)

    def measure_under_midlines(self):
        logging.info("Measuring under midlines")
        self.untrimmed_raw_profiles = ip.measure_under_midlines(
            self.rot_fl,
            self.midlines,
            n_points=self.config["pipeline"]["untrimmed_profile_length"],
            order=self.config["pipeline"]["measurement_order"],
            thickness=float(self.config["pipeline"]["measure_thickness"]),
        )
        self.untrimmed_raw_profiles = profile_processing.align_pa(
            self.untrimmed_raw_profiles)
        self.untrimmed_raw_profiles = self.add_experiment_metadata_to_data_array(
            self.untrimmed_raw_profiles)

        # subtract the image medians from the profile data
        logging.info("Subtracting image medians from profile data")
        self.untrimmed_raw_profiles = ip.subtract_medians(
            self.untrimmed_raw_profiles, self.images)

    def register_profiles(self):

        if self.config["pipeline"]["population_register"]:
            logging.info("Standardizing profiles")
            (
                self.untrimmed_std_profiles,
                self.std_warps,
            ) = profile_processing.standardize_profiles(
                self.untrimmed_raw_profiles,
                redox_params=self.config["redox"],
                **self.config["registration"],
            )

        if self.config["pipeline"]["channel_register"]:
            logging.info("Channel-Registering profiles")

            if self.untrimmed_std_profiles is not None:
                logging.info(
                    "using the standardize profiles for channel-registration")
                data_to_register = self.untrimmed_std_profiles
            else:
                logging.info("using the raw profiles for channel-registration")
                data_to_register = self.untrimmed_raw_profiles

            (
                self.untrimmed_reg_profiles,
                self.channel_warps,
            ) = profile_processing.channel_register(
                data_to_register,
                redox_params=self.config["redox"],
                reg_params=self.config["registration"],
            )

    def trim_data(self):
        logging.info("Trimming intensity data")

        self.trimmed_raw_profiles = self.add_experiment_metadata_to_data_array(
            profile_processing.trim_profiles(
                self.untrimmed_raw_profiles,
                self.config["pipeline"]["seg_threshold"],
                ref_wvl=self.config["pipeline"]["reference_wavelength"],
            ))

        if self.untrimmed_std_profiles is not None:
            self.trimmed_std_profiles = self.add_experiment_metadata_to_data_array(
                profile_processing.trim_profiles(
                    self.untrimmed_std_profiles,
                    self.config["pipeline"]["seg_threshold"],
                    ref_wvl=self.config["pipeline"]["reference_wavelength"],
                ))

        if self.untrimmed_reg_profiles is not None:
            self.trimmed_reg_profiles = self.add_experiment_metadata_to_data_array(
                profile_processing.trim_profiles(
                    self.untrimmed_reg_profiles,
                    self.config["pipeline"]["seg_threshold"],
                    ref_wvl=self.config["pipeline"]["reference_wavelength"],
                ))

    def calculate_redox(self):
        logging.info("Calculating redox measurements")

        redox_params = self.config["redox"]

        # Images
        self.images = utils.add_derived_wavelengths(self.images,
                                                    **redox_params)
        self.rot_fl = utils.add_derived_wavelengths(self.rot_fl,
                                                    **redox_params)

        # profiles
        self.trimmed_raw_profiles = utils.add_derived_wavelengths(
            self.trimmed_raw_profiles, **redox_params)

        self.untrimmed_raw_profiles = utils.add_derived_wavelengths(
            self.untrimmed_raw_profiles, **redox_params)

    def do_manual_ap_flips(self):
        # TODO finish implementation
        logging.info("skipping manual AP flips - not implemented")

    def flip_at(self, idx):
        # TODO finish implementation
        raise NotImplementedError

    ####################################################################################
    # PERSISTENCE / IO
    ####################################################################################

    def save_images(self):
        """Save this experiment's images to disk as netCDF4 files"""
        imgs_paths = [
            (self.images, self.orig_images_path),
            (self.rot_fl, self.aligned_images_path),
            (self.seg_images, self.seg_images_path),
            (self.rot_seg, self.aligned_seg_images_path),
        ]
        for img, path in imgs_paths:
            if img is not None:
                logging.info(f"Saving images to {path}")
                img.to_netcdf(path)

    # def load_tiff_as_hyperstack(self):
    # pass

    def make_fig_dir(self):
        fig_dir = self.analysis_dir.joinpath("figs")
        fig_dir.mkdir(parents=True, exist_ok=True)
        return fig_dir

    def save_individual_profiles(self, profile_data, treatment: str,
                                 trimmed: bool):
        if profile_data is None:
            return

        fig_dir = self.make_fig_dir()

        profile_data_fig_dir = (fig_dir / "profile_data" / treatment /
                                ("trimmed" if trimmed else "untrimmed"))

        individual_data_fig_dir = profile_data_fig_dir.joinpath("inividual")
        individual_data_fig_dir.mkdir(exist_ok=True, parents=True)

        for title, fig in plots.generate_wvl_pair_timepoint_profile_plots(
                profile_data):
            title = title.replace(" ", "")
            fig.savefig(individual_data_fig_dir /
                        f"{self.experiment_id}-{title}-individuals.pdf")
            plt.close(fig)

    def save_avg_profiles(self, profile_data, treatment: str, trimmed: bool):
        if profile_data is None:
            return

        fig_dir = self.make_fig_dir()

        profile_data_fig_dir = (fig_dir / "profile_data" / treatment /
                                ("trimmed" if trimmed else "untrimmed"))

        individual_data_fig_dir = profile_data_fig_dir.joinpath("avg")
        individual_data_fig_dir.mkdir(exist_ok=True, parents=True)

        for title, fig in plots.generate_avg_wvl_pair_profile_plots(
                profile_data):
            title = title.replace(" ", "")
            fig.savefig(individual_data_fig_dir /
                        f"{self.experiment_id}-{title}-avg.pdf")
            plt.close(fig)

    def save_plots(self):
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")

            for data, treatment, trimmed in [
                (self.untrimmed_raw_profiles, "raw", False),
                (self.untrimmed_std_profiles, "standardized", False),
                (self.untrimmed_reg_profiles, "channel-registered", False),
                (self.trimmed_raw_profiles, "raw", True),
                (self.trimmed_std_profiles, "standardized", True),
                (self.trimmed_reg_profiles, "channel-registered", True),
            ]:
                self.save_individual_profiles(data, treatment, trimmed)
                self.save_avg_profiles(data, treatment, trimmed)

            # frame-normed Ratio Images
            mvmt_annotation_img_path = self.fig_dir.joinpath(
                f"{self.experiment_id}-movement_annotation_imgs.pdf")
            imgs = utils.add_derived_wavelengths(self.images,
                                                 **self.config["redox"])
            with PdfPages(mvmt_annotation_img_path) as pdf:
                for i in tqdm(range(self.images.animal.size)):
                    fig = plots.plot_pharynx_R_imgs(imgs[i],
                                                    mask=self.seg_images[i])
                    fig.suptitle(f"animal = {i}")
                    pdf.savefig(fig)
                    if (i % 20) == 0:
                        plt.close("all")

            # Pop-normed ratio images
            u = self.trimmed_raw_profiles.sel(wavelength="r").mean()
            std = self.trimmed_raw_profiles.sel(wavelength="r").std()

            for pair in self.rot_fl.pair.values:
                for tp in self.rot_fl.timepoint.values:
                    ratio_img_path = self.fig_dir.joinpath(
                        f"{self.experiment_id}-ratio_images-pair={pair};timepoint={tp}.pdf"
                    )
                    with PdfPages(ratio_img_path) as pdf:
                        logging.info(
                            f"Saving ratio images to {ratio_img_path}")
                        for i in tqdm(range(self.rot_fl.animal.size)):
                            fig, ax = plt.subplots(dpi=300)
                            ratio_img = (self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_numerator"],
                                pair=pair,
                                timepoint=tp,
                            ) / self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_denominator"],
                                pair=pair,
                                timepoint=tp,
                            ))[i]
                            fl_img = self.rot_fl.sel(
                                wavelength=self.config["redox"]
                                ["ratio_numerator"],
                                pair=pair,
                                timepoint=tp,
                            )[i]
                            im, cbar = plots.imshow_ratio_normed(
                                ratio_img,
                                fl_img,
                                r_min=u - (std * 1.96),
                                r_max=u + (std * 1.96),
                                colorbar=True,
                                i_max=5000,
                                i_min=1000,
                                ax=ax,
                            )
                            ax.plot(
                                *self.midlines.sel(
                                    pair=pair,
                                    timepoint=tp,
                                )[i].values[()].linspace(),
                                color="green",
                                alpha=0.3,
                            )
                            strain = self.rot_fl.strain.values[i]
                            ax.set_title(
                                f"Animal={i} ; Pair={pair} ; Strain={strain}")
                            cax = cbar.ax
                            for j in range(len(self.trimmed_raw_profiles)):
                                cax.axhline(
                                    self.trimmed_raw_profiles.sel(
                                        wavelength="r",
                                        pair=pair,
                                        timepoint=tp)[j].mean(),
                                    color="k",
                                    alpha=0.1,
                                )
                            cax.axhline(
                                self.trimmed_raw_profiles.sel(
                                    wavelength="r", pair=pair,
                                    timepoint=tp)[i].mean(),
                                color="k",
                            )
                            pdf.savefig()
                            if (i % 20) == 0:
                                plt.close("all")

    def persist_profile_data(self):
        for treatment, untrimmed_profile_data in (
            ("raw", self.untrimmed_raw_profiles),
            ("std", self.untrimmed_std_profiles),
            ("reg", self.untrimmed_reg_profiles),
        ):
            if untrimmed_profile_data is not None:
                untrimmed_prof_path = self.untrimmed_profile_data_path(
                    treatment)
                logging.info(
                    f"Saving untrimmed {treatment} profile data to {untrimmed_prof_path}"
                )
                pio.save_profile_data(untrimmed_profile_data,
                                      untrimmed_prof_path)

                untrimmed_prof_path_csv = self.untrimmed_profile_data_csv_path(
                    treatment)
                profile_processing.to_dataframe(
                    untrimmed_profile_data,
                    "value").to_csv(untrimmed_prof_path_csv)

        for treatment, trimmed_profile_data in (
            ("raw", self.trimmed_raw_profiles),
            ("std", self.trimmed_std_profiles),
            ("reg", self.trimmed_reg_profiles),
        ):
            if trimmed_profile_data is not None:
                trimmed_prof_path = self.trimmed_profile_data_path(treatment)
                logging.info(
                    f"Saving trimmed {treatment} profile data to {trimmed_prof_path}"
                )
                pio.save_profile_data(trimmed_profile_data, trimmed_prof_path)

                trimmed_prof_path_csv = self.trimmed_profile_data_csv_path(
                    treatment)
                logging.info(
                    f"Saving trimmed {treatment} profile data to {trimmed_prof_path_csv}"
                )
                profile_processing.to_dataframe(
                    trimmed_profile_data,
                    "value").to_csv(trimmed_prof_path_csv)

        # Warps, if necessary
        if self.config["pipeline"]["channel_register"]:
            logging.info(
                f"Saving channel warp data to {self.channel_warp_data_path}")
            self.channel_warps.to_netcdf(self.channel_warp_data_path)

        if self.config["pipeline"]["population_register"]:
            logging.info(
                f"Saving channel warp data to {self.std_warp_data_path}")
            self.std_warps.to_netcdf(self.std_warp_data_path)

    def save_summary_data(self):
        # Persist the region means
        logging.info(
            f"Saving untrimmed region means to {self.untrimmed_region_data_path}"
        )
        self.untrimmed_summary_table.to_csv(self.untrimmed_region_data_path)
        logging.info(
            f"Saving trimmed region means to {self.trimmed_region_data_path}")
        self.trimmed_summary_table.to_csv(self.trimmed_region_data_path)

    def save_masks(self):
        logging.info(f"saving masks to {self.seg_images_path}")
        pio.save_profile_data(self.seg_images, self.seg_images_path)

    def load_masks(self):
        self.seg_images = pio.load_profile_data(self.seg_images_path)
        logging.info(f"Loaded masks from {self.seg_images_path}")

    def save_midlines(self):
        pio.save_midlines(self.midlines_path, self.midlines)

    def load_midlines(self):
        return pio.load_midlines(self.midlines_path)

    def persist_to_disk(self):
        logging.info(
            f"Saving {self.experiment_id} inside {self.experiment_dir}")

        self.save_midlines()

        if self.config["output"]["should_save_summary_data"]:
            self.save_summary_data()

        if self.config["output"]["should_save_profile_data"]:
            self.persist_profile_data()

        if self.config["output"]["should_save_plots"]:
            self.save_plots()

    ####################################################################################
    # MISC / HELPER
    ####################################################################################
    def add_experiment_metadata_to_data_array(self, data_array: xr.DataArray):
        params = {}
        params.update(self.config["pipeline"])
        params.update(self.config["redox"])
        params.update(self.config["registration"])

        to_remove = ["trimmed_regions", "untrimmed_regions"]
        for k in to_remove:
            del params[k]

        return data_array.assign_attrs(**params)
Example #24
0
    'executionTimeout': None,
    'killTimeout': 30,
    'statsd': None,
}


_report_schema = Map({
    Opt("sentry"): Map({
        Opt("dsn"): Map({
            Opt("value"): EmptyNone() | Str(),
            Opt("fromFile"): EmptyNone() | Str(),
            Opt("fromEnvVar"): EmptyNone() | Str(),
        }),
        Opt("fingerprint"): Seq(Str()),
        Opt("level"): Str(),
        Opt("extra"): MapPattern(Str(), Str() | Int() | Bool()),
    }),
    Opt("mail"): Map({
        "from": EmptyNone() | Str(),
        "to": EmptyNone() | Str(),
        Opt("smtpHost"): Str(),
        Opt("smtpPort"): Int(),
        Opt("subject"): Str(),
        Opt("body"): Str(),
    })
})

_job_defaults_common = {
    Opt("shell"): Str(),
    Opt("concurrencyPolicy"): Enum(['Allow', 'Forbid', 'Replace']),
    Opt("captureStderr"): Bool(),
Example #25
0
_report_schema = Map({
    Opt("sentry"):
    Map({
        Opt("dsn"):
        Map({
            Opt("value"): EmptyNone() | Str(),
            Opt("fromFile"): EmptyNone() | Str(),
            Opt("fromEnvVar"): EmptyNone() | Str(),
        }),
        Opt("fingerprint"):
        Seq(Str()),
        Opt("level"):
        Str(),
        Opt("extra"):
        MapPattern(Str(),
                   Str() | Int() | Bool()),
        Opt("body"):
        Str(),
    }),
    Opt("mail"):
    Map({
        "from": EmptyNone() | Str(),
        "to": EmptyNone() | Str(),
        Opt("smtpHost"): Str(),
        Opt("smtpPort"): Int(),
        Opt("subject"): Str(),
        Opt("body"): Str(),
    }),
})

_job_defaults_common = {
Example #26
0
            Map({
                "name": Str(),
                "value": Int() | Float(),
                Optional("nominalValue"): Int() | Float(),
                Optional("linkedValue"): Int() | Float(),
                Optional("rangeMinValue"): Int() | Float(),
                Optional("rangeMaxValue"): Int() | Float(),
                Optional("flags"): Int()
            }))
    }), )

stat_format4_schema = Seq(
    Map({
        "name": Str(),
        Optional("flags"): Int(),
        "location": MapPattern(Str(),
                               Int() | Float()),
    }))

instance_schema = MapPattern(
    Str(),
    Seq(
        Map({
            Optional("familyName"): Str(),
            Optional("styleName"): Str(),
            "coordinates": MapPattern(Str(),
                                      Int() | Float()),
        })))

schema = Map({
    "sources":
    Seq(Str()),
Example #27
0
class Engine(BaseEngine):
    """Python engine for running tests."""
    schema = StorySchema(
        preconditions=Map({
            "files": MapPattern(Str(), Str()),
            "variables": MapPattern(Str(), Str()),
            "python version": Str(),
            "ruamel version": Str(),
        }),
        params=Map({
            "python version": Str(),
            "ruamel version": Str(),
        }),
        about={
            "description": Str(),
            Optional("importance"): Int(),
        },
    )

    def __init__(self, keypath, settings):
        self.path = keypath
        self.settings = settings

    def set_up(self):
        """Set up your applications and the test environment."""
        self.doc = hitchdoc.Recorder(
            hitchdoc.HitchStory(self),
            self.path.gen.joinpath('storydb.sqlite'),
        )

        if self.path.gen.joinpath("state").exists():
            self.path.gen.joinpath("state").rmtree(ignore_errors=True)
        self.path.gen.joinpath("state").mkdir()
        self.path.state = self.path.gen.joinpath("state")

        for filename, text in self.preconditions.get("files", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().mkdir()
            filepath.write_text(text)

        self.python_package = hitchpython.PythonPackage(
            self.preconditions.get('python_version', '3.5.0'))
        self.python_package.build()

        self.pip = self.python_package.cmd.pip
        self.python = self.python_package.cmd.python

        # Install debugging packages
        with hitchtest.monitor(
            [self.path.key.joinpath("debugrequirements.txt")]) as changed:
            if changed:
                run(
                    self.pip("install", "-r",
                             "debugrequirements.txt").in_dir(self.path.key))

        # Uninstall and reinstall
        run(self.pip("uninstall", "strictyaml", "-y").ignore_errors())
        run(self.pip("install", ".").in_dir(self.path.project))
        run(
            self.pip(
                "install", "ruamel.yaml=={0}".format(
                    self.preconditions["ruamel version"])))

        self.services = hitchserve.ServiceBundle(str(self.path.project),
                                                 startup_timeout=8.0,
                                                 shutdown_timeout=1.0)

        self.services['IPython'] = hitchpython.IPythonKernelService(
            self.python_package)

        self.services.startup(interactive=False)
        self.ipython_kernel_filename = self.services[
            'IPython'].wait_and_get_ipykernel_filename()
        self.ipython_step_library = hitchpython.IPythonStepLibrary()
        self.ipython_step_library.startup_connection(
            self.ipython_kernel_filename)

        self.shutdown_connection = self.ipython_step_library.shutdown_connection
        self.ipython_step_library.run("import os")
        self.ipython_step_library.run("import sure")
        self.ipython_step_library.run("from path import Path")
        self.ipython_step_library.run("os.chdir('{}')".format(self.path.state))

        for filename, text in self.preconditions.get("files", {}).items():
            self.ipython_step_library.run(
                """{} = Path("{}").bytes().decode("utf8")""".format(
                    filename.replace(".yaml", ""), filename))

    def run_command(self, command):
        self.ipython_step_library.run(command)
        self.doc.step("code", command=command)

    def variable(self, name, value):
        self.path.state.joinpath("{}.yaml".format(name)).write_text(value)
        self.ipython_step_library.run(
            """{} = Path("{}").bytes().decode("utf8")""".format(
                name, "{}.yaml".format(name)))
        self.doc.step("variable", var_name=name, value=value)

    def code(self, command):
        self.ipython_step_library.run(command)
        self.doc.step("code", command=command)

    @validate(exception=Str())
    def raises_exception(self, command, exception, why=''):
        """
        Command raises exception.
        """
        import re
        self.error = self.ipython_step_library.run(
            command, swallow_exception=True).error
        if self.error is None:
            raise Exception("Expected exception, but got none")
        full_exception = re.compile("(?:\\x1bs?\[0m)+(?:\n+)+{0}".format(
            re.escape("\x1b[0;31m"))).split(self.error)[-1]
        exception_class_name, exception_text = full_exception.split(
            "\x1b[0m: ")
        if self.settings.get("overwrite"):
            self.current_step.update(exception=str(exception_text))
        else:
            assert exception.strip(
            ) in exception_text, "UNEXPECTED:\n{0}".format(exception_text)
        self.doc.step(
            "exception",
            command=command,
            exception_class_name=exception_class_name,
            exception=exception_text,
            why=why,
        )

    def returns_true(self, command, why=''):
        self.ipython_step_library.assert_true(command)
        self.doc.step("true", command=command, why=why)

    def should_be_equal(self, lhs='', rhs='', why=''):
        command = """({0}).should.be.equal({1})""".format(lhs, rhs)
        self.ipython_step_library.run(command)
        self.doc.step("true", command=command, why=why)

    def assert_true(self, command):
        self.ipython_step_library.assert_true(command)
        self.doc.step("true", command=command)

    def assert_exception(self, command, exception):
        error = self.ipython_step_library.run(command,
                                              swallow_exception=True).error
        assert exception.strip() in error
        self.doc.step("exception", command=command, exception=exception)

    def on_failure(self):
        if self.settings.get("pause_on_failure", True):
            if self.preconditions.get("launch_shell", False):
                self.services.log(message=self.stacktrace.to_template())
                self.shell()

    def shell(self):
        if hasattr(self, 'services'):
            self.services.start_interactive_mode()
            import sys
            import time
            time.sleep(0.5)
            if path.exists(
                    path.join(path.expanduser("~"),
                              ".ipython/profile_default/security/",
                              self.ipython_kernel_filename)):
                call([
                    sys.executable, "-m", "IPython", "console", "--existing",
                    "--no-confirm-exit",
                    path.join(path.expanduser("~"),
                              ".ipython/profile_default/security/",
                              self.ipython_kernel_filename)
                ])
            else:
                call([
                    sys.executable, "-m", "IPython", "console", "--existing",
                    self.ipython_kernel_filename
                ])
            self.services.stop_interactive_mode()

    def assert_file_contains(self, filename, contents):
        assert self.path.state.joinpath(filename).bytes().decode(
            'utf8').strip() == contents.strip()
        self.doc.step("filename contains",
                      filename=filename,
                      contents=contents)

    def pause(self, message="Pause"):
        if hasattr(self, 'services'):
            self.services.start_interactive_mode()
        import IPython
        IPython.embed()
        if hasattr(self, 'services'):
            self.services.stop_interactive_mode()

    def on_success(self):
        if self.settings.get("overwrite"):
            self.new_story.save()

    def tear_down(self):
        try:
            self.shutdown_connection()
        except:
            pass
        if hasattr(self, 'services'):
            self.services.shutdown()
Example #28
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    schema = StorySchema(
        given={
            Optional("files"): MapPattern(Str(), Str()),
            Optional("symlinks"): MapPattern(Str(), Str()),
            Optional("permissions"): MapPattern(Str(), Str()),
            Optional("setup"): Str(),
            Optional("python version"): Str(),
            Optional("pathpy version"): Str()
        },
        info={},
    )

    def __init__(self, keypath, settings):
        self.path = keypath
        self.settings = settings

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.state = self.path.gen.joinpath("state")
        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        if self.path.gen.joinpath("q").exists():
            self.path.gen.joinpath("q").remove()

        for filename, text in self.given.get("files", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().makedirs()
            filepath.write_text(text)

        for filename, linkto in self.given.get("symlinks", {}).items():
            filepath = self.path.state.joinpath(filename)
            linktopath = self.path.state.joinpath(linkto)
            linktopath.symlink(filepath)

        for filename, permission in self.given.get("permissions", {}).items():
            filepath = self.path.state.joinpath(filename)
            filepath.chmod(int(permission, 8))

        pylibrary = hitchbuildpy.PyLibrary(
            name="py3.5.0",
            base_python=hitchbuildpy.PyenvBuild("3.5.0").with_build_path(
                self.path.share),
            module_name="pathquery",
            library_src=self.path.project,
        ).with_build_path(self.path.gen)

        pylibrary.ensure_built()

        self.python = pylibrary.bin.python


        self.example_py_code = ExamplePythonCode(self.python, self.path.state)\
            .with_code(self.given.get('code', ''))\
            .with_setup_code(self.given.get('setup', ''))\
            .with_terminal_size(160, 100)\
            .with_env(TMPDIR=self.path.gen)\
            .with_long_strings(
                yaml_snippet_1=self.given.get('yaml_snippet_1'),
                yaml_snippet=self.given.get('yaml_snippet'),
                yaml_snippet_2=self.given.get('yaml_snippet_2'),
                modified_yaml_snippet=self.given.get('modified_yaml_snippet'),
            )

    @expected_exception(NonMatching)
    @expected_exception(HitchRunPyException)
    @validate(
        code=Str(),
        will_output=Map({
            "in python 2": Str(),
            "in python 3": Str()
        }) | Str(),
        raises=Map({
            Optional("type"):
            Map({
                "in python 2": Str(),
                "in python 3": Str()
            }) | Str(),
            Optional("message"):
            Map({
                "in python 2": Str(),
                "in python 3": Str()
            }) | Str(),
        }),
        in_interpreter=Bool(),
    )
    def run(self,
            code,
            will_output=None,
            yaml_output=True,
            raises=None,
            in_interpreter=False):
        if in_interpreter:
            code = '{0}\nprint(repr({1}))'.format(
                '\n'.join(code.strip().split('\n')[:-1]),
                code.strip().split('\n')[-1])
        to_run = self.example_py_code.with_code(code)

        if self.settings.get("cprofile"):
            to_run = to_run.with_cprofile(
                self.path.profile.joinpath("{0}.dat".format(self.story.slug)))

        result = to_run.expect_exceptions().run(
        ) if raises is not None else to_run.run()

        if will_output is not None:
            actual_output = '\n'.join(
                [line.rstrip() for line in result.output.split("\n")])
            try:
                Templex(will_output).assert_match(actual_output)
            except NonMatching:
                if self.settings.get("rewrite"):
                    self.current_step.update(**{"will output": actual_output})
                else:
                    raise

        if raises is not None:
            differential = False  # Difference between python 2 and python 3 output?
            exception_type = raises.get('type')
            message = raises.get('message')

            if exception_type is not None:
                if not isinstance(exception_type, str):
                    differential = True
                    exception_type = exception_type['in python 2']\
                        if self.given['python version'].startswith("2")\
                        else exception_type['in python 3']

            if message is not None:
                if not isinstance(message, str):
                    differential = True
                    message = message['in python 2']\
                        if self.given['python version'].startswith("2")\
                        else message['in python 3']

            try:
                result = self.example_py_code.expect_exceptions().run()
                result.exception_was_raised(exception_type, message)
            except ExpectedExceptionMessageWasDifferent as error:
                if self.settings.get("rewrite") and not differential:
                    new_raises = raises.copy()
                    new_raises['message'] = result.exception.message
                    self.current_step.update(raises=new_raises)
                else:
                    raise

    def output_contains(self, expected_contents, but_not=None):
        try:
            output_contents = self.path.state.joinpath(
                "output.txt").text().strip()
        except FileNotFoundError:
            raise AssertionError("Output not found")

        for expected_item in expected_contents:
            found = False
            for output_item in output_contents.split('\n'):
                if output_item.strip() == str(
                        self.path.state.joinpath(
                            expected_item).abspath()).strip():
                    found = True
            if not found:
                raise AssertionError(
                    "Expected:\n{0}\n\nNot found in output:\n{1}".format(
                        expected_item,
                        output_contents,
                    ))

        if but_not is not None:
            for unexpected_item in but_not:
                found = False
                for output_item in output_contents.split('\n'):
                    if output_item.strip() == str(
                            self.path.state.joinpath(
                                unexpected_item).abspath()).strip():
                        found = True
                if found:
                    raise RuntimeError(
                        "Expected NOT to find:\n{0}\n\nBut found in:\n{1}".
                        format(
                            unexpected_item,
                            output_contents,
                        ))

    def pause(self, message="Pause"):
        import IPython
        IPython.embed()

    def on_success(self):
        if self.settings.get("rewrite"):
            self.new_story.save()
        if self.settings.get("cprofile"):
            self.python(
                self.path.key.joinpath("printstats.py"),
                self.path.profile.joinpath("{0}.dat".format(
                    self.story.slug))).run()

    def tear_down(self):
        if self.path.gen.joinpath("q").exists():
            print(self.path.gen.joinpath("q").text())
Example #29
0
from strictyaml import Map, MapPattern, Optional
from strictyaml import Str, Int, Seq, Enum, Any, as_document

JSONSCHEMA_TYPE_SNIPPET = {
    "type": Enum(["object", "integer", "string", "array"]),
    Optional("required"): Seq(Str()),
    Optional("properties"): MapPattern(Str(), Any()),
    Optional("items"): Any(),
}

JSONSCHEMA_SCHEMA = Map(JSONSCHEMA_TYPE_SNIPPET)


def get_schema(snippet):
    if snippet['type'] == "integer":
        return Int()
    elif snippet['type'] == "string":
        return Str()
    elif snippet['type'] == "array":
        return Seq(get_schema(snippet["items"]))
    elif snippet['type'] == "object":
        map_schema = {}
        for key, subschema in snippet['properties'].items():
            if key in snippet.get('required', []):
                map_schema[Optional(key)] = get_schema(subschema)
            else:
                map_schema[key] = get_schema(subschema)
        return Map(map_schema)


def load_schema(json_schema):
Example #30
0
class Engine(BaseEngine):
    """Python engine for running tests."""

    given_definition = GivenDefinition(
        setup=GivenProperty(Str()),
        boxname=GivenProperty(Str()),
        vmname=GivenProperty(Str()),
        issue=GivenProperty(Str()),
        files=GivenProperty(MapPattern(Str(), Str())),
        python_version=GivenProperty(Str()),
    )

    info_definition = InfoDefinition(
        status=InfoProperty(schema=Enum(["experimental", "stable"])),
        docs=InfoProperty(schema=Str()),
    )

    def __init__(self, paths, settings):
        self.path = paths
        self.settings = settings

    def set_up(self):
        """Set up your applications and the test environment."""
        self.path.cachestate = self.path.gen.joinpath("cachestate")
        self.path.state = self.path.gen.joinpath("state")
        self.path.working_dir = self.path.gen.joinpath("working")
        self.path.build_path = self.path.gen.joinpath("build_path")
        self.path.localsync = self.path.gen.joinpath("local_sync")

        if self.path.state.exists():
            self.path.state.rmtree(ignore_errors=True)
        self.path.state.mkdir()

        if self.path.localsync.exists():
            self.path.localsync.rmtree(ignore_errors=True)
        self.path.localsync.mkdir()

        if self.path.build_path.exists():
            self.path.build_path.rmtree(ignore_errors=True)
        self.path.build_path.mkdir()

        self.python = hitchpylibrarytoolkit.project_build(
            "hitchbuildvagrant", self.path,
            self.given.get("python_version", "3.7.0")).bin.python

        if not self.path.cachestate.exists():
            self.path.cachestate.mkdir()

        for filename, contents in self.given.get("files", {}).items():
            filepath = self.path.state.joinpath(filename)
            if not filepath.dirname().exists():
                filepath.dirname().makedirs()
            filepath.write_text(contents)

        if self.path.working_dir.exists():
            self.path.working_dir.rmtree(ignore_errors=True)
        self.path.working_dir.mkdir()

        self.example_py_code = (ExamplePythonCode(
            self.python, self.path.state).with_setup_code(
                self.given.get("setup", "").replace(
                    "/path/to/share",
                    self.path.cachestate)).with_terminal_size(
                        160, 100).with_long_strings(
                            share=str(self.path.cachestate),
                            build_path=str(self.path.build_path),
                            issue=str(self.given.get("issue")),
                            boxname=str(self.given.get("boxname")),
                            vmname=str(self.given.get("vmname")),
                            local_sync_path=str(self.path.localsync),
                        ))

    @no_stacktrace_for(HitchRunPyException)
    def run(self, code):
        self.example_py_code.with_code(code).run()

    def write_to_localsync(self, **files):
        for filename, contents in files.items():
            self.path.localsync.joinpath(filename).write_text(contents)

    def delete_localsync_file(self, filename):
        self.path.localsync.joinpath(filename).remove()

    def write_file(self, filename, contents):
        self.path.state.joinpath(filename).write_text(contents)

    def raises_exception(self, message=None, exception_type=None):
        try:
            result = self.example_python_code.expect_exceptions().run(
                self.path.state, self.python)
            result.exception_was_raised(exception_type, message.strip())
        except ExpectedExceptionMessageWasDifferent as error:
            if self.settings.get("rewrite"):
                self.current_step.update(message=error.actual_message)
            else:
                raise

    def file_contains(self, filename, contents):
        assert (self.path.working_dir.joinpath(filename).bytes().decode("utf8")
                == contents)

    @validate(duration=Float())
    def sleep(self, duration):
        import time

        time.sleep(duration)

    def pause(self, message="Pause"):
        import IPython

        IPython.embed()

    def on_failure(self, reason):
        pass

    def tear_down(self):
        for vagrantfile in pathquery(self.path.state).named("Vagrantfile"):
            Command("vagrant", "destroy",
                    "-f").in_dir(vagrantfile.abspath().dirname()).run()