Пример #1
0
def init() -> None:
    '''
    Initialize this submodule.

    Specifically (in order):

    #. Initialize all uninitialized global variables of this submodule.
    #. If the currently installed version of Numpy was linked against an
       unoptimized BLAS implementation and is thus itself unoptimized, log a
       non-fatal warning.
    '''

    # Log this initialization.
    logs.log_debug('Initializing NumPy...')

    # Initialize all uninitialized global variables of this submodule.
    _init_globals()

    # If Numpy linked against an unoptimized BLAS, log a non-fatal warning.
    if not is_blas_optimized():
        logs.log_warning(
            'Numpy unoptimized; scaling down to single-core operation. '
            'Consider installing an optimized multithreaded '
            'CBLAS implementation (e.g., OpenBLAS, ATLAS, ACML, MKL) and '
            'reinstalling Numpy to use this implementation.')
Пример #2
0
def init() -> None:
    '''
    Enable our default logging configuration for the active Python process if
    this configuration has yet to be enabled *or* reduce to a noop otherwise
    (e.g., if this method has already been called).

    Specifically, this function instantiates the private :data:`_log_conf`
    singleton to an instance of the application-specific :class:`LogConf`
    class, publicly accessible via the :func:`get_log_conf` module getter. This
    singleton defines sane default filters, formatters, and handlers for the
    root logger, which callers may customize by setting singleton properties.
    '''

    # Avoid circular import dependencies.
    from betse.util.io.log import logs
    from betse.util.io.log.conf.logconfcls import LogConf

    # Module-scoped variables to be set below.
    global _log_conf

    # If a logging configuration already exists...
    if _log_conf is not None:
        # Log a non-fatal warning.
        logs.log_warning('Logging already configured (e.g., due to '
                         'logconf.init() having been called).')

        # Reduce to a noop.
        return

    # Instantiate this singleton global with the requisite defaults.
    # print('Reinitializing logging.')
    _log_conf = LogConf()

    # Log this initialization *AFTER* guaranteeing logging sanity.
    logs.log_debug('Initialized singleton logging configuration.')
Пример #3
0
def init() -> None:
    '''
    Validate the active Python interpreter.

    This function does *not* validate this interpreter's version, as the
    top-level :mod:`betse.metadata` submodule already does so at the start of
    application startup. Instead, this function (in order):

    #. Logs a non-fatal warning if this interpreter is *not* 64-bit.
    '''

    # Log this validation.
    logs.log_debug('Validating Python interpreter...')

    # If this Python interpreter is 32- rather than 64-bit, log a non-fatal
    # warning. While technically feasible, running BETSE under 32-bit Python
    # interpreters imposes non-trivial constraints detrimental to sanity.
    if is_wordsize_32():
        logs.log_warning(
            '32-bit Python interpreter detected. '
            '{name} will be confined to low-precision datatypes and '
            'at most 4GB of available RAM, '
            'impeding the reliability and scalability of modelling. '
            'Consider running {name} only under a '
            '64-bit Python interpreter.'.format(name=metadata.NAME))
Пример #4
0
def save(
    # Mandatory parameters.
    container: MappingOrSequenceTypes,
    filename: str,

    # Optional parameters.
    is_overwritable: bool = False,
) -> None:
    '''
    Save (i.e., open and write, serialize) the passed dictionary or list to the
    YAML-formatted file with the passed path via the active YAML
    implementation.

    Parameters
    ----------
    container: MappingOrSequenceTypes
        Dictionary or list to be written as the contents of this file.
    filename : str
        Absolute or relative filename of the YAML-formatted file to be saved.
    is_overwritable : optional[bool]
        Either:

        * ``True`` if this function may silently overwrite this file when this
          file already exists.
        * ``False`` if this function should instead raise an exception when
          this file already exists.

        Defaults to ``False`` for safety.
    '''

    # If this filename has no YAML-compliant filetype, log a warning.
    _warn_unless_filetype_yaml(filename)

    # With this YAML file opened for character-oriented writing...
    with iofiles.writing_chars(
        filename=filename, is_overwritable=is_overwritable) as yaml_file:
        # Fully-qualified name of the module defining this container subclass.
        container_class_module_name = objects.get_class_module_name_qualified(
            container)

        # If this container is *NOT* a "ruamel.yaml"-specific object returned
        # by a prior call to the load() function, log a non-fatal warning.
        # While this edge case does *NOT* constitute a fatal warning, it does
        # disable roundtripped preservation of comments and whitespace
        # contained in the original YAML file -- the principal motivation for
        # preferring "ruamel.yaml" versus PyYAML and friends (e.g., oyaml).
        if not container_class_module_name.startswith('ruamel.'):
            logs.log_warning(
                'Non-"ruamel.yaml" type "%s.%s" not roundtrippable.',
                container_class_module_name, container.__class__.__name__)

        # Safe roundtripping YAML parser.
        ruamel_parser = _make_ruamel_parser()

        # Save this container to this YAML file.
        ruamel_parser.dump(container, yaml_file)
Пример #5
0
    def _deprecated_inner(*args, **kwargs) -> object:
        # If this callable has *NOT* had a deprecation warning logged, do so.
        if not func.__is_deprecation_logged:
            # Prevent this warning from being logged more than once.
            func.__is_deprecation_logged = True

            # Capitalized human-readable string describing this callable.
            func_name = callables.to_str_capitalized(func)

            # Log this warning.
            logs.log_warning('%s deprecated.', func_name)

        # Call this callable.
        return func(*args, **kwargs)
Пример #6
0
def init() -> None:
    '''
    Validate the current platform.

    This function (in order):

    #. Logs a non-fatal warning if this platform is a non-WSL variant
       of Microsoft Windows (e.g., vanilla Windows, Cygwin Windows).
    #. Logs a non-fatal warning if this platform is *not* recognized as
       officially supported by this application (e.g., BSD*, Solaris).
    '''

    # Avoid circular import dependencies.
    from betse.util.os.brand import windows

    # Log this validation.
    logs.log_debug('Validating platform...')

    # Human-readable string describing the set of all officially supported
    # platforms known to interoperate sanely with this application.
    oses_supported_str = (
        'Consider running {name} only under Linux or macOS. '
        'Note that Linux is now freely emulatable under Windows 10 '
        'via the Windows Subsystem for Linux (WSL). '
        'See official installation instructions at:\n'
        '\thttps://docs.microsoft.com/en-us/windows/wsl/install-win10'.format(
            name=metadata.NAME))

    # If this is a non-WSL Windows variant, log a non-fatal warning.
    if windows.is_windows():
        #FIXME: Restore this logging statement to a non-fatal warning *AFTER*
        #resolving the "FIXME:" comment chain in the
        #"betse.util.os.brand.windows" submodule concerning PowerShell. Yikes!
        # logs.log_warning(
        logs.log_info(
            'Windows platform detected. '
            'Python itself and third-party scientific frameworks for Python '
            '(e.g., Numpy, SciPy, Matplotlib) are known to '
            'behave suboptimally on this platform. '
            '%s', oses_supported_str)

    # If this platform is officially unsupported by this application, log a
    # non-fatal warning.
    if not is_supported():
        logs.log_warning('Unsupported platform "%s" detected. %s', get_name(),
                         oses_supported_str)
Пример #7
0
def start_ptpython_repl() -> None:
    '''
    Start a REPL based on the optional third-party :mod:`ptpython` package.

    If this package is unavailable, this function defers to the first available
    REPL with a non-fatal warning.
    '''

    # If "ptpython" is unavailable...
    if not libs.is_runtime_optional('ptpython'):
        # Log a non-fatal warning.
        logs.log_warning(
            '"ptpython" module not found. Deferring to first available REPL.')

        # Defer to the first available REPL.
        start_first_repl()

        # Get us out of here, Great Captain.
        return
    # Else, "ptpython" is available.

    # Log this invocation.
    logs.log_info('Starting "ptpython"-based REPL...')

    # Defer heavyweight imports.
    from ptpython.repl import embed

    # If the "ptpython" key is missing from the dictionary of history
    # filenames, then default to no history file. This prevents the readline
    # history files being corrupted by ptpython's unique format.
    history_filename = appmetaone.get_app_meta().get_repl_history_filename(
        'ptpython')

    # Run this REPL.
    try:
        embed(
            globals=None,
            locals=replenv.repl_env,
            history_filename=history_filename,
        )
    # When this REPL halts with error, reraise this exception.
    except SystemExit as exception:
        if cmdexit.is_failure(exception.exception):
            raise
Пример #8
0
def remove_dir(dirname: str) -> None:
    '''
    Recursively remove the directory with the passed dirname.

    Caveats
    ----------
    Since recursive directory removal is an inherently dangerous operation,
    this function (in order):

    1. Notifies the end user with a logged warning.
    1. Waits several seconds, enabling sufficiently aware end users to jam the
       panic button.
    1. Recursively removes this directory.

    Parameters
    ----------
    dirname : str
        Absolute or relative dirname of the directory to be removed.

    Raises
    ----------
    BetseDirException
        If this directory does *not* exist.
    '''

    # Number of seconds to busywait before removing this directory.
    SLEEP_SECONDS = 4

    # Log this removal.
    logs.log_warning('Removing directory in %d seconds: %s', dirname,
                     SLEEP_SECONDS)

    # If this directory does *NOT* exist, raise an exception.
    die_unless_dir(dirname)

    # Busywait this number of seconds.
    time.sleep(SLEEP_SECONDS)

    # Recursively remove this directory.
    shutil.rmtree(dirname)

    # Log this successful completion.
    logs.log_info('Directory removed.')
Пример #9
0
    def __init__(
        self,
        p: 'betse.science.parameters.Parameters',
        time_step: float,
    ) -> None:
        '''
        Initialize this simulation spike event for the passed simulation
        configuration.

        Parameters
        ----------
        p : betse.science.parameters.Parameters
            Current simulation configuration.
        time_step : float
            Time step in seconds (s) at which to trigger this action.

        Raises
        ----------
        BetseSimEventException
            If this time step is invalid (i.e., *not* in the range
            ``[0, p.sim_time_total)``).
        '''

        # Initialize our superclass.
        super().__init__()

        # If this time step is invalid, log a non-fatal warning.
        #
        # While an invalid time step is arguably questionable, this
        # invalidity is *NOT* fatal for most use cases and sublasses.
        # Ergo, logging a non-fatal warning is saner than raising a
        # fatal exception here.
        if not 0.0 <= time_step < p.sim_time_total:
            logs.log_warning(
                'Event time %f invalid '
                '(i.e., not in range [0.0, %f)).',
                time_step, p.sim_time_total)

        # Classify all passed parameters.
        self._time_step = time_step
Пример #10
0
def get_version_or_none(module: ModuleOrStrTypes) -> StrOrNoneTypes:
    '''
    Version specifier of the passed module if that module defines a version
    specifier *or* ``None`` otherwise.

    Parameters
    ----------
    module : ModuleOrStrTypes
        Either:
        * The fully-qualified name of this module, in which case this function
          dynamically imports this module.
        * A previously imported module object.

    Returns
    ----------
    StrOrNoneTypes
        This module's version specifier if any *or* ``None`` otherwise.
    '''

    # Avoid circular import dependencies.
    from betse.util.py.module.pymodname import MODULE_NAME_TO_VERSION_ATTR_NAME

    # Resolve this module's object.
    module = resolve_module(module)

    # Name of the version specifier attribute defined by that module. For sane
    # modules, this is "__version__". Insane modules, however, exist.
    module_version_attr_name = MODULE_NAME_TO_VERSION_ATTR_NAME[
        module.__name__]

    # This attribute defined by this module if any *OR* "None" otherwise.
    module_version = getattr(module, module_version_attr_name, None)

    # If this version is undefined, log a non-fatal warning.
    if module_version is None:
        logs.log_warning('Module "%s" version not found.', module.__name__)

    # Return this version.
    return module_version
Пример #11
0
def set_headless(is_headless: bool) -> None:
    '''
    Set whether the active Python interpreter is running **headless** (i.e.,
    with *no* access to a GUI display) or not, explicitly overriding the
    implicit detection performed by the :func:`is_headfull` function of whether
    this interpreter actually is running headless or not.

    Parameters
    ----------
    is_headless : bool
        ``True`` only if the active Python interpreter is to be treated as if
        it were effectively running headless - regardless of whether it is.
    '''

    # Enable this global to be locally set.
    global _is_headless_forced

    # If coercing headless operation, log this coercion.
    if is_headless:
        logs.log_debug('Forcing headless operation...')
    # Else, headfull operation is being coerced. In this case...
    else:
        # Log this coercion.
        logs.log_debug('Forcing headfull operation...')

        # If the current environment is detected to be headless, log a
        # non-fatal warning. While an exception could also be raised, our
        # detection heuristic is known to be imperfect.
        if _is_headless():
            logs.log_warning(
                'Headless environment detected! '
                'Forcing headfull operation under a headless environment '
                'typically raises silent segmentation faults '
                'and hence is unsupported.'
            )

    # Set this global to this boolean.
    _is_headless_forced = is_headless
Пример #12
0
def start_repl(repl_type: ReplType = ReplType.first_available) -> None:
    '''
    Start a REPL of the passed type.

    Parameters
    ----------
    repl_type : optional[ReplType]
        Type of REPL to prefer. If :data:`ReplType.first_available`, the set of
        all possible REPLs is iteratively searched for the first available
        REPL; else if this REPL is unavailable, the first available REPL is
        used. Defaults to :data:`ReplType.first_available`.
    '''

    if repl_type is ReplType.first_available:
        start_first_repl()
    elif repl_type is ReplType.ptpython:
        start_ptpython_repl()
    elif repl_type is ReplType.code:
        start_code_repl()
    else:
        logs.log_warning('REPL type "{}" unrecognized. '
                         'Deferring to first available REPL.'.format(
                             repl_type.name))
        start_first_repl()
Пример #13
0
def _warn_unless_filetype_yaml(filename: str) -> None:
    '''
    Log a non-fatal warning unless the passed filename has a YAML-compliant
    filetype (i.e., is suffixed by either ``.yaml`` or ``.yml``).

    Parameters
    ----------
    filename : str
        Absolute or relative filename of this file.
    '''

    # Filetype of this file.
    filetype = pathnames.get_filetype_undotted_or_none(filename)

    # If this filetype is *NOT* YAML-compliant...
    if filetype not in YAML_FILETYPES:
        # If this file has *NO* filetype, log an appropriate warning.
        if filetype is None:
            logs.log_warning('YAML file "%s" has no filetype.', filename)
        # Else, this file has a filetype. Log an appropriate warning.
        else:
            logs.log_warning(
                'YAML file "%s" filetype "%s" neither "yaml" nor "yml".',
                filename, filetype)
Пример #14
0
def is_pathname(pathname: str) -> bool:
    '''
    ``True`` only if the passed string is a valid pathname (either absolute or
    relative) for the root filesystem of the current platform.

    Under:

    * POSIX-compatible OSes:

      * Valid pathnames are non-empty strings containing:

        * No null byte characters.
        * No ``/``-delimited path component longer than 255 characters.

      * The root filesystem is the filesystem mounted to the root directory
        ``/``.

    * Microsoft OSes:

      * Valid pathnames are non-empty strings satisfying `various constraints
        <https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx>`_
        too numerous to document here.
      * The root filesystem is the filesystem to which this instance of Windows
        was installed, also given by the ``%HOMEDRIVE%`` environment variable.
    '''

    # Avoid circular import dependencies.
    from betse.util.os.brand import windows
    from betse.util.os.brand.windows import WindowsErrorType

    # If this is the empty string, it cannot by definition be a valid pathname.
    if not pathname:
        return False

    # The only cross-platform and -filesystem portable means of validating
    # pathnames is to parse exceptions raised by the kernel-dependent os.stat()
    # or os.lstat() functions for metadata indicating invalid pathname syntax.
    # All other approaches (e.g., regex string matching) fail for common edge
    # cases. See also:
    #     https://stackoverflow.com/a/34102855/2809027
    try:
        # Strip this pathname's Windows drive specifier (e.g., "C:\") if any.
        # Since Windows prohibits path components from containing ":"
        # characters, failing to strip this ":"-suffixed prefix would
        # erroneously invalidate all valid absolute Windows pathnames.
        _, pathname = os_path.splitdrive(pathname)

        # Absolute path of a directory guaranteed to exist.
        #
        # To avoid race conditions with external processes concurrently
        # modifying the filesystem, the passed pathname cannot be tested as is.
        # Only path components split from this pathname are safely testable.
        # Why? Because os.stat() and os.lstat() raise "FileNotFoundError"
        # exceptions when passed pathnames residing in non-existing directories
        # regardless of whether these pathnames are invalid or not. Directory
        # existence takes precedence over pathname invalidity. Hence, the only
        # means of testing whether pathnames are invalid or not is to:
        #
        # 1. Split the passed pathname into path components (e.g.,
        #    "/foo/bar" into "['', 'foo', 'bar']").
        # 2. For each path component:
        #    1. Join the pathname of a directory guaranteed to exist and the
        #       current path component into a new pathname (e.g., "/bar").
        #    2. Pass that pathname to os.stat() or os.lstat(). If that
        #       pathname and hence current path component is invalid, this
        #       call is guaranteed to raise an exception exposing the type
        #       of invalidity rather than a generic "FileNotFoundError"
        #       exception. Why? Because that pathname resides in an
        #       existing directory. Circular logic is circular.
        #
        # Is a directory guaranteed to exist? Yes, but typically only one: the
        # root directory for the current filesystem. Passing pathnames residing
        # in any other directories to os.stat() or os.lstat() invites
        # mind-flaying race conditions, even for directories previously tested
        # to exist. External processes cannot be prevented from concurrently
        # removing those directories after those tests have been performed but
        # before those pathnames are passed to os.stat() or os.lstat().
        #
        # Did we mention this should be shipped with Python already?
        root_dirname = get_root_dirname()
        assert os_path.isdir(root_dirname)   # ...Murphy and her dumb Law

        # Test whether each path component split from this pathname is valid
        # or not. Most path components will *NOT* actually physically exist.
        for pathname_part in pathname.split(os_path.sep):
            try:
                os.lstat(root_dirname + pathname_part)
            # If an OS-specific exception is raised, its error code indicates
            # whether this pathname is valid or not. Unless this is the case,
            # this exception implies an ignorable kernel or filesystem
            # complaint (e.g., path not found or inaccessible).
            #
            # Only the following exceptions indicate invalid pathnames:
            #
            # * Instances of the Windows-specific "WindowsError" class
            #   defining the "winerror" attribute whose value is
            #   "ERROR_INVALID_NAME". Under Windows, "winerror" is more
            #   fine-grained and hence useful than the generic "errno"
            #   attribute. When a too-long pathname is passed, for example,
            #   "errno" is "ENOENT" (i.e., no such file or directory) rather
            #   than "ENAMETOOLONG" (i.e., file name too long).
            # * Instances of the cross-platform "OSError" class defining the
            #   generic "errno" attribute whose value is either:
            #   * Under most POSIX-compatible OSes, "ENAMETOOLONG".
            #   * Under some edge-case OSes (e.g., SunOS, *BSD), "ERANGE".
            except OSError as exc:
                # True only if this pathname is invalid (as detailed above).
                is_pathname_invalid = (
                    windows.is_exception_pathname_invalid(exc)
                    if isinstance(exc, WindowsErrorType)
                    else exc.errno in {errno.ENAMETOOLONG, errno.ERANGE})

                # If this pathname is invalid, log a warning and return False.
                if is_pathname_invalid:
                    logs.log_warning(
                        'Pathname "{}" invalid: {}'.format(
                            pathname, exc.strerror))
                    return False
    # If a "TypeError" exception was raised, it almost certainly has the
    # error message "embedded NUL character" indicating an invalid pathname.
    except TypeError as exc:
        logs.log_warning(
            'Pathname "{}" invalid: {}'.format(pathname, str(exc)))
        return False
    # If no exception was raised, all path components and hence this
    # pathname itself are valid. (Praise be to the curmudgeonly python.)
    else:
        return True
Пример #15
0
def copy_dir(
    # Mandatory parameters.
    src_dirname: str,
    trg_dirname: str,

    # Optional parameters.
    overwrite_policy: DirOverwritePolicy = (
        DirOverwritePolicy.HALT_WITH_EXCEPTION),
    ignore_basename_globs: IterableOrNoneTypes = None,
) -> None:
    '''
    Recursively copy the source directory with the passed dirname into the
    target directory with the passed dirname.

    For generality:

    * All nonexistent parents of the target directory will be recursively
      created, mimicking the action of the ``mkdir -p`` shell command on
      POSIX-compatible platforms in a platform-agnostic manner.
    * All symbolic links in the source directory will be preserved (i.e.,
      copied as is rather than their transitive targets copied instead).

    Parameters
    -----------
    src_dirname : str
        Absolute or relative dirname of the source directory to be copied from.
    trg_dirname : str
        Absolute or relative dirname of the target directory to be copied to.
    overwrite_policy : DirOverwritePolicy
        **Directory overwrite policy** (i.e., strategy for handling existing
        paths to be overwritten by this copy). Defaults to
        :attr:`DirOverwritePolicy.HALT_WITH_EXCEPTION`, raising an exception if
        any target path already exists.
    ignore_basename_globs : optional[IterableTypes]
        Iterable of shell-style globs (e.g., ``('*.tmp', '.keep')``) matching
        the basenames of all paths transitively owned by this source directory
        to be ignored during recursion and hence neither copied nor visited.
        If non-``None`` and the ``overwrite_policy`` parameter is
        :attr:`DirOverwritePolicy.OVERWRITE`, this iterable is ignored and a
        non-fatal warning is logged. Defaults to ``None``, in which case *all*
        paths transitively owned by this source directory are unconditionally
        copied and visited.

    Raises
    -----------
    BetseDirException
        If either:

        * The source directory does *not* exist.
        * The target directory is a subdirectory of the source directory.
          Permitting this edge case induces non-trivial issues, including
          infinite recursion from within the musty entrails of the
          :mod:`distutils` package (e.g., due to relative symbolic links).
        * The passed ``overwrite_policy`` parameter is
          :attr:`DirOverwritePolicy.HALT_WITH_EXCEPTION` *and* one or more
          subdirectories of the target directory already exist that are also
          subdirectories of the source directory. For safety, this function
          always preserves rather than overwrites existing target
          subdirectories.

    See Also
    -----------
    https://stackoverflow.com/a/22588775/2809027
        StackOverflow answer strongly inspiring this function's
        :attr:`DirOverwritePolicy.SKIP_WITH_WARNING` implementation.
    '''

    # Log this copy.
    logs.log_debug('Copying directory: %s -> %s', src_dirname, trg_dirname)

    # If the source directory does *NOT* exist, raise an exception.
    die_unless_dir(src_dirname)

    # If the target directory is a subdirectory of the source directory, raise
    # an exception. Permitting this edge case provokes issues, including
    # infinite recursion from within the musty entrails of the "distutils"
    # codebase (possibly due to relative symbolic links).
    die_if_subdir(parent_dirname=src_dirname, child_dirname=trg_dirname)

    # If passed an iterable of shell-style globs matching ignorable basenames,
    # convert this iterable into a predicate function of the form required by
    # the shutil.copytree() function. Specifically, this function accepts the
    # absolute or relative pathname of an arbitrary directory and an iterable
    # of the basenames of all subdirectories and files directly in this
    # directory; this function returns an iterable of the basenames of all
    # subdirectories and files in this directory to be ignored. This signature
    # resembles:
    #
    #     def ignore_basename_func(
    #         parent_dirname: str,
    #         child_basenames: IterableTypes) -> IterableTypes
    ignore_basename_func = None
    if ignore_basename_globs is not None:
        ignore_basename_func = shutil.ignore_patterns(*ignore_basename_globs)

    # If raising a fatal exception if any target path already exists...
    if overwrite_policy is DirOverwritePolicy.HALT_WITH_EXCEPTION:
        # Dictionary of all keyword arguments to pass to shutil.copytree(),
        # preserving symbolic links as is.
        copytree_kwargs = {
            'symlinks': True,
        }

        # If ignoring basenames, inform shutil.copytree() of these basenames.
        if ignore_basename_func is not None:
            copytree_kwargs['ignore'] = ignore_basename_func

        # Raise an exception if this target directory already exists. While we
        # could defer to the exception raised by the shutil.copytree() function
        # for this case, this exception's message erroneously refers to this
        # directory as a file and is hence best avoided as non-human-readable:
        #
        #     [Errno 17] File exists: 'sample_sim'
        die_if_dir(trg_dirname)

        # Recursively copy this source to target directory. To avoid silently
        # overwriting all conflicting target paths, the shutil.copytree()
        # rather than dir_util.copy_tree() function is called.
        shutil.copytree(src=src_dirname, dst=trg_dirname, **copytree_kwargs)
    # Else if overwriting this target directory with this source directory...
    elif overwrite_policy is DirOverwritePolicy.OVERWRITE:
        # If an iterable of shell-style globs matching ignorable basenames was
        # passed, log a non-fatal warning. Since the dir_util.copy_tree()
        # function fails to support this functionality and we are currently too
        # lazy to do so, a warning is as much as we're willing to give.
        if ignore_basename_globs is not None:
            logs.log_warning(
                'dirs.copy() parameter "ignore_basename_globs" '
                'ignored when parameter "is_overwritable" enabled.')

        # Recursively copy this source to target directory, preserving symbolic
        # links as is. To silently overwrite all conflicting target paths, the
        # dir_util.copy_tree() rather than shutil.copytree() function is
        # called.
        dir_util.copy_tree(src_dirname, trg_dirname, preserve_symlinks=1)

    #FIXME: Given how awesomely flexible the manual approach implemented below
    #is, we should probably consider simply rewriting the above two approaches
    #to reuse the exact same logic. It works. It's preferable. Let's reuse it.
    #FIXME: Actually, this is increasingly critical. Third-party functions
    #called above -- notably, the dir_util.copy_tree() function -- appear to
    #suffer critical edge cases. This can be demonstrated via the BETSEE GUI by
    #attempting to save an opened simulation configuration to a subdirectory of
    #itself, which appears to provoke infinite recursion from within the musty
    #depths of the "distutils" codebase. Of course, the implementation below
    #could conceivably suffer similar issues. If this is the case, this
    #function should explicitly detect attempts to recursively copy a source
    #directory into a subdirectory of itself and raise an exception.
    #FIXME: See the above FIXME comment addressing the infinite recursion issue
    #discussed here.

    # Else if logging a warning for each target path that already exists, do so
    # by manually implementing recursive directory copying. Sadly, Python
    # provides no means of doing so "out of the box."
    elif overwrite_policy is DirOverwritePolicy.SKIP_WITH_WARNING:
        # Avoid circular import dependencies.
        from betse.util.path import files, paths, pathnames
        from betse.util.type.iterable import sequences

        # Passed parameters renamed for disambiguity.
        src_root_dirname = src_dirname
        trg_root_dirname = trg_dirname

        # Basename of the top-level target directory to be copied to.
        trg_root_basename = pathnames.get_basename(src_root_dirname)

        # For the absolute pathname of each recursively visited source
        # directory, an iterable of the basenames of all subdirectories of this
        # directory, and an iterable of the basenames of all files of this
        # directory...
        for src_parent_dirname, subdir_basenames, file_basenames in _walk(
                src_root_dirname):
            # Relative pathname of the currently visited source directory
            # relative to the absolute pathname of this directory.
            parent_dirname_relative = pathnames.relativize(
                src_dirname=src_root_dirname, trg_pathname=src_parent_dirname)

            # If ignoring basenames...
            if ignore_basename_func is not None:
                # Sets of the basenames of all ignorable subdirectories and
                # files of this source directory.
                subdir_basenames_ignored = ignore_basename_func(
                    src_parent_dirname, subdir_basenames)
                file_basenames_ignored = ignore_basename_func(
                    src_parent_dirname, file_basenames)

                # If ignoring one or more subdirectories...
                if subdir_basenames_ignored:
                    # Log the basenames of these subdirectories.
                    logs.log_debug(
                        'Ignoring source "%s/%s" subdirectories: %r',
                        trg_root_basename, parent_dirname_relative,
                        subdir_basenames_ignored)

                    # Remove these subdirectories from the original iterable.
                    # Since the os.walk() function supports in-place changes to
                    # this iterable, this iterable is modified via this less
                    # efficient function rather than efficient alternatives
                    # (e.g., set subtraction).
                    sequences.remove_items(sequence=subdir_basenames,
                                           items=subdir_basenames_ignored)

                # If ignoring one or more files...
                if file_basenames_ignored:
                    # Log the basenames of these files.
                    logs.log_debug('Ignoring source "%s/%s" files: %r',
                                   trg_root_basename, parent_dirname_relative,
                                   file_basenames_ignored)

                    # Remove these files from the original iterable. Unlike
                    # above, we could technically modify this iterable via
                    # set subtraction: e.g.,
                    #
                    #     subdir_basenames -= subdir_basenames_ignored
                    #
                    # For orthogonality, preserve the above approach instead.
                    sequences.remove_items(sequence=file_basenames,
                                           items=file_basenames_ignored)

            # Absolute pathname of the corresponding target directory.
            trg_parent_dirname = pathnames.join(trg_root_dirname,
                                                parent_dirname_relative)

            # Create this target directory if needed.
            make_unless_dir(trg_parent_dirname)

            # For the basename of each non-ignorable file of this source
            # directory...
            for file_basename in file_basenames:
                # Absolute filenames of this source and target file.
                src_filename = pathnames.join(src_parent_dirname,
                                              file_basename)
                trg_filename = pathnames.join(trg_parent_dirname,
                                              file_basename)

                # If this target file already exists...
                if paths.is_path(trg_filename):
                    # Relative filename of this file. The absolute filename of
                    # this source or target file could be logged instead, but
                    # this relative filename is significantly more terse.
                    filename_relative = pathnames.join(
                        trg_root_basename, parent_dirname_relative,
                        file_basename)

                    # Warn of this file being ignored.
                    logs.log_warning('Ignoring existing target file: %s',
                                     filename_relative)

                    # Ignore this file by continuing to the next.
                    continue

                # Copy this source to target file.
                files.copy(src_filename=src_filename,
                           trg_filename=trg_filename)
    # Else, this overwrite policy is unrecognized. Raise an exception.
    else:
        raise BetseDirException(
            'Overwrite policy "{}" unrecognized.'.format(overwrite_policy))
Пример #16
0
def _is_blas_optimized_opt_info_libraries() -> BoolOrNoneTypes:
    '''
    ``True`` only if the first item of the ``libraries`` list of the global
    :data:`numpy.__config__.blas_opt_info` dictionary heuristically
    corresponds to that of an optimized BLAS implementation, ``False`` if a
    non-fatal error condition arises (e.g., due this list or dictionary being
    undefined), *or* ``None`` otherwise.

    This function returns ``None`` when unable to deterministically decide this
    boolean, in which case a subsequent heuristic will attempt to do so.

    Numpy does *not* define a public API exposing this boolean to callers.
    Numpy only defines a private API defining a medley of metadata from which
    this boolean is indirectly derivable: the :mod:`numpy.__config__`
    submodule. The :func:`numpy.distutils.misc_util.generate_config_py`
    function programmatically fabricates the contents of the
    :mod:`numpy.__config__` submodule at Numpy installation time. Ergo, this
    function introspectively inspects these contents for uniquely identifying
    metadata in a portable manner.
    '''

    # Global BLAS linkage dictionary for this Numpy installation if any or
    # "None" otherwise. Technically, this dictionary should *ALWAYS* be
    # defined.  Reality probably occasionally begs to disagree, however.
    blas_opt_info = getattr(numpy_config, 'blas_opt_info', None)

    # If this dictionary is undefined, log a non-fatal warning and return
    # False. While sad, this is *NOT* worth raising an exception over.
    if blas_opt_info is None:
        logs.log_warning(
            'Numpy installation misconfigured: '
            '"numpy.__config__.blas_opt_info" dictionary not found.')
        return False

    # List of the uniquely identifying substrings of all BLAS library basenames
    # this version of Numpy is linked against in a high-level manner if any or
    # "None" otherwise.
    #
    # Note that this list is *NOT* guaranteed to exist. When this version of
    # Numpy is linked against a BLAS library in a low-level manner (e.g., via
    # "'extra_link_args': ['-Wl,-framework', '-Wl,Accelerate']" on macOS), this
    # list should *NOT* exist. In most other cases, this list should exist. To
    # avoid edge cases, this list is ignored if absent.
    blas_basename_substrs = blas_opt_info.get('libraries', None)

    # If this list is either undefined or empty, silently noop.
    if not blas_basename_substrs:
        return None
    # Else, this list is non-empty.

    # First element of this list. For simplicity, this function assumes the
    # BLAS library identified by this element currently exists. While
    # iteratively testing all listed BLAS libraries for existence would be
    # feasible, doing so is platform-specific and hence non-trivially fragile.
    blas_basename_substr = blas_basename_substrs[0]

    # If the BLAS library identified by this element is optimized, return
    # True. Since this element may be suffixed by non-identifying metadata
    # (e.g., version), a regular expression is leveraged.
    if regexes.is_match(
            text=blas_basename_substr,
            regex=_OPTIMIZED_BLAS_OPT_INFO_LIBRARY_REGEX,
    ):
        return True

    # Else, instruct our caller to continue to the next heuristic.
    return None
Пример #17
0
def is_writer_command_codec(writer_basename: str,
                            codec_name: StrOrNoneTypes) -> bool:
    '''
    ``True`` only if the matplotlib animation writer class running the external
    command with the passed basename (e.g., ``ffmpeg``) supports the video
    codec with the passed encoder-specific name (e.g., ``libx264``).

    Specifically, this function returns ``True`` only if this basename is:

    * ``ffmpeg`` and the ``ffmpeg -help encoder={codec_name}`` command
      succeeds.
    * ``avconv`` and the ``avconv -help encoder={codec_name}`` command
      succeeds.
    * ``mencoder``, the ``mencoder -ovc help`` command lists the
      Mencoder-specific ``lavc`` video codec and either:

      * ``ffmpeg`` is in the current ``${PATH}`` and recursively calling this
        function as ``is_writer_codec('ffmpeg', codec_name)`` returns ``True``.
      * ``ffmpeg`` is *not* in the current ``${PATH}``, in which case this
        function assumes the passed codec to be supported and returns ``True``.

    * Any other passed basename (e.g., ``convert``, implying ImageMagick) *and*
      the passed codec is ``None``. These basenames are assumed to *not*
      actually be video encoders and thus support no video codecs.

    Parameters
    ----------
    writer_basename : str
        Basename of the external command of the video encoder to test.
    codec_name : str
        Encoder-specific name of the codec to be tested for.

    Returns
    ----------
    bool
        ``True`` only if this encoder supports this codec.

    Raises
    ----------
    BetseMatplotlibException
        If this basename is either:

        * Unrecognized by this application.
        * Unregistered with :mod:`matplotlib`, implying
        * Not found as an external command in the current ``${PATH}``.
        * Mencoder and the ``mencoder -ovc help`` command fails to list the
          Mencoder-specific ``lavc`` video codec required by Matplotlib.
    '''

    # Log this detection attempt.
    logs.log_debug('Detecting encoder "%s" codec "%s"...', writer_basename,
                   codec_name)

    # Absolute path of this command.
    writer_filename = get_writer_command_filename(writer_basename)

    # For FFmpeg, detect this codec by capturing help documentation output by
    # the "ffmpeg" command for this codec and grepping this output for a string
    # stating this codec to be unrecognized. Sadly, this command reports
    # success rather than failure when this codec is unrecognized. (wut,
    # FFmpeg?)
    if writer_basename == 'ffmpeg':
        # List of all shell words comprising the command to be tested.
        ffmpeg_command_words = (
            writer_filename,
            '-help',
            'encoder=' + shellstr.shell_quote(codec_name),
        )

        # Help documentation for this codec captured from "ffmpeg".
        ffmpeg_codec_help = cmdrun.get_stdout_or_die(ffmpeg_command_words)

        # Return whether this documentation is suffixed by a string implying
        # this codec to be unrecognized or not. If this codec is unrecognized,
        # this documentation ends with the following line:
        #
        #     Codec '${codec_name}' is not recognized by FFmpeg.
        return not ffmpeg_codec_help.endswith("' is not recognized by FFmpeg.")
    # For Libav, detect this codec in the same exact manner as for FFmpeg.
    elif writer_basename == 'avconv':
        # List of all shell words comprising the command to be tested.
        avconv_command_words = (
            writer_filename,
            '-help',
            'encoder=' + shellstr.shell_quote(codec_name),
        )

        # Help documentation for this codec captured from "avconv".
        avconv_codec_help = cmdrun.get_stdout_or_die(avconv_command_words)
        # print('avconv_command_words: {}'.format(avconv_command_words))
        # print('avconv_codec_help: {}'.format(avconv_codec_help))

        # Return whether this documentation is suffixed by an indicative
        # string.
        return not avconv_codec_help.endswith("' is not recognized by Libav.")
    # For Mencoder, detect this codec by capturing help documentation output by
    # the "mencoder" command for *ALL* video codecs, grepping this output for
    # the "lavc" video codec required by matplotlib, and, if found, repeating
    # the above FFmpeg-specific logic to specifically detect this codec.
    elif writer_basename == 'mencoder':
        # Help documentation for all codecs captured from "mencoder".
        mencoder_codecs_help = cmdrun.get_stdout_or_die(
            (writer_filename, '-ovc', 'help'))
        # print('mencoder codecs help: ' + mencoder_codecs_help)

        # If this output contains a line resembling the following, this
        # installation of Mencoder supports the requisite "lavc" codec:
        #     lavc     - libavcodec codecs - best quality!
        if regexes.is_match_line(text=mencoder_codecs_help,
                                 regex=r'^\s+lavc\s+'):
            # If the "ffmpeg" command is installed on the current system, query
            # that command for whether or not the passed codec is supported.
            # Note that the recursion bottoms out with this call, as the above
            # logic handling the FFmpeg writer does *NOT* recall this function.
            if is_writer_command('ffmpeg'):
                return is_writer_command_codec('ffmpeg', codec_name)
            # Else, "ffmpeg" is *NOT* in the ${PATH}. Since Mencoder implements
            # "lavc" codec support by linking against the "libavcodec" shared
            # library rather than calling the "ffmpeg" command, it's
            # technically permissible (albeit uncommon) for the "mencoder" but
            # not "ffmpeg" command to be in the ${PATH}. Hence, this does *NOT*
            # indicate a fatal error. This does, however, prevent us from
            # querying whether or not the passed codec is supported. In lieu of
            # sensible alternatives...
            else:
                # Log a non-fatal warning.
                logs.log_warning(
                    'Mencoder "libavcodec"-based video codec "{}" '
                    'possibly unavailable. Consider installing FFmpeg to '
                    'resolve this warning.'.format(codec_name))

                # Assume the passed codec to be supported.
                return True
        # Else, Mencoder fails to support the "lavc" codec. Raise an exception.
        else:
            raise BetseMatplotlibException(
                'Mencoder video codec "lavc" unavailable.')

    # For any other writer (e.g., ImageMagick), assume this writer to *NOT* be
    # a video encoder and hence support *NO* video codecs. In this case, return
    # True only if the passed codec is "None" -- signifying "no video codec."
    return codec_name is None
Пример #18
0
def is_aqua() -> bool:
    '''
    ``True`` only if the current process has access to the Aqua display server
    specific to macOS, implying this process to be headfull and hence support
    both CLIs and GUIs.

    See Also
    ----------
    https://developer.apple.com/library/content/technotes/tn2083/_index.html#//apple_ref/doc/uid/DTS10003794-CH1-SUBSECTION19
        "Security Context" subsection of "Technical Note TN2083: Daemons and
        Agents," a psuedo-human-readable discussion of the
        ``sessionHasGraphicAccess`` bit flag returned by the low-level
        ``SessionGetInfo()`` C function.
    '''

    # Avoid circular import dependencies.
    from betse.util.path import files
    from betse.util.os.command.cmdexit import SUCCESS

    # If the current platform is *NOT* macOS, return false.
    if not is_macos():
        return False
    # Else, the current platform is macOS.

    # Attempt all of the following in a safe manner catching, logging, and
    # converting exceptions into a false return value. This tester is *NOT*
    # mission-critical and hence should *NOT* halt the application on
    # library-specific failures.
    try:
        # If the system-wide Macho-O shared library providing the macOS
        # security context for the current process does *NOT* exist (after
        # following symbolic links), raise an exception.
        files.die_unless_file(_SECURITY_FRAMEWORK_DYLIB_FILENAME)

        # Dynamically load this library into the address space of this process.
        security_framework = CDLL(_SECURITY_FRAMEWORK_DYLIB_FILENAME)

        # Possibly non-unique identifier of the security session to request the
        # attributes of, signifying that of the current process.
        session_id = _SECURITY_SESSION_ID_CURRENT

        # Unique identifier of the requested security session, returned
        # by reference from the SessionGetInfo() C function called below. This
        # identifier is useless for our purposes and hence ignored below.
        session_id_real = c_int(0)

        # Attributes bit field of the requested security session, returned by
        # reference from the SessionGetInfo() C function called below.
        session_attributes = c_int(0)

        # C-style error integer returned by calling the SessionGetInfo() C
        # function exported by this Macho-O shared library, passing:
        #
        # * The input non-unique session identifier by value.
        # * The output unique session identifier by reference.
        # * The output session attributes integer by reference.
        session_errno = security_framework.SessionGetInfo(
            session_id, byref(session_id_real), byref(session_attributes))

        # This process has access to the Aqua display server if and only if...
        return (
            # The above function call succeeded *AND*...
            session_errno == SUCCESS and
            # The session attributes bit field returned by this call has the
            # corresponding bit flag enabled.
            session_attributes.value & _SECURITY_SESSION_HAS_GRAPHIC_ACCESS)

    # If the above logic fails with any exception...
    except Exception as exc:
        # Log a non-fatal warning informing users of this failure.
        logs.log_warning(
            'macOS-specific SessionGetInfo() C function failed: {}'.format(
                exc.strerror))

        # Assume this process to *NOT* have access to the Aqua display server.
        return False
Пример #19
0
def get_first_codec_name(
    writer_name: str,
    container_filetype: str,
    codec_names: SequenceTypes,
) -> (str, NoneType):
    '''
    Name of the first video codec (e.g., ``libx264``) in the passed list
    supported by both the encoder with the passed matplotlib-specific name
    (e.g., ``ffmpeg``) and the container format with the passed filetype (e.g.,
    ``mkv``, ``mp4``) if that writer supports codecs (as most do), ``None`` if
    this writer supports no codecs (e.g., ``imagemagick``) and the passed list
    contains ``None``, *or* raise an exception otherwise (i.e., if no passed
    codecs are supported by both this writer and container format).

    Algorithm
    ----------
    This function iteratively searches for video codecs in the same order as
    listed in the passed list as follows:

    * If there are no remaining video codecs in this list to be examined, an
      exception is raised.
    * If the current video codec has the application-specific name ``auto``,
      the name of an intelligently selected codec supported by both this
      encoder and container if any is returned *or* an exception is raised
      otherwise (i.e., if no codecs are supported by both this encoder and
      container). Note that this codec's name rather than the
      application-specific name ``auto`` is returned. See this function's body
      for further commentary.
    * Else if the current video codec is supported by both this encoder and
      container, this codec's name is returned.
    * Else the next video codec in this list is examined.

    Parameters
    ----------
    writer_name : str
        Matplotlib-specific alphanumeric lowercase name of the video encoder to
        search for the passed codecs.
    container_filetype: str
        Filetype of the video container format to constrain this search to.
    codec_names: SequenceTypes
        Sequence of the encoder-specific names of all codecs to search for (in
        descending order of preference).

    Returns
    ----------
    str
        Name of the first codec in the passed list supported by both this
        encoder and container.

    Raises
    ----------
    BetseMatplotlibException
        If any of the following errors arise:

        * This writer is either:

          * Unrecognized by this application or :mod:`matplotlib`.
          * Not found as an external command in the current ``${PATH}``.

        * This container format is unsupported by this writer.
        * No codec whose name is in the passed list is supported by both this
          writer and this container format.

    See Also
    ----------
    :func:`is_writer`
        Tester validating this writer.
    '''

    # If this writer is unrecognized, raise an exception.
    die_unless_writer(writer_name)

    # Basename of this writer's command.
    writer_basename = WRITER_NAME_TO_COMMAND_BASENAME[writer_name]

    # Dictionary mapping from the filetype of each video container format to a
    # list of the names of all video codecs supported by this writer.
    container_filetype_to_codec_names = (
        WRITER_BASENAME_TO_CONTAINER_FILETYPE_TO_CODEC_NAMES[writer_basename])

    # If the passed container is unsupported by this writer, raise an exception.
    if container_filetype not in container_filetype_to_codec_names:
        raise BetseMatplotlibException(
            'Video container format "{}" unsupported by '
            'matplotlib animation video writer "{}".'.format(
                container_filetype, writer_name))

    # List of the names of all candidate codecs supported by both this encoder
    # and this container.
    codec_names_supported = container_filetype_to_codec_names[
        container_filetype]

    # List of the names of all candidate codecs to be detected below, with each
    # instance of "auto" in the original list of these names replaced by the
    # list of all codecs supported by both this encoder and container.
    codec_names_candidate = []

    # For the name of each candidate codec...
    for codec_name in codec_names:
        # If this is the BETSE-specific name "auto", append the names of all
        # codecs supported by both this encoder and container.
        if codec_name == 'auto':
            codec_names_candidate.extend(codec_names_supported)
        # Else, append only the name of this codec.
        else:
            codec_names_candidate.append(codec_name)

    # Log this detection attempt.
    logs.log_debug('Detecting encoder "%s" codec from candidates: %r',
                   writer_name, codec_names_candidate)

    # For the name of each preferred codec...
    for codec_name in codec_names_candidate:
        # If this encoder supports this codec *AND*...
        if is_writer_command_codec(writer_basename, codec_name):
            # Log this detection result.
            logs.log_debug('Detected encoder "%s" codec "%s".', writer_name,
                           codec_name)

            # If this container is not known to support this codec, log a
            # non-fatal warning. Since what this application thinks it knows
            # and what reality actually is need not coincide, this container
            # could actually still support this codec. Hence, this edge case
            # does *NOT* constitute a hard, fatal error.
            if codec_name not in codec_names_supported:
                logs.log_warning(
                    'Encoder "%s" container "%s" '
                    'not known to support codec "%s".', writer_name,
                    container_filetype, codec_name)

            # Return the name of this codec.
            return codec_name

    # Else, no passed codecs are supported by this combination of writer and
    # container format. Raise an exception.
    raise BetseMatplotlibException(
        'Codec(s) {} unsupported by '
        'encoder "{}" and/or container "{}".'.format(
            strjoin.join_as_conjunction_double_quoted(*codec_names),
            writer_name, container_filetype))
Пример #20
0
    def sim_grn(self) -> SimPhase:
        '''
        Initialize and simulate a pure gene regulatory network (GRN) *without*
        bioelectrics with the cell cluster seeded by a prior call to the
        :meth:`seed` method and cache this initialization and simulation to
        output files, specified by the current configuration file.

        This method *must* be called prior to the :meth:`plot_grn` method, which
        consumes this output as input.

        Returns
        ----------
        SimPhase
            High-level simulation phase instance encapsulating all objects
            internally created by this method to run this phase.
        '''

        # Simulation phase objects, defaulting to undefined initially.
        phase = None

        # Simulation phase type.
        phase_kind = SimPhaseKind.INIT

        # Simulator objects initialized below.
        cells = None
        sim = None

        # Log this simulation.
        logs.log_info(
            'Running gene regulatory network "%s" '
            'defined in config file "%s"...',
            pathnames.get_basename(self._p.grn_config_filename),
            self._p.conf_basename)

        # If networking an uninitialized, unsimulated cell cluster...
        if self._p.grn_unpickle_phase_type is GrnUnpicklePhaseType.SEED:
            if not files.is_file(self._p.seed_pickle_filename):
                if not self._p.autoInit:
                    raise BetseSimException(
                        'Simulation halted due to missing core seed. '
                        'Please run "betse seed" and try again.')

                # Create an instance of world.
                logs.log_info('Automatically seeding cell cluster...')
                self.seed()

            # Load the seed from cache.
            cells, _ = fh.loadWorld(self._p.seed_pickle_filename)
            logs.log_info('Running gene regulatory network on betse seed...')
            logs.log_info('Now using cell cluster to run initialization.')

            # Simulation phase.
            phase = SimPhase(
                kind=phase_kind,
                callbacks=self._callbacks,
                cells=cells,
                p=self._p,
            )

            # Initialize core simulation data structures.
            phase.sim.init_core(phase)
            phase.sim.init_dynamics(phase)

            #FIXME: Shift the following assignments into a new public
            #"Simulator" method -- say, Simulator.init_core_null().

            # Initialize other aspects required for piggyback of GRN on the
            # sim object.
            phase.sim.time = []
            phase.sim.vm = -50e-3 * np.ones(phase.sim.mdl)

            # Initialize key fields of simulator required to interface
            # (dummy init).
            phase.sim.rho_pump = 1.0
            phase.sim.rho_channel = 1.0
            phase.sim.conc_J_x = np.zeros(phase.sim.edl)
            phase.sim.conc_J_y = np.zeros(phase.sim.edl)
            phase.sim.J_env_x  = np.zeros(phase.sim.edl)
            phase.sim.J_env_y  = np.zeros(phase.sim.edl)
            phase.sim.u_env_x  = np.zeros(phase.sim.edl)
            phase.sim.u_env_y  = np.zeros(phase.sim.edl)
        # Else if networking an initialized but unsimulated cell cluster...
        elif self._p.grn_unpickle_phase_type is GrnUnpicklePhaseType.INIT:
            if not files.is_file(self._p.init_pickle_filename):
                if not self._p.autoInit:
                    raise BetseSimException(
                        'Simulation halted due to missing core initialization. '
                        'Please run "betse init" and try again.')

                logs.log_info('Automatically initializing cell cluster...')
                self.init()
                logs.log_info('Now using initialization to run simulation.')

            # Load the initialization from cache.
            logs.log_info('Running gene regulatory network on betse init...')
            sim, cells, _ = fh.loadSim(self._p.init_pickle_filename)
        # Else if networking an initialized, simulated cell cluster...
        elif self._p.grn_unpickle_phase_type is GrnUnpicklePhaseType.SIM:
            if not files.is_file(self._p.sim_pickle_filename):
                raise BetseSimException(
                    'Simulation halted due to missing core simulation. '
                    'Please run "betse sim" and try again.')

            # Load the simulation from cache.
            logs.log_info('Running gene regulatory network on betse sim...')
            sim, cells, _ = fh.loadSim(self._p.sim_pickle_filename)
        # Else, this type of networking is unrecognized. Raise an exception.
        else:
            raise BetseSimConfException(
                'Gene regulatory network (GRN) unpickle simulation phase '
                '"{}" unrecognized.'.format(self._p.grn_unpickle_phase_type))

        # If *NOT* defined above, define this simulation phase.
        if phase is None:
            phase = SimPhase(
                kind=phase_kind,
                callbacks=self._callbacks,
                cells=cells,
                p=self._p,
                sim=sim,
            )

            # Reinitialize all profiles.
            phase.dyna.init_profiles(phase)
            phase.dyna.init_events(phase)

        # If *NOT* restarting from a prior GRN run, start a new GRN.
        if self._p.grn_unpickle_filename is None:
            # Log this start.
            logs.log_info("Initializing the gene regulatory network...")

            # Create and initialize an instance of master of metabolism.
            MoG = MasterOfGenes(self._p)
            MoG.read_gene_config(phase)
        # Else, restart from a prior GRN run.
        else:
            # Log this restart.
            logs.log_info(
                'Reinitializing the gene regulatory network from "%s"...',
                pathnames.get_basename(self._p.grn_unpickle_filename))

            # If this file does *NOT* exist, raise an exception.
            _die_unless_file_pickled(
                filename=self._p.grn_unpickle_filename,
                subcommand='sim-grn',
                subcommand_label='Gene regulatory network')

            # Unpickle this file into a high-level "MasterOfGenes" object.
            MoG, _, _ = pickles.load(self._p.grn_unpickle_filename)

            # If running on a sim with a cut event, perform this cut...
            if (
                phase.dyna.event_cut is not None and
                phase.dyna.event_cut.is_fired
            ):
                # Log this cutting.
                logs.log_info(
                    'A cutting event has been run, '
                    'so the GRN object needs to be modified...')

                # If no prior initialization exists, raise an exception.
                if not files.is_file(self._p.sim_pickle_filename):
                    logs.log_warning(
                        'This situation is complex '
                        'due to a cutting event being run. '
                        'Please have a corresponding init file '
                        'to run the GRN simulation!')

                    raise BetseSimException(
                        'Simulation terminated due to missing core init. '
                        'Please alter GRN settings and try again.')
                # Else, a prior initialization exists.

                # Log this initialization.
                logs.log_info(
                    'Loading betse init from cache '
                    'for reference to original cells...')

                # Load the initialization from cache.
                sim_old, cells_old, _ = fh.loadSim(self._p.sim_pickle_filename)

                #FIXME: This phase object would ideally be pickled to and
                #from the "self._p.sim_pickle_filename" file loaded above, in
                #which case this local variable would be safely removable.

                # Original simulation phase. To avoid caller confusion, the
                # optional "callbacks" parameter is intentionally *NOT* passed.
                phase_old = SimPhase(
                    kind=phase_kind,
                    p=self._p,
                    cells=cells_old,
                    sim=sim_old,
                )

                # Initialize all tissue profiles on original cells.
                phase_old.dyna.init_profiles(phase_old)

                for cut_profile_name in phase_old.p.event_cut_profile_names:
                    logs.log_info(
                        'Cutting cell cluster via cut profile "%s"...',
                        cut_profile_name)

                    # Object picking the cells removed by this cut profile.
                    tissue_picker = phase_old.dyna.cut_name_to_profile[
                        cut_profile_name].picker

                    # One-dimensional Numpy arrays of the indices of all
                    # cells and cell membranes to be removed.
                    target_inds_cell, target_inds_mems = (
                        tissue_picker.pick_cells_and_mems(
                            cells=cells_old, p=self._p))

                    MoG.core.mod_after_cut_event(
                        phase, target_inds_cell, target_inds_mems)
                    MoG.core.redefine_dynamic_dics(sim, cells, self._p)

                    logs.log_info(
                        'Reinitializing gene regulatory network '
                        'for simulation...')
                    MoG.reinitialize(phase)

            # if self._p.use_microtubules:
            #     sim.mtubes.mtubes_x = MoG.mtubes_x_time[-1]
            #     sim.mtubes.mtubes_y = MoG.mtubes_y_time[-1]
            #
            #     sim.mtubes.uxmt, sim.mtubes.uymt = sim.mtubes.mtubes_to_cell(
            #         cells, self._p)

        logs.log_info('Simulating gene regulatory network...')
        MoG.run_core_sim(phase)

        # Return this phase.
        return phase
Пример #21
0
def _upgrade_sim_conf_to_0_5_0(p: Parameters) -> None:
    '''
    Upgrade the in-memory contents of the passed simulation configuration to
    reflect the newest structure of these contents expected by version 0.5.0
    (i.e., "Happy Hodgkin") of this application.
    '''

    # Log this upgrade attempt.
    logs.log_debug('Upgrading simulation configuration to 0.5.0 format...')

    # Localize configuration subdictionaries for convenience.
    results_dict = p._conf['results options']

    # For backward compatibility, convert the prior into the current
    # configuration format.
    if not ('while solving' in results_dict and 'after solving' in results_dict
            and 'save' in results_dict):
        # Log a non-fatal warning.
        logs.log_warning(
            'Config file results options '
            '"while solving", "after solving", and/or "save" not found. '
            'Repairing to preserve backward compatibility. '
            'Consider upgrading to the newest config file format!', )

        # For convenience, localize configuration subdictionaries.
        anim_save = results_dict['save animations']
        anim_save_frames = anim_save['frames']

        # Convert the prior into the current configuration format.
        results_dict['while solving'] = {
            'animations': {
                'enabled': (results_dict['plot while solving']
                            or results_dict['save solving plot']),
                'show':
                results_dict['plot while solving'],
                'save':
                results_dict['save solving plot'],
            },
        }
        results_dict['after solving'] = {
            'plots': {
                'enabled': (results_dict['display plots']
                            or results_dict['automatically save plots']),
                'show':
                results_dict['display plots'],
                'save':
                results_dict['automatically save plots'],
            },
            'animations': {
                'enabled': results_dict['create all animations'],
                'show': results_dict['display plots'],
                'save': anim_save_frames['enabled'],
            },
        }
        results_dict['save'] = {
            'plots': {
                'filetype': anim_save_frames['filetype'],
                'dpi': anim_save_frames['dpi'],
            },
            'animations': {
                'images': {
                    'enabled': anim_save_frames['enabled'],
                    'filetype': anim_save_frames['filetype'],
                    'dpi': anim_save_frames['dpi'],
                },
                'video': {
                    'enabled': False,
                    'filetype': 'mkv',
                    'dpi': 300,
                    'bitrate': 1500,
                    'framerate': 5,
                    'metadata': {
                        'artist': 'BETSE',
                        'genre': 'Bioinformatics',
                        'subject': 'Bioinformatics',
                        'comment': 'Produced by BETSE.',
                    },
                    'writers': ['ffmpeg', 'avconv', 'mencoder', 'imagemagick'],
                    'codecs': ['auto'],
                },
            },
            'data': {
                'all': {
                    'enabled': results_dict['export data to file'],
                    'filetype': 'csv',
                },
                'vmem': {
                    'enabled': results_dict['export 2D data to file'],
                    'filetype': 'csv',
                },
            }
        }

    after_solving_anims = results_dict['after solving']['animations']
    after_solving_plots = results_dict['after solving']['plots']

    if 'pipeline' not in after_solving_anims:
        # Log a non-fatal warning.
        logs.log_warning(
            'Config file setting "results options" -> "after solving" -> '
            '"animations" -> "pipeline" not found. '
            'Repairing to preserve backward compatibility. '
            'Consider upgrading to the newest config file format!', )

        # Default the value for this dictionary key to the empty list.
        after_solving_anims['pipeline'] = []

    while_solving_anims = results_dict['while solving']['animations']

    if 'colorbar' not in while_solving_anims:
        # Log a non-fatal warning.
        logs.log_warning(
            'Config file setting "results options" -> "while solving" -> '
            '"animations" -> "colorbar" not found. '
            'Repairing to preserve backward compatibility. '
            'Consider upgrading to the newest config file format!', )

        # Default the value for this dictionary key to the typical settings.
        while_solving_anims['colorbar'] = {
            'autoscale': True,
            'minimum': -70.0,
            'maximum': 10.0,
        }

    if 'single cell pipeline' not in after_solving_plots:
        # Log a non-fatal warning.
        if 'single cell' not in after_solving_plots:
            logs.log_warning(
                'Config file setting "results options" -> "after solving" -> '
                '"plots" -> "single cell pipeline" not found. '
                'Repairing to preserve backward compatibility. '
                'Consider upgrading to the newest config file format!', )

        # Default the value for this dictionary key to the empty list.
        after_solving_plots['single cell pipeline'] = (
            after_solving_plots['single cell']['pipeline']
            if 'single cell' in after_solving_plots else [])

    if 'cell cluster pipeline' not in after_solving_plots:
        # Log a non-fatal warning.
        if 'cell cluster' not in after_solving_plots:
            logs.log_warning(
                'Config file setting "results options" -> "after solving" -> '
                '"plots" -> "cell cluster pipeline" not found. '
                'Repairing to preserve backward compatibility. '
                'Consider upgrading to the newest config file format!', )

        after_solving_plots['cell cluster pipeline'] = (
            after_solving_plots['cell cluster']['pipeline']
            if 'cell cluster' in after_solving_plots else [])

    if 'plot networks single cell' not in results_dict:
        # Log a non-fatal warning.
        logs.log_warning(
            'Config file setting "results options" -> '
            '"plot networks single cell" not found. '
            'Repairing to preserve backward compatibility. '
            'Consider upgrading to the newest config file format!', )

        # Default the value for this dictionary key to the empty list.
        results_dict['plot networks single cell'] = results_dict[
            'plot single cell graphs']

    # For each pipelined animation and cell cluster plot...
    for anim_conf in iterables.iter_items(
            results_dict['after solving']['animations']['pipeline'],
            results_dict['after solving']['plots']['cell cluster pipeline'],
    ):
        # Add the "enabled" boolean.
        if 'enabled' not in anim_conf:
            anim_conf['enabled'] = True

        # For disambiguity, rename:
        #
        # * "polarization" to "voltage_polarity".
        # * "junction_state" to "gj_permeability".
        if anim_conf['type'] == 'polarization':
            anim_conf['type'] = 'voltage_polarity'
        elif anim_conf['type'] == 'junction_state':
            anim_conf['type'] = 'gj_permeability'
Пример #22
0
    def export_tissue_cuts(self, phase: SimPhase,
                           conf: SimConfExportPlotCells) -> None:
        '''
        Plot a **tissue and cut profile tessellation** (i.e., tiled mosaic of
        all cells spatially subdivided into tissue and cut profile regions such
        that all cells in the same region share the same arbitrary color) for
        the cell cluster.

        This plot is irrespective of time step.
        '''

        # Prepare to export the current plot.
        self._export_prep(phase)

        # Localize frequently accessed fields for convenience.
        p = phase.p
        dyna = phase.dyna
        cells = phase.cells
        colormap = phase.p.background_cm

        col_dic = {}
        cb_ticks = []
        cb_tick_labels = []

        # Ordered dictionary mapping from the name of each tissue and cut
        # profile to a one-dimensional Numpy array of the 0-based indices of
        # all cells owned by this profile.
        #
        # Note that order is absolutely significant. The first tissue profile
        # is a pseudo-tissue defining the cell cluster itself. If order were
        # *NOT* preserved here, this tissue would be assigned an arbitrary
        # z-order, in which case all tissues assigned smaller z-orders would be
        # entirely obscured by that pseudo-tissue covering the cell cluster.
        profile_name_to_cells_index = OrderedDict()

        fig = pyplot.figure()
        ax = pyplot.subplot(111)

        # For the name and object encapsulating each tissue profile...
        for tissue_name, _ in dyna.tissue_name_to_profile.items():
            # One-dimensional Numpy array of the indices of all tissue cells.
            tissue_cells_index = dyna.cell_target_inds[tissue_name]

            # If this tissue contains no cells, skip to the next tissue.
            if not len(tissue_cells_index):
                logs.log_warning('Tissue "%s" contains no cells.', tissue_name)
                continue

            # Else, tissue contains one or more cells. Map this tissue to these
            # indices (for subsequent lookup).
            profile_name_to_cells_index[tissue_name] = tissue_cells_index

        #FIXME: The "p.plot_cutlines" boolean is only ever leveraged here and
        #thus is arguably extraneous. Consider refactoring as follows:
        #
        #* Remove the "p.plot_cutlines" boolean and corresponding
        #  YAML-formetted default configuration option.
        #* Split the existing "tissue_cuts" plot type in the "cell cluster
        #  pipeline" into the following two types:
        #  * "tissue", unconditionally plotting *ONLY* tissue profiles.
        #  * "tissue_cuts", unconditionally plotting both tissue and cut
        #    profiles.
        #* Define a new private _export_profiles() method as follows:
        #      @type_check
        #      _export_profiles(
        #          self,
        #          conf: SimConfExportPlotCells,
        #          is_tissue: bool,
        #          is_cuts: bool
        #       ) -> None:
        #* Implement export_tissue() to call _export_profiles() as:
        #    self._export_profiles(
        #        self,
        #        conf=conf,
        #        is_tissue=True,
        #        is_cuts=False,
        #    )
        #* Implement export_tissue_cuts() similarly.

        # If plotting cut profiles as well *AND* the cutting event is
        # enabled...
        if p.plot_cutlines and dyna.event_cut is not None:
            # For the name and object encapsulating each cut profile...
            for cut_name, cut_profile in dyna.cut_name_to_profile.items():
                # Map this cut to the indices of all cells cut by this profile.
                profile_name_to_cells_index[cut_name] = (
                    cut_profile.picker.pick_cells(cells=cells, p=p))

        # Minimum and maximum 0-based integers uniquely identifying the first
        # and last tissue and cut profile (respoctively), localized for
        # ordering purposes in the colorbar legend.
        profile_zorder = 0
        profile_zorder_max = len(profile_name_to_cells_index)

        # For the name and one-dimensional Numpy array of the 0-based indices
        # of all cells in each tissue and/or cut profile...
        for profile_name, profile_cells_index in (
                profile_name_to_cells_index.items()):
            # logs.log_debug('Plotting tissue "%s"...', profile_name)
            profile_zorder += 1

            profile_points = mathunit.upscale_coordinates(
                cells.cell_verts[profile_cells_index])

            z = np.zeros(len(profile_points))
            z[:] = profile_zorder

            col_dic[profile_name] = PolyCollection(profile_points,
                                                   array=z,
                                                   cmap=colormap,
                                                   edgecolors='none')
            col_dic[profile_name].set_clim(0, profile_zorder_max)

            # col_dic[profile_name].set_alpha(0.8)
            col_dic[profile_name].set_zorder(profile_zorder)
            ax.add_collection(col_dic[profile_name])

            # Add this profile name to the colour legend.
            cb_ticks.append(profile_zorder)
            cb_tick_labels.append(profile_name)

        # logs.log_debug('Plotting colorbar ticks: %r', cb_ticks)
        # logs.log_debug('Plotting colorbar tick labels: %r', cb_tick_labels)

        ax_cb = None
        if dyna.tissue_name_to_profile:
            # Name of the first tissue profile.
            tissue_first_name = iterget.get_item_first(
                dyna.tissue_name_to_profile.keys())

            # Color mappable associated with this tissue profile, guaranteed in
            # this case to be a "PolyCollection" instance.
            tissue_first_mappable = col_dic[tissue_first_name]

            ax_cb = fig.colorbar(tissue_first_mappable, ax=ax, ticks=cb_ticks)
            ax_cb.ax.set_yticklabels(cb_tick_labels)

        if p.visual.is_show_cell_indices:
            for i, cll in enumerate(cells.cell_centres):
                ax.text(p.um * cll[0],
                        p.um * cll[1],
                        i,
                        ha='center',
                        va='center',
                        zorder=20)

        ax.set_xlabel('Spatial Distance [um]')
        ax.set_ylabel('Spatial Distance [um]')
        ax.set_title('Cell Cluster')

        ax.axis('equal')
        ax.axis(phase.cache.upscaled.extent)

        # Export this plot to disk and/or display.
        self._export(phase=phase, basename='cluster_mosaic')