Ejemplo n.º 1
0
def get_install_crx2rnx(override=False, verbose=False):
    '''
    Check for presence of crx2rnx in PATH.
    If not present, download and extract to python environment PATH location.
    If override = True, will download if present or not
    '''
    if (not _Path(f'{_sys.path[0]}/crx2rnx').is_file()) or (override):
        if verbose:
            logging.info(f'Installing crx2rnx at {_sys.path[0]}')
        tmp_dir = _Path('tmp')
        if not tmp_dir.is_dir():
            tmp_dir.mkdir()

        url = 'https://terras.gsi.go.jp/ja/crx2rnx/RNXCMP_4.0.8_src.tar.gz'
        out_f = _Path('tmp/RNXCMP_4.0.8_src.tar.gz')
        _rqs.urlretrieve(url, out_f)

        _sp.run(['tar', '-xvf', 'tmp/RNXCMP_4.0.8_src.tar.gz', '-C', 'tmp'])
        cp = [
            'gcc', '-ansi', '-O2', '-static',
            'tmp/RNXCMP_4.0.8_src/source/crx2rnx.c', '-o', 'crx2rnx'
        ]
        _sp.run(cp)
        _sp.run(['rm', '-r', 'tmp'])
        _sp.run(['mv', 'crx2rnx', _sys.path[0]])
    else:
        if verbose:
            logging.info(f'crx2rnx already present in {_sys.path[0]}')
Ejemplo n.º 2
0
def get_output_file(filename, path=None, **kwargs):
    """
    Convert filename to full output path name.
    
    If path is not defined, use workspace path for output directory.
    Otherwise only convert the file name to a Path object.
    
    Parameters
    ----------
    filename: str
        File name (base name or full path).
    path: str
        Output path. If not defined and `filename` is relative, 
        then the default output path is used (see function :func:`.set_path`).
        
    Returns
    -------
    :class:`pathlib.Path`
        Full path specification as a :class:`pathlib.Path` object.
    """
    f = _Path(filename)
    if not f.is_absolute():
        if path:
            p = _Path(path)
        else:
            p = workspace().full_path('output', as_posix=False)
        f = p.joinpath(f)
    return f
Ejemplo n.º 3
0
    def __init__(self, args):

        # TODO: Move this out
        _LOGGER.info(f"clang-build {__version__}")

        # Toolchain
        self.toolchain = None
        toolchain_file_str = args.get("toolchain", None)
        _LOGGER.info(f'toolchain_file_str "{toolchain_file_str}"')
        if toolchain_file_str:
            toolchain_file = _Path(toolchain_file_str)
            if toolchain_file.is_file():
                _LOGGER.info(f'Using toolchain file "{toolchain_file.resolve()}"')
                self.toolchain = _get_toolchain(toolchain_file)
                if not isinstance(self.toolchain, _Toolchain):
                    raise RuntimeError(
                        f'Unable to initialize toolchain:\nThe `get_toolchain` method in "{toolchain_file_str}" did not return a valid `clang_build.toolchain.Toolchain`, its type is "{type(self.toolchain)}"'
                    )
            else:
                _LOGGER.error('Could not find toolchain file "{toolchain_file_str}"')

        if not self.toolchain:
            _LOGGER.info("Using default LLVM toolchain")
            self.toolchain = _LLVM()

        # Build type (Default, Release, Debug)
        self.build_type = args.get("build_type", _BuildType.Default)
        _LOGGER.info(f"Build type: {self.build_type.name}")

        # Whether to force a rebuild
        self.force_build = args.get("force_build", False)
        if self.force_build:
            _LOGGER.info("Forcing rebuild...")

        # Build directory
        self.build_directory = _Path("build")

        # Whether to create a dotfile for graphing dependencies
        self.create_dependency_dotfile = not args.get("no_graph", False)

        # Whether to recursively clone submodules when cloning with git
        self.clone_recursive = not args.get("no_recursive_clone", False)

        # Whether to bundle binaries
        self.bundle = args.get("bundle", False)
        if self.bundle:
            _LOGGER.info("Bundling of binary dependencies is activated")

        # Whether to create redistributable bundles
        self.redistributable = args.get("redistributable", False)
        if self.redistributable:
            self.bundle = True
            _LOGGER.info("Redistributable bundling of binary dependencies is activated")

        self.compilation_database_file = self.build_directory / "compile_commands.json"
        self.compilation_database = []
        if self.compilation_database_file.exists():
            self.compilation_database = json.loads(
                self.compilation_database_file.read_text()
            )
Ejemplo n.º 4
0
def _rmdir(directory):
    """Function modified from one copied from 'mitch' on stackoverflow
       https://stackoverflow.com/questions/13118029/deleting-folders-in-python-recursively
    """
    directory = _Path(directory)

    # first, check for removing important directories such as $HOME or root
    if directory == _Path.home():
        raise FileExistsError(f"We WILL NOT remove your "
                              f"home directory ${directory}")

    if directory == _Path("/"):
        raise FileExistsError(f"We WILL NOT remove the root directory "
                              f"{directory}")

    # get the directory containing '$HOME'
    if directory == _Path.home().parent:
        raise FileExistsError(f"We WILL NOT remove the users/home "
                              f"directory {directory}")

    if not directory.is_dir():
        directory.unlink()
        return

    from .utils._console import Console

    for item in directory.iterdir():
        if item.is_dir():
            _rmdir(item)
        else:
            item.unlink()

    Console.print(f"removing directory {directory}", style="warning")
    directory.rmdir()
Ejemplo n.º 5
0
    def bundle(self):
        self.unsuccessful_bundle = False

        ### Gather
        self_bundle_files = [self.outfile]
        if self._environment.toolchain.platform == "windows":
            self_bundle_files.append(_Path(str(self.outfile)[:-3] + "exp"))
            self_bundle_files.append(_Path(str(self.outfile)[:-3] + "lib"))

        bundle_files = []
        for dependency in self.dependencies + self.public_dependencies:
            bundle_files += dependency.bundle()

        ### Copy
        for bundle_file in bundle_files:
            try:
                _shutil.copy(bundle_file, self.output_folder)
            except _subprocess.CalledProcessError as error:
                self.unsuccessful_bundle = True
                self.bundle_report = error.output.decode("utf-8").strip()

        # Catch bundling errors
        if self.unsuccessful_bundle:
            raise _BundleError("Bundling was unsuccessful",
                               {self.identifier: self.bundle_report})

        return self_bundle_files + bundle_files
Ejemplo n.º 6
0
def _guess_tz_dir() -> str:
    """
    :return: path to OS timezone database info
    """
    if get_env_setting("PYTZ_TZDATADIR", None):
        return get_env_setting("PYTZ_TZDATADIR")

    _candidate_tz_dirs = [
        # modern linux, bsd, osx
        "/usr/share/zoneinfo/",
        # modern aix, solaris, hpux
        "/usr/share/lib/zoneinfo/",
        # libc4
        "/usr/lib/zoneinfo/",
        # glibc2
        get_env_setting("TZDIR", None),
    ]

    try:
        return next(p for p in filter(None, _candidate_tz_dirs)
                    if _Path(p).exists() and _Path(p).is_dir())
    except StopIteration:
        raise _ImproperlyConfigured(
            "Unable to guess OS timezone data folder. Please specify PYTZ_TZDATADIR."
        )
Ejemplo n.º 7
0
def path_transform(fname, destdir, ext=None):
    """Rename a file path to a different directory.

    Given a filename, and a destination directory, return a new path that
    "moves" the file to the destination directory.  Optionally, if an extension
    is provided, the extension of the filename is also transformed.

    >>> path_transform('/path/to/input/sample.bam', '/path/to/output')
    '/path/to/output/sample.bam'
    >>> path_transform('/path/to/input/sample.bam', '/path/to/output', 'vcf')
    '/path/to/output/sample.vcf'

    """
    try:
        fname, destdir = _Path(fname), _Path(destdir)
    except TypeError as err:
        _logger.error(f"Undefined argument to {_inspect.stack()[0].function}: "
                      f"{repr(fname)}, {repr(destdir)}\n{err}")
        # # alternate implementation: pass on undefined to the next step
        # for i in [fname, destdir]:
        #     if isinstance(i, Undefined):
        #         return i
        return ""

    if ext is None or isinstance(ext, Undefined):
        res = destdir / fname.name
        if isinstance(ext, Undefined):
            _logger.warning(f"Ignoring undefined argument to "
                            f"{_inspect.stack()[0].function}: 'ext'")
    else:
        res = destdir / (fname.stem + f".{ext}")

    return str(res)
Ejemplo n.º 8
0
 def _set_file(self, file):
     """Set new file name.
     
     1. Define full name. If file is relative, join it with 
        `self._path`.  
     2. Try to set `self._file` relative to `self._path`. If not possible, 
        then: if not `fileonly`,  define new `self._path`, else
        raise exception.
     """
     f = _Path(file)
     p = _Path(self._path)
     if f.is_absolute():
         fullname = f
     else:
         fullname = p.joinpath(file)
     try:
         _file = fullname.relative_to(p).as_posix()
     except:
         if not self._fileonly:
             p = f.parent
             _file = f.name
         else:
             msg = 'File name must be relative to {}'
             raise Exception(msg.format(p.as_posix()))
     self._fullname = fullname
     self._path = p.as_posix()
     self._file = _file
     self._value = {'path': self._path, 'file': self._file}
Ejemplo n.º 9
0
def _coerce_path(arg, *args):
    assert arg is not None
    if args is not None and len(args):
        return _Path(str(arg), *[str(a) for a in args])
    else:
        if not isinstance(arg, _Path):
            arg = _Path(str(arg))
        return arg
Ejemplo n.º 10
0
def download_most_recent(dest,
                         f_type,
                         ftps=None,
                         ac='any',
                         dwn_src='cddis',
                         f_dict_out=False,
                         gpswkD_out=False,
                         ftps_out=False):
    '''
    Download the most recent version of a product file
    '''
    # File types should be converted to lists if not already a list
    if type(f_type) == list:
        f_types = f_type
    else:
        f_types = [f_type]

    # Create directory if doesn't exist:
    if not _Path(dest).is_dir():
        _Path(dest).mkdir(parents=True)

    # Create list to hold filenames that will be downloaded:
    if f_dict_out:
        f_dict = {f_typ: [] for f_typ in f_types}
    if gpswkD_out:
        gpswk_dict = {f_typ + '_gpswkD': [] for f_typ in f_types}
    # Connect to ftps if not already:
    if not ftps:
        # Connect to chosen server
        if dwn_src == 'cddis':
            ftps = connect_cddis()

            for f_typ in f_types:
                logging.info(f'\nSearching for most recent {ac} {f_typ}...\n')

                dt = (_np.datetime64('today') - 1).astype(_datetime)
                mr_file, ftps, c_gpswk = find_mr_file(dt, f_typ, ac, ftps)
                check_n_download(mr_file, dwndir=dest, ftps=ftps, uncomp=True)
                ftps.cwd(f'/')
                if f_dict_out:
                    f_uncomp = gen_uncomp_filename(mr_file)
                    if f_uncomp not in f_dict[f_typ]:
                        f_dict[f_typ].append(f_uncomp)
                c_gpswkD = mr_file[3:8]
                if gpswkD_out:
                    gpswk_dict[f_typ + '_gpswkD'].append(c_gpswkD)

            ret_vars = []
            if f_dict_out:
                ret_vars.append(f_dict)
            if gpswkD_out:
                ret_vars.append(gpswk_dict)
            if ftps_out:
                ret_vars.append(ftps)

            return ret_vars
Ejemplo n.º 11
0
def _get_files_in_patterns(patterns, exclude_patterns=[], recursive=True):
    included = [
        _Path(f) for pattern in patterns
        for f in _iglob(str(pattern), recursive=recursive)
        if _Path(f).is_file()
    ]
    excluded = [
        _Path(f) for pattern in exclude_patterns
        for f in _iglob(str(pattern), recursive=recursive)
        if _Path(f).is_file()
    ]
    return list(f.resolve() for f in (set(included) - set(excluded)))
Ejemplo n.º 12
0
 def test_toolchain_emscripten(self):
     clang_build_try_except([
         "-d",
         "test/toolchains/emscripten",
         "--toolchain",
         "test/toolchains/emscripten/emscripten-toolchain.py",
         "-V",
     ])
     self.assertTrue(
         (_Path("build") / "default" / "lib" / "libbackend.js").exists())
     self.assertTrue(
         (_Path("build") / "default" / "lib" / "libbackend.wasm").exists())
Ejemplo n.º 13
0
def create_topography_file(fpath, axs, elevation, options=None, **kwargs):
    """A helper to create a topography DEM file with NetCDF CF convention.

    The key of the elevation is fixed to `elevation` for convenience. By default, the spatial axes
    `x` and `y` use EPSG 3857 system. All length units are in meters (i.e., `m`).

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    axs : a list/tuple of nplike.ndarray
        The coordinates of the gridlines in x (west-east) and y (south-north) direction.
    elevation : nplike.ndarray
        The elevation data with shape (ny, nx)
    options : dict or None
        To overwrite the default attribute values of `x`, `y`, `elevation`, and `root`.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()
    _options = {"elevation": {"units": "m"}}
    _options.update({} if options is None else options)

    with _Dataset(fpath, "w", **kwargs) as dset:
        _write_to_dataset(dset,
                          axs, {"elevation": elevation},
                          options=_options)
Ejemplo n.º 14
0
def _update_doc(doc: str) -> str:
    """Add information to doc-string

    Args:
        doc:  The doc-string to update.

    Returns:
        The updated doc-string.
    """
    # Subpackages
    subpackage_paths = _Path(__file__).parent.iterdir()
    subpackage_list = [
        p.name for p in subpackage_paths
        if p.is_dir() and not p.name.startswith("_")
    ]
    subpackages = "\n".join(f"+ {p}" for p in subpackage_list)

    # Maintainers
    maintainer_list = [
        f"+ {a.name} <{a.email}>" for a in _AUTHORS
        if a.start < _date.today() < a.end
    ]
    maintainers = "\n".join(maintainer_list)

    # Add to doc-string
    return doc.format(subpackages=subpackages, maintainers=maintainers)
Ejemplo n.º 15
0
def create_dir(dir):
    debug(2, "creating directory %s" % dir)
    try:
        path = _Path(dir)
        path.mkdir(parents=True)
    except OSError as oserror:
        raise OSError("Error: directory %s already exists." % oserror.filename)
Ejemplo n.º 16
0
def log(message='', prefix=''):
    """
    Log entries with prefixes into a log file.

    Parameters:\n
        `message` - The message to be logged.\n
        `prefix` - The prefix before the message, and after the timestamp.
    """
    timestamp = f'[{_datetime.now().astimezone().strftime(timestamp_format)}]' if enable_timestamp else ''
    if _os.path.exists(log_path):
        message = _remove_ansi(str(message))
        prefix = _remove_ansi(str(prefix))
        if prefix != '' and prefix[-1] != ' ':
            prefix = prefix + ' '
        with open(log_path, 'a') as log_file:
            log_file.write(f'{timestamp} {prefix}{message}\n')

    elif log_level != 0:
        _os.makedirs(_Path(log_path).parent)
        with open(log_path, 'w') as log_file:
            log_file.write(
                f"{timestamp} [ERROR] Log file missing or inaccessible. Creating a new one.\n"
            )

        log(message=message, prefix=prefix)
Ejemplo n.º 17
0
def create_soln_snapshot_file(fpath, grid, soln, **kwargs):
    """Create a NetCDF file with a single snapshot of solutions.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    grid : torchswe.mpi.data.Gridlines
        The Gridlines instance corresponds to the solutions.
    soln : torchswe.utils.data.WHUHVModel or torchswe.utils.data.HUVModel
        The snapshot of the solution.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    try:
        data = {k: soln[k] for k in ["w", "hu", "hv"]}
        options = {"w": {"units": "m"}, "hu": {"units": "m2 s-1"}, "hv": {"units": "m2 s-1"}}
    except AttributeError as err:
        if "has no attribute \'w\'" in str(err):  # a HUVModel
            data = {k: soln[k] for k in ["h", "u", "v"]}
            options = {"h": {"units": "m"}, "u": {"units": "m s-1"}, "v": {"units": "m s-1"}}
        else:
            raise

    with _Dataset(fpath, "w", parallel=True, comm=grid.comm, **kwargs) as dset:
        _write_to_dataset(
            dset, [grid.x.cntr, grid.y.cntr], data, [grid.gnx, grid.gny],
            [grid.ibg, grid.ied, grid.jbg, grid.jed], (grid.gxbg, grid.gyed),
            ((grid.gxed-grid.gxbg)/grid.gnx, (grid.gyed-grid.gybg)/grid.gny), options)
        dset.sync()
def importConfigFile(file: str) -> None:
    """
    This function will read the 'config_admin.json' to retrieve the information to be used by this module. 
    Arguments:
        file: REQUIRED : file (if in the same folder) or path to the file and file if in another folder.


    Example of file value.
    "config.json"
    "./config.json"
    """
    global _org_id
    global _api_key
    global _tech_id
    global _pathToKey
    global _secret
    global _endpoint
    global _header
    if file.startswith('/'):
        file = "." + file
    with open(_Path(file), 'r') as file:
        f = _json.load(file)
        _org_id = f['org_id']
        _api_key = f['api_key']
        _header["X-Api-Key"] = f['api_key']
        _tech_id = f['tech_id']
        _secret = f['secret']
        _pathToKey = f['pathToKey']
Ejemplo n.º 19
0
 def set_paths(self, **kwargs):
     """
     Set paths for data input and output folders.
     
     Paths with `None` value remain unchanged.
     
     Parameters
     ----------
     data: str
         Input data (measured strain, intensity, etc.).
     tables: str
         Other auxilliary input such as lookup tables or material data.
     instruments: str
         Instrument configuration files.
     output: str
         Output folder.
         
     If the given path is a subdirectory of the work path, then it is saved
     as relative. 
     
     """
     for key in kwargs:
         if key == 'work':
             pass
         elif key in self._path_keys and kwargs[key] is not None:
             p = _Path(kwargs[key])
             # make absolute paths relative to work if possible
             if p.is_absolute():
                 try:
                     p = p.relative_to(self._paths['work'])
                 except:
                     pass
             self._paths[key] = p
Ejemplo n.º 20
0
def _select_files(path, text):
    files_in_dir = [str(f) for f in _Path(path).glob('**/*')
                    if not _os.path.isdir(str(f))]
    selected_files = []

    print('#####  {}  #####'.format(text))

    for i, f in enumerate(files_in_dir):
        print('-    {}. {}'.format(i, f))

    while True:
        number = get_input('Pick number:  ')

        if not number:
            break

        try:
            choice = int(number)
            filename = files_in_dir[choice]
            selected_files.append(filename)
        except (ValueError, IndexError):
            pass
        print('Selected files: {}'.format([_os.path.basename(f) for f in set(selected_files)]))

    return selected_files
Ejemplo n.º 21
0
def write_soln_to_file(fpath, soln, time, tidx, ngh=0, **kwargs):
    """Write a solution snapshot to an existing NetCDF file.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    soln : torchswe.utils.data.WHUHVModel or torchswe.utils.data.HUVModel
        The States instance containing solutions.
    time : float
        The simulation time of this snapshot.
    tidx : int
        The index of the snapshot time in the temporal axis.
    ngh : int
        The number of ghost-cell layers out side each boundary.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    # determine if it's a WHUHVModel or HUVModel
    if hasattr(soln, "w"):
        keys = ["w", "hu", "hv"]
    else:
        keys = ["h", "u", "v"]

    if ngh == 0:
        data = {k: soln[k] for k in keys}
    else:
        slc = slice(ngh, -ngh)  # alias for convenience; non-ghost domain
        data = {k: soln[k][slc, slc] for k in keys}

    with _Dataset(fpath, "a", **kwargs) as dset:
        _add_time_data_to_dataset(dset, data, time, tidx)
Ejemplo n.º 22
0
    def find_packages(self):
        """Return a list all Python packages found within self.src_dir directory

        package directories must meet the following conditions:

        * Must reside inside self.src_dir directory, counting itself        
        * Must have __init__.py
        * Any directories w/out any *.py files as namespace package IF one of its
          descendent contains __init__.py (NOTE: I'm not yet clear whether we can 
          have a namespace package as subpackage to another package)
        """

        # scan src_dir for __init__.py
        root = _Path(self.src_dir)
        reg_paths = set(
            [d.parent.relative_to(root) for d in root.rglob("**/__init__.py")])

        # add all namespace packages that houses regular packages
        pkg_paths = set(reg_paths)
        for dir in reg_paths:
            pkg_paths |= set(dir.parents)

        # convert path to str
        pkg_dirs = [path.as_posix() for path in pkg_paths]

        # convert dir to package notation
        return [_dir_to_pkg(self.package_name, dir) for dir in pkg_dirs]
Ejemplo n.º 23
0
def _check_remove(outdir, prompt):
    """Function to check if the user wants to remove the directory,
       giving them the option to continue, quit or remove all files
    """
    if prompt is None:
        raise FileExistsError(f"Cannot continue as {outdir} already exists!")

    from .utils._console import Console
    Console.warning(f"{outdir} already exists.")
    y = prompt("Do you want to remove it? (y/n) ")

    y = y.strip().lower()

    if len(y) > 0 and y == "y":
        Console.print(f"Removing all files in {outdir}", style="warning")
        _rmdir(_Path(outdir))
        return

    Console.warning(f"Continuing with this run will mix its output with "
                    f"the files already in {outdir}.")

    y = prompt("Do you want to continue with this run? (y/n) ")

    y = y.strip().lower()

    if len(y) == 0 or y != "y":
        from .utils._console import Console
        Console.error(f"Exiting the program as we cannot run any more.")
        import sys
        sys.exit(-1)
Ejemplo n.º 24
0
 def get_source_files(self):
     """Get all the source files"""
     return [
         path.as_posix()
         for path in _Path(self.src_dir).rglob("*") if not path.is_dir()
     ] + [(path / "CMakeLists.txt").as_posix()
          for path in _PurePath(self.src_dir).parents]
Ejemplo n.º 25
0
def create_dir(dir):
    debug(2, "creating directory %s" % dir)
    try:
        path = _Path(dir)
        path.mkdir(parents=True)
    except OSError as oserror:
        raise OSError("Error: directory %s already exists." % oserror.filename)
Ejemplo n.º 26
0
    def get_package_data(self, prefix=None):
        """get setup package_data dict (expected to run only post-install)"""

        # glob all the files in dist_dir then filter out py & ext files
        root = _Path(self._get_dist_dir(prefix))
        excludes = [".py", sysconfig.get_config_var("EXT_SUFFIX")]
        files = [
            f for f in root.rglob("**/*")
            if f.is_file() and not any(f.name.endswith(e) for e in excludes)
        ]

        # find the parent package of each file and add to the package_data
        package_data = {}
        for f in files:
            pkg_dir = next(d for d in f.parents
                           if (d / "__init__.py").is_file())
            pkg_name = _dir_to_pkg(self.package_name,
                                   pkg_dir.relative_to(root).as_posix())
            pkg_path = f.relative_to(pkg_dir).as_posix()
            if pkg_name in package_data:
                package_data[pkg_name].append(pkg_path)
            else:
                package_data[pkg_name] = [pkg_path]

        return package_data
Ejemplo n.º 27
0
def download_distribution(additional_distribution: str,
                          destination: _os.PathLike):
    """
    Downloads a remote code distribution and overwrites any local files.
    :param Text additional_distribution:
    :param _os.PathLike destination:
    """
    _data_proxy.Data.get_data(additional_distribution, destination)
    tarfile_name = _os.path.basename(additional_distribution)
    file_suffix = _Path(tarfile_name).suffixes
    if len(file_suffix
           ) != 2 or file_suffix[0] != ".tar" or file_suffix[1] != ".gz":
        raise ValueError(
            "Unrecognized additional distribution format for {}".format(
                additional_distribution))

    # This will overwrite the existing user flyte workflow code in the current working code dir.
    result = _subprocess.run(
        [
            "tar", "-xvf",
            _os.path.join(destination, tarfile_name), "-C", destination
        ],
        stdout=_subprocess.PIPE,
    )
    result.check_returncode()
Ejemplo n.º 28
0
    def __init__(self, root, link_finder=None):

        # assert isinstance(link_finder, dict)
        if not isinstance(root, _Path):
            root = _Path(root)
        self._root = Path(root)
        self._link_finder = link_finder
Ejemplo n.º 29
0
class Path(type(_Path())):
    """
    Extends Python's `Path object`_ interface.

    .. _Path object: https://docs.python.org/3/library/pathlib.html
    """
    def str(self):
        """
        Represent path as string.

        Alias to ``os.fspath(self)``.
        
        Returns
        -------
        str
           ``os.fspath(self)``.
        """
        return os.fspath(self)

    def myparents(self):
        """
        List of the path parent folders.

        Alias to ``pathlib.Path.resolve().parents[0]``.

        Returns
        -------
        list
            Parent paths. Name file or folder are excluded.
        """
        return self.resolve().parents[0]
Ejemplo n.º 30
0
    def _find(self, executable):
        """Find path of executable.

        Parameters
        ----------
        executable : str
            The executable for which to search the location.

        Returns
        -------
        pathlib.Path
            Path where the executable was found.

        Raises
        ------
        RuntimeError
            If the executable was not found in the systems default
            look-up places.

        """
        try:
            return _Path(
                _shutil.which(executable), path="/usr/local/emsdk/emscripten/1.38.29/"
            )
        except TypeError:
            error_message = f"Couldn't find {executable} executable"
            _LOGGER.error(error_message)
            raise RuntimeError(error_message)
Ejemplo n.º 31
0
    def xml_of_run(self,
                   run,
                   xmldir=None,
                   zip=False,
                   return_path=False) -> str:
        """
        Return xml text of query for datafiles in run
        """
        import gzip

        xmldir = _Path(xmldir or 'xmls')
        xmldir.mkdir(exist_ok=True, parents=True)

        # check cache locations
        xml_path = self.xmlfilename_for_run(run, xmldir)
        if xml_path.exists():
            return xml_path.read_text()

        zxml_path = xml_path.with_suffix(".xml.gz")
        if zxml_path.exists():
            return gzip.decompress(zxml_path.read_bytes()).decode()

        # not in cache - fetch from server
        xml_data = self.fetch_xml_of_run(run)

        if zip:
            with gzip.open(zxml_path, "wb") as f:
                f.write(xml_data)
        else:
            xml_path.write_bytes(xml_data)

        if return_path:
            return zxml_path if zip else xml_path

        return xml_data.decode()
Ejemplo n.º 32
0
            def _delegate(self, *args, **kwargs):
                from .compat import Str
                from inspect import stack

                name = stack()[1].function
                subject = _Path(self._filename)
                new_obj = getattr(subject, name)(*args, **kwargs)
                return type(self)(Str(new_obj))
Ejemplo n.º 33
0
Archivo: path.py Proyecto: marrow/mongo
	def to_native(self, obj, name, value):  # pylint:disable=unused-argument
		return _Path(value)
Ejemplo n.º 34
0
# USAGE
# python align_faces.py --shape-predictor shape_predictor_68_face_landmarks.dat --image images/example_01.jpg
# Author credit to Adrian at PyImageSearch
# import the necessary packages
from imutils.face_utils import FaceAligner
from imutils.face_utils import rect_to_bb
import argparse
import imutils
import dlib
import cv2
from pathlib import Path as _Path
import os as _os

_path = _Path(_os.path.dirname(_os.path.abspath(__file__)))

def ff(img):
    """
    :param img:
    :return: faceAligned[0] the aligned face version of the original input image
    """
    # initialize dlib's face detector (HOG-based) and then create
    # the facial landmark predictor and the face aligner
    detector = dlib.get_frontal_face_detector()
    predictor = dlib.shape_predictor(str(_path) + "/shape_predictor_68_face_landmarks.dat") ########################
    fa = FaceAligner(predictor, desiredFaceWidth=256)

    # load the input image, resize it, and convert it to grayscale
    image = img ##########################################################
    image = imutils.resize(image, width=800)
    gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
Ejemplo n.º 35
0
from .database import Database
from .detection import detect
from .cameras import imgarray
from .profileClass import ProfileClass
from .lols import sym
import matplotlib.pyplot as plt
import matplotlib.patches as patches # for the rectangles we draw in our recognize function
import re
import skimage.io as io # for io.imread()
import sys # for arguments on command line
from pathlib import Path as _Path # because this is a module,
import cv2 # for cv2.imread
import os as _os # because package needs path
from .align_faces import ff # face aligner

_path = _Path(_os.path.dirname(_os.path.abspath(__file__))) # the path of this directory

def main():
    """
    Main function that calls all the other functions and classes when the user inputs commands via terminal
    This program is meant to recognize multiple faces but has some added features such as a symmetry test

    Refer to the conditional statements below to see which commands are acceptable.
    """
    db = Database("profiles.pkl") # loading database

    def loadimgs(directory, showImgs=False):
        folders = [entry for entry in os.scandir(directory)]
        for i, folder in enumerate(folders):
            print(folder.name, flush = True)
            foldPath = "./images/{}".format(folder.name)