Beispiel #1
0
def prep_filesystem(fs_name):

    if fs_name == 'OSFS':

        with make_temp_dir() as tmp:
            local = OSFS(tmp)

            yield local

            local.close()

    elif fs_name == 'S3FS':

        m = moto.mock_s3()
        m.start()

        try:

            s3 = S3FS('test-bucket',
                      aws_access_key='MY_KEY',
                      aws_secret_key='MY_SECRET_KEY')

            yield s3
            s3.close()

        finally:
            m.stop()
Beispiel #2
0
def copy_wiki():
    ws = OSFS('wiki-small')
    for file in ws.walkfiles():
        print(file)
        copyfile('wiki-small%s' % file,
                 'htmls/%s' % file[file.rfind('/') + 1:])
    ws.close()
Beispiel #3
0
def dir_sim(dirp, randombits, bits):
    """
     Finds the similarity between documents of the same extension
     contained in the given directory path.
     If we are using random bits i.e md5 length the helper is used.
     For each file contained in the directory:
     get its fingerprint and generate the comparison combinations
     then for each combination who's extension matches
     print the similarity between them
    """
    if bits is not None:
        print('using bit len of %d' % bits)
    dir = OSFS(dirp)
    file_fps = {}
    for fname in dir.listdir(files_only=True):
        file_fps[fname] = file_fingerprint(os.path.join(dirp, fname), randombits, bits)
    dir.close()
    combos = list(map(dict, combinations(file_fps.items(), 2)))
    if bits is None:
        dir_sim_rand_helper(file_fps, combos)
        return
    for k, v in combos:
        kext = os.path.splitext(k)[1]
        vext = os.path.splitext(v)[1]
        if kext == vext:
            print(k, v, cosine_sim(file_fps[k][:bits], file_fps[v][:bits]))
Beispiel #4
0
def sanitiz():
    base = 'band_to_tm'
    metal_btm = OSFS(base)
    for metal in metal_btm.walkfiles():
        p1 = metal.find('/')
        p2 = metal.find('.')
        b = metal[p1 + 1:p2]
        print(b)
        with open('%s%s' % ('band_to_tm', metal), 'r') as min:
            metal = json.load(min)
            info = metal['info']
            mem_point = []
            for i in info:
                for f, t in map(extract_binfo_date, i['active']):
                    mem_point.append({
                        "f": f.format('YYYYMMDDHHMMSS'),
                        "t": t.format('YYYYMMDDHHMMSS'),
                        "plays": i['plays'],
                        "member": i['member']
                    })
            mtm = metal['tm']
            metal_plot = {
                "member_points": mem_point,
                "mementos": mtm['mementos'],
                "first": mtm['first'],
                "last": mtm['last'],
                "timemap": mtm['self'],
                "timegate": mtm['timegate'],
                "original": mtm['original'],
            }
            with open('plot/%s.json' % b, 'w') as mout:
                json.dump(metal_plot, mout, indent=2)
            print('----------------------------------------')
    metal_btm.close()
Beispiel #5
0
def pick_file_list():
    ws = OSFS('wiki-small')
    small_list = []
    small_set = set()
    for file in ws.walkfiles():
        small_list.append('wiki-small%s' % file)
        small_set.add(file[file.rfind('/') + 1:])
    dump_pickle((small_list, small_set), 'pickled/wsmall.pickle')
    ws.close()
Beispiel #6
0
def do_combine():
    with open('band_timeline.json', 'r') as bndin:
        bands = json.load(bndin)
        print(bands)

    mtml = OSFS('timemaps/json')
    for metal_tml in mtml.walkfiles():
        p1 = metal_tml.find('/')
        p2 = metal_tml.find('.')
        b = metal_tml[p1 + 1:p2]
        with open('timemaps/json%s' % metal_tml, 'r') as bndin:
            band_tm = json.load(bndin)
            with open('band_to_tm/%s.json' % b, 'w+') as btm:
                json.dump({"info": bands[b], "tm": band_tm}, btm, indent=2)
            print(band_tm)
    mtml.close()
class LocalStorageHook(FileSystemHookInterface):
    conn_type = 'local_storage'

    def __init__(self, conn_params: ConnectionParams):
        self.conn_params = conn_params

    def __enter__(self) -> OSFS:
        self.base_path = self.conn_params.extra.get('base_path', '')
        self.conn = OSFS(root_path=self.base_path,
                         create=self.conn_params.extra.get('create', False))
        return self.conn

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.base_path = None
        self.conn.close()
        self.conn = None
Beispiel #8
0
def pick_file_list():
    wl = OSFS('wiki-large')
    large_list = []
    large_set = set()
    for file in wl.walkfiles():
        large_list.append('wiki-large%s' % file)
        large_set.add(file[file.rfind('/') + 1:])
    dump_pickle((large_list, large_set), 'pickled/wiki-large2.pickle')
    wl.close()

    ws = OSFS('wiki-small')
    small_list = []
    small_set = set()
    for file in ws.walkfiles():
        small_list.append('wiki-small%s' % file)
        small_set.add(file[file.rfind('/') + 1:])
    dump_pickle((small_list, small_set), 'pickled/wiki-small2.pickle')
    ws.close()
Beispiel #9
0
def install_encoded(device_class, version, firmware_b64, activate_firmware=True, firmware_path=None):
    """Install firmware from a b64 encoded zip file"""
    # TODO:  implement this in a less memory hungry way
    # decode from b64
    firmware_bin = base64.b64decode(firmware_b64)
    # Make a file-like object
    firmware_file = BytesIO(firmware_bin)
    # Open zip
    firmware_fs = ZipFS(firmware_file)
    # Open firmware dir
    dst_fs = OSFS(firmware_path or constants.FIRMWARE_PATH, create=True, dir_mode=0o755)
    # Install
    install_path = install(device_class, version, firmware_fs, dst_fs)
    # Move symlink to active firmware
    if activate_firmware:
        activate(device_class, version, dst_fs, fw_path=firmware_path)

    # Clean up any temporary files
    firmware_fs.close()
    dst_fs.close()

    # Return install_path
    return install_path
Beispiel #10
0
def dl_pages():
    base = 'band_pages'
    bandpages = OSFS(base)
    c = 0
    with requests.session() as session:
        with open('bandpages_original.csv', 'r') as bndin:
            for row in DictReader(bndin):
                band = row['band']
                wlink = row['wlink']
                malink = row['malink']
                if not bandpages.isdir(band):
                    bandpages.makedir(band)
                session.headers.update({'User-Agent': useragents[c]})
                request = session.get(wlink)
                write_page(band, 'wiki', request)
                if malink != 'none':
                    request = session.get(malink)
                    write_page(band, 'ma', request)
                c += 1
                if c == 3:
                    c = 0

    bandpages.close()
Beispiel #11
0
    def reduce_path(carry_fs, value):
        """ Reduce path by opening or creating each segment and returning the last. 

        Args:
            carry_fs (fs): Opened filesystem
            value (str): Next path segment

        Returns:
            fs
        """
        if not isinstance(carry_fs, FS):
            carry_fs = OSFS(carry_fs + separator)

        if not carry_fs.isdir(value):
            carry_fs.makedirs(value)

        # open next carry_fs
        next_carry_fs = OSFS(carry_fs.getsyspath(value))

        # close carry_fs
        carry_fs.close()

        return next_carry_fs
Beispiel #12
0
def install_encoded(device_class, version, firmware_b64, activate_firmware=True):
    """Install firmware from a b64 encoded zip file"""
    # TODO:  implement this in a less memory hungry way
    # decode from b64
    firmware_bin = base64.b64decode(firmware_b64)
    # Make a file-like object
    firmware_file = StringIO(firmware_bin)
    # Open zip
    firmware_fs = ZipFS(firmware_file)
    # Open firmware dir
    dst_fs = OSFS(constants.FIRMWARE_PATH, create=True)
    # Install
    install_path = install(device_class, version, firmware_fs, dst_fs)
    # Move symlink to active firmware
    if activate_firmware:
        activate(device_class, version, dst_fs)

    # Clean up any temporary files
    firmware_fs.close()
    dst_fs.close()

    # Return install_path
    return install_path
Beispiel #13
0
class OdooAddonManager:
    """
    Class wrapping the OAM behaviour

    Attributes
    ----------
    install_dir: OSFS
        The installation directory
    src_cache: dict
        A dictionary containing, for each source type supporting cache (git), the temporary location of the previously
        downloaded sources
    odoo_version: str
        Version of Odoo using the addons
    desc_version: str
        Version of the description file used to log changes
    modules_to_install: dict
        Modules to install as described in the YAML file
    verbose_level: str
        Level of details to print
    """

    install_dir: OSFS
    modules_to_install: Dict[str, Dict[str, Any]]
    src_cache: Dict[str, Dict[str, Any]]
    odoo_version: str
    desc_version: str = None
    verbose_level: str
    _tmp_dir: TempFS = None
    _hst_file: TextIOWrapper = None
    _chglog_file: TextIOWrapper = None

    def __init__(self,
                 description_file: str = None,
                 install_directory: str = ".",
                 verbose_level: str = VERBOSE_NONE):
        self.install_dir = OSFS(install_directory)
        self.verbose_level = verbose_level
        self.src_cache = {
            "git": {},
        }

        if description_file:
            with open(description_file, "r") as description_file:
                install_data = yaml.load(description_file, Loader=yaml.Loader)
                self.modules_to_install = install_data.get("modules", [])
                self.odoo_version = install_data.get("odoo_version")
                self.desc_version = install_data.get("version")

    def __del__(self):
        self.install_dir.close()
        if self._tmp_dir:
            self._tmp_dir.close()
        if self._hst_file:
            self._hst_file.close()
        if self._chglog_file:
            self._chglog_file.close()

    @property
    def tmp_dir(self) -> TempFS:
        """
        The temporary directory used to download modules before installing them if needed.
        """
        if not self._tmp_dir:
            self._tmp_dir = TempFS(TEMP_DIR_NAME)
        return self._tmp_dir

    @property
    def history_file(self) -> TextIOWrapper:
        """
        The history file where are logged the operations performed in the installation directory
        """
        if not self._hst_file:
            self._hst_file = open(
                self.install_dir.getsyspath(HISTORY_FILE_NAME), 'a+')
        return self._hst_file

    @property
    def changelog_file(self) -> TextIOWrapper:
        """
        The markdown changelog file listing changes in a human-readable format
        """
        if not self._chglog_file:
            self._chglog_file = open(
                self.install_dir.getsyspath(CHANGELOG_FILE_NAME), "a+")
            self._chglog_file.seek(0)
            if not self._chglog_file.read():
                self._chglog_file.write("# CHANGELOG")

        self._chglog_file.seek(0)

        return self._chglog_file

    def install_all(self, force: bool = False):
        """
        Install all modules described in the description file.
        :param force: whether to overwrite installed modules or not
        """
        installed_modules = []

        if self.verbose_level == VERBOSE_NONE:
            with click.progressbar(self.modules_to_install) as modules:
                for module in modules:
                    if self.install(module, force):
                        installed_modules.append(module)
        else:
            for module in self.modules_to_install:
                if self.install(module, force):
                    installed_modules.append(module)

        # Modules installed are removed from the list to avoid being processed twice e.g. in case of a refresh
        for module in installed_modules:
            self.modules_to_install.pop(module)

        click.echo("{} module(s) installed.".format(len(installed_modules)))

    def install(self, module_name: str, force: bool = False) -> bool:
        """
        Install a single module from its source.
        :param module_name: Name of the module
        :param force: Whether to overwrite the module if it is already installed
        :param: Whether the module has been installed or not
        """
        success = False
        self.pretty_print(module_name, "Installing...", level=VERBOSE_FULL)

        source = self.modules_to_install[module_name]
        origin_name = source.get("origin_name", module_name)
        installed_version = self.get_module_version(module_name,
                                                    self.install_dir)

        if force or not installed_version:
            try:
                source_fs = self.fetch_module_from_source(module_name)
                self.install_from_fs(origin_name,
                                     source_fs,
                                     output_name=module_name)
                version = self.get_module_version(module_name,
                                                  self.install_dir)
                self.log(module_name,
                         OPERATION_INSTALL,
                         force=force,
                         extra=version)
                if not force:
                    self.log_md(module_name,
                                OPERATION_INSTALL,
                                new_version=version)

                success = True
            except InvalidModuleError as err:
                self.pretty_print(module_name,
                                  err.message,
                                  status=LOG_STATUS_ERROR,
                                  level=VERBOSE_NONE)
            except pygit2.errors.GitError:
                self.pretty_print(
                    module_name,
                    "Installation failed - Could not fetch from Git repository.",
                    status=LOG_STATUS_ERROR,
                    level=VERBOSE_NONE)
            except Exception as e:
                self.pretty_print(module_name,
                                  "Installation failed ({})".format(
                                      type(e).__name__),
                                  status=LOG_STATUS_ERROR,
                                  level=VERBOSE_NONE)
        else:
            self.pretty_print(module_name,
                              "Already installed. Skipping installation.",
                              status=LOG_STATUS_WARNING,
                              level=VERBOSE_NORMAL)

        return success

    def update_all(self, force: bool = False):
        """
        Update all modules
        :param force: Whether to skip version check or not.
            If True, modules are just replaced no matter if they are being downgraded or installed for the first time.
        """
        updated_modules = []

        if self.verbose_level == VERBOSE_NONE:
            with click.progressbar(self.modules_to_install) as modules:
                for module in modules:
                    if self.update(module, force):
                        updated_modules.append(module)
        else:
            for module in self.modules_to_install:
                if self.update(module, force):
                    updated_modules.append(module)

        # Modules updated are removed from the list to avoid being processed twice in case of a refresh
        for module in updated_modules:
            self.modules_to_install.pop(module)

        click.echo("{} module(s) updated.".format(len(updated_modules)))

    def update(self, module_name: str, force: bool = False) -> bool:
        """
        Update a single module.
        :param module_name: Name of the module
        :param force: Whether to skip version check or not.
            If True, modules are just replaced no matter if they are being downgraded or installed for the first time.
        :return: Whether the module has been updated or not
        """
        success = False
        self.pretty_print(module_name, "Updating...", level=VERBOSE_FULL)

        installed_version = self.get_module_version(module_name,
                                                    self.install_dir)

        if force or installed_version:
            try:
                source_fs = self.fetch_module_from_source(module_name)
                origin_name = self.modules_to_install[module_name].get(
                    "origin_name", module_name)
                new_version = self.get_module_version(origin_name, source_fs)

                if force or version.parse(new_version) >= version.parse(
                        installed_version):
                    self.pretty_print(module_name,
                                      "Updating from {0} to {1}".format(
                                          installed_version, new_version),
                                      level=VERBOSE_FULL)
                    self.install_from_fs(origin_name,
                                         source_fs,
                                         output_name=module_name)
                    self.log(module_name,
                             OPERATION_UPDATE,
                             force=force,
                             extra="from {0} to {1}".format(
                                 installed_version, new_version))
                    if not force:
                        self.log_md(module_name, OPERATION_UPDATE,
                                    installed_version, new_version)

                    success = True
                else:
                    self.pretty_print(
                        module_name,
                        "Fetched version ({0}) is inferior to current version ({1}). Skipping update."
                        .format(new_version, installed_version),
                        status=LOG_STATUS_ERROR,
                        level=VERBOSE_NORMAL)
            except InvalidModuleError as err:
                self.pretty_print(module_name,
                                  err.message,
                                  status=LOG_STATUS_ERROR,
                                  level=VERBOSE_NONE)
            except pygit2.errors.GitError:
                self.pretty_print(
                    module_name,
                    "Update failed - Could not fetch from Git repository.",
                    status=LOG_STATUS_ERROR,
                    level=VERBOSE_NONE)
            except Exception as e:
                self.pretty_print(module_name,
                                  "Update failed ({})".format(
                                      type(e).__name__),
                                  status=LOG_STATUS_ERROR,
                                  level=VERBOSE_NONE)
        else:
            self.pretty_print(
                module_name,
                "Not installed. Skipping update.".format(module_name),
                status=LOG_STATUS_WARNING,
                level=VERBOSE_NORMAL)

        return success

    def uninstall_all(self, auto_confirm=False):
        """
        Uninstall all modules that are installed but not present in the description file.
        Ask confirmation to the user.
        :param auto_confirm: Do not ask the user to confirm if True
        """
        installed_modules = self.get_installed_modules()
        modules_to_uninstall = set(installed_modules.keys()) - set(
            self.modules_to_install.keys())

        if not auto_confirm:
            click.echo("The following modules will be removed:")
            for module in modules_to_uninstall:
                click.echo(module)
            click.confirm('Do you want to continue?', abort=True)

        count = 0

        if self.verbose_level == VERBOSE_NONE:
            with click.progressbar(modules_to_uninstall) as modules:
                for module in modules:
                    count += self.uninstall(module)
        else:
            for module in modules_to_uninstall:
                count += self.uninstall(module)

        click.echo("{} module(s) removed.".format(count))

    def uninstall(self, module_name: str) -> bool:
        """
        Uninstall a single module if it is installed.
        :param module_name: Name of the module
        :return: Whether the module has been uninstalled or not
        """
        success = False

        if module_name in self.install_dir.listdir("."):
            self.pretty_print(module_name,
                              "Uninstalling...",
                              level=VERBOSE_FULL)
            self.install_dir.removetree(module_name)
            success = True
            self.log(module_name, OPERATION_UNINSTALL)
            self.log_md(module_name, OPERATION_UNINSTALL)
            self.pretty_print(module_name,
                              "Uninstalled.",
                              status=LOG_STATUS_OK,
                              level=VERBOSE_NORMAL)
        else:
            self.pretty_print(module_name,
                              "Not installed. Skipping uninstall.",
                              status=LOG_STATUS_ERROR,
                              level=VERBOSE_NORMAL)

        return success

    def get_installed_modules(self) -> Dict[str, str]:
        """
        Scan installation directory to list currently installed modules
        :return: A dictionary of module names as keys and their currently installed version as values
        """
        modules = {}
        for module in self.install_dir.scandir("."):
            if module.is_dir and "__manifest__.py" in self.install_dir.listdir(
                    module.name):
                manifest_file = self.install_dir.getsyspath(
                    join(module.name, "__manifest__.py"))
                with open(manifest_file, "r") as manifest:
                    modules[module.name] = ast.literal_eval(
                        manifest.read())["version"]

        return modules

    @staticmethod
    def get_module_version(module_name: str, directory: FS) -> str:
        """
        Get the version of the module in the given directory
        :param module_name: name of the module
        :param directory: FS object pointing to the parent directory of the module
        :return: version of the module or None if it is not present in the directory
        """
        version = None
        if module_name in directory.listdir("."):
            manifest = directory.readtext(join(module_name, "__manifest__.py"))
            version = ast.literal_eval(manifest)["version"]

        return version

    def fetch_module_from_source(self, module_name: str) -> FS:
        """
        Download a module from its source if needed and return the directory where it is located.
        :param module_name: Name of the module
        :return: An FS object pointing to the module location
        """
        source = self.modules_to_install[module_name]
        source_fs: FS

        if source["source_type"] == SOURCE_LOCAL_DIR:
            source_fs = OSFS(source["path"])
        elif source["source_type"] == SOURCE_LOCAL_ZIP:
            source_fs = ZipFS(source["path"])
        elif source["source_type"] == SOURCE_GIT:
            source_fs = self.download_from_git(
                module_name, source["url"],
                source.get("branch", self.odoo_version),
                source.get("path", "."))

        return source_fs

    def download_from_git(self,
                          module_name: str,
                          url: str,
                          branch: str,
                          path: str = ".") -> OSFS:
        """
        Clone a git repository or find it in the source cache.
        :param module_name: name of the module being installed
        :param url: URL of the repository
        :param branch: branch of the desired module version
        :param path: path to the module inside the repository (default to '.')
        :return: an OSFS object pointing to the module location inside the repository
        """
        repo_dir_name = urlparse(url).path.replace("/", "_")

        if url in self.src_cache["git"]:
            self.pretty_print(module_name,
                              "Repository found in cache",
                              level=VERBOSE_FULL)
            repo = self.src_cache["git"][url]
            repo.checkout("refs/remotes/origin/{}".format(branch))
        else:
            self.pretty_print(module_name,
                              "Cloning repository",
                              level=VERBOSE_FULL)
            repo = pygit2.clone_repository(
                url,
                self.tmp_dir.getsyspath(repo_dir_name),
                checkout_branch=branch)

        self.src_cache["git"][url] = repo
        return OSFS(join(repo.workdir, path))

    def install_from_fs(self,
                        name: str,
                        source_fs: FS,
                        path: str = ".",
                        output_name: str = None):
        """
        Copy a module directory from where it is located to the installation directory.
        :param name: Name of the module
        :param source_fs: FS object pointing to the source location
        :param path: Path to the module directory from the source location root
        :param output_name: Name to give to the module's directory at installation
        """
        path_to_module = join(path, name)

        if name not in source_fs.listdir(path):
            raise InvalidModuleError(
                name,
                "Module directory not found - Given path should be the parent directory"
            )
        if "__manifest__.py" not in source_fs.listdir(path_to_module):
            raise InvalidModuleError(
                name,
                "Manifest not found - Given path should be the parent directory"
            )

        self.pretty_print(output_name,
                          "Copying from {}".format(
                              source_fs.desc(path_to_module)),
                          level=VERBOSE_FULL)
        copy_dir(source_fs, path_to_module, self.install_dir, output_name
                 or name)

        self.pretty_print(output_name,
                          "Installed and up to date.",
                          status=LOG_STATUS_OK,
                          level=VERBOSE_NORMAL)

    def log(self,
            module_name: str,
            operation: str,
            force=False,
            extra: str = ""):
        """
        Log an operation in the history file.
        :param module_name: Name of the module
        :param operation: Type of the operation
        :param force: Whether the operation was performed with the force option or not
        :param extra: Extra information to log
        """
        log_line = "{0} - {1}{2}: {3} {4}\n".format(
            datetime.now().replace(microsecond=0), operation,
            " (forced)" if force else "", module_name, extra)
        self.history_file.write(log_line)

    def log_md(self,
               module: str,
               operation: str,
               old_version: str = None,
               new_version: str = None):
        """
        Log an operation in the markdown log file in human-readable format.
        :param module: Name of the module
        :param operation: Type of the operation
        :param old_version: Overwritten version of the module, in case of an update
        :param new_version: New version of the module, in case of an installation/update
        """
        current_log_content = self.changelog_file.read()

        # Look for the section concerning the current version, or write a scaffold if not found
        version = self.desc_version or datetime.today().strftime("%Y-%m-%d")
        log_index = current_log_content.find("## {}".format(version))
        if log_index >= 0:
            new_log_content = current_log_content[log_index:]
        else:
            new_log_content = "\n\n## {}\n\n**Added**\n\n\n**Updated**\n\n\n**Removed**\n\n".format(
                version)
            log_index = len(current_log_content)

        # Remove previous log entry concerning the module
        if module in new_log_content:
            new_log_content = re.sub(r"\n.*{}.*".format(module), "",
                                     new_log_content)

        # Append the new log line under the right operation type
        if operation == OPERATION_INSTALL:
            index = new_log_content.find("**Updated**") - 2
            log_line = "\n * {0} ({1})".format(module, new_version)
        elif operation == OPERATION_UPDATE:
            index = new_log_content.find("**Removed**") - 2
            log_line = "\n * {0} ({1} from {2})".format(
                module, new_version, old_version)
        elif operation == OPERATION_UNINSTALL:
            index = len(new_log_content) - 1
            log_line = "\n * {0}".format(module)

        new_log_content = "{0}{1}{2}".format(new_log_content[:index], log_line,
                                             new_log_content[index:])

        # Overwrite file with the updated logs
        old_log_content = current_log_content[:log_index]

        self.changelog_file.truncate()
        self.changelog_file.write(old_log_content + new_log_content)

    def list_external_dependencies(self, raw=False, modules: List[str] = None):
        """
        Show external dependencies of all installed modules.
        :param raw: Whether to print only python dependencies in a 'requirements.txt' format
        :param modules: If given, show dependencies of those modules only
        """
        dependencies = self.get_all_dependencies(modules=modules)

        if raw:
            for dep in dependencies.get("python", []):
                click.echo(dep)
        else:
            for type in dependencies:
                click.echo(type)
                for dep in dependencies[type]:
                    if type == "python":
                        dep_installed = self.check_python_dependency(dep)
                        click.echo("\t{0} {1}".format(
                            dep, "(OK)" if dep_installed else "(missing)"))
                    else:
                        click.echo("\t{}".format(dep))

    def install_missing_dependencies(self, modules: List[str] = None):
        """
        Install all missing dependencies.
        :param modules: If given, install dependencies of those modules only
        """
        dependencies = self.get_all_dependencies(modules=modules)
        self.install_python_dependencies(dependencies.get("python", []))

    def get_all_dependencies(self,
                             modules: List[str] = None
                             ) -> Dict[str, List[str]]:
        """
        Get all missing dependencies from the installed modules.
        :param modules: If given, return dependencies of those modules only
        :return: A dictionary containing a list of dependencies for each type
        """

        # Filter installed modules to keep the ones given
        modules = {mod: self.get_installed_modules()[mod] for mod in modules} if modules \
            else self.get_installed_modules()
        all_deps = {}

        for module in modules:
            module_deps = self.parse_dependencies(module, self.install_dir)
            for type, deps in module_deps.items():
                all_deps.setdefault(type, set()).update(set(deps))

        return all_deps

    @staticmethod
    def parse_dependencies(module_name: str,
                           directory: FS) -> Dict[str, List[str]]:
        """
        Retrieve external dependencies from a module's manifest.
        :param module_name: Name of the module
        :param directory: Location of the module
        :return: A dictionary containing a list of dependencies for each type
        """
        manifest = directory.readtext(join(module_name, "__manifest__.py"))
        manifest_dict = ast.literal_eval(manifest)

        return manifest_dict.get("external_dependencies", {})

    @staticmethod
    def check_python_dependency(dependency: str) -> bool:
        """
        Check if a python dependency is satisfied i.e. if the python module is installed.
        :param dependency: Name of the python module
        :return: True if the module is installed, False otherwise
        """
        try:
            __import__(dependency)
        except ImportError:
            return False
        return True

    @staticmethod
    def install_python_dependencies(dependencies: List[str]):
        """
        Call pip to install the given python dependencies.
        :param dependencies: List of python modules to install
        """
        callable_pip.main("install", *dependencies)

    def pretty_print(self,
                     module_name: str,
                     message: str = "",
                     status: str = LOG_STATUS_PENDING,
                     level: int = 0):
        """
        Format and print a log to the console.
        :param module_name: Name of the module concerned
        :param message: Message to print
        :param status: Status of the log ('pending', 'ok', 'warning', 'error')
        :param level: Minimum verbose level to actually print the log (0, 1, 2)
        """
        if level <= self.verbose_level:
            if status == LOG_STATUS_OK:
                msg_color = "green"
            elif status == LOG_STATUS_WARNING:
                msg_color = "yellow"
            elif status == LOG_STATUS_ERROR:
                msg_color = "red"
            else:
                msg_color = "white"

            click.echo(
                click.style(module_name.ljust(30), fg="blue") +
                click.style(message, fg=msg_color))
Beispiel #14
0
    with tag('urlSet', xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"):
        for name, stats in dir.listdirinfo(files_only=True):
            if accept(name):
                with tag('url'):
                    with tag('loc'):
                        text('http://www.example.com/%s' % quote(name))
                    with tag('lastmod'):
                        text(str(Arrow.utcfromtimestamp(stats['modified_time'].timestamp())))
        for aDir in dir.walkdirs():
            if skipFirst:
                skipFirst = False
                continue
            if accept(aDir[1:None]):
                for name, stats in dir.listdirinfo(path=aDir, files_only=True):
                    if accept(name):
                        with tag('url'):
                            with tag('loc'):
                                text('http://www.example.com%s/%s' %(aDir,quote(name)))
                            with tag('lastmod'):
                                text(str(Arrow.utcfromtimestamp(stats['modified_time'].timestamp())))


    dir.close()
    result = indent(
        doc.getvalue(),
        indentation=' ' * 2,
        newline='\r\n'
    )

    print(result)
Beispiel #15
0
#!/usr/bin/env python3
import pendulum as pen
import fs
from fs.osfs import OSFS

home = OSFS("c:")
# print(home.listdir("tom"))

directory = list(home.scandir('tom'))
#print(directory)
# for d in directory:
#     print(d)

dir2 = home.filterdir('tom',files=['*.txt'], namespaces=['details'])
for d in dir2:
    # 2016-03-19 13:33:00.656588+00:00
    modified = pen.instance(d.modified).to_datetime_string()
    print(d.name, d.size, modified)


#print(home.tree())


home.close()


Beispiel #16
0
from fs.memoryfs import MemoryFS
from fs.expose import fuse
fs = MemoryFS()  # create an in memory file system
fs.createfile('filename.txt')  # creating an empty file
fs.setcontents('filename.txt',
               'contents of file')  # putting content into the file.
from fs.osfs import OSFS
home_fs = OSFS('/')  #
home_fs.makedir(
    '/home/dave/scratch/ramdrive', allow_recreate=True
)  # have to make a directory for us to mount our memory file system on.
mp = fuse.mount(
    fs, '/home/dave/scratch/ramdrive'
)  # exposes fs to everything else on machine. (ie: other system calls can see these files)
mp.path  # in case you need the path to the files created.
mp.unmount()  # files are no longer being exposed via fuse
home_fs.removedir('/home/dave/scratch/ramdrive/'
                  )  #remove the real file system directory when done.

fs.remove('filename.txt')

home_fs.close()
fs.close()

# creating a ramdrive like this wont work for my desired task, as other external applications cannot write to the directory. They only have read access.
Beispiel #17
0
            if path.lower().startswith("_autorun."):
                path = path[1:]
        return path

    def _decode(self,path):
        path = relpath(normpath(path))
        path = path.replace("__colon__",":")
        if not self.allow_autorun:
            if path.lower().startswith("autorun."):
                path = "_" + path
        return path


if __name__ == "__main__":
    import os, os.path
    import tempfile
    from fs.osfs import OSFS
    from fs.memoryfs import MemoryFS
    path = tempfile.mkdtemp()
    try:
        fs = OSFS(path)
        #fs = MemoryFS()
        fs.setcontents("test1.txt","test one")
        flags = DOKAN_OPTION_DEBUG|DOKAN_OPTION_STDERR|DOKAN_OPTION_REMOVABLE
        mount(fs, "Q", foreground=True, numthreads=1, flags=flags)
        fs.close()
    finally:
        OSFS(path).removedir("/",force=True)


Beispiel #18
0
                save_tm(metal['band'], request)
                c += 1
                if c == 3:
                    c = 0
                print(metal['band'])


if __name__ == '__main__':
    c = 0
    bands = {}
    with open('bands.csv', 'r') as metalIn:
        for metal in DictReader(metalIn):
            bands[metal['band']] = {'site': metal['site']}
    mtml = OSFS('timemaps/link/')
    for metal_tml in mtml.walkfiles():
        p1 = metal_tml.find('/')
        p2 = metal_tml.find('.')
        b = metal_tml[p1 + 1:p2]
        with open('timemaps/link%s' % metal_tml, 'r') as mtmlin:
            metal_tm = Timemap(mtmlin.readlines())
            print(metal_tm)
            bands[b] = metal_tm
            print('----------------------------------------')
    mtml.close()
    for band, tm in bands.items():
        print(band, tm)
        with open('timemaps/json/%s.json' % band, 'w+') as btm:
            json.dump(tm, btm, default=lambda x: x.to_json(), indent=2)
    # with open('band_to_tm.json','w+') as btm:
    #     json.dump(bands,btm,default=lambda x:x.to_json(),indent=2)
Beispiel #19
0
            if path.lower().startswith("_autorun."):
                path = path[1:]
        return path

    def _decode(self, path):
        path = relpath(normpath(path))
        path = path.replace("__colon__", ":")
        if not self.allow_autorun:
            if path.lower().startswith("autorun."):
                path = "_" + path
        return path


if __name__ == "__main__":
    import os, os.path
    import tempfile
    from fs.osfs import OSFS
    from fs.memoryfs import MemoryFS
    from shutil import rmtree
    from six import b
    path = tempfile.mkdtemp()
    try:
        fs = OSFS(path)
        #fs = MemoryFS()
        fs.setcontents("test1.txt", b("test one"))
        flags = DOKAN_OPTION_DEBUG | DOKAN_OPTION_STDERR | DOKAN_OPTION_REMOVABLE
        mount(fs, "Q", foreground=True, numthreads=1, flags=flags)
        fs.close()
    finally:
        rmtree(path)
class TestDatasetManager(unittest.TestCase):

    trash_dir = "./tests/resources/trash_data"

    def setUp(self):
        self.os = OSFS(".")

    def tearDown(self):
        for data in self.os.listdir(self.trash_dir):
            if data != ".keep":
                self.os.remove("{}/{}".format(self.trash_dir, data))
        self.os.close()

    def test_should_read_yaml_from_dir(self):

        expected = {
            "one_test": {
                "source": "http://source/teste",
                "description": "my little dataset"
            }
        }

        data = DatasetManager("./tests/resources/one_data")
        self.assertDictEqual(data.get_datasets(), expected)

    def test_should_read_multiple_yaml_from_dir(self):

        expected = {
            "one_test": {
                "source":
                "https://raw.githubusercontent.com/pcsanwald/kaggle-titanic/master/train.csv",
                "description": "my little dataset"
            },
            "two_test": {
                "source":
                "https://raw.githubusercontent.com/pcsanwald/kaggle-titanic/master/train.csv",
                "description": "my little dataset 2"
            }
        }

        data = DatasetManager("./tests/resources/multiple_data", fs=self.os)
        result = list(data.get_datasets().keys())
        result.sort()
        expected = ["one_test", "two_test"]
        self.assertListEqual(expected, result)

    def test_should_get_dataset(self):

        data = DatasetManager("./tests/resources/local_data")
        dataset = {
            "local_test": {
                "source": "./tests/resources/local_data/train.csv",
                "description": "my little dataset local"
            }
        }
        self.assertDictEqual(data.get_dataset("local_test"),
                             dataset.get("local_test"))

    def test_should_get_dataset_unknown(self):

        data = DatasetManager("./tests/resources/local_data")
        with self.assertRaises(IOError):
            data.get_dataset("unknown_test")

    def test_should_create_dataset(self):
        data = DatasetManager(self.trash_dir, fs=self.os)
        identifier = "data_name"
        dataset = {
            "identifier": identifier,
            "description": "description",
            "source": "/tmp/test.csv",
        }

        data.create_dataset(**dataset)

        loaded_datasets = data.get_datasets()
        dataset_config = loaded_datasets.get(identifier)

        self.assertTrue(
            self.os.isfile("{}/{}.yaml".format(self.trash_dir, identifier)))
        self.assertEqual(len(self.os.listdir(self.trash_dir)), 2)

        self.assertEqual(list(loaded_datasets.keys())[0], identifier)
        self.assertEqual(dataset_config.get("description"),
                         dataset["description"])
        self.assertEqual(dataset_config.get("source"), dataset["source"])

    def test_should_create_dataset_with_custom_data(self):
        data = DatasetManager(self.trash_dir, fs=self.os)
        identifier = "data_name_custom"
        dataset = {
            "identifier": identifier,
            "description": "description",
            "source": "/tmp/test.csv"
        }
        data.create_dataset(**dataset)
        self.assertTrue(
            self.os.isfile("{}/{}.yaml".format(self.trash_dir, identifier)))

        self.assertEqual(len(os.listdir(self.trash_dir)), 2)
        loaded_dataset = data.get_datasets()
        self.assertEqual(list(loaded_dataset.keys()), [identifier])

        datasource_configs = loaded_dataset.get(identifier)
        self.assertEqual(datasource_configs["description"],
                         dataset["description"])
        self.assertEqual(datasource_configs["source"], dataset["source"])

    def test_should_remove_dataset(self):
        data = DatasetManager(self.trash_dir, fs=self.os)
        identifier = "data_name"
        dataset = {
            "identifier": identifier,
            "description": "description",
            "source": "/tmp/test.csv"
        }
        data.create_dataset(**dataset)
        self.assertTrue(
            os.path.isfile("{}/{}.yaml".format(self.trash_dir, identifier)))
        self.assertEqual(len(os.listdir(self.trash_dir)), 2)
        data.remove_dataset(identifier)
        self.assertFalse(
            os.path.isfile("{}/{}.yaml".format(self.trash_dir, identifier)))
        self.assertEqual(len(os.listdir(self.trash_dir)), 1)

    def test_should_remove_unknown_dataset(self):

        data = DatasetManager("./tests/resources/local_data", fs=self.os)
        with self.assertRaises(IOError):
            data.remove_dataset("unknown_dataset")