Example #1
0
    def __del__(self):
        """ Decontructor """

        # imports are set to None or deleted on app exit
        try:
            shutil.get_archive_formats()
        except:
            import shutil

        # remove tempdir
        shutil.rmtree(self.tempdir)
Example #2
0
    def __del__(self):
        """ Decontructor """

        # imports are set to None or deleted on app exit
        try:
            shutil.get_archive_formats()
        except:
            import shutil

        # remove tempdir
        shutil.rmtree(self.tempdir)
Example #3
0
    def display(self, info):
        columns, lines = shutil.get_terminal_size()
        shutil.get_archive_formats()

        outputs = []

        def w(msg, **extra):
            outputs.append(msg.format(i=info, **extra))

        def pre(i=0):
            if i == 0:
                return " " * 10
            return "{:-<10}".format((" " * i) + "|")

        def prog(done, current=None, max=100, waiting=False):
            small_done = (int(done * 2 / 10) or 1)
            whitespace = " " * (20 - small_done)
            end = "{current:3}/{max}".format(current=current or done, max=max) if not waiting else "  -/-"
            return "[{mark:=>{small_done}}{whitespace}] {end}".format(mark="*",
                                                                      done=done,
                                                                      end=end,
                                                                      small_done=small_done,
                                                                      whitespace=whitespace)

        w("{i[name]}: {pipe_len} pipes", pipe_len=len(info["pipes"]))
        w("Runtime: {i[runtime]:.0f} seconds")

        for sub_pipe in info["pipes"]:
            errors = "[Errors: {}]".format(len(sub_pipe["task"]["error_list"]))\
                if sub_pipe["task"].get("error_list", None) else ""
            w(pre(1) + " {p[name]} {errors}", p=sub_pipe, errors=errors)
            inp = sub_pipe["input"]
            if "percentage_done" in inp:
                percent = "{inp[percentage_done][percent]:3}% done".format(inp=inp)
                current, max = inp["percentage_done"]["data"]
            elif "read_count" in inp:
                percent, current, max = "", inp["read_count"], "?"
            else:
                percent, current, max = "", "?", "?"

            w(pre() + " Input: {current}/{max} read. {percent}", inp=inp, current=current, max=max, percent=percent)
            if "percentage_done" in inp:
                w(pre() + " " + prog(inp["percentage_done"]["percent"], current, max))

            if sub_pipe["task"]["subtasks"]:
                w(pre() + " Tasks:")

            for subtask in sub_pipe["task"]["subtasks"]:
                if "percentage_done" in subtask:
                    current, max = subtask["percentage_done"]["data"]
                    w(pre(2) + " " + prog(subtask["percentage_done"]["percent"], current=current, max=max))

        yield from self.output.write("\n".join(outputs))
Example #4
0
    def test_register_archive_format(self):

        self.assertRaises(TypeError, register_archive_format, "xxx", 1)
        self.assertRaises(TypeError, register_archive_format, "xxx", lambda: x, 1)
        self.assertRaises(TypeError, register_archive_format, "xxx", lambda: x, [(1, 2), (1, 2, 3)])

        register_archive_format("xxx", lambda: x, [(1, 2)], "xxx file")
        formats = [name for name, params in get_archive_formats()]
        self.assertIn("xxx", formats)

        unregister_archive_format("xxx")
        formats = [name for name, params in get_archive_formats()]
        self.assertNotIn("xxx", formats)
Example #5
0
    def test_register_archive_format(self):

        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
        self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
                          1)
        self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
                          [(1, 2), (1, 2, 3)])

        register_archive_format('xxx', lambda: x, [(1, 2)], 'xxx file')
        formats = [name for name, params in get_archive_formats()]
        self.assertIn('xxx', formats)

        unregister_archive_format('xxx')
        formats = [name for name, params in get_archive_formats()]
        self.assertNotIn('xxx', formats)
Example #6
0
    def user_interface_creating_arc(self, dir_name_out):
        """Calls the user interface.

        dir_name_out - argument with the path to the folder to be archived.

        """
        if dir_name_out:
            result_type, state_type = QInputDialog.getItem(
                self, "Выбор типа", "Выберите тип архива:",
                tuple(x[0] for x in shutil.get_archive_formats()), 1, False)
            if state_type:
                name_arc, state_arc = QInputDialog.getText(
                    self, "Укажите название",
                    "Укажите название архива\t(без указания типа)")
                if state_arc:
                    dir_name_in = QFileDialog.getExistingDirectory(
                        self, 'Select directory')
                    if dir_name_in:
                        result, message = ArchiveFunctional().create_arhcive(
                            name_arc, result_type, dir_name_out, dir_name_in)
                        if not result:
                            self.show_report_msgbox(message)
                        else:
                            self.show_success_msgbox("Archiving",
                                                     "Success creating!")
def openFile(fileName):
    """
    Otevøe soubor fileName vhodným kompresním programem a vrátí otevøený stream.

    @result {Stream | None}
    """
    fileExtension = fileName[fileName.rfind(".") + 1:].lower()
    if fileExtension in UNCOMPRESSED_EXTENSIONS:
        return open(fileName)
    else:
        formats = shutil.get_archive_formats()
        for dict in formats:
            if fileExtension == dict[0]:
                tempDir = "tempDir"
                if os.path.exists(tempDir):
                    shutil.rmtree(tempDir)

                shutil.unpack_archive(fileName, tempDir)
                onlyfiles = [ f for f in os.listdir(tempDir) if os.path.isfile(os.path.join(tempDir,f)) ]
                if onlyfiles == []:
                    return None
                else:
                    return open(tempDir + '\\' + onlyfiles[0])
            pass
    return None
Example #8
0
def make_test_case(args, npy_savename, sync_was_successful):
    if npy_savename is None:
        raise ValueError('need non-null npy_savename')
    tar_dir = '{}.{}'.format(args.reference,
                             datetime.now().strftime('%Y-%m-%d-%H:%M:%S'))
    logger.info('creating test archive {}.tar.gz...'.format(tar_dir))
    os.mkdir(tar_dir)
    try:
        shutil.move('ffsubsync.log', tar_dir)
        shutil.copy(args.srtin, tar_dir)
        if sync_was_successful:
            shutil.move(args.srtout, tar_dir)
        if _ref_format(args.reference) in SUBTITLE_EXTENSIONS:
            shutil.copy(args.reference, tar_dir)
        elif args.serialize_speech or args.reference == npy_savename:
            shutil.copy(npy_savename, tar_dir)
        else:
            shutil.move(npy_savename, tar_dir)
        supported_formats = set(list(zip(*shutil.get_archive_formats()))[0])
        preferred_formats = ['gztar', 'bztar', 'xztar', 'zip', 'tar']
        for archive_format in preferred_formats:
            if archive_format in supported_formats:
                shutil.make_archive(tar_dir, 'gztar', os.curdir, tar_dir)
                break
        else:
            logger.error('failed to create test archive; no formats supported '
                         '(this should not happen)')
            return 1
        logger.info('...done')
    finally:
        shutil.rmtree(tar_dir)
    return 0
Example #9
0
    def test_show_formats(self):
        __, stdout = captured_stdout(show_formats)

        # the output should be a header line + one line per format
        num_formats = len(get_archive_formats())
        output = [line for line in stdout.split("\n") if line.strip().startswith("--formats=")]
        self.assertEquals(len(output), num_formats)
Example #10
0
def cli():
    """
    Command line interface
    """

    import argparse

    note = 'The archive can be in one of the following archive formats: '
    for s, t in shutil.get_archive_formats():
        note += s + ' (' + t + '), '
    note = note[:-2] + '.'

    p = argparse.ArgumentParser(description='Unarchive for IPOL demos. ' +
                                note)
    p.add_argument('archive', help="path to the archive")
    p.add_argument('--rename',
                   action='store_true',
                   default=False,
                   help="rename files with spaces etc.")
    p.add_argument('--extract-dir',
                   help="directory in which to extract the archive")

    a = p.parse_args()

    accepted_formats = [
        '.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.tif', '.tiff', '.asc',
        '.pgm'
    ]

    files = get_from_archive(archive=a.archive,
                             accepted_formats=accepted_formats,
                             rename=a.rename)

    print('\n'.join(files))
Example #11
0
def shutil_make_archive():
    """
        产生压缩文件
    """
    # 支持压缩的类型
    archive_formats = shutil.get_archive_formats()
    print(archive_formats)
    shutil.make_archive("test_archive", "zip", "test_dir/")
Example #12
0
    def test_register_archive_format(self) -> None:

        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
        self.assertRaises(TypeError, register_archive_format, 'xxx',
                          lambda: 1/0,
                          1)
        self.assertRaises(TypeError, register_archive_format, 'xxx',
                          lambda: 1/0,
                          [(1, 2), (1, 2, 3)])

        register_archive_format('xxx', lambda: 1/0, [('x', 2)], 'xxx file')
        formats = [name for name, params in get_archive_formats()]
        self.assertIn('xxx', formats)

        unregister_archive_format('xxx')
        formats = [name for name, params in get_archive_formats()]
        self.assertNotIn('xxx', formats)
Example #13
0
def _build_archive_format_list() -> List[ArchiveFormat]:
    formats = []
    extensions = shutil.get_archive_formats()
    for name, description in extensions:
        match_fn_name = f"{name}_match"
        match = getattr(sys.modules[__name__], match_fn_name, None)
        if match is not None:
            formats.append(ArchiveFormat(name, description, match))
    return formats
Example #14
0
def show_formats():
    """Print all possible values for the 'formats' option (used by
    the "--help-formats" command-line option).
    """
    from packaging.fancy_getopt import FancyGetopt
    formats = sorted(('formats=' + name, None, desc)
                     for name, desc in get_archive_formats())
    FancyGetopt(formats).print_help(
        "List of available source distribution formats:")
Example #15
0
 def prepare_archive_with_repositories(self):
     available_archive_format_names =\
         [i[0] for i in shutil.get_archive_formats()]
     if not self.archive_format in available_archive_format_names:
         raise Exception("Archive format {} is not supported.".format(
             self.archive_format))
     shutil.make_archive(self.archive_base_name, self.archive_format,
                         self.directory)
     return "{}.{}".format(self.archive_base_name, self.archive_format)
Example #16
0
 def Archive(cls, src, dst):
     ext = os.path.splitext(dst)[1][1:]
     archive_exts = [f[0] for f in shutil.get_archive_formats()]
     if ext not in archive_exts : raise Exception('拡張子\'{}\'は不正値です。アーカイブ拡張子は次のいずれかのみ可能です。{}'.format(ext, archive_exts))
     head, tail = os.path.split(src)
     base_name = os.path.join(os.path.dirname(dst), tail)
     root_dir = os.path.join(os.path.dirname(dst), head)
     base_dir = tail
     return shutil.make_archive(base_name, ext, root_dir=root_dir, base_dir=base_dir)
Example #17
0
def test_mk_archive():
    archive_formats = shutil.get_archive_formats()
    print(
        archive_formats
    )  # [('bztar', "bzip2'ed tar-file"), ('gztar', "gzip'ed tar-file"), ('tar', 'uncompressed tar file'), ('xztar', "xz'ed tar-file"), ('zip', 'ZIP file')]

    base = '/Users/huhao/software/idea_proj/data-base/api-test/py-test/Part3_Python_CookBook/'
    shutil.make_archive('aa', 'zip',
                        base + 'aa')  # make_archive(目标文件名,归档格式,待归档文件目录) # 压缩
Example #18
0
def main(args):
    import logging
    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger(__name__)

    parser = argparse.ArgumentParser(description='Generate a distribution archive.', prog=args[0])

    parser.add_argument('build_dir',
        help='the build directory (defaults to CWD)',
        default=Path(os.getcwd()),
        type=Path,
        nargs='?',
    )

    parser.add_argument('output',
        help='the output file path',
        type=Path,
    )

    parser.add_argument('--format',
        help='the archive format',
        default='zip',
        choices=sorted(map(lambda x: x[0], shutil.get_archive_formats())),
    )

    parser.add_argument('--prefix',
        help='add a common prefix to all files and directories in the archive',
        default=None,
    )

    add_common_args(parser)
    args = parser.parse_args(args[1:])

    if args.prefix is not None:
        p = PurePosixPath(args.prefix)

        if p.is_absolute() or p.is_reserved() or '..' in p.parts:
            raise ValueError('Bad prefix: {}'.format(args.prefix))

        args.prefix = str(p)

        if args.prefix == '.':
            args.prefix = None

    with temp_install(args.build_dir) as install_dir:
        if args.prefix is not None:
            os.chdir(str(install_dir.parent))
            install_dir.rename(install_dir.parent / args.prefix)
            archive = shutil.make_archive(str(args.output), args.format, '.', str(args.prefix))
        else:
            archive = shutil.make_archive(str(args.output), args.format, str(install_dir))

        archive = Path(archive)
        archive.rename(args.output)

    print("Generated distribution archive {}".format(str(args.output)))
Example #19
0
    def test_show_formats(self):
        with captured_stdout() as stdout:
            show_formats()
        stdout = stdout.getvalue()

        # the output should be a header line + one line per format
        num_formats = len(get_archive_formats())
        output = [line for line in stdout.split('\n')
                  if line.strip().startswith('--formats=')]
        self.assertEqual(len(output), num_formats)
Example #20
0
 def __init__(self,
              templates_dir: str = "./templates",
              dist_dir: str = "./dist"):
     self.templates_dir = Path(templates_dir)
     self.dist_dir = Path(dist_dir)
     self.dist_dir.mkdir(parents=True, exist_ok=True)
     self.rendered_code = {}
     self.available_archive_formats = [
         x[0] for x in shutil.get_archive_formats()[::-1]
     ]
Example #21
0
def show_formats():
    """Print all possible values for the 'formats' option (used by
    the "--help-formats" command-line option).
    """
    from distutils2.fancy_getopt import FancyGetopt

    formats = []
    for name, desc in get_archive_formats():
        formats.append(("formats=" + name, None, desc))
    formats.sort()
    FancyGetopt(formats).print_help("List of available source distribution formats:")
    def __init__(self, *args, **kwargs) -> None:
        """ Class constructor.

        Args:
            *args: Variable length argument list
            **kwargs: Arbitrary keyword arguments

        """
        super(QSettingsDialog, self).__init__(*args, **kwargs)
        self.setWindowModality(QtCore.Qt.ApplicationModal)

        self.layout = QtWidgets.QVBoxLayout()
        self.setLayout(self.layout)

        # Global

        self.globalGroupBox = QtWidgets.QGroupBox('Project\'s Preferences')
        self.globalFormLayout = QtWidgets.QFormLayout()
        self.globalFormLayout.setLabelAlignment(QtCore.Qt.AlignLeft)
        self.globalGroupBox.setMinimumWidth(500)
        self.globalGroupBox.setLayout(self.globalFormLayout)

        self.globalWorkdirInput = QtPtWidgets.QDirectoryPathBox()
        self.globalFormLayout.addRow(QtWidgets.QLabel('Working directory'),
                                     self.globalWorkdirInput)

        self.globalArchiveInput = QtWidgets.QComboBox()
        self.globalArchiveInput.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                                              QtWidgets.QSizePolicy.Expanding)
        self.globalArchiveInput.addItems(
            [f[0] for f in shutil.get_archive_formats()])
        self.globalFormLayout.addRow(QtWidgets.QLabel('Archive format'),
                                     self.globalArchiveInput)

        self.globalDelayInput = QtWidgets.QSpinBox(minimum=0,
                                                   maximum=60,
                                                   suffix=' seconds')
        self.globalDelayInput.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                                            QtWidgets.QSizePolicy.Expanding)
        self.globalFormLayout.addRow(QtWidgets.QLabel('Screenshot delay'),
                                     self.globalDelayInput)

        # Buttons

        self.buttonBox = QtWidgets.QDialogButtonBox(
            QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel)
        self.buttonBox.accepted.connect(self.accept)
        self.buttonBox.rejected.connect(self.reject)

        # QSettingsDialog

        self.layout.addWidget(self.globalGroupBox)
        self.layout.addWidget(self.buttonBox)
Example #23
0
def archive_test():
    """
    对目录进行归档压缩
    :return:
    """
    format_result = shutil.get_archive_formats()
    print(format_result)
    # 将test3目录归档为zip
    shutil.make_archive("test3", "zip", "test2")

    # 将test3.zip进行解压
    shutil.unpack_archive("test3.zip", "test2")
Example #24
0
    def export(self, formatArchive: str = 'tar') -> str:
        """ Create an archive file (such as zip or tar).

        Args:
            formatArchive (str): The archive format (default 'tar')

        Returns:
            str: The archive's name

        """
        if formatArchive in [f[0] for f in shutil.get_archive_formats()]:
            return shutil.make_archive(self.absolutePath(), formatArchive,
                                       self.parentPath(), self.dirName())
        return None
Example #25
0
 def download_list(self, url_list: list):
     for item in url_list:
         t.log_message('Downloading: ' + item)
         try:
             download = Download(item, retries=5)
             download.download()
             path = os.path.abspath(download.download_path)
             _, extension = os.path.splitext(path)
             if extension[1:] in dict(shutil.get_archive_formats()).keys(
             ) and self.config.extract_archives:
                 shutil.unpack_archive(path)
         except Exception as e:
             t.log_message("ERROR. Download: " + item + ' FAILED.\n' +
                           str(e))
Example #26
0
def main():
    if path.exists("textfile.txt"):
        # get the path to the file in the current directory
        src = path.realpath("textfile.txt")
        head, tail = path.split(src)

        print "path: " + head
        print "file: " + tail
        #create the bkup name
        bk = src + ".bkup"
        print bk
        # now copy the file
        shutil.copy(src, bk)
        # copy over the permissions, modification times, and other info
        shutil.copystat(src, bk)
        # rename a file
        os.rename(bk, "newfile.txt")

        #now lets create a zip file
        root_dir, fn = path.split(src)

        # determine what archives are available
        print shutil.get_archive_formats()
        shutil.make_archive("myarchive", 'zip', root_dir)
Example #27
0
def unzip_archives(path):
    """функция просматривает архивные файлы в папке 'archives' и если встречает архивы обрабатываемых форматов
    то разархивирует их в субпапки с названием файла архива внутри директории 'archives'"""
    available_arch_type = set()
    for elem in shutil.get_archive_formats():
        available_arch_type.add(elem[0])
    # print('доступные форматы: ', available_arch_type)

    for elem in (path / 'archives').iterdir():
        # print(f'просматриваю файл - {elem.name}')
        # print(elem.suffix)
        if elem.suffix[1:] in available_arch_type:
            print(
                f'найден архив доступного для обработки формата: {elem.name}')
            shutil.unpack_archive(elem, path / 'archives' / elem.stem)
            print('архив разархивирован')
Example #28
0
def unpack_arch(filename, extract_dir='.'):
    try:
        shutil.unpack_archive(filename=filename, extract_dir=extract_dir)
    except shutil.ReadError:
        success = False
        for format_ in [x[0] for x in shutil.get_archive_formats()]:
            try:
                shutil.unpack_archive(filename=filename,
                                      extract_dir=extract_dir,
                                      format=format_)
                success = True
            except Exception:
                pass
            if success:
                break
        if not success:
            raise shutil.ReadError
Example #29
0
 def make_archive(self):
     global listbox, ask_folder, folder_name, lists
     lists = []
     listbox = Listbox(root, selectmode='single')
     lists.append(listbox)
     listbox.grid(row=1, column=1)
     ask_folder = str(filedialog.askdirectory())
     string = StringVar()
     string.set(ask_folder)
     formats = shutil.get_archive_formats()
     for i in range(len(formats)):
         listbox.insert(END, formats[i][0])
     folder_name = Entry(root, textvariable=string)
     lists.append(folder_name)
     folder_name.grid(row=2, column=2)
     make_button = Button(root, text='FINAL', command=self.final_archive)
     lists.append(make_button)
     make_button.grid(row=1, column=3)
     refresh_button = Button(root, text='REFRESH', command=self.refresh)
     lists.append(refresh_button)
     refresh_button.grid(row=1, column=4)
Example #30
0
def make_test_case(args: argparse.Namespace, npy_savename: Optional[str],
                   sync_was_successful: bool) -> int:
    if npy_savename is None:
        raise ValueError("need non-null npy_savename")
    tar_dir = "{}.{}".format(args.reference,
                             datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))
    logger.info("creating test archive {}.tar.gz...".format(tar_dir))
    os.mkdir(tar_dir)
    try:
        log_path = "ffsubsync.log"
        if args.log_dir_path is not None and os.path.isdir(args.log_dir_path):
            log_path = os.path.join(args.log_dir_path, log_path)
        shutil.copy(log_path, tar_dir)
        shutil.copy(args.srtin[0], tar_dir)
        if sync_was_successful:
            shutil.move(args.srtout, tar_dir)
        if _ref_format(args.reference) in SUBTITLE_EXTENSIONS:
            shutil.copy(args.reference, tar_dir)
        elif args.serialize_speech or args.reference == npy_savename:
            shutil.copy(npy_savename, tar_dir)
        else:
            shutil.move(npy_savename, tar_dir)
        supported_formats = set(list(zip(*shutil.get_archive_formats()))[0])
        preferred_formats = ["gztar", "bztar", "xztar", "zip", "tar"]
        for archive_format in preferred_formats:
            if archive_format in supported_formats:
                shutil.make_archive(tar_dir, archive_format, os.curdir,
                                    tar_dir)
                break
        else:
            logger.error("failed to create test archive; no formats supported "
                         "(this should not happen)")
            return 1
        logger.info("...done")
    finally:
        shutil.rmtree(tar_dir)
    return 0
Example #31
0
def archive_files(root_folder):
    # Returns the location of the archive containing the selected files

    # Create the archive folder once
    if ARCHIVE_FOLDER not in TEMP_FILES:
        TEMP_FILES[ARCHIVE_FOLDER] = mkdtemp()

    tmp_dir = TEMP_FILES[ARCHIVE_FOLDER]

    filename = get_random_filename()

    archive_base_name = "{}{}{}".format(tmp_dir, sep, filename)

    # Get the last available format for this platform
    file_format = get_archive_formats()[-1][0]

    archive_filename = make_archive(archive_base_name,
                                    format=file_format,
                                    base_dir=root_folder,
                                    root_dir="/")

    TEMP_FILES[archive_base_name] = archive_filename

    return archive_filename
Example #32
0
def build_drive_parser():
    parser = ArgumentParser(description='Google Drive Backup CLI Parser',
                            parents=[tools.argparser])
    parser.add_argument('--source',
                        type=str,
                        default=None,
                        help='Source folder or file to backup')
    parser.add_argument('--destination',
                        type=str,
                        default="~",
                        help='Destination for backup to be stored')
    parser.add_argument(
        '--graceful',
        action='store_true',
        help=
        'Backup must be graceful, i.e no downloads are allowed to fail. Non-graceful backups will mark corrupted files'
    )
    parser.add_argument('--compression',
                        type=str,
                        default=None,
                        choices=[fmt[0] for fmt in get_archive_formats()],
                        help='Compression format for downloaded files')
    parser.add_argument("--gDocConversion",
                        type=str,
                        choices=['msoffice', 'pdf'],
                        default='pdf',
                        help='Google Doc mime type conversion')
    parser.add_argument(
        '--scope',
        type=str,
        default='READONLY',
        choices=['READONLY', 'FILE', 'DRIVE'],
        help='OAuth 2.0 scope (Must match OAuth Credentials File)')
    parser.add_argument('--logging', type=str, default="Info", help='Log')

    return parser
Example #33
0
# shutil_example.py
#
# Shutil Example
#
# Provides a high level interface when working with files or collection of files.
#
# Due to the destructive nature of this module most of the examples are commented out

# noinspection PyUnresolvedReferences
from shutil import move, copy, copytree, rmtree, get_archive_formats, make_archive, unpack_archive, disk_usage, which

# Moving / Copying and Deleting
# move("source_file.txt", "target_dir" )
# copy("source_file.txt", "target_file.txt" )
# copytree("source_dir", "target_dir")

# rmtree("source")

# Archive
# Get the archive formats available
print(get_archive_formats())
# make_archive(archive_name, 'gztar', root_dir)
# unpack_archive(archive_name, "source_dir", "gztar" )

# Disk Space
print(disk_usage("/"))

# Which
print(which("python"))
print(which("python3"))
"""
shutil.move moves a file or directory to another location and returns 
destination.
OUTPUT : D:/Automation_New
"""
src = "D:/Automation"
dst = "D:/Automation_New"
print(shutil.move(src,dst))

"""
shutil.disk_usage gives the statistics of the following info in bytes:
OUTPUT : usage(total=365307097088, used=59445481472, free=305861615616)
"""
print(shutil.disk_usage("D:/documents"))

"""
shutil.make_archive creates the archive in the following formats:
        - ('bztar', "bzip2'ed tar-file")
        - ('gztar', "gzip'ed tar-file")
        - ('tar', 'uncompressed tar file')
        - ('xztar', "xz'ed tar-file")
        - ('zip', 'ZIP file')

shutil.get_archive_formats() will show the supported formats as above.
shutil.unpack_archive(base_name, format, root_dir, base_dir) extracts the specified file in the supported format.
base_name - file name in which the archive is created.
format - format specifier.
root_dir - path of the directory in which base_dir is located.
base_dir - dir name for which archive needs to be created.

"""
import shutil

for format, description in shutil.get_archive_formats():
    print('{:<5}:{}'.format(format, description))
Example #36
0
def comprimir(nombre_portafolio, carpeta_portafolio_org):
    shutil.get_archive_formats()
    shutil.make_archive(nombre_portafolio, "zip", carpeta_portafolio_org)
Example #37
0
def configure(env, cfg):
	env.debug("archive formats: %s" % [a[0] for a in shutil.get_archive_formats()])
	env.settings.merge(cfg, 'sadmenv', ('target.dir',))
import shutil

print(shutil.get_archive_formats())
shutil.unpack_archive('Python-3.3.0.tgz')
shutil.make_archive('py33', 'zip', 'Python-3.3.0')
# coding=utf-8
# 压缩文件

import shutil

print shutil.get_archive_formats()

# 产生压缩文件
# shutil.make_archive(basename, format, root_dir)
shutil.make_archive("test_archive", "zip", "test_dir/")
# shutil_get_archive_formats.py

import shutil

for format, description in shutil.get_archive_formats():
    print('{:<5}: {}'.format(format, description))
Example #41
0
# 创建和解压归档文件

import shutil

shutil.unpack_archive('Python-3.3.0.tgz')
shutil.make_archive('py33','zip','Python-3.3.0')

shutil.get_archive_formats()
# [('bztar', "bzip2'ed tar-file"), ('gztar', "gzip'ed tar-file"),('tar', 'uncompressed tar file'), ('zip', 'ZIP file')]

## tarfile, zipfile, gzip, bz2 等可处理多种归档格式








def main():
    logging.basicConfig(level=logging.INFO)
    parser = cgal_docker_args.parser()
    args = parser.parse_args()

    # Setup the pidfile handling
    if os.path.isfile(pidfile):
        logging.warning('pidfile {} already exists. Killing other process.'.format(pidfile))
        with open(pidfile, 'r') as pf:
            oldpid = int(pf.read().strip())
        try:
            os.kill(oldpid, signal.SIGTERM)
            # Wait for the process to terminate.
            while pid_exists(oldpid):
                pass
        except OSError:
            logging.warning('pidfile {} did contain invalid pid {}.'.format(pidfile, oldpid))

    with open(pidfile, 'w') as pf:
        pid = str(os.getpid())
        logging.info('Writing pidfile {} with pid {}'.format(pidfile, pid))
        pf.write(pid)

    # If no jobs are specified, use as many as we use cpus per
    # container.
    if not args.jobs:
        args.jobs = args.container_cpus

    client = docker.APIClient(base_url=args.docker_url, version='1.24', timeout=300)

    # Perform a check for existing, running containers.
    existing = [cont for cont in client.containers(filters = { 'status' : 'running'})]
    generic_name_regex = re.compile('CGAL-.+-testsuite')
    for cont in existing:
        for name in cont[u'Names']:
            if generic_name_regex.match(name):
                info.error('Testsuite Container {} of previous suite still running. Aborting. NOTE: This could also be a name clash.')
                sys.exit(0)


    args.images = images(client, args.images)

    if args.upload_results:
        assert args.tester, 'When uploading a --tester has to be given'
        assert args.tester_name, 'When uploading a --tester-name has to be given'
        assert args.tester_address, 'When uploading a --tester-address has to be given'
        assert 'gztar' in (item[0] for item in shutil.get_archive_formats()), 'When uploading results, gztar needs to be available'

    logging.info('Using images {}'.format(', '.join(args.images)))

    release = Release(args.testsuite, args.use_local, args.user, args.passwd)
    if args.packages:
        release.scrub(args.packages)

    logging.info('Extracted release {} is at {}'.format(release.version, release.path))

    local_dir = os.path.dirname(os.path.realpath(__file__))
    # Copy the entrypoint to the testsuite volume
    subprocess.call(['cp', '--preserve=xattr', os.path.join(local_dir, 'docker-entrypoint.sh'), release.path])
    subprocess.call(['cp', '--preserve=xattr', os.path.join(local_dir, 'run-testsuite.sh'), release.path])

    cpu_sets = calculate_cpu_sets(args.max_cpus, args.container_cpus)
    nb_parallel_containers = len(cpu_sets)

    logging.info('Running a maximum of %i containers in parallel each using %i CPUs and using %i jobs' % (nb_parallel_containers, args.container_cpus, args.jobs))

    runner = ContainerRunner(client, args.tester, args.tester_name, 
                             args.tester_address, args.force_rm, args.jobs,
                             release, args.testresults, args.use_fedora_selinux_policy,
                             args.intel_license, args.mac_address)
    scheduler = ContainerScheduler(runner, args.images, cpu_sets)
    # Translate SIGTERM to SystemExit exception
    signal.signal(signal.SIGTERM, term_handler)
    before_start = int(time.time())
    launch_result = scheduler.launch()
    if not launch_result:
        logging.error('Exiting without starting any containers.')
        sys.exit('Exiting without starting any containers.')

    # Possible events are: create, destroy, die, export, kill, pause,
    # restart, start, stop, unpause.

    # We only care for die events. The problem is that a killing or
    # stopping a container will also result in a die event before
    # emitting a kill/stop event. So, when a container dies, we cannot
    # know if it got stopped, killed or exited regularly. Waiting for
    # the next event with a timeout is very flaky and error
    # prone. This is a known design flaw of the docker event API. To
    # work around it, we parse the ExitCode of the container die event and
    # base our decision on it.

    # Process events since starting our containers, so we don't miss
    # any event that might have occured while we were still starting
    # containers. The decode parameter has been documented as a
    # resolution to this issue
    # https://github.com/docker/docker-py/issues/585
    try:
        for ev in client.events(since=before_start, decode=True):
            assert isinstance(ev, dict)
            if ev[u'Type'] != u'container':
                continue;
            event_id = ev[u'id']

            if scheduler.is_ours(event_id): # we care
                if ev[u'status'] == u'die':
                    if ev[u'Actor'][u'Attributes'][u'exitCode']!='0':
                        logging.warning('Could not parse exit status: {}. Assuming dirty death of the container.'
                                        .format(ev[u'Actor'][u'Attributes'][u'exitCode']))
                    else:
                        logging.info('Container died cleanly, handling results.')
                        try:
                            handle_results(client, event_id, args.upload_results, args.testresults,
                                           release, args.tester)
                        except TestsuiteException as e:
                            logging.exception(str(e))
                            # The freed up cpu_set.
                    scheduler.container_finished(event_id)
                    if not scheduler.launch():
                        logging.info('No more images to launch.')
                    if not scheduler.containers_running():
                        logging.info('Handled all images.')
                        break
    except KeyboardInterrupt:
        logging.warning('SIGINT received, cleaning up containers!')
        scheduler.kill_all()
    except SystemExit:
        logging.warning('SIGTERM received, cleaning up containers!')
        scheduler.kill_all()

    if not args.use_local:
        logging.info('Cleaning up {}'.format(release.path))
        shutil.rmtree(release.path)

    remove_pidfile()

    if scheduler.errors_encountered:
      print (scheduler.error_buffer.getvalue())
      exit(33)
Example #43
0
__author__ = 'PyBeaner'
# using shutil to (un)pack files easier than the lower-level ways like zipfile,tarfile,etc.
import shutil

# shutil.make_archive("example","zip","some_directory_to_compress")

formats = shutil.get_archive_formats()
# [('bztar', "bzip2'ed tar-file"), ('gztar', "gzip'ed tar-file"), ('tar', 'uncompressed tar file'), ('zip', 'ZIP file')]
print(formats)

# shutil.unpack_archive("file.zip")
Example #44
0
 def _check_archive_formats(self, formats):
     supported_formats = [name for name, desc in get_archive_formats()]
     for format in formats:
         if format not in supported_formats:
             return format
     return None
Example #45
0

# Backward compatibility
# ------------------------------------------------
try:
    FileExistsError
except NameError:
    FileExistsError = OSError


# Data
# ------------------------------------------------
PLUGIN = "BackupManager"
# The PLUGIN_VERSION is not related to the EMSM version number.
PLUGIN_VERSION = "2.0.0"
AVLB_ARCHIVE_FORMATS = [name for name, desc in shutil.get_archive_formats()]


# Functions
# ------------------------------------------------
def file_hash(path):
    """
    Returns the sha512 hash sum of the file at *path*.
    """
    with open(path, "rb") as file:
        data = file.read()
        sum_ = hashlib.sha512(data)
    return sum_.hexdigest()


# Classes
Example #46
0
#!/usr/bin/python3

import shutil
import sys
import os

formats = list(i[0] for i in shutil.get_archive_formats())
form = list(i[0] for i in formats)


def unzip():
    for compressed in sys.argv[3:]:
        try:
            if os.path.exists(compressed):
                shutil.unpack_archive(compressed)
            else:
                print('No file or folder named: ' + compressed)
        except ValueError:
            print(
                'We might get there one day. But for now, Unsupported archive format'
            )


def compress():
    if sys.argv[2] not in form:
        print('Bros small small')
    else:
        archive = formats[form.index(sys.argv[2])]
        for name in sys.argv[3:]:
            if name.endswith('/'):
                name = name[:-1]
Example #47
0
__author__ = 'Administrator'

"""
    archive bag operation
    implement tar compression
"""

from pathlib import Path
from tempfile import TemporaryFile
import shutil, argparse


parser = argparse.ArgumentParser()
types = dict(shutil.get_archive_formats())  # get support type of system

parser.add_argument("targz", help="input a targz name")
parser.add_argument("input", help="directory need to compression")
parser.add_argument("output", help="compression file output directory")
parser.add_argument("--type", help="type of compression", choices=types, default="tar")

arguments = parser.parse_args()

pathObj = Path(arguments.input)

if not pathObj.is_absolute():
    pathObj = pathObj.absolute()

pathString = str(pathObj)

destFilePath = shutil.make_archive(base_name=TemporaryFile().name, format=arguments.type, root_dir=arguments.input)
Example #48
0
 def info():
     print('archive types: ', shutil.get_archive_formats())
def main():
    logging.basicConfig(level=logging.INFO)
    parser = cgal_docker_args.parser()
    args = parser.parse_args()

    # If no jobs are specified, use as many as we use cpus per
    # container.
    if not args.jobs:
        args.jobs = args.container_cpus

    client = docker.Client(base_url=args.docker_url)
    args.images = images(client, args.images)

    if args.upload_results:
        assert args.tester, 'When uploading a --tester has to be given'
        assert args.tester_name, 'When uploading a --tester-name has to be given'
        assert args.tester_address, 'When uploading a --tester-address has to be given'
        assert 'gztar' in (item[0] for item in shutil.get_archive_formats()), 'When uploading results, gztar needs to be available'

    logging.info('Using images {}'.format(', '.join(args.images)))

    release = Release(args.testsuite, args.use_local, args.user, args.passwd)
    if args.packages:
        release.scrub(args.packages)

    logging.info('Extracted release {} is at {}'.format(release.version, release.path))

    # Copy the entrypoint to the testsuite volume
    shutil.copy('./docker-entrypoint.sh', release.path)
    shutil.copy('./run-testsuite.sh', release.path)

    cpu_sets = calculate_cpu_sets(args.max_cpus, args.container_cpus)
    nb_parallel_containers = len(cpu_sets)

    logging.info('Running a maximum of %i containers in parallel each using %i CPUs and using %i jobs' % (nb_parallel_containers, args.container_cpus, args.jobs))


    runner = ContainerRunner(client, args.tester, args.tester_name, 
                             args.tester_address, args.force_rm, args.jobs,
                             release, args.testresults, args.use_fedora_selinux_policy)
    scheduler = ContainerScheduler(runner, args.images, cpu_sets)

    before_start = int(time.time())
    launch_result = scheduler.launch()
    if not launch_result:
        logging.error('Exiting without starting any containers.')
        sys.exit('Exiting without starting any containers.')

    # Possible events are: create, destroy, die, export, kill, pause,
    # restart, start, stop, unpause.

    # We only care for die events. The problem is that a killing or
    # stopping a container will also result in a die event before
    # emitting a kill/stop event. So, when a container dies, we cannot
    # know if it got stopped, killed or exited regularly. Waiting for
    # the next event with a timeout is very flaky and error
    # prone. This is a known design flaw of the docker event API. To
    # work around it, we parse the Exit Status of the container and
    # base our decision on the error code.
    status_code_regex = re.compile(r'Exited \((.*)\)')

    # Process events since starting our containers, so we don't miss
    # any event that might have occured while we were still starting
    # containers. The decode parameter has been documented as a
    # resolution to this issue
    # https://github.com/docker/docker-py/issues/585
    for ev in client.events(since=before_start, decode=True):
        assert isinstance(ev, dict)
        event_id = ev[u'id']

        if scheduler.is_ours(event_id): # we care
            container_info = container_by_id(client, event_id)
            if ev[u'status'] == u'die' and status_code_regex.search(container_info[u'Status']):
                res = status_code_regex.search(container_info[u'Status'])
                if not res:
                    logging.warning('Could not parse exit status: {}. Assuming dirty death of the container.'
                                    .format(container_info[u'Status']))
                elif res.group(1) != '0':
                    logging.warning('Container exited with Error Code: {}. Assuming dirty death of the container.'
                                    .format(res.group(1)))
                else:
                    logging.info('Container died cleanly, handling results.')
                    try:
                        handle_results(client, event_id, args.upload_results, args.testresults,
                                       release, args.tester)
                    except TestsuiteException as e:
                        logging.exception(str(e))
                # The freed up cpu_set.
                scheduler.container_finished(event_id)
                if not scheduler.launch():
                    logging.info('No more images to launch.')
                if not scheduler.containers_running():
                    logging.info('Handled all images.')
                    break