コード例 #1
0
def _create_dl_subprocess(abs_cmd, src, dest, timeout):
    if "curl" in os.path.basename(abs_cmd):
        size_cmd = [abs_cmd, '-sI', src, '--location', '--max-time', str(timeout)]
        get_cmd = [abs_cmd, '-s', '-L', src, '-o', dest, '--connect-timeout', str(timeout)]
    elif "wget" in os.path.basename(abs_cmd):
        size_cmd = [abs_cmd, src, '--spider', '--server-response', '--timeout=%d' % timeout, '--tries=1']
        get_cmd = [abs_cmd, '-q', src, '-O', dest, '--timeout=%d' % timeout]
    else:
        raise InternalError("Invalid command parameter: %s" % abs_cmd)
    try:
        proc_output = get_command_output(size_cmd)
    except subprocess.CalledProcessError as err:
        return err.returncode
    _heavy_debug(proc_output)
    try:
        file_size = int(proc_output.partition('Content-Length')[2].split()[1])
    except (ValueError, IndexError):
        LOGGER.warning("Invalid response while retrieving download file size")
        file_size = -1
    with ProgressIndicator(file_size) as progress_bar:
        with open(os.devnull, 'wb') as devnull:
            proc = subprocess.Popen(get_cmd, stdout=devnull, stderr=subprocess.STDOUT)
            while proc.poll() is None:
                try:
                    current_size = os.stat(dest).st_size
                except OSError:
                    pass
                else:
                    progress_bar.update(current_size)
                time.sleep(0.1)
            proc.wait()
            retval = proc.returncode
            LOGGER.debug("%s returned %d", get_cmd, retval)
            return retval
コード例 #2
0
ファイル: util.py プロジェクト: HPCL/taucmdr
def extract_archive(archive, dest, show_progress=True):
    """Extracts archive file to dest.

    Supports compressed and uncompressed tar archives. Destination folder will
    be created if it doesn't exist.

    Args:
        archive (str): Path to archive file to extract.
        dest (str): Destination folder.

    Returns:
        str: Full path to extracted files.

    Raises:
        IOError: Failed to extract archive.
    """
    topdir = archive_toplevel(archive)
    full_dest = os.path.join(dest, topdir)
    mkdirp(dest)
    with tarfile.open(archive) as fin:
        if show_progress:
            LOGGER.info("Checking contents of '%s'", archive)
            with ProgressIndicator("Extracting archive", show_cpu=False):
                members = fin.getmembers()
            LOGGER.info("Extracting '%s' to create '%s'", archive, full_dest)
            fin.extractall(dest, members=_show_extract_progress(members))
        else:
            LOGGER.info("Extracting '%s' to create '%s'", archive, full_dest)
            fin.extractall(dest)
    if not os.path.isdir(full_dest):
        raise IOError("Extracting '%s' does not create '%s'" %
                      (archive, full_dest))
    return full_dest
コード例 #3
0
ファイル: util.py プロジェクト: HPCL/taucmdr
def _show_extract_progress(members):
    with ProgressIndicator("Extracting",
                           total_size=len(members),
                           show_cpu=False) as progress_bar:
        for i, member in enumerate(members):
            progress_bar.update(i)
            yield member
コード例 #4
0
ファイル: installation.py プロジェクト: eugeneswalker/taucmdr
 def set_group(self, gid=None):
     """Sets the group for all files in the installation.
     
     Args:
         gid (int): Group ID number.  If not given the use the group ID of the folder containing the installation.
     """
     if gid is None:
         parent_stat = os.stat(os.path.dirname(self.install_prefix))
         gid = parent_stat.st_gid
     paths = [self.install_prefix]
     LOGGER.info("Checking installed files...")
     with ProgressIndicator(""):
         for root, dirs, _ in os.walk(self.install_prefix):
             paths.extend((os.path.join(root, x) for x in dirs))
     LOGGER.info("Setting file permissions...")
     with ProgressIndicator("", total_size=len(paths)) as progress_bar:
         for i, path in enumerate(paths):
             try:
                 os.chown(path, -1, gid)
             except OSError as err:
                 LOGGER.debug("Cannot set group on '%s': %s", path, err)
             progress_bar.update(i)
コード例 #5
0
ファイル: util.py プロジェクト: HPCL/taucmdr
def download(src, dest, timeout=8):
    """Downloads or copies files.

    `src` may be a file path or URL.  The destination folder will be created
    if it doesn't exist.  Download is via curl, wget, or Python's urllib as appropriate.

    Args:
        src (str): Path or URL to source file.
        dest (str): Path to file copy or download destination.
        timeout (int): Maximum time in seconds for the connection to the server.  0 for no timeout.

    Raises:
        IOError: File copy or download failed.
    """
    assert isinstance(timeout, int) and timeout >= 0
    if src.startswith('file://'):
        src = src[6:]
    if os.path.isfile(src):
        LOGGER.debug("Copying '%s' to '%s'", src, dest)
        mkdirp(os.path.dirname(dest))
        shutil.copy(src, dest)
    else:
        LOGGER.debug("Downloading '%s' to '%s'", src, dest)
        LOGGER.info("Downloading '%s'", src)
        mkdirp(os.path.dirname(dest))
        for cmd in "curl", "wget":
            abs_cmd = which(cmd)
            if abs_cmd and _create_dl_subprocess(abs_cmd, src, dest,
                                                 timeout) == 0:
                return
            LOGGER.warning(
                "%s failed to download '%s'. Retrying with a different method...",
                cmd, src)
        # Fallback: urllib is usually **much** slower than curl or wget and doesn't support timeout
        if timeout:
            raise IOError("Failed to download '%s'" % src)
        with ProgressIndicator("Downloading") as progress_bar:
            try:
                urllib.urlretrieve(src, dest, reporthook=progress_bar.update)
            except Exception as err:
                LOGGER.warning("urllib failed to download '%s': %s", src, err)
                raise IOError("Failed to download '%s'" % src)
コード例 #6
0
 def _postprocess_slog2(self):
     slog2 = os.path.join(self.prefix, 'tau.slog2')
     if os.path.exists(slog2):
         return
     tau = TauInstallation.get_minimal()
     merged_trc = os.path.join(self.prefix, 'tau.trc')
     merged_edf = os.path.join(self.prefix, 'tau.edf')
     if not os.path.exists(merged_trc) or not os.path.exists(merged_edf):
         tau.merge_tau_trace_files(self.prefix)
     tau.tau_trace_to_slog2(merged_trc, merged_edf, slog2)
     trc_files = glob.glob(os.path.join(self.prefix, '*.trc'))
     edf_files = glob.glob(os.path.join(self.prefix, '*.edf'))
     count_trc_edf = len(trc_files) + len(edf_files)
     LOGGER.info('Cleaning up TAU trace files...')
     with ProgressIndicator("", total_size=count_trc_edf) as progress_bar:
         count = 0
         for path in trc_files + edf_files:
             os.remove(path)
             count += 1
             progress_bar.update(count)