コード例 #1
0
 def __init__(self, cache_dir, log=None, skip_cache=False):
     # TODO: instead of storing N artifact packages, store M megabytes.
     CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log, skip_cache=skip_cache)
     self._cache_dir = cache_dir
     size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
     file_limit = 4 # But always keep at least 4 old artifacts around.
     persist_limit = PersistLimit(size_limit, file_limit)
     self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
     self._last_dl_update = -1
コード例 #2
0
ファイル: artifacts.py プロジェクト: LongyunZhang/gecko-dev
 def __init__(self, cache_dir, log=None):
     # TODO: instead of storing N artifact packages, store M megabytes.
     CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log)
     self._cache_dir = cache_dir
     size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
     file_limit = 4 # But always keep at least 4 old artifacts around.
     persist_limit = PersistLimit(size_limit, file_limit)
     self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
コード例 #3
0
class ArtifactCache(CacheManager):
    '''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''
    def __init__(self, cache_dir, log=None, skip_cache=False):
        # TODO: instead of storing N artifact packages, store M megabytes.
        CacheManager.__init__(self,
                              cache_dir,
                              'fetch',
                              MAX_CACHED_ARTIFACTS,
                              cache_callback=self.delete_file,
                              log=log,
                              skip_cache=skip_cache)
        self._cache_dir = cache_dir
        size_limit = 1024 * 1024 * 1024  # 1Gb in bytes.
        file_limit = 4  # But always keep at least 4 old artifacts around.
        persist_limit = PersistLimit(size_limit, file_limit)
        self._download_manager = DownloadManager(self._cache_dir,
                                                 persist_limit=persist_limit)
        self._last_dl_update = -1

    def delete_file(self, key, value):
        try:
            os.remove(value)
            self.log(logging.INFO, 'artifact', {'filename': value},
                     'Purged artifact {filename}')
        except (OSError, IOError):
            pass

        try:
            os.remove(value + PROCESSED_SUFFIX)
            self.log(logging.INFO, 'artifact',
                     {'filename': value + PROCESSED_SUFFIX},
                     'Purged processed artifact {filename}')
        except (OSError, IOError):
            pass

    @cachedmethod(operator.attrgetter('_cache'))
    def fetch(self, url, force=False):
        # We download to a temporary name like HASH[:16]-basename to
        # differentiate among URLs with the same basenames.  We used to then
        # extract the build ID from the downloaded artifact and use it to make a
        # human readable unique name, but extracting build IDs is time consuming
        # (especially on Mac OS X, where we must mount a large DMG file).
        hash = hashlib.sha256(url).hexdigest()[:16]
        fname = hash + '-' + os.path.basename(url)

        path = os.path.abspath(mozpath.join(self._cache_dir, fname))
        if self._skip_cache and os.path.exists(path):
            self.log(
                logging.DEBUG, 'artifact', {'path': path},
                'Skipping cache: removing cached downloaded artifact {path}')
            os.remove(path)

        self.log(logging.INFO, 'artifact', {'path': path},
                 'Downloading to temporary location {path}')
        try:
            dl = self._download_manager.download(url, fname)

            def download_progress(dl, bytes_so_far, total_size):
                percent = (float(bytes_so_far) / total_size) * 100
                now = int(percent / 5)
                if now == self._last_dl_update:
                    return
                self._last_dl_update = now
                self.log(
                    logging.INFO, 'artifact', {
                        'bytes_so_far': bytes_so_far,
                        'total_size': total_size,
                        'percent': percent
                    }, 'Downloading... {percent:02.1f} %')

            if dl:
                dl.set_progress(download_progress)
                dl.wait()
            self.log(logging.INFO, 'artifact', {
                'path':
                os.path.abspath(mozpath.join(self._cache_dir, fname))
            }, 'Downloaded artifact to {path}')
            return os.path.abspath(mozpath.join(self._cache_dir, fname))
        finally:
            # Cancel any background downloads in progress.
            self._download_manager.cancel()

    def print_last_item(self, args, sorted_kwargs, result):
        url, = args
        self.log(logging.INFO, 'artifact', {'url': url},
                 'Last installed binaries from url {url}')
        self.log(logging.INFO, 'artifact', {'filename': result},
                 'Last installed binaries from local file {filename}')
        self.log(
            logging.INFO, 'artifact', {'filename': result + PROCESSED_SUFFIX},
            'Last installed binaries from local processed file {filename}')
コード例 #4
0
ファイル: artifacts.py プロジェクト: LongyunZhang/gecko-dev
class ArtifactCache(CacheManager):
    '''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''

    def __init__(self, cache_dir, log=None):
        # TODO: instead of storing N artifact packages, store M megabytes.
        CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log)
        self._cache_dir = cache_dir
        size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
        file_limit = 4 # But always keep at least 4 old artifacts around.
        persist_limit = PersistLimit(size_limit, file_limit)
        self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)

    def delete_file(self, key, value):
        try:
            os.remove(value)
            self.log(logging.INFO, 'artifact',
                {'filename': value},
                'Purged artifact {filename}')
        except (OSError, IOError):
            pass

        try:
            os.remove(value + PROCESSED_SUFFIX)
            self.log(logging.INFO, 'artifact',
                {'filename': value + PROCESSED_SUFFIX},
                'Purged processed artifact {filename}')
        except (OSError, IOError):
            pass

    @cachedmethod(operator.attrgetter('_cache'))
    def fetch(self, url, force=False):
        # We download to a temporary name like HASH[:16]-basename to
        # differentiate among URLs with the same basenames.  We used to then
        # extract the build ID from the downloaded artifact and use it to make a
        # human readable unique name, but extracting build IDs is time consuming
        # (especially on Mac OS X, where we must mount a large DMG file).
        hash = hashlib.sha256(url).hexdigest()[:16]
        fname = hash + '-' + os.path.basename(url)
        self.log(logging.INFO, 'artifact',
            {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
            'Downloading to temporary location {path}')
        try:
            dl = self._download_manager.download(url, fname)
            if dl:
                dl.wait()
            self.log(logging.INFO, 'artifact',
                {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
                'Downloaded artifact to {path}')
            return os.path.abspath(mozpath.join(self._cache_dir, fname))
        finally:
            # Cancel any background downloads in progress.
            self._download_manager.cancel()

    def print_last_item(self, args, sorted_kwargs, result):
        url, = args
        self.log(logging.INFO, 'artifact',
            {'url': url},
            'Last installed binaries from url {url}')
        self.log(logging.INFO, 'artifact',
            {'filename': result},
            'Last installed binaries from local file {filename}')
        self.log(logging.INFO, 'artifact',
            {'filename': result + PROCESSED_SUFFIX},
            'Last installed binaries from local processed file {filename}')
コード例 #5
0
ファイル: artifacts.py プロジェクト: hansman/gecko-dev
class ArtifactCache(CacheManager):
    '''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''
    def __init__(self, cache_dir, log=None):
        # TODO: instead of storing N artifact packages, store M megabytes.
        CacheManager.__init__(self,
                              cache_dir,
                              'fetch',
                              MAX_CACHED_ARTIFACTS,
                              cache_callback=self.delete_file,
                              log=log)
        self._cache_dir = cache_dir
        size_limit = 1024 * 1024 * 1024  # 1Gb in bytes.
        file_limit = 4  # But always keep at least 4 old artifacts around.
        persist_limit = PersistLimit(size_limit, file_limit)
        self._download_manager = DownloadManager(self._cache_dir,
                                                 persist_limit=persist_limit)

    def delete_file(self, key, value):
        try:
            os.remove(value)
            self.log(logging.INFO, 'artifact', {'filename': value},
                     'Purged artifact {filename}')
        except IOError:
            pass

    @cachedmethod(operator.attrgetter('_cache'))
    def fetch(self, url, force=False):
        # We download to a temporary name like HASH[:16]-basename to
        # differentiate among URLs with the same basenames.  We then extract the
        # build ID from the downloaded artifact and use it to make a human
        # readable unique name.
        hash = hashlib.sha256(url).hexdigest()[:16]
        fname = hash + '-' + os.path.basename(url)
        self.log(
            logging.INFO, 'artifact',
            {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
            'Downloading to temporary location {path}')
        try:
            dl = self._download_manager.download(url, fname)
            if dl:
                dl.wait()
            # Version information is extracted from {application,platform}.ini
            # in the package itself.
            info = mozversion.get_version(mozpath.join(self._cache_dir, fname))
            buildid = info['platform_buildid'] or info['application_buildid']
            if not buildid:
                raise ValueError(
                    'Artifact for {url} existed, but no build ID could be extracted!'
                    .format(url=url))
            newname = buildid + '-' + os.path.basename(url)
            os.rename(mozpath.join(self._cache_dir, fname),
                      mozpath.join(self._cache_dir, newname))
            self.log(logging.INFO, 'artifact', {
                'path':
                os.path.abspath(mozpath.join(self._cache_dir, newname))
            }, 'Downloaded artifact to {path}')
            return os.path.abspath(mozpath.join(self._cache_dir, newname))
        finally:
            # Cancel any background downloads in progress.
            self._download_manager.cancel()

    def print_last_item(self, args, sorted_kwargs, result):
        url, = args
        self.log(logging.INFO, 'artifact', {'url': url},
                 'Last installed binaries from url {url}')
        self.log(logging.INFO, 'artifact', {'filename': result},
                 'Last installed binaries from local file {filename}')
コード例 #6
0
ファイル: artifacts.py プロジェクト: carriercomm/gecko-dev
class ArtifactCache(CacheManager):
    """Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk."""

    def __init__(self, cache_dir, log=None, skip_cache=False):
        # TODO: instead of storing N artifact packages, store M megabytes.
        CacheManager.__init__(
            self,
            cache_dir,
            "fetch",
            MAX_CACHED_ARTIFACTS,
            cache_callback=self.delete_file,
            log=log,
            skip_cache=skip_cache,
        )
        self._cache_dir = cache_dir
        size_limit = 1024 * 1024 * 1024  # 1Gb in bytes.
        file_limit = 4  # But always keep at least 4 old artifacts around.
        persist_limit = PersistLimit(size_limit, file_limit)
        self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
        self._last_dl_update = -1

    def delete_file(self, key, value):
        try:
            os.remove(value)
            self.log(logging.INFO, "artifact", {"filename": value}, "Purged artifact {filename}")
        except (OSError, IOError):
            pass

        try:
            os.remove(value + PROCESSED_SUFFIX)
            self.log(
                logging.INFO, "artifact", {"filename": value + PROCESSED_SUFFIX}, "Purged processed artifact {filename}"
            )
        except (OSError, IOError):
            pass

    @cachedmethod(operator.attrgetter("_cache"))
    def fetch(self, url, force=False):
        # We download to a temporary name like HASH[:16]-basename to
        # differentiate among URLs with the same basenames.  We used to then
        # extract the build ID from the downloaded artifact and use it to make a
        # human readable unique name, but extracting build IDs is time consuming
        # (especially on Mac OS X, where we must mount a large DMG file).
        hash = hashlib.sha256(url).hexdigest()[:16]
        fname = hash + "-" + os.path.basename(url)

        path = os.path.abspath(mozpath.join(self._cache_dir, fname))
        if self._skip_cache and os.path.exists(path):
            self.log(
                logging.DEBUG, "artifact", {"path": path}, "Skipping cache: removing cached downloaded artifact {path}"
            )
            os.remove(path)

        self.log(logging.INFO, "artifact", {"path": path}, "Downloading to temporary location {path}")
        try:
            dl = self._download_manager.download(url, fname)

            def download_progress(dl, bytes_so_far, total_size):
                percent = (float(bytes_so_far) / total_size) * 100
                now = int(percent / 5)
                if now == self._last_dl_update:
                    return
                self._last_dl_update = now
                self.log(
                    logging.INFO,
                    "artifact",
                    {"bytes_so_far": bytes_so_far, "total_size": total_size, "percent": percent},
                    "Downloading... {percent:02.1f} %",
                )

            if dl:
                dl.set_progress(download_progress)
                dl.wait()
            self.log(
                logging.INFO,
                "artifact",
                {"path": os.path.abspath(mozpath.join(self._cache_dir, fname))},
                "Downloaded artifact to {path}",
            )
            return os.path.abspath(mozpath.join(self._cache_dir, fname))
        finally:
            # Cancel any background downloads in progress.
            self._download_manager.cancel()

    def print_last_item(self, args, sorted_kwargs, result):
        url, = args
        self.log(logging.INFO, "artifact", {"url": url}, "Last installed binaries from url {url}")
        self.log(logging.INFO, "artifact", {"filename": result}, "Last installed binaries from local file {filename}")
        self.log(
            logging.INFO,
            "artifact",
            {"filename": result + PROCESSED_SUFFIX},
            "Last installed binaries from local processed file {filename}",
        )