示例#1
0
	def getlist(self, parser, url):
		"""
		Uses the supplied parser to get a list of urls.
		Takes a parser object, url, and filering options.
		"""

		self.output.write('getlist(): fetching ' + url + '\n', 2)

		self.output.print_info('Downloading a list of mirrors...\n')

		# setup the ssl-fetch ouptut map
		connector_output = {
			'info':self.output.write,
			'debug': self.output.write,
			'error': self.output.print_err,
			'kwargs-info': {'level': 2},
			'kwargs-debug': {'level':2},
			'kwargs-error': {'level':0},
			}

		fetcher = Connector(connector_output, self.proxies, USERAGENT)
		success, mirrorlist, timestamp = fetcher.fetch_content(url)
		parser.parse(mirrorlist)

		if (not mirrorlist) or len(parser.tuples()) == 0:
			self.output.print_err('Could not get mirror list. '
				'Check your internet connection.')

		self.output.write(' Got %d mirrors.\n' % len(parser.tuples()))

		return parser.tuples()
示例#2
0
 def fetch_seeds(self, seeds):
     '''Fetch new seed files'''
     # TODO: add support for separated fetching
     # setup the ssl-fetch ouptut map
     connector_output = {
         'info': self.logger.info,
         'error': self.logger.error,
         'kwargs-info': {},
         'kwargs-error': {},
     }
     http_check = re.compile(r'^(http|https)://')
     urls = []
     messages = []
     devseeds = self.config.get_key('developers.seeds')
     relseeds = self.config.get_key('release.seeds')
     if not http_check.match(devseeds) and not http_check.match(relseeds):
         urls.extend([self.config['seedurls']['developers.seeds'], self.config['seedurls']['release.seeds']])
     else:
         urls.extend([devseeds, relseeds])
     fetcher = Connector(connector_output, None, "Gentoo Keys")
     for url in urls:
         seed = url.rsplit('/', 1)[1]
         timestamp_prefix = seed[:3]
         timestamp_path = self.config['%s-timestamp' % timestamp_prefix]
         filename = self.config['%s-seedfile' % timestamp_prefix]
         file_exists = os.path.exists(filename)
         success, seeds, timestamp = fetcher.fetch_content(url, timestamp_path)
         if not timestamp and file_exists:
             messages.append("%s is already up to date." % seed)
         elif success:
             self.logger.debug("SeedHandler: fetch_seed; got results.")
             filename = filename + '.new'
             with open(filename, 'w') as seedfile:
                 seedfile.write(seeds)
             filename = self.config['%s-seedfile' % timestamp_prefix]
             old = filename + '.old'
             try:
                 self.logger.info("Backing up existing file...")
                 if os.path.exists(old):
                     self.logger.debug(
                         "SeedHandler: fetch_seeds; Removing 'old' seed file: %s"
                         % old)
                     os.unlink(old)
                 if os.path.exists(filename):
                     self.logger.debug(
                         "SeedHandler: fetch_seeds; Renaming current seed file to: "
                         "%s" % old)
                     os.rename(filename, old)
                 self.logger.debug("SeedHandler: fetch_seeds; Renaming '.new' seed file to %s"
                                   % filename)
                 os.rename(filename + '.new', filename)
                 with open(timestamp_path, 'w+') as timestampfile:
                     timestampfile.write(str(timestamp) + '\n')
                 messages.append("Successfully fetched %s." % seed)
             except IOError:
                 raise
         else:
             messages.append("Failed to fetch %s." % seed)
     return messages
示例#3
0
    def _fetch(self, base, archive_url, dest_dir):
        '''
        Fetches overlay source archive.

        @params base: string of directory base for installed overlays.
        @params archive_url: string of URL where archive is located.
        @params dest_dir: string of destination of extracted archive.
        @rtype tuple (str of package location, bool to clean_archive)
        '''
        ext = self.get_extension()

        if 'file://' not in archive_url:
            # set up ssl-fetch output map
            connector_output = {
                'info': self.output.debug,
                'error': self.output.error,
                'kwargs-info': {
                    'level': 5
                },
                'kwargs-debug': {
                    'level': 2
                },
                'kwargs-error': {
                    'level': None
                },
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, archive, timestamp = fetcher.fetch_content(archive_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(archive)

            except Exception as error:
                raise Exception('Failed to store archive package in '\
                                '%(pkg)s\nError was: %(error)s'\
                                % ({'pkg': pkg, 'error': error}))

        else:
            self.clean_archive = False
            pkg = archive_url.replace('file://', '')

        return pkg
示例#4
0
文件: tar.py 项目: dwfreed/layman
    def _extract(self, base, tar_url, dest_dir):
        ext = '.tar.noidea'
        clean_tar = self.config['clean_tar']
        for i in [('tar.%s' % e) for e in ('bz2', 'gz', 'lzma', 'xz', 'Z')] \
                + ['tgz', 'tbz', 'taz', 'tlz', 'txz']:
            candidate_ext = '.%s' % i
            if self.src.endswith(candidate_ext):
                ext = candidate_ext
                break

        if 'file://' not in tar_url:
            # setup the ssl-fetch output map
            connector_output = {
                'info':  self.output.debug,
                'error': self.output.error,
                'kwargs-info': {'level': 2},
                'kwargs-error':{'level': None},
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, tar, timestamp = fetcher.fetch_content(tar_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(tar)

            except Exception as error:
                raise Exception('Failed to store tar package in '
                                + pkg + '\nError was:' + str(error))
        else:
            clean_tar = False
            pkg = tar_url.replace('file://', '')

        # tar -v -x -f SOURCE -C TARGET
        args = ['-v', '-x', '-f', pkg, '-C', dest_dir]
        result = self.run_command(self.command(), args, cmd=self.type)

        if clean_tar:
            os.unlink(pkg)
        return result
示例#5
0
文件: archive.py 项目: dewey/layman
    def _fetch(self, base, archive_url, dest_dir):
        '''
        Fetches overlay source archive.

        @params base: string of directory base for installed overlays.
        @params archive_url: string of URL where archive is located.
        @params dest_dir: string of destination of extracted archive.
        @rtype tuple (str of package location, bool to clean_archive)
        '''
        ext = self.get_extension()
 
        if 'file://' not in archive_url:
            # set up ssl-fetch output map
            connector_output = {
                'info': self.output.debug,
                'error': self.output.error,
                'kwargs-info': {'level': 5},
                'kwargs-debug': {'level': 2},
                'kwargs-error': {'level': None},
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, archive, timestamp = fetcher.fetch_content(archive_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(archive)

            except Exception as error:
                raise Exception('Failed to store archive package in '\
                                '%(pkg)s\nError was: %(error)s'\
                                % ({'pkg': pkg, 'error': error}))
        
        else:
            self.clean_archive = False
            pkg = archive_url.replace('file://', '')

        return pkg
示例#6
0
    def getlist(self, parser, url):
        """
		Uses the supplied parser to get a list of urls.
		Takes a parser object, url, and filering options.
		"""

        self.output.write('getlist(): fetching ' + url + '\n', 2)

        self.output.print_info('Downloading a list of mirrors...\n')

        # setup the ssl-fetch ouptut map
        connector_output = {
            'info': self.output.write,
            'debug': self.output.write,
            'error': self.output.print_err,
            'kwargs-info': {
                'level': 2
            },
            'kwargs-debug': {
                'level': 2
            },
            'kwargs-error': {
                'level': 0
            },
        }

        fetcher = Connector(connector_output, self.proxies, USERAGENT)
        success, mirrorlist, timestamp = fetcher.fetch_content(url, climit=60)
        parser.parse(mirrorlist)

        if (not mirrorlist) or len(parser.tuples()) == 0:
            self.output.print_err('Could not get mirror list. '
                                  'Check your internet connection.')

        self.output.write(' Got %d mirrors.\n' % len(parser.tuples()))

        return parser.tuples()
示例#7
0
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        >>> import tempfile
        >>> here = os.path.dirname(os.path.realpath(__file__))
        >>> tmpdir = tempfile.mkdtemp(prefix="laymantmp_")
        >>> cache = os.path.join(tmpdir, 'cache')
        >>> myoptions = {'overlays' :
        ...           ['file://' + here + '/tests/testfiles/global-overlays.xml'],
        ...           'cache' : cache,
        ...           'nocheck'    : 'yes',
        ...           'proxy' : None}
        >>> from layman.config import OptionConfig
        >>> config = OptionConfig(myoptions)
        >>> config.set_option('quietness', 3)
        >>> a = RemoteDB(config)
        >>> a.cache()
        (True, True)
        >>> b = fileopen(a.filepath(config['overlays'])+'.xml')
        >>> b.readlines()[24]
        '      A collection of ebuilds from Gunnar Wrobel [[email protected]].\\n'

        >>> b.close()
        >>> os.unlink(a.filepath(config['overlays'])+'.xml')

        >>> a.overlays.keys()
        ['wrobel', 'wrobel-stable']

        >>> import shutil
        >>> shutil.rmtree(tmpdir)
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info':  self.output.debug,
            'error': self.output.error,
            'kwargs-info': {'level': 2},
            'kwargs-error':{'level': None},
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" %str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug("RemoteDB.cache() url = %s is a tuple=%s"
                    %(str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug("RemoteDB.cache() len(olist) = %s"
                    % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug("RemoteDB.cache() gpg returned "
                            "verified = %s" %str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug("RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded
示例#8
0
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        @rtype tuple: reflects whether the cache has updates and whether or not
        the cache retrieval was successful.
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info': self.output.info,
            'debug': self.output.debug,
            'error': self.output.error,
            'exception': self.output.error,
            # we want any warnings to be printed to the terminal
            # so assign it to output.info with a lower noise level
            'warning': self.output.info,
            'kwargs-exception': {
                'level': None
            },
            'kwargs-info': {
                'level': 5
            },
            'kwargs-debug': {
                'level': 2
            },
            'kwargs-error': {
                'level': None
            },
            'kwargs-warning': {
                'level': 2
            },
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" % str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug(
                    "RemoteDB.cache() url = %s is a tuple=%s" %
                    (str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath, climit=60)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath, climit=60)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug(
                    "RemoteDB.cache() len(olist) = %s" % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug(
                            "RemoteDB.cache() gpg returned "
                            "verified = %s" % str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(
                    has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug(
                "RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded
示例#9
0
文件: remotedb.py 项目: wking/layman
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        @rtype tuple: reflects whether the cache has updates and whether or not
        the cache retrieval was successful.
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info':  self.output.debug,
            'error': self.output.error,
            'kwargs-info': {'level': 2},
            'kwargs-error':{'level': None},
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" %str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug("RemoteDB.cache() url = %s is a tuple=%s"
                    %(str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug("RemoteDB.cache() len(olist) = %s"
                    % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug("RemoteDB.cache() gpg returned "
                            "verified = %s" %str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug("RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded