def getlist(self, parser, url):
		"""
		Uses the supplied parser to get a list of urls.
		Takes a parser object, url, and filering options.
		"""

		self.output.write('getlist(): fetching ' + url + '\n', 2)

		self.output.print_info('Downloading a list of mirrors...\n')

		# setup the ssl-fetch ouptut map
		connector_output = {
			'info':self.output.write,
			'debug': self.output.write,
			'error': self.output.print_err,
			'kwargs-info': {'level': 2},
			'kwargs-debug': {'level':2},
			'kwargs-error': {'level':0},
			}

		fetcher = Connector(connector_output, self.proxies, USERAGENT)
		success, mirrorlist, timestamp = fetcher.fetch_content(url)
		parser.parse(mirrorlist)

		if (not mirrorlist) or len(parser.tuples()) == 0:
			self.output.print_err('Could not get mirror list. '
				'Check your internet connection.')

		self.output.write(' Got %d mirrors.\n' % len(parser.tuples()))

		return parser.tuples()
Beispiel #2
0
 def fetch_seeds(self, seeds):
     '''Fetch new seed files'''
     # TODO: add support for separated fetching
     # setup the ssl-fetch ouptut map
     connector_output = {
         'info': self.logger.info,
         'error': self.logger.error,
         'kwargs-info': {},
         'kwargs-error': {},
     }
     http_check = re.compile(r'^(http|https)://')
     urls = []
     messages = []
     devseeds = self.config.get_key('developers.seeds')
     relseeds = self.config.get_key('release.seeds')
     if not http_check.match(devseeds) and not http_check.match(relseeds):
         urls.extend([self.config['seedurls']['developers.seeds'], self.config['seedurls']['release.seeds']])
     else:
         urls.extend([devseeds, relseeds])
     fetcher = Connector(connector_output, None, "Gentoo Keys")
     for url in urls:
         seed = url.rsplit('/', 1)[1]
         timestamp_prefix = seed[:3]
         timestamp_path = self.config['%s-timestamp' % timestamp_prefix]
         filename = self.config['%s-seedfile' % timestamp_prefix]
         file_exists = os.path.exists(filename)
         success, seeds, timestamp = fetcher.fetch_content(url, timestamp_path)
         if not timestamp and file_exists:
             messages.append("%s is already up to date." % seed)
         elif success:
             self.logger.debug("SeedHandler: fetch_seed; got results.")
             filename = filename + '.new'
             with open(filename, 'w') as seedfile:
                 seedfile.write(seeds)
             filename = self.config['%s-seedfile' % timestamp_prefix]
             old = filename + '.old'
             try:
                 self.logger.info("Backing up existing file...")
                 if os.path.exists(old):
                     self.logger.debug(
                         "SeedHandler: fetch_seeds; Removing 'old' seed file: %s"
                         % old)
                     os.unlink(old)
                 if os.path.exists(filename):
                     self.logger.debug(
                         "SeedHandler: fetch_seeds; Renaming current seed file to: "
                         "%s" % old)
                     os.rename(filename, old)
                 self.logger.debug("SeedHandler: fetch_seeds; Renaming '.new' seed file to %s"
                                   % filename)
                 os.rename(filename + '.new', filename)
                 with open(timestamp_path, 'w+') as timestampfile:
                     timestampfile.write(str(timestamp) + '\n')
                 messages.append("Successfully fetched %s." % seed)
             except IOError:
                 raise
         else:
             messages.append("Failed to fetch %s." % seed)
     return messages
Beispiel #3
0
class Fetch(object):

    def __init__(self, logger):
        self.logger = logger
        connector_output = {
             'info': self.logger.info,
             'debug': self.logger.debug,
             'error': self.logger.error,
             'exception': self.logger.exception,
             # we want any warnings to be printed to the terminal
             # so assign it to logging.error
             'warning': self.logger.error,
             'kwargs-info': {},
             'kwargs-debug': {},
             'kwargs-error': {},
             'kwargs-exception': {},
             'kwargs-warning': {},
        }
        self.fetcher = Connector(connector_output, None, "Gentoo Keys")
        self.sig_path = None

    def fetch_url(self, url, filepath, signature=True, timestamp=None, timestamp_path=None, climit=60):
        if not timestamp_path:
            timestamp_path = filepath + ".timestamp"
        messages = []
        self.logger.debug(
            _unicode("FETCH: fetching %s signed file ") % filepath)
        self.logger.debug(
            _unicode("FETCH: timestamp path: %s") % timestamp_path)
        success, signedfile, timestamp = self.fetcher.fetch_file(
            url, filepath, timestamp_path, climit=climit, timestamp=timestamp)
        if timestamp is '':
            self.logger.debug("Fetch.fetch_url; file not downloaded")
            return (False, messages)
        elif not success:
            messages.append(_unicode("File %s cannot be retrieved.") % filepath)
        elif '.' + url.rsplit('.', 1)[1] not in EXTENSIONS:
            self.logger.debug("File %s successfully retrieved.", filepath)
            if signature:
                success_fetch = False
                for ext in EXTENSIONS:
                    sig_path = filepath + ext
                    signature = url + ext
                    self.logger.debug(
                        _unicode("FETCH: fetching %s signature ")
                        % signature)
                    success_fetch, sig, timestamp = self.fetcher.fetch_file(signature, sig_path)
                    if success_fetch:
                        self.sig_path = sig_path
                        break
                    else:
                        signature = None
        return (success, messages)

    def verify_cycle(self, tpath, climit=60):
        return self.fetcher.verify_cycle(tpath, climit=60)
Beispiel #4
0
    def _fetch(self, base, archive_url, dest_dir):
        '''
        Fetches overlay source archive.

        @params base: string of directory base for installed overlays.
        @params archive_url: string of URL where archive is located.
        @params dest_dir: string of destination of extracted archive.
        @rtype tuple (str of package location, bool to clean_archive)
        '''
        ext = self.get_extension()

        if 'file://' not in archive_url:
            # set up ssl-fetch output map
            connector_output = {
                'info': self.output.debug,
                'error': self.output.error,
                'kwargs-info': {
                    'level': 5
                },
                'kwargs-debug': {
                    'level': 2
                },
                'kwargs-error': {
                    'level': None
                },
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, archive, timestamp = fetcher.fetch_content(archive_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(archive)

            except Exception as error:
                raise Exception('Failed to store archive package in '\
                                '%(pkg)s\nError was: %(error)s'\
                                % ({'pkg': pkg, 'error': error}))

        else:
            self.clean_archive = False
            pkg = archive_url.replace('file://', '')

        return pkg
Beispiel #5
0
    def _extract(self, base, tar_url, dest_dir):
        ext = '.tar.noidea'
        clean_tar = self.config['clean_tar']
        for i in [('tar.%s' % e) for e in ('bz2', 'gz', 'lzma', 'xz', 'Z')] \
                + ['tgz', 'tbz', 'taz', 'tlz', 'txz']:
            candidate_ext = '.%s' % i
            if self.src.endswith(candidate_ext):
                ext = candidate_ext
                break

        if 'file://' not in tar_url:
            # setup the ssl-fetch output map
            connector_output = {
                'info':  self.output.debug,
                'error': self.output.error,
                'kwargs-info': {'level': 2},
                'kwargs-error':{'level': None},
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, tar, timestamp = fetcher.fetch_content(tar_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(tar)

            except Exception as error:
                raise Exception('Failed to store tar package in '
                                + pkg + '\nError was:' + str(error))
        else:
            clean_tar = False
            pkg = tar_url.replace('file://', '')

        # tar -v -x -f SOURCE -C TARGET
        args = ['-v', '-x', '-f', pkg, '-C', dest_dir]
        result = self.run_command(self.command(), args, cmd=self.type)

        if clean_tar:
            os.unlink(pkg)
        return result
Beispiel #6
0
    def _fetch(self, base, archive_url, dest_dir):
        '''
        Fetches overlay source archive.

        @params base: string of directory base for installed overlays.
        @params archive_url: string of URL where archive is located.
        @params dest_dir: string of destination of extracted archive.
        @rtype tuple (str of package location, bool to clean_archive)
        '''
        ext = self.get_extension()
 
        if 'file://' not in archive_url:
            # set up ssl-fetch output map
            connector_output = {
                'info': self.output.debug,
                'error': self.output.error,
                'kwargs-info': {'level': 5},
                'kwargs-debug': {'level': 2},
                'kwargs-error': {'level': None},
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, archive, timestamp = fetcher.fetch_content(archive_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(archive)

            except Exception as error:
                raise Exception('Failed to store archive package in '\
                                '%(pkg)s\nError was: %(error)s'\
                                % ({'pkg': pkg, 'error': error}))
        
        else:
            self.clean_archive = False
            pkg = archive_url.replace('file://', '')

        return pkg
Beispiel #7
0
    def getlist(self, parser, url):
        """
		Uses the supplied parser to get a list of urls.
		Takes a parser object, url, and filering options.
		"""

        self.output.write('getlist(): fetching ' + url + '\n', 2)

        self.output.print_info('Downloading a list of mirrors...\n')

        # setup the ssl-fetch ouptut map
        connector_output = {
            'info': self.output.write,
            'debug': self.output.write,
            'error': self.output.print_err,
            'kwargs-info': {
                'level': 2
            },
            'kwargs-debug': {
                'level': 2
            },
            'kwargs-error': {
                'level': 0
            },
        }

        fetcher = Connector(connector_output, self.proxies, USERAGENT)
        success, mirrorlist, timestamp = fetcher.fetch_content(url, climit=60)
        parser.parse(mirrorlist)

        if (not mirrorlist) or len(parser.tuples()) == 0:
            self.output.print_err('Could not get mirror list. '
                                  'Check your internet connection.')

        self.output.write(' Got %d mirrors.\n' % len(parser.tuples()))

        return parser.tuples()
Beispiel #8
0
 def __init__(self, logger):
     self.logger = logger
     connector_output = {
          'info': self.logger.info,
          'debug': self.logger.debug,
          'error': self.logger.error,
          'exception': self.logger.exception,
          # we want any warnings to be printed to the terminal
          # so assign it to logging.error
          'warning': self.logger.error,
          'kwargs-info': {},
          'kwargs-debug': {},
          'kwargs-error': {},
          'kwargs-exception': {},
          'kwargs-warning': {},
     }
     self.fetcher = Connector(connector_output, None, "Gentoo Keys")
     self.sig_path = None
Beispiel #9
0
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        >>> import tempfile
        >>> here = os.path.dirname(os.path.realpath(__file__))
        >>> tmpdir = tempfile.mkdtemp(prefix="laymantmp_")
        >>> cache = os.path.join(tmpdir, 'cache')
        >>> myoptions = {'overlays' :
        ...           ['file://' + here + '/tests/testfiles/global-overlays.xml'],
        ...           'cache' : cache,
        ...           'nocheck'    : 'yes',
        ...           'proxy' : None}
        >>> from layman.config import OptionConfig
        >>> config = OptionConfig(myoptions)
        >>> config.set_option('quietness', 3)
        >>> a = RemoteDB(config)
        >>> a.cache()
        (True, True)
        >>> b = fileopen(a.filepath(config['overlays'])+'.xml')
        >>> b.readlines()[24]
        '      A collection of ebuilds from Gunnar Wrobel [[email protected]].\\n'

        >>> b.close()
        >>> os.unlink(a.filepath(config['overlays'])+'.xml')

        >>> a.overlays.keys()
        ['wrobel', 'wrobel-stable']

        >>> import shutil
        >>> shutil.rmtree(tmpdir)
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info':  self.output.debug,
            'error': self.output.error,
            'kwargs-info': {'level': 2},
            'kwargs-error':{'level': None},
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" %str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug("RemoteDB.cache() url = %s is a tuple=%s"
                    %(str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug("RemoteDB.cache() len(olist) = %s"
                    % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug("RemoteDB.cache() gpg returned "
                            "verified = %s" %str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug("RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded
Beispiel #10
0
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        @rtype tuple: reflects whether the cache has updates and whether or not
        the cache retrieval was successful.
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info': self.output.info,
            'debug': self.output.debug,
            'error': self.output.error,
            'exception': self.output.error,
            # we want any warnings to be printed to the terminal
            # so assign it to output.info with a lower noise level
            'warning': self.output.info,
            'kwargs-exception': {
                'level': None
            },
            'kwargs-info': {
                'level': 5
            },
            'kwargs-debug': {
                'level': 2
            },
            'kwargs-error': {
                'level': None
            },
            'kwargs-warning': {
                'level': 2
            },
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" % str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug(
                    "RemoteDB.cache() url = %s is a tuple=%s" %
                    (str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath, climit=60)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath, climit=60)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug(
                    "RemoteDB.cache() len(olist) = %s" % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug(
                            "RemoteDB.cache() gpg returned "
                            "verified = %s" % str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(
                    has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug(
                "RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded
Beispiel #11
0
 def _verify(self, args, key, messages=None):
     if messages == None:
         messages = []
     self.category = args.category
     filepath, signature  = args.filename, args.signature
     timestamp_path = None
     isurl = success = verified = False
     if filepath.startswith('http'):
         isurl = True
         url = filepath
         filepath = args.destination
         # a bit hackish, but save it to current directory
         # with download file name
         if not filepath:
             filepath = url.split('/')[-1]
             self.logger.debug(_unicode(
                 "ACTIONS: verify; destination filepath was "
                 "not supplied, using current directory ./%s") % filepath)
     if args.timestamp:
         timestamp_path = filepath + ".timestamp"
         climit = 60
     else:
         climit = 0
     sig_path = None
     if isurl:
         from sslfetch.connections import Connector
         connector_output = {
              'info': self.logger.info,
              'debug': self.logger.debug,
              'error': self.logger.error,
              'exception': self.logger.exception,
              # we want any warnings to be printed to the terminal
              # so assign it to logging.error
              'warning': self.logger.error,
              'kwargs-info': {},
              'kwargs-debug': {},
              'kwargs-error': {},
              'kwargs-exception': {},
              'kwargs-warning': {},
         }
         fetcher = Connector(connector_output, None, "Gentoo Keys")
         self.logger.debug(
             _unicode("ACTIONS: verify; fetching %s signed file ") % filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; timestamp path: %s") % timestamp_path)
         success, signedfile, timestamp = fetcher.fetch_file(
             url, filepath, timestamp_path, climit=climit)
         if not success:
             messages.append(_unicode("File %s cannot be retrieved.") % filepath)
         elif '.' + url.rsplit('.', 1)[1] not in EXTENSIONS:
             if not signature:
                 success_fetch = False
                 for ext in EXTENSIONS:
                     sig_path = filepath + ext
                     if isurl:
                         signature = url + ext
                         self.logger.debug(
                             _unicode("ACTIONS: verify; fetching %s signature ")
                             % signature)
                         success_fetch, sig, timestamp = fetcher.fetch_file(signature, sig_path)
                     if success_fetch:
                         break
                     else:
                         signature = None
     else:
         filepath = os.path.abspath(filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; local file %s") % filepath)
         success = os.path.isfile(filepath)
         if (not signature
             and '.' + filepath.rsplit('.', 1)[-1] not in EXTENSIONS):
             success_fetch = False
             for ext in EXTENSIONS:
                 sig_path = filepath + ext
                 sig_path = os.path.abspath(sig_path)
                 self.logger.debug(
                     _unicode("ACTIONS: verify; checking %s signature ")
                     % sig_path)
                 success_sig = os.path.isfile(sig_path)
                 if success_sig:
                     break
                 else:
                     sig_path = None
     self.logger.info("Verifying file...")
     verified = False
     results = self.gpg.verify_file(key, sig_path, filepath)
     keyid = key.keyid[0]
     (valid, trust) = results.verified
     if valid:
         verified = True
         messages.extend(
             [_unicode("Verification succeeded.: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s")
             % ( key.name, key.nick, keyid),
             _unicode("    category, nick.....: %s %s")
             % (args.category, args.nick)])
     else:
         messages.extend(
             [_unicode("Verification failed....: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s")
             % ( key.name, key.nick, keyid)])
         found, args, new_msgs = self.keyhandler.autosearch_key(args, results)
         messages.extend(new_msgs)
         if found:
             return self.verify(args, messages)
     return (verified, messages)
Beispiel #12
0
 def verify(self, args):
     '''File verification action'''
     connector_output = {
          'info': self.logger.debug,
          'error': self.logger.error,
          'kwargs-info': {},
          'kwargs-error': {},
     }
     if not args.filename:
         return (False, ['Please provide a signed file.'])
     if not args.category:
         args.category = self.config.get_key('verify_keyring')
         self.logger.debug("ACTIONS: verify; keyring category not specified, using default: %s"
             % args.category)
     handler = SeedHandler(self.logger, self.config)
     keys = handler.load_category(args.category)
     if not keys:
         return (False, ['No installed keys found, try installkey action.'])
     keyrings = self.config.get_key('keyring')
     catdir = os.path.join(keyrings, args.category)
     self.logger.debug("ACTIONS: verify; catdir = %s" % catdir)
     self.gpg = GkeysGPG(self.config, catdir)
     filepath, signature  = args.filename, args.signature
     timestamp_path = None
     isurl = success = verified = False
     if filepath.startswith('http'):
         isurl = True
         url = filepath
         filepath = args.destination
         # a bit hackish, but save it to current directory
         # with download file name
         if not filepath:
             filepath = url.split('/')[-1]
             self.logger.debug("ACTIONS: verify; destination filepath was "
                 "not supplied, using current directory ./%s" % filepath)
     if args.timestamp:
         timestamp_path = filepath + ".timestamp"
     if isurl:
         from sslfetch.connections import Connector
         connector_output = {
              'info': self.logger.info,
              'debug': self.logger.debug,
              'error': self.logger.error,
              'kwargs-info': {},
              'kwargs-debug': {},
              'kwargs-error': {},
         }
         fetcher = Connector(connector_output, None, "Gentoo Keys")
         self.logger.debug("ACTIONS: verify; fetching %s signed file " % filepath)
         self.logger.debug("ACTIONS: verify; timestamp path: %s" % timestamp_path)
         success, signedfile, timestamp = fetcher.fetch_file(url, filepath, timestamp_path)
     else:
         filepath = os.path.abspath(filepath)
         self.logger.debug("ACTIONS: verify; local file %s" % filepath)
         success = os.path.isfile(filepath)
     if not success:
         messages = ["File %s cannot be retrieved." % filepath]
     else:
         if not signature:
             EXTENSIONS = ['.sig', '.asc', 'gpg','.gpgsig']
             success_fetch = False
             for ext in EXTENSIONS:
                 sig_path = filepath + ext
                 if isurl:
                     signature = url + ext
                     self.logger.debug("ACTIONS: verify; fetching %s signature " % signature)
                     success_fetch, sig, timestamp = fetcher.fetch_file(signature, sig_path)
                 else:
                     signature = filepath + ext
                     signature = os.path.abspath(signature)
                     self.logger.debug("ACTIONS: verify; checking %s signature " % signature)
                     success_fetch = os.path.isfile(signature)
                 if success_fetch:
                     break
         else:
             sig_path = signature
         messages = []
         self.logger.info("Verifying file...")
         verified = False
         key = keys.nick_search(args.nick)
         if not key:
             messages.append("Failed to find nick: %s in %s category"
                 % (args.nick, args.category))
             return (False, messages)
         results = self.gpg.verify_file(key, sig_path, filepath)
         keyid = key.keyid[0]
         (valid, trust) = results.verified
         if valid:
             verified = True
             messages = ["Verification succeeded.: %s" % (filepath),
                 "Key info...............: %s <%s>, %s"
                 % ( key.name, key.nick, keyid)]
         else:
             messages = ["Verification failed..... %s:" % (filepath),
                 "Key info................: %s <%s>, %s"
                 % ( key.name, key.nick, keyid)]
     return (verified, messages)
Beispiel #13
0
 def _verify(self, args, key, messages=None):
     if messages == None:
         messages = []
     self._set_category(args.category)
     filepath, signature = args.filename, args.signature
     timestamp_path = None
     isurl = success = verified = False
     if filepath.startswith('http'):
         isurl = True
         url = filepath
         filepath = args.destination
         # a bit hackish, but save it to current directory
         # with download file name
         if not filepath:
             filepath = url.split('/')[-1]
             self.logger.debug(
                 _unicode("ACTIONS: verify; destination filepath was "
                          "not supplied, using current directory ./%s") %
                 filepath)
     if args.timestamp:
         timestamp_path = filepath + ".timestamp"
         climit = 60
     else:
         climit = 0
     sig_path = None
     if isurl:
         from sslfetch.connections import Connector
         connector_output = {
             'info': self.logger.info,
             'debug': self.logger.debug,
             'error': self.logger.error,
             'exception': self.logger.exception,
             # we want any warnings to be printed to the terminal
             # so assign it to logging.error
             'warning': self.logger.error,
             'kwargs-info': {},
             'kwargs-debug': {},
             'kwargs-error': {},
             'kwargs-exception': {},
             'kwargs-warning': {},
         }
         fetcher = Connector(connector_output, None, "Gentoo Keys")
         self.logger.debug(
             _unicode("ACTIONS: verify; fetching %s signed file ") %
             filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; timestamp path: %s") %
             timestamp_path)
         success, signedfile, timestamp = fetcher.fetch_file(url,
                                                             filepath,
                                                             timestamp_path,
                                                             climit=climit)
         if not success:
             messages.append(
                 _unicode("File %s cannot be retrieved.") % filepath)
         elif '.' + url.rsplit('.', 1)[1] not in EXTENSIONS:
             if not signature:
                 success_fetch = False
                 for ext in EXTENSIONS:
                     sig_path = filepath + ext
                     if isurl:
                         signature = url + ext
                         self.logger.debug(
                             _unicode(
                                 "ACTIONS: verify; fetching %s signature ")
                             % signature)
                         success_fetch, sig, timestamp = fetcher.fetch_file(
                             signature, sig_path)
                     if success_fetch:
                         break
                     else:
                         signature = None
     elif signature is not None and os.path.exists(signature):
         sig_path = signature
     else:
         filepath = os.path.abspath(filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; local file %s") % filepath)
         success = os.path.isfile(filepath)
         if (not signature
                 and '.' + filepath.rsplit('.', 1)[-1] not in EXTENSIONS):
             success_fetch = False
             for ext in EXTENSIONS:
                 sig_path = filepath + ext
                 sig_path = os.path.abspath(sig_path)
                 self.logger.debug(
                     _unicode("ACTIONS: verify; checking %s signature ") %
                     sig_path)
                 success_sig = os.path.isfile(sig_path)
                 if success_sig:
                     break
                 else:
                     sig_path = None
     self.logger.info("Verifying file...")
     verified = False
     results = self.gpg.verify_file(key, sig_path, filepath)
     keyid = key.keyid[0]
     (valid, trust) = results.verified
     if valid:
         verified = True
         messages.extend([
             _unicode("Verification succeeded.: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s") %
             (key.name, key.nick, keyid),
             _unicode("    category, nick.....: %s %s") %
             (args.category, args.nick)
         ])
     else:
         messages.extend([
             _unicode("Verification failed....: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s") %
             (key.name, key.nick, keyid)
         ])
         found, args, new_msgs = self.keyhandler.autosearch_key(
             args, results)
         messages.extend(new_msgs)
         if found:
             return self.verify(args, messages)
     return (verified, messages)
Beispiel #14
0
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        @rtype tuple: reflects whether the cache has updates and whether or not
        the cache retrieval was successful.
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info':  self.output.debug,
            'error': self.output.error,
            'kwargs-info': {'level': 2},
            'kwargs-error':{'level': None},
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" %str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug("RemoteDB.cache() url = %s is a tuple=%s"
                    %(str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug("RemoteDB.cache() len(olist) = %s"
                    % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug("RemoteDB.cache() gpg returned "
                            "verified = %s" %str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug("RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded
Beispiel #15
0
 def _verify(self, args, handler, key, messages=None):
     if messages == None:
         messages = []
     filepath, signature  = args.filename, args.signature
     timestamp_path = None
     isurl = success = verified = False
     if filepath.startswith('http'):
         isurl = True
         url = filepath
         filepath = args.destination
         # a bit hackish, but save it to current directory
         # with download file name
         if not filepath:
             filepath = url.split('/')[-1]
             self.logger.debug(_unicode(
                 "ACTIONS: verify; destination filepath was "
                 "not supplied, using current directory ./%s") % filepath)
     if args.timestamp:
         timestamp_path = filepath + ".timestamp"
     sig_path = None
     if isurl:
         from sslfetch.connections import Connector
         connector_output = {
              'info': self.logger.info,
              'debug': self.logger.debug,
              'error': self.logger.error,
              'kwargs-info': {},
              'kwargs-debug': {},
              'kwargs-error': {},
         }
         fetcher = Connector(connector_output, None, "Gentoo Keys")
         self.logger.debug(
             _unicode("ACTIONS: verify; fetching %s signed file ") % filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; timestamp path: %s") % timestamp_path)
         success, signedfile, timestamp = fetcher.fetch_file(
             url, filepath, timestamp_path)
         if not success:
             messages.append(_unicode("File %s cannot be retrieved.") % filepath)
         elif '.' + url.rsplit('.', 1)[1] not in EXTENSIONS:
             if not signature:
                 success_fetch = False
                 for ext in EXTENSIONS:
                     sig_path = filepath + ext
                     if isurl:
                         signature = url + ext
                         self.logger.debug(
                             _unicode("ACTIONS: verify; fetching %s signature ")
                             % signature)
                         success_fetch, sig, timestamp = fetcher.fetch_file(signature, sig_path)
                     if success_fetch:
                         break
                     else:
                         signature = None
     else:
         filepath = os.path.abspath(filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; local file %s") % filepath)
         success = os.path.isfile(filepath)
         if (not signature
             and '.' + filepath.rsplit('.', 1)[1] not in EXTENSIONS):
             success_fetch = False
             for ext in EXTENSIONS:
                 sig_path = filepath + ext
                 sig_path = os.path.abspath(sig_path)
                 self.logger.debug(
                     _unicode("ACTIONS: verify; checking %s signature ")
                     % sig_path)
                 success_sig = os.path.isfile(sig_path)
                 if success_sig:
                     break
                 else:
                     sig_path = None
     self.logger.info("Verifying file...")
     verified = False
     results = self.gpg.verify_file(key, sig_path, filepath)
     keyid = key.keyid[0]
     (valid, trust) = results.verified
     if valid:
         verified = True
         messages.extend(
             [_unicode("Verification succeeded.: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s")
             % ( key.name, key.nick, keyid),
             _unicode("    category, nick.....: %s %s")
             % (args.category, args.nick)])
     else:
         messages.extend(
             [_unicode("Verification failed....: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s")
             % ( key.name, key.nick, keyid)])
         has_no_pubkey, s_keyid = results.no_pubkey
         if has_no_pubkey:
             messages.append(
                 _unicode("Auto-searching for key.: 0x%s") % s_keyid)
             # reset all but keyid and pass thru data
             args.keyid = s_keyid
             args.keydir = None
             args.fingerprint = None
             args.exact = False
             args.category = None
             args.nick = None
             args.name = None
             args.all = False
             keys = self.key_search(args, data_only=True)
             if keys:
                 args.category = list(keys)[0]
                 args.nick = keys[args.category][0].nick
                 return self.verify(args, messages)
             messages.append(_unicode("Failed to find gpg key.: 0x%s") % s_keyid)
     return (verified, messages)