コード例 #1
0
    def _fetch(self, base, archive_url, dest_dir):
        '''
        Fetches overlay source archive.

        @params base: string of directory base for installed overlays.
        @params archive_url: string of URL where archive is located.
        @params dest_dir: string of destination of extracted archive.
        @rtype tuple (str of package location, bool to clean_archive)
        '''
        ext = self.get_extension()

        if 'file://' not in archive_url:
            # set up ssl-fetch output map
            connector_output = {
                'info': self.output.debug,
                'error': self.output.error,
                'kwargs-info': {
                    'level': 5
                },
                'kwargs-debug': {
                    'level': 2
                },
                'kwargs-error': {
                    'level': None
                },
            }

            fetcher = Connector(connector_output, self.proxies, USERAGENT)

            success, archive, timestamp = fetcher.fetch_content(archive_url)

            pkg = path([base, self.parent.name + ext])

            try:
                with fileopen(pkg, 'w+b') as out_file:
                    out_file.write(archive)

            except Exception as error:
                raise Exception('Failed to store archive package in '\
                                '%(pkg)s\nError was: %(error)s'\
                                % ({'pkg': pkg, 'error': error}))

        else:
            self.clean_archive = False
            pkg = archive_url.replace('file://', '')

        return pkg
コード例 #2
0
ファイル: extractor.py プロジェクト: shuber2/mirrorselect
    def getlist(self, parser, url):
        """
		Uses the supplied parser to get a list of urls.
		Takes a parser object, url, and filering options.
		"""

        self.output.write('getlist(): fetching ' + url + '\n', 2)

        self.output.print_info('Downloading a list of mirrors...\n')

        # setup the ssl-fetch ouptut map
        connector_output = {
            'info': self.output.write,
            'debug': self.output.write,
            'error': self.output.print_err,
            'kwargs-info': {
                'level': 2
            },
            'kwargs-debug': {
                'level': 2
            },
            'kwargs-error': {
                'level': 0
            },
        }

        fetcher = Connector(connector_output, self.proxies, USERAGENT)
        success, mirrorlist, timestamp = fetcher.fetch_content(url, climit=60)
        parser.parse(mirrorlist)

        if (not mirrorlist) or len(parser.tuples()) == 0:
            self.output.print_err('Could not get mirror list. '
                                  'Check your internet connection.')

        self.output.write(' Got %d mirrors.\n' % len(parser.tuples()))

        return parser.tuples()
コード例 #3
0
    def cache(self):
        '''
        Copy the remote overlay list to the local cache.

        @rtype tuple: reflects whether the cache has updates and whether or not
        the cache retrieval was successful.
        '''
        has_updates = False
        self._create_storage(self.config['storage'])
        # succeeded reset when a failure is detected
        succeeded = True
        url_lists = [self.urls, self.detached_urls, self.signed_urls]
        need_gpg = [False, True, True]
        # setup the ssl-fetch output map
        connector_output = {
            'info': self.output.info,
            'debug': self.output.debug,
            'error': self.output.error,
            'exception': self.output.error,
            # we want any warnings to be printed to the terminal
            # so assign it to output.info with a lower noise level
            'warning': self.output.info,
            'kwargs-exception': {
                'level': None
            },
            'kwargs-info': {
                'level': 5
            },
            'kwargs-debug': {
                'level': 2
            },
            'kwargs-error': {
                'level': None
            },
            'kwargs-warning': {
                'level': 2
            },
        }
        fetcher = Connector(connector_output, self.proxies, USERAGENT)

        for index in range(0, 3):
            self.output.debug("RemoteDB.cache() index = %s" % str(index), 2)
            urls = url_lists[index]
            if need_gpg[index] and len(urls) and self.gpg is None:
                #initialize our gpg instance
                self.init_gpg()
            # main working loop
            for url in urls:
                sig = ''
                self.output.debug(
                    "RemoteDB.cache() url = %s is a tuple=%s" %
                    (str(url), str(isinstance(url, tuple))), 2)
                filepath, mpath, tpath, sig = self._paths(url)
                if 'file://' in url:
                    success, olist, timestamp = self._fetch_file(
                        url, mpath, tpath)
                elif sig:
                    success, olist, timestamp = fetcher.fetch_content(
                        url[0], tpath, climit=60)
                else:
                    success, olist, timestamp = fetcher.fetch_content(
                        url, tpath, climit=60)
                if not success:
                    #succeeded = False
                    continue

                self.output.debug(
                    "RemoteDB.cache() len(olist) = %s" % str(len(olist)), 2)
                # GPG handling
                if need_gpg[index]:
                    olist, verified = self.verify_gpg(url, sig, olist)
                    if not verified:
                        self.output.debug(
                            "RemoteDB.cache() gpg returned "
                            "verified = %s" % str(verified), 2)
                        succeeded = False
                        filename = os.path.join(self.config['storage'],
                                                "Failed-to-verify-sig")
                        self.write_cache(olist, filename)
                        continue

                # Before we overwrite the old cache, check that the downloaded
                # file is intact and can be parsed
                if isinstance(url, tuple):
                    olist = self._check_download(olist, url[0])
                else:
                    olist = self._check_download(olist, url)

                # Ok, now we can overwrite the old cache
                has_updates = max(
                    has_updates,
                    self.write_cache(olist, mpath, tpath, timestamp))

            self.output.debug(
                "RemoteDB.cache() self.urls:  has_updates, "
                "succeeded %s, %s" % (str(has_updates), str(succeeded)), 4)
        return has_updates, succeeded
コード例 #4
0
 def _verify(self, args, key, messages=None):
     if messages == None:
         messages = []
     self._set_category(args.category)
     filepath, signature = args.filename, args.signature
     timestamp_path = None
     isurl = success = verified = False
     if filepath.startswith('http'):
         isurl = True
         url = filepath
         filepath = args.destination
         # a bit hackish, but save it to current directory
         # with download file name
         if not filepath:
             filepath = url.split('/')[-1]
             self.logger.debug(
                 _unicode("ACTIONS: verify; destination filepath was "
                          "not supplied, using current directory ./%s") %
                 filepath)
     if args.timestamp:
         timestamp_path = filepath + ".timestamp"
         climit = 60
     else:
         climit = 0
     sig_path = None
     if isurl:
         from sslfetch.connections import Connector
         connector_output = {
             'info': self.logger.info,
             'debug': self.logger.debug,
             'error': self.logger.error,
             'exception': self.logger.exception,
             # we want any warnings to be printed to the terminal
             # so assign it to logging.error
             'warning': self.logger.error,
             'kwargs-info': {},
             'kwargs-debug': {},
             'kwargs-error': {},
             'kwargs-exception': {},
             'kwargs-warning': {},
         }
         fetcher = Connector(connector_output, None, "Gentoo Keys")
         self.logger.debug(
             _unicode("ACTIONS: verify; fetching %s signed file ") %
             filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; timestamp path: %s") %
             timestamp_path)
         success, signedfile, timestamp = fetcher.fetch_file(url,
                                                             filepath,
                                                             timestamp_path,
                                                             climit=climit)
         if not success:
             messages.append(
                 _unicode("File %s cannot be retrieved.") % filepath)
         elif '.' + url.rsplit('.', 1)[1] not in EXTENSIONS:
             if not signature:
                 success_fetch = False
                 for ext in EXTENSIONS:
                     sig_path = filepath + ext
                     if isurl:
                         signature = url + ext
                         self.logger.debug(
                             _unicode(
                                 "ACTIONS: verify; fetching %s signature ")
                             % signature)
                         success_fetch, sig, timestamp = fetcher.fetch_file(
                             signature, sig_path)
                     if success_fetch:
                         break
                     else:
                         signature = None
     elif signature is not None and os.path.exists(signature):
         sig_path = signature
     else:
         filepath = os.path.abspath(filepath)
         self.logger.debug(
             _unicode("ACTIONS: verify; local file %s") % filepath)
         success = os.path.isfile(filepath)
         if (not signature
                 and '.' + filepath.rsplit('.', 1)[-1] not in EXTENSIONS):
             success_fetch = False
             for ext in EXTENSIONS:
                 sig_path = filepath + ext
                 sig_path = os.path.abspath(sig_path)
                 self.logger.debug(
                     _unicode("ACTIONS: verify; checking %s signature ") %
                     sig_path)
                 success_sig = os.path.isfile(sig_path)
                 if success_sig:
                     break
                 else:
                     sig_path = None
     self.logger.info("Verifying file...")
     verified = False
     results = self.gpg.verify_file(key, sig_path, filepath)
     keyid = key.keyid[0]
     (valid, trust) = results.verified
     if valid:
         verified = True
         messages.extend([
             _unicode("Verification succeeded.: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s") %
             (key.name, key.nick, keyid),
             _unicode("    category, nick.....: %s %s") %
             (args.category, args.nick)
         ])
     else:
         messages.extend([
             _unicode("Verification failed....: %s") % (filepath),
             _unicode("Key info...............: %s <%s>, %s") %
             (key.name, key.nick, keyid)
         ])
         found, args, new_msgs = self.keyhandler.autosearch_key(
             args, results)
         messages.extend(new_msgs)
         if found:
             return self.verify(args, messages)
     return (verified, messages)