Esempio n. 1
0
 def test_interrupt_callback_raises(self):
     "interrupt callback raises an exception"
     ic = (self._interrupt_callback, (),
           {'exception': self.TestException()})
     g = grabber.URLGrabber(progress_obj=self.InterruptProgress(),
                            interrupt_callback=ic)
     self.assertRaises(self.TestException, g.urlgrab, ref_http)
Esempio n. 2
0
def myurlgrab(url, filename, proxies, progress_obj=None, ignore_404=False):
    g = grabber.URLGrabber(netrc_optional=True)
    if progress_obj is None:
        progress_obj = TextProgress()

    if url.startswith("file:/"):
        file = url.replace("file:", "")
        if not os.path.exists(file):
            if ignore_404:
                return None
            else:
                raise CreatorError("URLGrabber error: can't find file %s" %
                                   file)
        runner.show(['cp', "-f", file, filename])
    else:
        try:
            filename = g.urlgrab(url=url,
                                 filename=filename,
                                 ssl_verify_host=False,
                                 ssl_verify_peer=False,
                                 proxies=proxies,
                                 http_headers=(('Pragma', 'no-cache'), ),
                                 quote=0,
                                 progress_obj=progress_obj)
        except grabber.URLGrabError as e:
            if e.errno == 14 and e.code in [404, 503] and ignore_404:
                return None
            else:
                raise CreatorError("URLGrabber error: %s - %s" % (url, e))

    return filename
def myurlgrab(url, filename, proxies, progress_obj=None):
    g = grabber.URLGrabber()
    if progress_obj is None:
        progress_obj = TextProgress()

    if url.startswith("file:/"):
        filepath = "/%s" % url.replace("file:", "").lstrip('/')
        if not os.path.exists(filepath):
            raise CreatorError("URLGrabber error: can't find file %s" % url)
        if url.endswith('.rpm'):
            return filepath
        else:
            # untouch repometadata in source path
            runner.show(['cp', '-f', filepath, filename])

    else:
        try:
            filename = g.urlgrab(url=str(url),
                                 filename=filename,
                                 ssl_verify_host=False,
                                 ssl_verify_peer=False,
                                 proxies=proxies,
                                 http_headers=(('Pragma', 'no-cache'), ),
                                 quote=0,
                                 progress_obj=progress_obj)
        except grabber.URLGrabError, err:
            msg = str(err)
            if msg.find(url) < 0:
                msg += ' on %s' % url
            raise CreatorError(msg)
Esempio n. 4
0
 def test_failure_callback_called(self):
     "failure callback is called on retry"
     self.failure_callback_called = 0
     g = grabber.URLGrabber(retry=2, retrycodes=[14],
                            failure_callback=self._failure_callback)
     try: g.urlgrab(ref_404)
     except URLGrabError: pass
     self.assertEquals(self.failure_callback_called, 1)
Esempio n. 5
0
 def test_interrupt_callback_called(self):
     "interrupt callback is called on retry"
     self.interrupt_callback_called = 0
     ic = (self._interrupt_callback, (), {})
     g = grabber.URLGrabber(progress_obj=self.InterruptProgress(),
                            interrupt_callback=ic)
     try: g.urlgrab(ref_http)
     except KeyboardInterrupt: pass
     self.assertEquals(self.interrupt_callback_called, 1)
Esempio n. 6
0
 def _download_repo(cls, project_name, repo_filename, chroot=None):
     if chroot is None:
         chroot = cls._guess_chroot()
     #http://copr.fedoraproject.org/coprs/larsks/rcm/repo/epel-7-x86_64/
     api_path = "/coprs/{0}/repo/{1}/".format(project_name, chroot)
     ug = grabber.URLGrabber()
     # FIXME when we are full on python2 urllib.parse
     try:
         ug.urlgrab(cls.copr_url + api_path, filename=repo_filename)
     except grabber.URLGrabError as e:
         cls._remove_repo(repo_filename)
         raise YError(str(e))
Esempio n. 7
0
    def setUp(self):
        self.ref = short_reference_data
        tmp = tempfile.mktemp()
        with open(tmp, 'wb') as tmpfo:
            tmpfo.write(self.ref)
        self.tmp = tmp

        (url, parts) = grabber.default_grabber.opts.urlparser.parse(
            tmp, grabber.default_grabber.opts)
        self.url = url

        self.grabber = grabber.URLGrabber(reget='check_timestamp',
                                          copy_local=1)
        self.filename = tempfile.mktemp()
        self.hl = len(self.ref) // 2
Esempio n. 8
0
    def setUp(self):
        self.ref = short_reference_data
        _, tmp = tempfile.mkstemp()
        tmpfo = open(tmp, 'wb' if not six.PY3 else 'w')
        tmpfo.write(self.ref)
        tmpfo.close()
        self.tmp = tmp
        
        (url, parts) = grabber.default_grabber.opts.urlparser.parse(
            tmp, grabber.default_grabber.opts)
        self.url = url

        self.grabber = grabber.URLGrabber(reget='check_timestamp',
                                          copy_local=1)
        _, self.filename = tempfile.mkstemp()
        self.hl = len(self.ref) / 2
Esempio n. 9
0
 def test_failure_callback_args(self):
     "failure callback is called with the proper args"
     fc = (self._failure_callback, ('foo',), {'bar': 'baz'})
     g = grabber.URLGrabber(retry=2, retrycodes=[14],
                            failure_callback=fc)
     try: g.urlgrab(ref_404)
     except URLGrabError: pass
     self.assert_(hasattr(self, 'obj'))
     self.assert_(hasattr(self, 'args'))
     self.assert_(hasattr(self, 'kwargs'))
     self.assertEquals(self.args, ('foo',))
     self.assertEquals(self.kwargs, {'bar': 'baz'})
     self.assert_(isinstance(self.obj, CallbackObject))
     self.assertEquals(self.obj.url, ref_404)
     self.assert_(isinstance(self.obj.exception, URLGrabError))
     del self.obj
Esempio n. 10
0
def myurlgrab(url, filename, proxies, progress_obj=None):
    g = grabber.URLGrabber()
    if progress_obj is None:
        progress_obj = TextProgress()

    if url.startswith("file:/"):
        file = url.replace("file:", "")
        if not os.path.exists(file):
            raise CreatorError("URLGrabber error: can't find file %s" % file)
        runner.show(['cp', "-f", file, filename])
    else:
        try:
            filename = g.urlgrab(url=url,
                                 filename=filename,
                                 ssl_verify_host=False,
                                 ssl_verify_peer=False,
                                 proxies=proxies,
                                 http_headers=(('Pragma', 'no-cache'), ),
                                 quote=0,
                                 progress_obj=progress_obj)
        except grabber.URLGrabError, e:
            raise CreatorError("URLGrabber error: %s" % url)
Esempio n. 11
0
    to_be_built = pkgs
    while try_again:
        failed = []
        for pkg in to_be_built:
            if not pkg.endswith('.rpm'):
                log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
                failed.append(pkg)
                continue

            elif pkg.startswith('http://') or pkg.startswith('https://'):
                url = pkg
                cwd = os.getcwd()
                os.chdir(download_dir)
                try:
                    log(opts.logfile, 'Fetching %s' % url)
                    ug = grabber.URLGrabber()
                    fn = ug.urlgrab(url)
                    pkg = download_dir + '/' + fn
                except Exception, e:
                    log(opts.logfile, 'Error Downloading %s: %s' % (url, str(e)))
                    failed.append(url)
                    os.chdir(cwd)
                    continue
                else:
                    os.chdir(cwd)
                    downloaded_pkgs[pkg] = url
            log(opts.logfile, "Start build: %s" % pkg)
            ret, cmd, out, err = do_build(opts, cfg, pkg)
            log(opts.logfile, "End build: %s" % pkg)
            if ret == 0:
                if opts.recurse:
Esempio n. 12
0
 def test_restart_workaround(self):
     inst = grabber.URLGrabber()
     rslt = inst.urlread(self.url, range=(500, 1000))
Esempio n. 13
0
 def setUp(self):
     self.ref = short_reference_data
     self.grabber = grabber.URLGrabber(reget='check_timestamp')
     _, self.filename = tempfile.mkstemp()
     self.hl = len(self.ref) / 2
     self.url = 'OVERRIDE THIS'
Esempio n. 14
0
 def setUp(self):
     cf = (self._checkfunc, ('foo',), {'bar': 'baz'})
     self.g = grabber.URLGrabber(checkfunc=cf)
     _, self.filename = tempfile.mkstemp()
     self.data = short_reference_data
Esempio n. 15
0
def main(args):
    opts, args = parse_args(args)

    # take mock config + list of pkgs
    cfg = opts.chroot
    pkgs = args[1:]
    mockcfg = mockconfig_path + '/' + cfg + '.cfg'

    if not os.path.exists(mockcfg):
        print "could not find config: %s" % mockcfg
        sys.exit(1)

    username = os.getlogin()
    pid = os.getpid()
    opts.uniqueext = '%s-%s' % (username, pid)

    # create a tempdir for our local info
    if opts.localrepo:
        local_tmp_dir = os.path.abspath(opts.localrepo)
        if not os.path.exists(local_tmp_dir):
            os.makedirs(local_tmp_dir)
    else:
        pre = 'mock-chain-%s-' % opts.uniqueext
        local_tmp_dir = tempfile.mkdtemp(prefix=pre, dir='/var/tmp')

    os.chmod(local_tmp_dir, 0755)

    if opts.logfile:
        opts.logfile = os.path.join(local_tmp_dir, opts.logfile)
        if os.path.exists(opts.logfile):
            os.unlink(opts.logfile)

    log(opts.logfile, "starting logfile: %s" % opts.logfile)
    opts.local_repo_dir = os.path.normpath(local_tmp_dir + '/results/' + cfg +
                                           '/')

    if not os.path.exists(opts.local_repo_dir):
        os.makedirs(opts.local_repo_dir, mode=0755)

    local_baseurl = "file://%s" % opts.local_repo_dir
    log(opts.logfile, "results dir: %s" % opts.local_repo_dir)
    opts.config_path = os.path.normpath(local_tmp_dir + '/configs/' + cfg +
                                        '/')

    if not os.path.exists(opts.config_path):
        os.makedirs(opts.config_path, mode=0755)

    log(opts.logfile, "config dir: %s" % opts.config_path)

    my_mock_config = opts.config_path + '/' + os.path.basename(mockcfg)

    # modify with localrepo
    res, msg = add_local_repo(mockcfg, my_mock_config, local_baseurl,
                              'local_build_repo')
    if not res:
        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
        sys.exit(1)

    for baseurl in opts.repos:
        res, msg = add_local_repo(my_mock_config, my_mock_config, baseurl)
        if not res:
            log(
                opts.logfile,
                "Error: Could not add: %s to yum config in mock chroot: %s" %
                (baseurl, msg))
            sys.exit(1)

    # these files needed from the mock.config dir to make mock run
    for fn in ['site-defaults.cfg', 'logging.ini']:
        pth = mockconfig_path + '/' + fn
        shutil.copyfile(pth, opts.config_path + '/' + fn)

    # createrepo on it
    out, err = createrepo(opts.local_repo_dir)
    if err.strip():
        log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
        log(opts.logfile, "Err: %s" % err)
        sys.exit(1)

    download_dir = tempfile.mkdtemp()
    downloaded_pkgs = {}
    built_pkgs = []
    try_again = True
    to_be_built = pkgs
    while try_again:
        failed = []
        for pkg in to_be_built:
            if not pkg.endswith('.rpm'):
                log(opts.logfile,
                    "%s doesn't appear to be an rpm - skipping" % pkg)
                failed.append(pkg)
                continue

            elif pkg.startswith('http://') or pkg.startswith('https://'):
                url = pkg
                cwd = os.getcwd()
                os.chdir(download_dir)
                try:
                    log(opts.logfile, 'Fetching %s' % url)
                    ug = grabber.URLGrabber()
                    fn = ug.urlgrab(url)
                    pkg = download_dir + '/' + fn
                except Exception, e:
                    log(opts.logfile,
                        'Error Downloading %s: %s' % (url, str(e)))
                    failed.append(url)
                    os.chdir(cwd)
                    continue
                else:
                    os.chdir(cwd)
                    downloaded_pkgs[pkg] = url
            log(opts.logfile, "Start build: %s" % pkg)
            ret, cmd, out, err = do_build(opts, cfg, pkg)
            log(opts.logfile, "End build: %s" % pkg)
            if ret == 0:
                if opts.recurse:
                    failed.append(pkg)
                    log(
                        opts.logfile, "Error building %s, will try again" %
                        os.path.basename(pkg))
                else:
                    log(opts.logfile,
                        "Error building %s" % os.path.basename(pkg))
                    log(opts.logfile,
                        "See logs/results in %s" % opts.local_repo_dir)
                    if not opts.cont:
                        sys.exit(1)

            elif ret == 1:
                log(opts.logfile,
                    "Success building %s" % os.path.basename(pkg))
                built_pkgs.append(pkg)
                # createrepo with the new pkgs
                out, err = createrepo(opts.local_repo_dir)
                if err.strip():
                    log(opts.logfile,
                        "Error making local repo: %s" % opts.local_repo_dir)
                    log(opts.logfile, "Err: %s" % err)
            elif ret == 2:
                log(opts.logfile,
                    "Skipping already built pkg %s" % os.path.basename(pkg))

        if failed:
            if len(failed) != len(to_be_built):
                to_be_built = failed
                try_again = True
                log(opts.logfile,
                    'Trying to rebuild %s failed pkgs' % len(failed))
            else:
                log(
                    opts.logfile,
                    "Tried twice - following pkgs could not be successfully built:"
                )
                for pkg in failed:
                    msg = pkg
                    if pkg in downloaded_pkgs:
                        msg = downloaded_pkgs[pkg]
                    log(opts.logfile, msg)

                try_again = False
        else:
            try_again = False