Exemplo n.º 1
0
    def upload(self, load_path, remote_path):

        self.__connect_if_not()
        path = os.path.join(self.__ftpdir, remote_path)

        tmp_path = path + EntropyUriHandler.TMP_TXC_FILE_EXT
        tries = 0

        def updater(buf):
            self._commit_buffer_update(len(buf))
            self._update_speed()
            self._update_progress()
            self._speed_limit_loop()

        while tries < 10:

            tries += 1
            self._init_vars()

            try:

                file_size = get_file_size(load_path)
                self.__filesize = round(float(file_size)/ 1024, 1)
                self.__filekbcount = 0

                with open(load_path, "r") as f:
                    rc = self.__ftpconn.storbinary("STOR " + tmp_path, f,
                        8192, updater)

                self._update_progress(force = True)
                # now we can rename the file with its original name
                self.rename(tmp_path, path)

                done = rc.find("226") != -1
                return done

            except Exception as e: # connection reset by peer

                print_traceback()
                mytxt = red("%s: %s, %s... #%s") % (
                    _("Upload issue"),
                    repr(e),
                    _("retrying"),
                    tries+1,
                )
                self.output(
                    mytxt,
                    importance = 1,
                    level = "warning",
                    header = "  "
                    )
                self._reconnect() # reconnect
                self.delete(tmp_path)
                self.delete(path)
Exemplo n.º 2
0
    def upload(self, load_path, remote_path):

        self.__connect_if_not()
        path = os.path.join(self.__ftpdir, remote_path)

        tmp_path = path + EntropyUriHandler.TMP_TXC_FILE_EXT
        tries = 0

        def updater(buf):
            self._commit_buffer_update(len(buf))
            self._update_speed()
            self._update_progress()
            self._speed_limit_loop()

        while tries < 10:

            tries += 1
            self._init_vars()

            try:

                file_size = get_file_size(load_path)
                self.__filesize = round(float(file_size) / 1000, 1)
                self.__filekbcount = 0

                with open(load_path, "r") as f:
                    rc = self.__ftpconn.storbinary("STOR " + tmp_path, f, 8192,
                                                   updater)

                self._update_progress(force=True)
                # now we can rename the file with its original name
                self.rename(tmp_path, path)

                done = rc.find("226") != -1
                return done

            except Exception as e:  # connection reset by peer

                print_traceback()
                mytxt = red("%s: %s, %s... #%s") % (
                    _("Upload issue"),
                    repr(e),
                    _("retrying"),
                    tries + 1,
                )
                self.output(mytxt, importance=1, level="warning", header="  ")
                self._reconnect()  # reconnect
                self.delete(tmp_path)
                self.delete(path)
Exemplo n.º 3
0
    def _urllib_download(self):
        """
        urrlib2 based downloader. This is the default for HTTP and FTP urls.
        """
        self._setup_urllib_proxy()
        self.__setup_urllib_resume_support()
        # we're going to feed the md5 digestor on the way.
        self.__use_md5_checksum = True
        url = self.__encode_url(self.__url)
        url_protocol = UrlFetcher._get_url_protocol(self.__url)
        uname = os.uname()
        user_agent = "Entropy/%s (compatible; %s; %s: %s %s %s)" % (
            etpConst['entropyversion'],
            "Entropy",
            os.path.basename(url),
            uname[0],
            uname[4],
            uname[2],
        )

        if url_protocol in ("http", "https"):
            headers = {'User-Agent': user_agent,}
            req = urlmod.Request(url, headers = headers)
        else:
            req = url

        u_agent_error = False
        do_return = False
        while True:

            # get file size if available
            try:
                self.__remotefile = urlmod.urlopen(req, None, self.__timeout)
            except KeyboardInterrupt:
                self.__urllib_close(False)
                raise
            except httplib.InvalidURL:
                # malformed url!
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except urlmod_error.HTTPError as e:
                if (e.code == 405) and not u_agent_error:
                    # server doesn't like our user agent
                    req = url
                    u_agent_error = True
                    continue
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except urlmod_error.URLError as err: # timeout error
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except httplib.BadStatusLine:
                # obviously, something to cope with
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except socket.timeout:
                # arghv!!
                self.__urllib_close(True)
                self.__status = UrlFetcher.TIMEOUT_FETCH_ERROR
                do_return = True

            except socket.error:
                # connection reset by peer?
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except ValueError: # malformed, unsupported URL? raised by urllib
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except Exception:
                print_traceback()
                raise
            break

        if do_return:
            return self.__status

        try:
            self.__remotesize = int(self.__remotefile.headers.get(
                "content-length", -1))
        except KeyboardInterrupt:
            self.__urllib_close(False)
            raise
        except ValueError:
            pass

        try:
            # i don't remember why this is needed
            # the whole code here is crap and written at
            # scriptkiddie age, but still, it works (kinda).
            request = url
            if ((self.__startingposition > 0) and (self.__remotesize > 0)) \
                and (self.__startingposition < self.__remotesize):

                headers = {
                    "Range" : "bytes=" + \
                        str(self.__startingposition) + "-" + \
                        str(self.__remotesize)
                }
                if url_protocol in ("http", "https"):
                    headers['User-Agent'] = user_agent

                try:
                    request = urlmod.Request(
                        url,
                        headers = headers)
                except KeyboardInterrupt:
                    self.__urllib_close(False)
                    raise
                except:
                    pass

                # this will be replaced, close...
                try:
                    self.__remotefile.close()
                except:
                    pass
                self.__remotefile = urlmod.urlopen(
                    request, None, self.__timeout)

            elif self.__startingposition == self.__remotesize:
                # all fine then!
                self.__urllib_close(False)
                return self.__prepare_return()
            elif (self.__startingposition > self.__remotesize) and \
                self.__resumed:
                # there is something wrong
                # downloaded more than the advertised size
                # the HTTP server is broken or something else happened
                # locally and file cannot be trusted (resumed)
                self.__urllib_open_local_file("wb")

        except KeyboardInterrupt:
            self.__urllib_close(False)
            raise
        except:
            self.__urllib_close(True)
            self.__status = UrlFetcher.GENERIC_FETCH_ERROR
            return self.__status

        if self.__remotesize > 0:
            self.__remotesize = float(int(self.__remotesize))/1000
        else:
            # this means we were not able to get Content-Length
            self.__remotesize = 0

        if url_protocol not in ("file", "ftp", "ftps"):
            if self.__disallow_redirect and \
                (url != self.__remotefile.geturl()):

                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                return self.__status

        while True:
            try:
                rsx = self.__remotefile.read(self.__buffersize)
                if not rsx:
                    break
                if self.__abort_check_func != None:
                    self.__abort_check_func()
                if self.__thread_stop_func != None:
                    self.__thread_stop_func()

            except KeyboardInterrupt:
                self.__urllib_close(False)
                raise

            except socket.timeout:
                self.__urllib_close(False)
                self.__status = UrlFetcher.TIMEOUT_FETCH_ERROR
                return self.__status

            except socket.error:
                # connection reset by peer?
                self.__urllib_close(False)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                return self.__status

            except Exception:
                # python 2.4 timeouts go here
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                return self.__status

            self.__urllib_commit(rsx)
            if self.__show_speed:
                self.handle_statistics(self.__th_id, self.__downloadedsize,
                    self.__remotesize, self.__average, self.__oldaverage,
                    self.__updatestep, self.__show_speed, self.__datatransfer,
                    self.__time_remaining, self.__time_remaining_secs
                )
                self.update()
                self.__oldaverage = self.__average
            if self.__speedlimit:
                while self.__datatransfer > self.__speedlimit*1000:
                    time.sleep(0.1)
                    self._update_speed()
                    if self.__show_speed:
                        self.update()
                        self.__oldaverage = self.__average

        # kill thread
        self.__urllib_close(False)
        return self.__prepare_return()
Exemplo n.º 4
0
    def _urllib_download(self):
        """
        urrlib2 based downloader. This is the default for HTTP and FTP urls.
        """
        self._setup_urllib_proxy()
        self.__setup_urllib_resume_support()
        # we're going to feed the md5 digestor on the way.
        self.__use_md5_checksum = True
        url = self.__encode_url(self.__url)
        url_protocol = UrlFetcher._get_url_protocol(self.__url)
        uname = os.uname()
        user_agent = "Entropy/%s (compatible; %s; %s: %s %s %s)" % (
            etpConst['entropyversion'],
            "Entropy",
            os.path.basename(url),
            uname[0],
            uname[4],
            uname[2],
        )

        if url_protocol in ("http", "https"):
            headers = {'User-Agent': user_agent,}
            req = urlmod.Request(url, headers = headers)
        else:
            req = url

        u_agent_error = False
        do_return = False
        while True:

            # get file size if available
            try:
                self.__remotefile = urlmod.urlopen(req, None, self.__timeout)
            except KeyboardInterrupt:
                self.__urllib_close(False)
                raise
            except httplib.InvalidURL:
                # malformed url!
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except urlmod_error.HTTPError as e:
                if (e.code == 405) and not u_agent_error:
                    # server doesn't like our user agent
                    req = url
                    u_agent_error = True
                    continue
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except urlmod_error.URLError as err: # timeout error
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except httplib.BadStatusLine:
                # obviously, something to cope with
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except socket.timeout:
                # arghv!!
                self.__urllib_close(True)
                self.__status = UrlFetcher.TIMEOUT_FETCH_ERROR
                do_return = True

            except socket.error:
                # connection reset by peer?
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except ValueError: # malformed, unsupported URL? raised by urllib
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                do_return = True

            except Exception:
                print_traceback()
                raise
            break

        if do_return:
            return self.__status

        try:
            self.__remotesize = int(self.__remotefile.headers.get(
                "content-length", -1))
        except KeyboardInterrupt:
            self.__urllib_close(False)
            raise
        except ValueError:
            pass

        try:
            # i don't remember why this is needed
            # the whole code here is crap and written at
            # scriptkiddie age, but still, it works (kinda).
            request = url
            if ((self.__startingposition > 0) and (self.__remotesize > 0)) \
                and (self.__startingposition < self.__remotesize):

                headers = {
                    "Range" : "bytes=" + \
                        str(self.__startingposition) + "-" + \
                        str(self.__remotesize)
                }
                if url_protocol in ("http", "https"):
                    headers['User-Agent'] = user_agent

                try:
                    request = urlmod.Request(
                        url,
                        headers = headers)
                except KeyboardInterrupt:
                    self.__urllib_close(False)
                    raise
                except:
                    pass

                # this will be replaced, close...
                try:
                    self.__remotefile.close()
                except:
                    pass
                self.__remotefile = urlmod.urlopen(
                    request, None, self.__timeout)

            elif self.__startingposition == self.__remotesize:
                # all fine then!
                self.__urllib_close(False)
                return self.__prepare_return()
            elif (self.__startingposition > self.__remotesize) and \
                self.__resumed:
                # there is something wrong
                # downloaded more than the advertised size
                # the HTTP server is broken or something else happened
                # locally and file cannot be trusted (resumed)
                self.__urllib_open_local_file("wb")

        except KeyboardInterrupt:
            self.__urllib_close(False)
            raise
        except:
            self.__urllib_close(True)
            self.__status = UrlFetcher.GENERIC_FETCH_ERROR
            return self.__status

        if self.__remotesize > 0:
            self.__remotesize = float(int(self.__remotesize))/1000
        else:
            # this means we were not able to get Content-Length
            self.__remotesize = 0

        if url_protocol not in ("file", "ftp", "ftps"):
            if self.__disallow_redirect and \
                (url != self.__remotefile.geturl()):

                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                return self.__status

        while True:
            try:
                rsx = self.__remotefile.read(self.__buffersize)
                if not rsx:
                    break
                if self.__abort_check_func != None:
                    self.__abort_check_func()
                if self.__thread_stop_func != None:
                    self.__thread_stop_func()

            except KeyboardInterrupt:
                self.__urllib_close(False)
                raise

            except socket.timeout:
                self.__urllib_close(False)
                self.__status = UrlFetcher.TIMEOUT_FETCH_ERROR
                return self.__status

            except socket.error:
                # connection reset by peer?
                self.__urllib_close(False)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                return self.__status

            except Exception:
                # python 2.4 timeouts go here
                self.__urllib_close(True)
                self.__status = UrlFetcher.GENERIC_FETCH_ERROR
                return self.__status

            self.__urllib_commit(rsx)
            if self.__show_speed:
                self.handle_statistics(self.__th_id, self.__downloadedsize,
                    self.__remotesize, self.__average, self.__oldaverage,
                    self.__updatestep, self.__show_speed, self.__datatransfer,
                    self.__time_remaining, self.__time_remaining_secs
                )
                self.update()
                self.__oldaverage = self.__average
            if self.__speedlimit:
                while self.__datatransfer > self.__speedlimit*1000:
                    time.sleep(0.1)
                    self._update_speed()
                    if self.__show_speed:
                        self.update()
                        self.__oldaverage = self.__average

        # kill thread
        self.__urllib_close(False)
        return self.__prepare_return()
Exemplo n.º 5
0
    def download(self, remote_path, save_path):

        self.__connect_if_not()
        path = os.path.join(self.__ftpdir, remote_path)
        tmp_save_path = save_path + EntropyUriHandler.TMP_TXC_FILE_EXT

        def writer(buf):
            # writing file buffer
            f.write(buf)
            self._commit_buffer_update(len(buf))
            self._update_speed()
            self._update_progress()
            self._speed_limit_loop()

        tries = 10
        while tries:

            tries -= 1
            self._init_vars()
            self.__filekbcount = 0
            rc = ''

            try:

                # get the file size
                self.__filesize = self._get_file_size_compat(path)
                if (self.__filesize):
                    self.__filesize = round(float(int(self.__filesize))/1024, 1)
                    if (self.__filesize == 0):
                        self.__filesize = 1
                elif not self.is_path_available(path):
                    return False
                else:
                    self.__filesize = 0

                with open(tmp_save_path, "wb") as f:
                    rc = self.__ftpconn.retrbinary('RETR ' + path, writer, 8192)
                    f.flush()

                self._update_progress(force = True)
                done = rc.find("226") != -1
                if done:
                    # download complete, atomic mv
                    os.rename(tmp_save_path, save_path)

            except (IOError, self.ftplib.error_reply, socket.error) as e:
                # connection reset by peer

                print_traceback()
                mytxt = red("%s: %s, %s... #%s") % (
                    _("Download issue"),
                    repr(e),
                    _("retrying"),
                    tries+1,
                )
                self.output(
                    mytxt,
                    importance = 1,
                    level = "warning",
                    header = "  "
                    )
                self._reconnect() # reconnect
                continue

            finally:
                if os.path.isfile(tmp_save_path):
                    os.remove(tmp_save_path)

            return done
Exemplo n.º 6
0
    def download(self, remote_path, save_path):

        self.__connect_if_not()
        path = os.path.join(self.__ftpdir, remote_path)
        tmp_save_path = save_path + EntropyUriHandler.TMP_TXC_FILE_EXT

        def writer(buf):
            # writing file buffer
            f.write(buf)
            self._commit_buffer_update(len(buf))
            self._update_speed()
            self._update_progress()
            self._speed_limit_loop()

        tries = 10
        while tries:

            tries -= 1
            self._init_vars()
            self.__filekbcount = 0
            rc = ''

            try:

                # get the file size
                self.__filesize = self._get_file_size_compat(path)
                if (self.__filesize):
                    self.__filesize = round(
                        float(int(self.__filesize)) / 1000, 1)
                    if (self.__filesize == 0):
                        self.__filesize = 1
                elif not self.is_path_available(path):
                    return False
                else:
                    self.__filesize = 0

                with open(tmp_save_path, "wb") as f:
                    rc = self.__ftpconn.retrbinary('RETR ' + path, writer,
                                                   8192)

                self._update_progress(force=True)
                done = rc.find("226") != -1
                if done:
                    # download complete, atomic mv
                    os.rename(tmp_save_path, save_path)

            except (IOError, self.ftplib.error_reply, socket.error) as e:
                # connection reset by peer

                print_traceback()
                mytxt = red("%s: %s, %s... #%s") % (
                    _("Download issue"),
                    repr(e),
                    _("retrying"),
                    tries + 1,
                )
                self.output(mytxt, importance=1, level="warning", header="  ")
                self._reconnect()  # reconnect
                continue

            finally:
                if os.path.isfile(tmp_save_path):
                    os.remove(tmp_save_path)

            return done
Exemplo n.º 7
0
    def _transceive(self, uri):

        fine = set()
        broken = set()
        fail = False
        crippled_uri = EntropyTransceiver.get_uri_name(uri)
        action = 'push'
        if self.download:
            action = 'pull'
        elif self.remove:
            action = 'remove'

        try:
            txc = EntropyTransceiver(uri)
            if const_isnumber(self.speed_limit):
                txc.set_speed_limit(self.speed_limit)
            txc.set_output_interface(self._entropy)
        except TransceiverConnectionError:
            print_traceback()
            return True, fine, broken # issues

        maxcount = len(self.myfiles)
        counter = 0

        with txc as handler:

            for mypath in self.myfiles:

                base_dir = self.txc_basedir

                if isinstance(mypath, tuple):
                    if len(mypath) < 2:
                        continue
                    base_dir, mypath = mypath

                if not handler.is_dir(base_dir):
                    handler.makedirs(base_dir)

                mypath_fn = os.path.basename(mypath)
                remote_path = os.path.join(base_dir, mypath_fn)

                syncer = handler.upload
                myargs = (mypath, remote_path)
                if self.download:
                    syncer = handler.download
                    local_path = os.path.join(self.local_basedir, mypath_fn)
                    myargs = (remote_path, local_path)
                elif self.remove:
                    syncer = handler.delete
                    myargs = (remote_path,)

                fallback_syncer, fallback_args = None, None
                # upload -> remote copy herustic support
                # if a package file might have been already uploaded
                # to remote mirror, try to look in other repositories'
                # package directories if a file, with the same md5 and name
                # is already available. In this case, use remote copy instead
                # of upload to save bandwidth.
                if self._copy_herustic and (syncer == handler.upload):
                    # copy herustic support enabled
                    # we are uploading
                    new_syncer, new_args = self._copy_herustic_support(
                        handler, mypath, base_dir, remote_path)
                    if new_syncer is not None:
                        fallback_syncer, fallback_args = syncer, myargs
                        syncer, myargs = new_syncer, new_args
                        action = "copy"

                counter += 1
                tries = 0
                done = False
                lastrc = None

                while tries < 5:
                    tries += 1
                    self._entropy.output(
                        "[%s|#%s|(%s/%s)] %s: %s" % (
                            blue(crippled_uri),
                            darkgreen(str(tries)),
                            blue(str(counter)),
                            bold(str(maxcount)),
                            blue(action),
                            red(os.path.basename(mypath)),
                        ),
                        importance = 0,
                        level = "info",
                        header = red(" @@ ")
                    )
                    rc = syncer(*myargs)
                    if (not rc) and (fallback_syncer is not None):
                        # if we have a fallback syncer, try it first
                        # before giving up.
                        rc = fallback_syncer(*myargs)

                    if rc and not (self.download or self.remove):
                        remote_md5 = handler.get_md5(remote_path)
                        rc = self.handler_verify_upload(mypath, uri,
                            counter, maxcount, tries, remote_md5 = remote_md5)
                    if rc:
                        self._entropy.output(
                            "[%s|#%s|(%s/%s)] %s %s: %s" % (
                                        blue(crippled_uri),
                                        darkgreen(str(tries)),
                                        blue(str(counter)),
                                        bold(str(maxcount)),
                                        blue(action),
                                        _("successful"),
                                        red(os.path.basename(mypath)),
                            ),
                            importance = 0,
                            level = "info",
                            header = darkgreen(" @@ ")
                        )
                        done = True
                        fine.add(uri)
                        break
                    else:
                        self._entropy.output(
                            "[%s|#%s|(%s/%s)] %s %s: %s" % (
                                        blue(crippled_uri),
                                        darkgreen(str(tries)),
                                        blue(str(counter)),
                                        bold(str(maxcount)),
                                        blue(action),
                                        brown(_("failed, retrying")),
                                        red(os.path.basename(mypath)),
                                ),
                            importance = 0,
                            level = "warning",
                            header = brown(" @@ ")
                        )
                        lastrc = rc
                        continue

                if not done:

                    self._entropy.output(
                        "[%s|(%s/%s)] %s %s: %s - %s: %s" % (
                                blue(crippled_uri),
                                blue(str(counter)),
                                bold(str(maxcount)),
                                blue(action),
                                darkred("failed, giving up"),
                                red(os.path.basename(mypath)),
                                _("error"),
                                lastrc,
                        ),
                        importance = 1,
                        level = "error",
                        header = darkred(" !!! ")
                    )

                    if mypath not in self.critical_files:
                        self._entropy.output(
                            "[%s|(%s/%s)] %s: %s, %s..." % (
                                blue(crippled_uri),
                                blue(str(counter)),
                                bold(str(maxcount)),
                                blue(_("not critical")),
                                os.path.basename(mypath),
                                blue(_("continuing")),
                            ),
                            importance = 1,
                            level = "warning",
                            header = brown(" @@ ")
                        )
                        continue

                    fail = True
                    broken.add((uri, lastrc))
                    # next mirror
                    break

        return fail, fine, broken
Exemplo n.º 8
0
    def _transceive(self, uri):

        fine = set()
        broken = set()
        fail = False
        crippled_uri = EntropyTransceiver.get_uri_name(uri)
        action = 'push'
        if self.download:
            action = 'pull'
        elif self.remove:
            action = 'remove'

        try:
            txc = EntropyTransceiver(uri)
            if const_isnumber(self.speed_limit):
                txc.set_speed_limit(self.speed_limit)
            txc.set_output_interface(self._entropy)
        except TransceiverConnectionError:
            print_traceback()
            return True, fine, broken  # issues

        maxcount = len(self.myfiles)
        counter = 0

        with txc as handler:

            for mypath in self.myfiles:

                base_dir = self.txc_basedir

                if isinstance(mypath, tuple):
                    if len(mypath) < 2:
                        continue
                    base_dir, mypath = mypath

                if not handler.is_dir(base_dir):
                    handler.makedirs(base_dir)

                mypath_fn = os.path.basename(mypath)
                remote_path = os.path.join(base_dir, mypath_fn)

                syncer = handler.upload
                myargs = (mypath, remote_path)
                if self.download:
                    syncer = handler.download
                    local_path = os.path.join(self.local_basedir, mypath_fn)
                    myargs = (remote_path, local_path)
                elif self.remove:
                    syncer = handler.delete
                    myargs = (remote_path, )

                fallback_syncer, fallback_args = None, None
                # upload -> remote copy herustic support
                # if a package file might have been already uploaded
                # to remote mirror, try to look in other repositories'
                # package directories if a file, with the same md5 and name
                # is already available. In this case, use remote copy instead
                # of upload to save bandwidth.
                if self._copy_herustic and (syncer == handler.upload):
                    # copy herustic support enabled
                    # we are uploading
                    new_syncer, new_args = self._copy_herustic_support(
                        handler, mypath, base_dir, remote_path)
                    if new_syncer is not None:
                        fallback_syncer, fallback_args = syncer, myargs
                        syncer, myargs = new_syncer, new_args
                        action = "copy"

                counter += 1
                tries = 0
                done = False
                lastrc = None

                while tries < 5:
                    tries += 1
                    self._entropy.output("[%s|#%s|(%s/%s)] %s: %s" % (
                        blue(crippled_uri),
                        darkgreen(str(tries)),
                        blue(str(counter)),
                        bold(str(maxcount)),
                        blue(action),
                        red(os.path.basename(mypath)),
                    ),
                                         importance=0,
                                         level="info",
                                         header=red(" @@ "))
                    rc = syncer(*myargs)
                    if (not rc) and (fallback_syncer is not None):
                        # if we have a fallback syncer, try it first
                        # before giving up.
                        rc = fallback_syncer(*myargs)

                    if rc and not (self.download or self.remove):
                        remote_md5 = handler.get_md5(remote_path)
                        rc = self.handler_verify_upload(mypath,
                                                        uri,
                                                        counter,
                                                        maxcount,
                                                        tries,
                                                        remote_md5=remote_md5)
                    if rc:
                        self._entropy.output("[%s|#%s|(%s/%s)] %s %s: %s" % (
                            blue(crippled_uri),
                            darkgreen(str(tries)),
                            blue(str(counter)),
                            bold(str(maxcount)),
                            blue(action),
                            _("successful"),
                            red(os.path.basename(mypath)),
                        ),
                                             importance=0,
                                             level="info",
                                             header=darkgreen(" @@ "))
                        done = True
                        fine.add(uri)
                        break
                    else:
                        self._entropy.output("[%s|#%s|(%s/%s)] %s %s: %s" % (
                            blue(crippled_uri),
                            darkgreen(str(tries)),
                            blue(str(counter)),
                            bold(str(maxcount)),
                            blue(action),
                            brown(_("failed, retrying")),
                            red(os.path.basename(mypath)),
                        ),
                                             importance=0,
                                             level="warning",
                                             header=brown(" @@ "))
                        lastrc = rc
                        continue

                if not done:

                    self._entropy.output("[%s|(%s/%s)] %s %s: %s - %s: %s" % (
                        blue(crippled_uri),
                        blue(str(counter)),
                        bold(str(maxcount)),
                        blue(action),
                        darkred("failed, giving up"),
                        red(os.path.basename(mypath)),
                        _("error"),
                        lastrc,
                    ),
                                         importance=1,
                                         level="error",
                                         header=darkred(" !!! "))

                    if mypath not in self.critical_files:
                        self._entropy.output("[%s|(%s/%s)] %s: %s, %s..." % (
                            blue(crippled_uri),
                            blue(str(counter)),
                            bold(str(maxcount)),
                            blue(_("not critical")),
                            os.path.basename(mypath),
                            blue(_("continuing")),
                        ),
                                             importance=1,
                                             level="warning",
                                             header=brown(" @@ "))
                        continue

                    fail = True
                    broken.add((uri, lastrc))
                    # next mirror
                    break

        return fail, fine, broken