def _getfile(self, tmpfile, filename, hash): path = lfutil.findfile(self.remote, hash) if not path: raise basestore.StoreError(filename, hash, self.url, _("can't get file locally")) with open(path, 'rb') as fd: return lfutil.copyandhash(fd, tmpfile)
def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) except urllib2.HTTPError, e: # 401s get converted to util.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e))
def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) except urlerr.httperror as e: # 401s get converted to error.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e)) except urlerr.urlerror as e: # This usually indicates a connection problem, so don't # keep trying with the other files... they will probably # all fail too. raise error.Abort('%s: %s' % (util.hidepassword(self.url), e.reason)) except IOError as e: raise basestore.StoreError(filename, hash, self.url, str(e)) return lfutil.copyandhash(chunks, tmpfile)
def _getfile(self, tmpfile, filename, hash): (baseurl, authinfo) = url_.getauthinfo(self.url) url = bfutil.urljoin(baseurl, hash) try: request = urllib2.Request(url) infile = self.opener.open(request) except urllib2.HTTPError, err: detail = _("HTTP error: %s %s") % (err.code, err.msg) raise basestore.StoreError(filename, hash, url, detail)
def _getfile(self, tmpfile, filename, hash): if lfutil.instore(self.remote, hash): path = lfutil.storepath(self.remote, hash) elif lfutil.inusercache(self.ui, hash): path = lfutil.usercachepath(self.ui, hash) else: raise basestore.StoreError(filename, hash, '', _("Can't get file locally")) fd = open(path, 'rb') try: return lfutil.copyandhash(fd, tmpfile) finally: fd.close()
def _getfile(self, tmpfile, filename, hash): # quit if the largefile isn't there stat = self._stat(hash) if stat == 1: raise util.Abort(_('remotestore: largefile %s is invalid') % hash) elif stat == 2: raise util.Abort(_('remotestore: largefile %s is missing') % hash) try: length, infile = self._get(hash) except urllib2.HTTPError, e: # 401s get converted to util.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e))
def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) except urllib2.HTTPError, e: # 401s get converted to util.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e)) except urllib2.URLError, e: # This usually indicates a connection problem, so don't # keep trying with the other files... they will probably # all fail too. raise util.Abort('%s: %s' % (util.hidepassword(self.url), e.reason)) except IOError, e: raise basestore.StoreError(filename, hash, self.url, str(e)) return lfutil.copyandhash(chunks, tmpfile) def _verifyfile(self, cctx, cset, contents, standin, verified): filename = lfutil.splitstandin(standin) if not filename: return False fctx = cctx[standin] key = (filename, fctx.filenode()) if key in verified: return False verified.add(key) expecthash = fctx.data()[0:40]
def _getfile(self, tmpfile, filename, hash): if bfutil.in_system_cache(self.ui, hash): return bfutil.system_cache_path(self.ui, hash) raise basestore.StoreError(filename, hash, '', _("Can't get file locally"))