def assertEquals(self, l, r): """assert equality of l and r, converting bytes to str This is so we don't have to adapt the whole .t output. """ ls = pycompat.strurl(l) rs = pycompat.strurl(r) print('%% expect %r' % (r, )) print(ls) assert ls == rs, (l, r)
def callconduit(ui, name, params): """call Conduit API, params is a dict. return json.loads result, or None""" host, token = readurltoken(ui) url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params))) params = params.copy() params[b'api.token'] = token data = urlencodenested(params) curlcmd = ui.config(b'phabricator', b'curlcmd') if curlcmd: sin, sout = procutil.popen2(b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))) sin.write(data) sin.close() body = sout.read() else: urlopener = urlmod.opener(ui, authinfo) request = util.urlreq.request(pycompat.strurl(url), data=data) with contextlib.closing(urlopener.open(request)) as rsp: body = rsp.read() ui.debug(b'Conduit Response: %s\n' % body) parsed = pycompat.rapply( lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x, json.loads(body)) if parsed.get(b'error_code'): msg = (_(b'Conduit Error (%s): %s') % (parsed[b'error_code'], parsed[b'error_info'])) raise error.Abort(msg) return parsed[b'result']
def _buildheader(): # The spec doesn't mention the Accept header here, but avoid # a gratuitous deviation from lfs-test-server in the test # output. hdr = {r'Accept': r'application/vnd.git-lfs'} auth = req.headers.get(b'Authorization', b'') if auth.startswith(b'Basic '): hdr[r'Authorization'] = pycompat.strurl(auth) return hdr
def set_env(): try: with open('/etc/environment', 'rb') as fh: for line in fh: if not line.startswith(b'HTTPS_PROXY='): continue value = line.strip().split(b'=', 1)[1] value = value.strip(b'"') os.environ['HTTPS_PROXY'] = pycompat.strurl(value) break except IOError as e: if e.errno != errno.ENOENT: raise
def __init__(self, ui): bzaccess.__init__(self, ui) bzweb = self.ui.config(b'bugzilla', b'bzurl') bzweb = bzweb.rstrip(b"/") + b"/xmlrpc.cgi" user = self.ui.config(b'bugzilla', b'user') passwd = self.ui.config(b'bugzilla', b'password') self.fixstatus = self.ui.config(b'bugzilla', b'fixstatus') self.fixresolution = self.ui.config(b'bugzilla', b'fixresolution') self.bzproxy = xmlrpclib.ServerProxy( pycompat.strurl(bzweb), self.transport(bzweb) ) ver = self.bzproxy.Bugzilla.version()[b'version'].split(b'.') self.bzvermajor = int(ver[0]) self.bzverminor = int(ver[1]) login = self.bzproxy.User.login( {b'login': user, b'password': passwd, b'restrict_login': True} ) self.bztoken = login.get(b'token', b'')
def _batchresponseobjects(req, objects, action, store): """Yield one dictionary of attributes for the Batch API response for each object in the list. req: The parsedrequest for the Batch API request objects: The list of objects in the Batch API object request list action: 'upload' or 'download' store: The local blob store for servicing requests""" # Successful lfs-test-server response to solict an upload: # { # u'objects': [{ # u'size': 12, # u'oid': u'31cf...8e5b', # u'actions': { # u'upload': { # u'href': u'http://localhost:$HGPORT/objects/31cf...8e5b', # u'expires_at': u'0001-01-01T00:00:00Z', # u'header': { # u'Accept': u'application/vnd.git-lfs' # } # } # } # }] # } # TODO: Sort out the expires_at/expires_in/authenticated keys. for obj in objects: # Convert unicode to ASCII to create a filesystem path soid = obj.get(r'oid') oid = soid.encode(r'ascii') rsp = { r'oid': soid, r'size': obj.get(r'size'), # XXX: should this check the local size? # r'authenticated': True, } exists = True verifies = False # Verify an existing file on the upload request, so that the client is # solicited to re-upload if it corrupt locally. Download requests are # also verified, so the error can be flagged in the Batch API response. # (Maybe we can use this to short circuit the download for `hg verify`, # IFF the client can assert that the remote end is an hg server.) # Otherwise, it's potentially overkill on download, since it is also # verified as the file is streamed to the caller. try: verifies = store.verify(oid) if verifies and action == b'upload': # The client will skip this upload, but make sure it remains # available locally. store.linkfromusercache(oid) except IOError as inst: if inst.errno != errno.ENOENT: _logexception(req) rsp[r'error'] = { r'code': 500, r'message': inst.strerror or r'Internal Server Server', } yield rsp continue exists = False # Items are always listed for downloads. They are dropped for uploads # IFF they already exist locally. if action == b'download': if not exists: rsp[r'error'] = { r'code': 404, r'message': r"The object does not exist", } yield rsp continue elif not verifies: rsp[r'error'] = { r'code': 422, # XXX: is this the right code? r'message': r"The object is corrupt", } yield rsp continue elif verifies: yield rsp # Skip 'actions': already uploaded continue expiresat = datetime.datetime.now() + datetime.timedelta(minutes=10) def _buildheader(): # The spec doesn't mention the Accept header here, but avoid # a gratuitous deviation from lfs-test-server in the test # output. hdr = {r'Accept': r'application/vnd.git-lfs'} auth = req.headers.get(b'Authorization', b'') if auth.startswith(b'Basic '): hdr[r'Authorization'] = pycompat.strurl(auth) return hdr rsp[r'actions'] = { r'%s' % pycompat.strurl(action): { r'href': pycompat.strurl(b'%s%s/.hg/lfs/objects/%s' % (req.baseurl, req.apppath, oid)), # datetime.isoformat() doesn't include the 'Z' suffix r"expires_at": expiresat.strftime(r'%Y-%m-%dT%H:%M:%SZ'), r'header': _buildheader(), } } yield rsp
def _basictransfer(self, obj, action, localstore): """Download or upload a single object using basic transfer protocol obj: dict, an object description returned by batch API action: string, one of ['upload', 'download'] localstore: blobstore.local See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\ basic-transfers.md """ oid = obj[b'oid'] href = obj[b'actions'][action].get(b'href') headers = obj[b'actions'][action].get(b'header', {}).items() request = util.urlreq.request(pycompat.strurl(href)) if action == b'upload': # If uploading blobs, read data from local blobstore. if not localstore.verify(oid): raise error.Abort( _(b'detected corrupt lfs object: %s') % oid, hint=_(b'run hg verify'), ) request.data = filewithprogress(localstore.open(oid), None) request.get_method = lambda: r'PUT' request.add_header(r'Content-Type', r'application/octet-stream') request.add_header(r'Content-Length', len(request.data)) for k, v in headers: request.add_header(pycompat.strurl(k), pycompat.strurl(v)) response = b'' try: with contextlib.closing(self.urlopener.open(request)) as req: ui = self.ui # Shorten debug lines if self.ui.debugflag: ui.debug(b'Status: %d\n' % req.status) # lfs-test-server and hg serve return headers in different # order headers = pycompat.bytestr(req.info()).strip() ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines()))) if action == b'download': # If downloading blobs, store downloaded data to local # blobstore localstore.download(oid, req) else: while True: data = req.read(1048576) if not data: break response += data if response: ui.debug(b'lfs %s response: %s' % (action, response)) except util.urlerr.httperror as ex: if self.ui.debugflag: self.ui.debug( b'%s: %s\n' % (oid, ex.read()) ) # XXX: also bytes? raise LfsRemoteError( _(b'LFS HTTP error: %s (oid=%s, action=%s)') % (stringutil.forcebytestr(ex), oid, action) ) except util.urlerr.urlerror as ex: hint = _(b'attempted connection to %s') % pycompat.bytesurl( util.urllibcompat.getfullurl(request) ) raise LfsRemoteError( _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint )
def _batchrequest(self, pointers, action): """Get metadata about objects pointed by pointers for given action Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]} See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md """ objects = [ {r'oid': pycompat.strurl(p.oid()), r'size': p.size()} for p in pointers ] requestdata = pycompat.bytesurl( json.dumps( {r'objects': objects, r'operation': pycompat.strurl(action),} ) ) url = b'%s/objects/batch' % self.baseurl batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata) batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json') batchreq.add_header(r'Content-Type', r'application/vnd.git-lfs+json') try: with contextlib.closing(self.urlopener.open(batchreq)) as rsp: rawjson = rsp.read() except util.urlerr.httperror as ex: hints = { 400: _( b'check that lfs serving is enabled on %s and "%s" is ' b'supported' ) % (self.baseurl, action), 404: _(b'the "lfs.url" config may be used to override %s') % self.baseurl, } hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action)) raise LfsRemoteError( _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex), hint=hint, ) except util.urlerr.urlerror as ex: hint = ( _(b'the "lfs.url" config may be used to override %s') % self.baseurl ) raise LfsRemoteError( _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint ) try: response = pycompat.json_loads(rawjson) except ValueError: raise LfsRemoteError( _(b'LFS server returns invalid JSON: %s') % rawjson.encode("utf-8") ) if self.ui.debugflag: self.ui.debug(b'Status: %d\n' % rsp.status) # lfs-test-server and hg serve return headers in different order headers = pycompat.bytestr(rsp.info()).strip() self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines()))) if r'objects' in response: response[r'objects'] = sorted( response[r'objects'], key=lambda p: p[r'oid'] ) self.ui.debug( b'%s\n' % pycompat.bytesurl( json.dumps( response, indent=2, separators=(r'', r': '), sort_keys=True, ) ) ) def encodestr(x): if isinstance(x, pycompat.unicode): return x.encode('utf-8') return x return pycompat.rapply(encodestr, response)
def email(ui, repo, *revs, **opts): '''send changesets by email By default, diffs are sent in the format generated by :hg:`export`, one per message. The series starts with a "[PATCH 0 of N]" introduction, which describes the series as a whole. Each patch email has a Subject line of "[PATCH M of N] ...", using the first line of the changeset description as the subject text. The message contains two or three parts. First, the changeset description. With the -d/--diffstat option, if the diffstat program is installed, the result of running diffstat on the patch is inserted. Finally, the patch itself, as generated by :hg:`export`. With the -d/--diffstat or --confirm options, you will be presented with a final summary of all messages and asked for confirmation before the messages are sent. By default the patch is included as text in the email body for easy reviewing. Using the -a/--attach option will instead create an attachment for the patch. With -i/--inline an inline attachment will be created. You can include a patch both as text in the email body and as a regular or an inline attachment by combining the -a/--attach or -i/--inline with the --body option. With -B/--bookmark changesets reachable by the given bookmark are selected. With -o/--outgoing, emails will be generated for patches not found in the destination repository (or only those which are ancestors of the specified revisions if any are provided) With -b/--bundle, changesets are selected as for --outgoing, but a single email containing a binary Mercurial bundle as an attachment will be sent. Use the ``patchbomb.bundletype`` config option to control the bundle type as with :hg:`bundle --type`. With -m/--mbox, instead of previewing each patchbomb message in a pager or sending the messages directly, it will create a UNIX mailbox file with the patch emails. This mailbox file can be previewed with any mail user agent which supports UNIX mbox files. With -n/--test, all steps will run, but mail will not be sent. You will be prompted for an email recipient address, a subject and an introductory message describing the patches of your patchbomb. Then when all is done, patchbomb messages are displayed. In case email sending fails, you will find a backup of your series introductory message in ``.hg/last-email.txt``. The default behavior of this command can be customized through configuration. (See :hg:`help patchbomb` for details) Examples:: hg email -r 3000 # send patch 3000 only hg email -r 3000 -r 3001 # send patches 3000 and 3001 hg email -r 3000:3005 # send patches 3000 through 3005 hg email 3000 # send patch 3000 (deprecated) hg email -o # send all patches not in default hg email -o DEST # send all patches not in DEST hg email -o -r 3000 # send all ancestors of 3000 not in default hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST hg email -B feature # send all ancestors of feature bookmark hg email -b # send bundle of all patches not in default hg email -b DEST # send bundle of all patches not in DEST hg email -b -r 3000 # bundle of all ancestors of 3000 not in default hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST hg email -o -m mbox && # generate an mbox file... mutt -R -f mbox # ... and view it with mutt hg email -o -m mbox && # generate an mbox file ... formail -s sendmail \\ # ... and use formail to send from the mbox -bm -t < mbox # ... using sendmail Before using this command, you will need to enable email in your hgrc. See the [email] section in hgrc(5) for details. ''' opts = pycompat.byteskwargs(opts) _charsets = mail._charsets(ui) bundle = opts.get(b'bundle') date = opts.get(b'date') mbox = opts.get(b'mbox') outgoing = opts.get(b'outgoing') rev = opts.get(b'rev') bookmark = opts.get(b'bookmark') if not (opts.get(b'test') or mbox): # really sending mail.validateconfig(ui) if not (revs or rev or outgoing or bundle or bookmark): raise error.Abort( _(b'specify at least one changeset with -B, -r or -o')) if outgoing and bundle: raise error.Abort( _(b"--outgoing mode always on with --bundle;" b" do not re-specify --outgoing")) if rev and bookmark: raise error.Abort(_(b"-r and -B are mutually exclusive")) if outgoing or bundle: if len(revs) > 1: raise error.Abort(_(b"too many destinations")) if revs: dest = revs[0] else: dest = None revs = [] if rev: if revs: raise error.Abort(_(b'use only one form to specify the revision')) revs = rev elif bookmark: if bookmark not in repo._bookmarks: raise error.Abort(_(b"bookmark '%s' not found") % bookmark) revs = scmutil.bookmarkrevs(repo, bookmark) revs = scmutil.revrange(repo, revs) if outgoing: revs = _getoutgoing(repo, dest, revs) if bundle: opts[b'revs'] = [b"%d" % r for r in revs] # check if revision exist on the public destination publicurl = repo.ui.config(b'patchbomb', b'publicurl') if publicurl: repo.ui.debug(b'checking that revision exist in the public repo\n') try: publicpeer = hg.peer(repo, {}, publicurl) except error.RepoError: repo.ui.write_err( _(b'unable to access public repo: %s\n') % publicurl) raise if not publicpeer.capable(b'known'): repo.ui.debug(b'skipping existence checks: public repo too old\n') else: out = [repo[r] for r in revs] known = publicpeer.known(h.node() for h in out) missing = [] for idx, h in enumerate(out): if not known[idx]: missing.append(h) if missing: if len(missing) > 1: msg = _(b'public "%s" is missing %s and %i others') msg %= (publicurl, missing[0], len(missing) - 1) else: msg = _(b'public url %s is missing %s') msg %= (publicurl, missing[0]) missingrevs = [ctx.rev() for ctx in missing] revhint = b' '.join( b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)) hint = _(b"use 'hg push %s %s'") % (publicurl, revhint) raise error.Abort(msg, hint=hint) # start if date: start_time = dateutil.parsedate(date) else: start_time = dateutil.makedate() def genmsgid(id): return _msgid(id[:20], int(start_time[0])) # deprecated config: patchbomb.from sender = (opts.get(b'from') or ui.config(b'email', b'from') or ui.config(b'patchbomb', b'from') or prompt(ui, b'From', ui.username())) if bundle: stropts = pycompat.strkwargs(opts) bundledata = _getbundle(repo, dest, **stropts) bundleopts = stropts.copy() bundleopts.pop(r'bundle', None) # already processed msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts) else: msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts)) showaddrs = [] def getaddrs(header, ask=False, default=None): configkey = header.lower() opt = header.replace(b'-', b'_').lower() addrs = opts.get(opt) if addrs: showaddrs.append(b'%s: %s' % (header, b', '.join(addrs))) return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test')) # not on the command line: fallback to config and then maybe ask addr = ui.config(b'email', configkey) or ui.config( b'patchbomb', configkey) if not addr: specified = ui.hasconfig(b'email', configkey) or ui.hasconfig( b'patchbomb', configkey) if not specified and ask: addr = prompt(ui, header, default=default) if addr: showaddrs.append(b'%s: %s' % (header, addr)) return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test')) elif default: return mail.addrlistencode(ui, [default], _charsets, opts.get(b'test')) return [] to = getaddrs(b'To', ask=True) if not to: # we can get here in non-interactive mode raise error.Abort(_(b'no recipient addresses provided')) cc = getaddrs(b'Cc', ask=True, default=b'') bcc = getaddrs(b'Bcc') replyto = getaddrs(b'Reply-To') confirm = ui.configbool(b'patchbomb', b'confirm') confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm')) if confirm: ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary') ui.write((b'From: %s\n' % sender), label=b'patchbomb.from') for addr in showaddrs: ui.write(b'%s\n' % addr, label=b'patchbomb.to') for m, subj, ds in msgs: ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject') if ds: ui.write(ds, label=b'patchbomb.diffstats') ui.write(b'\n') if ui.promptchoice( _(b'are you sure you want to send (yn)?$$ &Yes $$ &No')): raise error.Abort(_(b'patchbomb canceled')) ui.write(b'\n') parent = opts.get(b'in_reply_to') or None # angle brackets may be omitted, they're not semantically part of the msg-id if parent is not None: if not parent.startswith(b'<'): parent = b'<' + parent if not parent.endswith(b'>'): parent += b'>' sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1] sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test')) sendmail = None firstpatch = None progress = ui.makeprogress(_(b'sending'), unit=_(b'emails'), total=len(msgs)) for i, (m, subj, ds) in enumerate(msgs): try: m[b'Message-Id'] = genmsgid(m[b'X-Mercurial-Node']) if not firstpatch: firstpatch = m[b'Message-Id'] m[b'X-Mercurial-Series-Id'] = firstpatch except TypeError: m[b'Message-Id'] = genmsgid(b'patchbomb') if parent: m[b'In-Reply-To'] = parent m[b'References'] = parent if not parent or b'X-Mercurial-Node' not in m: parent = m[b'Message-Id'] m[b'User-Agent'] = b'Mercurial-patchbomb/%s' % util.version() m[b'Date'] = eutil.formatdate(start_time[0], localtime=True) start_time = (start_time[0] + 1, start_time[1]) m[b'From'] = sender m[b'To'] = b', '.join(to) if cc: m[b'Cc'] = b', '.join(cc) if bcc: m[b'Bcc'] = b', '.join(bcc) if replyto: m[b'Reply-To'] = b', '.join(replyto) # Fix up all headers to be native strings. # TODO(durin42): this should probably be cleaned up above in the future. if pycompat.ispy3: for hdr, val in list(m.items()): change = False if isinstance(hdr, bytes): del m[hdr] hdr = pycompat.strurl(hdr) change = True if isinstance(val, bytes): # header value should be ASCII since it's encoded by # mail.headencode(), but -n/--test disables it and raw # value of platform encoding is stored. val = encoding.strfromlocal(val) if not change: # prevent duplicate headers del m[hdr] change = True if change: m[hdr] = val if opts.get(b'test'): ui.status(_(b'displaying '), subj, b' ...\n') ui.pager(b'email') generator = mail.Generator(ui, mangle_from_=False) try: generator.flatten(m, 0) ui.write(b'\n') except IOError as inst: if inst.errno != errno.EPIPE: raise else: if not sendmail: sendmail = mail.connect(ui, mbox=mbox) ui.status(_(b'sending '), subj, b' ...\n') progress.update(i, item=subj) if not mbox: # Exim does not remove the Bcc field del m[b'Bcc'] fp = stringio() generator = mail.Generator(fp, mangle_from_=False) generator.flatten(m, 0) alldests = to + bcc + cc alldests = [encoding.strfromlocal(d) for d in alldests] sendmail(sender_addr, alldests, fp.getvalue()) progress.complete()