def user_auth(self, username, password): """Authenticate a user against crowd. Returns brief information about the user.""" url = ("%s/rest/usermanagement/%s/authentication?username=%s" % (self._uri, self._version, urllib.parse.quote(username))) body = ascii_bytes(ext_json.dumps({"value": password})) return self._request(url, body)
def parse_pub_key(ssh_key): r"""Parse SSH public key string, raise SshKeyParseError or return decoded keytype, data and comment >>> getfixture('doctest_mock_ugettext') >>> parse_pub_key('') Traceback (most recent call last): ... kallithea.lib.ssh.SshKeyParseError: SSH key is missing >>> parse_pub_key('''AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp=' >>> parse_pub_key('''abc AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)' >>> parse_pub_key('''ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ' >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==''') Traceback (most recent call last): ... kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa' >>> parse_pub_key('''ssh-rsa AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ" >>> parse_pub_key(''' ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment ... ''') ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n') >>> parse_pub_key('''ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIP1NA2kBQIKe74afUXmIWD9ByDYQJqUwW44Y4gJOBRuo''') ('ssh-ed25519', b'\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '') """ if not ssh_key: raise SshKeyParseError(_("SSH key is missing")) parts = ssh_key.split(None, 2) if len(parts) < 2: raise SshKeyParseError(_("Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='")) keytype, keyvalue, comment = (parts + [''])[:3] if keytype not in ('ssh-rsa', 'ssh-dss', 'ssh-ed25519'): raise SshKeyParseError(_("Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'")) if re.search(r'[^a-zA-Z0-9+/=]', keyvalue): raise SshKeyParseError(_("Incorrect SSH key - unexpected characters in base64 part %r") % keyvalue) try: key_bytes = base64.b64decode(keyvalue) except base64.binascii.Error: raise SshKeyParseError(_("Incorrect SSH key - failed to decode base64 part %r") % keyvalue) if not key_bytes.startswith(b'\x00\x00\x00%c%s\x00' % (len(keytype), ascii_bytes(keytype))): raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (keytype, ascii_str(key_bytes[4:].split(b'\0', 1)[0]))) return keytype, key_bytes, comment
def make_ui(repo_path=None): """ Create an Mercurial 'ui' object based on database Ui settings, possibly augmenting with content from a hgrc file. """ baseui = mercurial.ui.ui() # clean the baseui object baseui._ocfg = mercurial.config.config() baseui._ucfg = mercurial.config.config() baseui._tcfg = mercurial.config.config() sa = meta.Session() for ui_ in sa.query(Ui).order_by(Ui.ui_section, Ui.ui_key): if ui_.ui_active: log.debug('config from db: [%s] %s=%r', ui_.ui_section, ui_.ui_key, ui_.ui_value) baseui.setconfig( ascii_bytes(ui_.ui_section), ascii_bytes(ui_.ui_key), b'' if ui_.ui_value is None else safe_bytes(ui_.ui_value)) # force set push_ssl requirement to False, Kallithea handles that baseui.setconfig(b'web', b'push_ssl', False) baseui.setconfig(b'web', b'allow_push', b'*') # prevent interactive questions for ssh password / passphrase ssh = baseui.config(b'ui', b'ssh', default=b'ssh') baseui.setconfig(b'ui', b'ssh', b'%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh) # push / pull hooks baseui.setconfig(b'hooks', b'changegroup.kallithea_log_push_action', b'python:kallithea.lib.hooks.log_push_action') baseui.setconfig(b'hooks', b'outgoing.kallithea_log_pull_action', b'python:kallithea.lib.hooks.log_pull_action') if repo_path is not None: # Note: MercurialRepository / mercurial.localrepo.instance will do this too, so it will always be possible to override db settings or what is hardcoded above baseui.readconfig(repo_path) assert baseui.plain( ) # set by hgcompat.monkey_do (invoked from import of vcs.backends.hg) to minimize potential impact of loading config files return baseui
def _rpc_call(self, action, environ, **rpc_args): """ Call the specified RPC Method """ raw_response = '' try: raw_response = getattr(self, action)(**rpc_args) if isinstance(raw_response, HTTPError): self._error = str(raw_response) except JSONRPCError as e: self._error = str(e) except Exception as e: log.error( 'Encountered unhandled exception: %s', traceback.format_exc(), ) json_exc = JSONRPCError('Internal server error') self._error = str(json_exc) if self._error is not None: raw_response = None response = dict(id=self._req_id, result=raw_response, error=self._error) try: return ascii_bytes(ext_json.dumps(response)) except TypeError as e: log.error('API FAILED. Error encoding response for %s %s: %s\n%s', action, rpc_args, e, traceback.format_exc()) return ascii_bytes( ext_json.dumps( dict( id=self._req_id, result=None, error="Error encoding response", )))
def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type, gist_expires): """ store metadata inside the gist, this can be later used for imports or gist identification """ metadata = { 'metadata_version': '1', 'gist_db_id': gist_id, 'gist_access_id': gist_access_id, 'gist_owner_id': user_id, 'gist_type': gist_type, 'gist_expires': gist_expires, 'gist_updated': time.time(), } with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f: f.write(ascii_bytes(ext_json.dumps(metadata)))
def api_call(apikey, apihost, method=None, **kw): """ Api_call wrapper for Kallithea. :param apikey: :param apihost: :param format: formatting, pretty means prints and pprint of json json returns unparsed json :param method: :returns: json response from server """ def _build_data(random_id): """ Builds API data with given random ID :param random_id: """ return { "id": random_id, "api_key": apikey, "method": method, "args": kw } if not method: raise Exception('please specify method name !') apihost = apihost.rstrip('/') id_ = random.randrange(1, 9999) req = urllib.request.Request('%s/_admin/api' % apihost, data=ascii_bytes( ext_json.dumps(_build_data(id_))), headers={'content-type': 'text/plain'}) ret = urllib.request.urlopen(req) raw_json = ret.read() json_data = ext_json.loads(raw_json) id_ret = json_data['id'] if id_ret == id_: return json_data else: _formatted_json = pprint.pformat(json_data) raise Exception('something went wrong. ' 'ID mismatch got %s, expected %s | %s' % (id_ret, id_, _formatted_json))
def inforefs(self, req, environ): """ WSGI Response producer for HTTP GET Git Smart HTTP /info/refs request. """ git_command = req.GET.get('service') if git_command not in self.commands: log.debug('command %s not allowed', git_command) return exc.HTTPMethodNotAllowed() # From Documentation/technical/http-protocol.txt shipped with Git: # # Clients MUST verify the first pkt-line is `# service=$servicename`. # Servers MUST set $servicename to be the request parameter value. # Servers SHOULD include an LF at the end of this line. # Clients MUST ignore an LF at the end of the line. # # smart_reply = PKT-LINE("# service=$servicename" LF) # ref_list # "0000" server_advert = '# service=%s\n' % git_command packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower() _git_path = kallithea.CONFIG.get('git_path', 'git') cmd = [ _git_path, git_command[4:], '--stateless-rpc', '--advertise-refs', self.content_path ] log.debug('handling cmd %s', cmd) try: out = subprocessio.SubprocessIOChunker( cmd, starting_values=[ ascii_bytes(packet_len + server_advert + '0000') ]) except EnvironmentError as e: log.error(traceback.format_exc()) raise exc.HTTPExpectationFailed() resp = Response() resp.content_type = 'application/x-%s-advertisement' % str(git_command) resp.charset = None resp.app_iter = out return resp
def check_password(password, hashed): """ Checks password match the hashed value using bcrypt. Remains backwards compatible and accept plain sha256 hashes which used to be used on Windows. :param password: password :param hashed: password in hashed form """ # sha256 hashes will always be 64 hex chars # bcrypt hashes will always contain $ (and be shorter) if len(hashed) == 64 and all(x in string.hexdigits for x in hashed): return hashlib.sha256(password).hexdigest() == hashed try: return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed)) except ValueError as e: # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors log.error('error from bcrypt checking password: %s', e) return False log.error('check_password failed - no method found for hash length %s', len(hashed)) return False
def jsonify(func, *args, **kwargs): """Action decorator that formats output for JSON Given a function that will return content, this decorator will turn the result into JSON, with a content-type of 'application/json' and output it. """ response.headers['Content-Type'] = 'application/json; charset=utf-8' data = func(*args, **kwargs) if isinstance(data, (list, tuple)): # A JSON list response is syntactically valid JavaScript and can be # loaded and executed as JavaScript by a malicious third-party site # using <script>, which can lead to cross-site data leaks. # JSON responses should therefore be scalars or objects (i.e. Python # dicts), because a JSON object is a syntax error if intepreted as JS. msg = "JSON responses with Array envelopes are susceptible to " \ "cross-site data leak attacks, see " \ "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings" warnings.warn(msg, Warning, 2) log.warning(msg) log.debug("Returning JSON wrapped action output") return ascii_bytes(ext_json.dumps(data))
def post(self, method, args): """Send a generic API post to Kallithea. This will generate the UUID for validation check after the response is returned. Handle errors and get the result back. """ uid = str(uuid.uuid1()) data = self.get_api_data(uid, method, args) data = ascii_bytes(ext_json.dumps(data)) headers = {'content-type': 'text/plain'} req = urllib.request.Request(self.url, data, headers) response = urllib.request.urlopen(req) response = ext_json.load(response) if uid != response["id"]: raise InvalidResponseIDError("UUID does not match.") if response["error"] is not None: raise ResponseError(response["error"]) return response["result"]
def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev): """ Returns lists of changesets that can be merged from org_repo@org_rev to other_repo@other_rev ... and the other way ... and the ancestors that would be used for merge :param org_repo: repo object, that is most likely the original repo we forked from :param org_rev: the revision we want our compare to be made :param other_repo: repo object, most likely the fork of org_repo. It has all changesets that we need to obtain :param other_rev: revision we want out compare to be made on other_repo """ ancestors = None if org_rev == other_rev: org_changesets = [] other_changesets = [] elif alias == 'hg': # case two independent repos if org_repo != other_repo: hgrepo = mercurial.unionrepo.makeunionrepository( other_repo.baseui, safe_bytes(other_repo.path), safe_bytes(org_repo.path)) # all ancestors of other_rev will be in other_repo and # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot # no remote compare do it on the same repository else: hgrepo = other_repo._repo ancestors = [ ascii_str(hgrepo[ancestor].hex()) for ancestor in hgrepo.revs( b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev)) ] if ancestors: log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) else: log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) ancestors = [ ascii_str(hgrepo[ancestor].hex()) for ancestor in hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev)) ] # FIXME: expensive! other_changesets = [ other_repo.get_changeset(rev) for rev in hgrepo.revs( b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) ] org_changesets = [ org_repo.get_changeset(ascii_str(hgrepo[rev].hex())) for rev in hgrepo.revs( b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) ] elif alias == 'git': if org_repo != other_repo: from dulwich.repo import Repo from dulwich.client import SubprocessGitClient gitrepo = Repo(org_repo.path) SubprocessGitClient(thin_packs=False).fetch( other_repo.path, gitrepo) gitrepo_remote = Repo(other_repo.path) SubprocessGitClient(thin_packs=False).fetch( org_repo.path, gitrepo_remote) revs = [ ascii_str(x.commit.id) for x in gitrepo_remote.get_walker( include=[ascii_bytes(other_rev)], exclude=[ascii_bytes(org_rev)]) ] other_changesets = [ other_repo.get_changeset(rev) for rev in reversed(revs) ] if other_changesets: ancestors = [other_changesets[0].parents[0].raw_id] else: # no changesets from other repo, ancestor is the other_rev ancestors = [other_rev] gitrepo.close() gitrepo_remote.close() else: so = org_repo.run_git_command([ 'log', '--reverse', '--pretty=format:%H', '-s', '%s..%s' % (org_rev, other_rev) ]) other_changesets = [ org_repo.get_changeset(cs) for cs in re.findall(r'[0-9a-fA-F]{40}', so) ] so = org_repo.run_git_command( ['merge-base', org_rev, other_rev]) ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] org_changesets = [] else: raise Exception('Bad alias only git and hg is allowed') return other_changesets, org_changesets, ancestors
def execute(self): created_by = User.get(request.authuser.user_id) pr = PullRequest() pr.org_repo = self.org_repo pr.org_ref = self.org_ref pr.other_repo = self.other_repo pr.other_ref = self.other_ref pr.revisions = self.revisions pr.title = self.title pr.description = self.description pr.owner = self.owner Session().add(pr) Session().flush() # make database assign pull_request_id if self.org_repo.scm_instance.alias == 'git': # create a ref under refs/pull/ so that commits don't get garbage-collected self.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pr.pull_request_id] = ascii_bytes(self.org_rev) # reset state to under-review from kallithea.model.changeset_status import ChangesetStatusModel from kallithea.model.comment import ChangesetCommentsModel comment = ChangesetCommentsModel().create( text='', repo=self.org_repo, author=created_by, pull_request=pr, send_email=False, status_change=ChangesetStatus.STATUS_UNDER_REVIEW, ) ChangesetStatusModel().set_status( self.org_repo, ChangesetStatus.STATUS_UNDER_REVIEW, created_by, comment, pull_request=pr, ) mention_recipients = extract_mentioned_users(self.description) PullRequestModel().add_reviewers(created_by, pr, self.reviewers, mention_recipients) return pr
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): DBS = celerylib.get_session() lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config.get('cache_dir') or config['app_conf'][ 'cache_dir'] # Backward compatibility for TurboGears < 2.4 log.info('running task with lockkey %s', lockkey) try: lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey)) co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config.get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository) \ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics) \ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict( ext_json.loads(cur_stats.commit_activity_combined)) co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity) log.debug('starting parsing %s', parse_limit) last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s', last_rev, last_rev + parse_limit) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s', cs) last_cs = cs # remember last parsed changeset tt = cs.date.timetuple() k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0)) if akc(cs.author) in co_day_auth_aggr: try: l = [ timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data'] ] time_pos = l.index(k) except ValueError: time_pos = None if time_pos is not None and time_pos >= 0: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = { "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data'] \ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{ "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), }], "schema": ["commits"], } # gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr)) stats.commit_activity_combined = ascii_bytes( ext_json.dumps(overview_data)) log.debug('last revision %s', last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s', leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = ascii_bytes( ext_json.dumps(__get_codes_stats(repo_name))) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions ) > 1 and kallithea.CELERY_APP and recurse_limit > 0: get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1) elif recurse_limit <= 0: log.debug('Not recursing - limit has been reached') else: log.debug('Not recursing') except celerylib.LockHeld: log.info('Task with key %s already running', lockkey) return 'Task with key %s already running' % lockkey
def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None): """return a structure with repo's interesting changesets, suitable for the selectors in pullrequest.html rev: a revision that must be in the list somehow and selected by default branch: a branch that must be in the list and selected by default - even if closed branch_rev: a revision of which peers should be preferred and available.""" # list named branches that has been merged to this named branch - it should probably merge back peers = [] if branch_rev: # a revset not restricting to merge() would be better # (especially because it would get the branch point) # ... but is currently too expensive # including branches of children could be nice too peerbranches = set() for i in repo._repo.revs( b"sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)", ascii_bytes(branch_rev), ascii_bytes(branch_rev), ): for abranch in repo.get_changeset(i).branches: if abranch not in peerbranches: n = 'branch:%s:%s' % ( abranch, repo.get_changeset(abranch).raw_id) peers.append((n, abranch)) peerbranches.add(abranch) selected = None tiprev = repo.tags.get('tip') tipbranch = None branches = [] for abranch, branchrev in repo.branches.items(): n = 'branch:%s:%s' % (abranch, branchrev) desc = abranch if branchrev == tiprev: tipbranch = abranch desc = '%s (current tip)' % desc branches.append((n, desc)) if rev == branchrev: selected = n if branch == abranch: if not rev: selected = n branch = None if branch: # branch not in list - it is probably closed branchrev = repo.closed_branches.get(branch) if branchrev: n = 'branch:%s:%s' % (branch, branchrev) branches.append((n, _('%s (closed)') % branch)) selected = n branch = None if branch: log.debug('branch %r not found in %s', branch, repo) bookmarks = [] for bookmark, bookmarkrev in repo.bookmarks.items(): n = 'book:%s:%s' % (bookmark, bookmarkrev) bookmarks.append((n, bookmark)) if rev == bookmarkrev: selected = n tags = [] for tag, tagrev in repo.tags.items(): if tag == 'tip': continue n = 'tag:%s:%s' % (tag, tagrev) tags.append((n, tag)) # note: even if rev == tagrev, don't select the static tag - it must be chosen explicitly # prio 1: rev was selected as existing entry above # prio 2: create special entry for rev; rev _must_ be used specials = [] if rev and selected is None: selected = 'rev:%s:%s' % (rev, rev) specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))] # prio 3: most recent peer branch if peers and not selected: selected = peers[0][0] # prio 4: tip revision if not selected: if h.is_hg(repo): if tipbranch: selected = 'branch:%s:%s' % (tipbranch, tiprev) else: selected = 'tag:null:' + repo.EMPTY_CHANGESET tags.append((selected, 'null')) else: if 'master' in repo.branches: selected = 'branch:master:%s' % repo.branches['master'] else: k, v = list(repo.branches.items())[0] selected = 'branch:%s:%s' % (k, v) groups = [ (specials, _("Special")), (peers, _("Peer branches")), (bookmarks, _("Bookmarks")), (branches, _("Branches")), (tags, _("Tags")), ] return [g for g in groups if g[0]], selected
def test_push_new_repo(self, webserver, vt): # Clear the log so we know what is added UserLog.query().delete() Session().commit() # Create an empty server repo using the API repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence())) usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN) params = { "id": 7, "api_key": usr.api_key, "method": 'create_repo', "args": dict(repo_name=repo_name, owner=base.TEST_USER_ADMIN_LOGIN, repo_type=vt.repo_type), } req = urllib.request.Request( 'http://%s:%s/_admin/api' % webserver.server_address, data=ascii_bytes(json.dumps(params)), headers={'content-type': 'application/json'}) response = urllib.request.urlopen(req) result = json.loads(response.read()) # Expect something like: # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None} assert result['result']['success'] # Create local clone of the empty server repo local_clone_dir = _get_tmp_dir() clone_url = vt.repo_url_param(webserver, repo_name) stdout, stderr = Command(base.TESTS_TMP_PATH).execute( vt.repo_type, 'clone', clone_url, local_clone_dir) # Make 3 commits and push to the empty server repo. # The server repo doesn't have any other heads than the # refs/heads/master we are pushing, but the `git log` in the push hook # should still list the 3 commits. stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url) if vt.repo_type == 'git': _check_proper_git_push(stdout, stderr) elif vt.repo_type == 'hg': assert 'pushing to ' in stdout assert 'remote: added ' in stdout # Verify that we got the right events in UserLog. Expect something like: # <UserLog('id:new_git_XXX:started_following_repo')> # <UserLog('id:new_git_XXX:user_created_repo')> # <UserLog('id:new_git_XXX:pull')> # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')> action_parts = [ ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id) ] assert [ (t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts ] == ([ ('started_following_repo', 0), ('user_created_repo', 0), ('pull', 0), ('push', 3) ] if vt.repo_type == 'git' else [ ('started_following_repo', 0), ('user_created_repo', 0), # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones ('push', 3) ])