def commit_change(self, repo, repo_name, cs, user, author, message, content, f_path): """ Commit a change to a single file :param repo: a db_repo.scm_instance """ user = self._get_user(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = IMC(repo) imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path))) try: tip = imc.commit(message=message, author=author, parents=[cs], branch=cs.branch) except Exception as e: log.error(traceback.format_exc()) raise IMCCommitError(str(e)) finally: # always clear caches, if commit fails we want fresh object also self.mark_for_invalidation(repo_name) self._handle_push(repo, username=user.username, action='push_local', repo_name=repo_name, revisions=[tip.raw_id]) return tip
def commit_change(self, repo, repo_name, cs, user, author, message, content, f_path): """ Commits changes :param repo: SCM instance """ user = self._get_user(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = IMC(repo) imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path))) try: tip = imc.commit(message=message, author=author, parents=[cs], branch=cs.branch) except Exception, e: log.error(traceback.format_exc()) raise IMCCommitError(str(e))
def index(self): def _branchtags(localrepo): bt_closed = {} for bn, heads in localrepo.branchmap().iteritems(): tip = heads[-1] if 'close' in localrepo.changelog.read(tip)[5]: bt_closed[bn] = tip return bt_closed cs_g = c.db_repo_scm_instance.get_changeset c.repo_closed_branches = {} if c.db_repo.repo_type == 'hg': bt_closed = _branchtags(c.db_repo_scm_instance._repo) _closed_branches = [( safe_unicode(n), cs_g(binascii.hexlify(h)), ) for n, h in bt_closed.items()] c.repo_closed_branches = OrderedDict( sorted(_closed_branches, key=lambda ctx: ctx[0], reverse=False)) _branches = [(safe_unicode(n), cs_g(h)) for n, h in c.db_repo_scm_instance.branches.items()] c.repo_branches = OrderedDict( sorted(_branches, key=lambda ctx: ctx[0], reverse=False)) return render('branches/branches.html')
def settings_mapping(self): """GET /admin/settings/mapping: All items in the collection""" # url('admin_settings_mapping') c.active = 'mapping' if request.POST: rm_obsolete = request.POST.get('destroy', False) install_git_hooks = request.POST.get('hooks', False) invalidate_cache = request.POST.get('invalidate', False) log.debug('rescanning repo location with destroy obsolete=%s and ' 'install git hooks=%s' % (rm_obsolete,install_git_hooks)) if invalidate_cache: log.debug('invalidating all repositories cache') for repo in Repository.get_all(): ScmModel().mark_for_invalidation(repo.repo_name, delete=True) filesystem_repos = ScmModel().repo_scan() added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, install_git_hook=install_git_hooks, user=c.authuser.username) h.flash(h.literal(_('Repositories successfully rescanned. Added: %s. Removed: %s.') % (', '.join(h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added) or '-', ', '.join(h.escape(safe_unicode(repo_name)) for repo_name in removed) or '-')), category='success') return redirect(url('admin_settings_mapping')) defaults = Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( render('admin/settings/settings.html'), defaults=defaults, encoding="UTF-8", force_defaults=False)
def index(self): def _branchtags(localrepo): bt_closed = {} for bn, heads in localrepo.branchmap().iteritems(): tip = heads[-1] if 'close' in localrepo.changelog.read(tip)[5]: bt_closed[bn] = tip return bt_closed cs_g = c.db_repo_scm_instance.get_changeset c.repo_closed_branches = {} if c.db_repo.repo_type == 'hg': bt_closed = _branchtags(c.db_repo_scm_instance._repo) _closed_branches = [(safe_unicode(n), cs_g(binascii.hexlify(h)),) for n, h in bt_closed.items()] c.repo_closed_branches = OrderedDict(sorted(_closed_branches, key=lambda ctx: ctx[0], reverse=False)) _branches = [(safe_unicode(n), cs_g(h)) for n, h in c.db_repo_scm_instance.branches.items()] c.repo_branches = OrderedDict(sorted(_branches, key=lambda ctx: ctx[0], reverse=False)) return render('branches/branches.html')
def add_doc(self, writer, path, repo, repo_name, index_rev=None): """ Adding doc to writer this function itself fetches data from the instance of vcs backend """ node = self.get_node(repo, path, index_rev) indexed = indexed_w_content = 0 # we just index the content of chosen files, and skip binary files if node.extension in INDEX_EXTENSIONS and not node.is_binary: u_content = node.content if not isinstance(u_content, unicode): log.warning(' >> %s Could not get this content as unicode ' 'replacing with empty content' % path) u_content = u'' else: log.debug(' >> %s [WITH CONTENT]' % path) indexed_w_content += 1 else: log.debug(' >> %s' % path) # just index file name without it's content u_content = u'' indexed += 1 p = safe_unicode(path) writer.add_document(fileid=p, owner=unicode(repo.contact), repository=safe_unicode(repo_name), path=p, content=u_content, modtime=self.get_node_mtime(node), extension=node.extension) return indexed, indexed_w_content
def delete_nodes(self, user, repo, message, nodes, parent_cs=None, author=None, trigger_push_hook=True): """ Deletes specified nodes from repo. :param user: Kallithea User object or user_id, the committer :param repo: Kallithea Repository object :param message: commit message :param nodes: mapping {filename:{'content':content},...} :param parent_cs: parent changeset, can be empty than it's initial commit :param author: author of commit, cna be different that committer only for git :param trigger_push_hook: trigger push hooks :returns: new committed changeset after deletion """ user = self._get_user(user) scm_instance = repo.scm_instance_no_cache() processed_nodes = [] for f_path in nodes: f_path = self._sanitize_path(f_path) # content can be empty but for compatibility it allows same dicts # structure as add_nodes content = nodes[f_path].get('content') processed_nodes.append((f_path, content)) message = safe_unicode(message) committer = user.full_contact author = safe_unicode(author) if author else committer IMC = self._get_IMC_module(scm_instance.alias) imc = IMC(scm_instance) if not parent_cs: parent_cs = EmptyChangeset(alias=scm_instance.alias) if isinstance(parent_cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [parent_cs] # add multiple nodes for path, content in processed_nodes: imc.remove(FileNode(path, content=content)) tip = imc.commit(message=message, author=author, parents=parents, branch=parent_cs.branch) self.mark_for_invalidation(repo.repo_name) if trigger_push_hook: self._handle_push(scm_instance, username=user.username, action='push_local', repo_name=repo.repo_name, revisions=[tip.raw_id]) return tip
def update_nodes(self, user, repo, message, nodes, parent_cs=None, author=None, trigger_push_hook=True): """ Commits specified nodes to repo. Again. """ user = self._get_user(user) scm_instance = repo.scm_instance_no_cache() message = safe_unicode(message) committer = user.full_contact author = safe_unicode(author) if author else committer imc_class = self._get_IMC_module(scm_instance.alias) imc = imc_class(scm_instance) if not parent_cs: parent_cs = EmptyChangeset(alias=scm_instance.alias) if isinstance(parent_cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [parent_cs] # add multiple nodes for _filename, data in nodes.items(): # new filename, can be renamed from the old one filename = self._sanitize_path(data['filename']) old_filename = self._sanitize_path(_filename) content = data['content'] filenode = FileNode(old_filename, content=content) op = data['op'] if op == 'add': imc.add(filenode) elif op == 'del': imc.remove(filenode) elif op == 'mod': if filename != old_filename: #TODO: handle renames, needs vcs lib changes imc.remove(filenode) imc.add(FileNode(filename, content=content)) else: imc.change(filenode) # commit changes tip = imc.commit(message=message, author=author, parents=parents, branch=parent_cs.branch) self.mark_for_invalidation(repo.repo_name) if trigger_push_hook: self._handle_push(scm_instance, username=user.username, action='push_local', repo_name=repo.repo_name, revisions=[tip.raw_id])
def auth(self, userobj, username, password, settings, **kwargs): """Authenticate request.""" environ = kwargs.get('environ') if not environ: log.debug('Empty environ data skipping...') return None if not userobj: userobj = self.get_user('', environ=environ, settings=settings) # we don't care passed username/password for container auth plugins. # only way to log in is using environ username = None identity = {} if userobj: username = getattr(userobj, 'username') if not username: # we don't have any objects in DB user doesn't exist extrac username # from environ based on the settings identity = self._get_user_info(environ, settings) if identity: username = identity['username'] # if cannot fetch username, it's a no-go for this plugin to proceed if not username: return None identity = pylons.session.get(self.session_key) admin = getattr(userobj, 'admin', False) active = getattr(userobj, 'active', self.user_activation_state()) if identity: email = identity.get('email', getattr(userobj, 'email', '')) firstname = identity.get('given_name', getattr(userobj, 'firstname', '')) lastname = identity.get('family_name', getattr(userobj, 'lastname', '')) else: email = getattr(userobj, 'email', '') firstname = getattr(userobj, 'firstname', '') lastname = getattr(userobj, 'lastname', '') extern_type = getattr(userobj, 'extern_type', '') user_attrs = { 'username': username, 'firstname': safe_unicode(firstname or username), 'lastname': safe_unicode(lastname or ''), 'groups': [], 'email': email or '', 'admin': admin or False, 'active': active, 'active_from_extern': True, 'extern_name': username, 'extern_type': extern_type, } log.info('user `%s` authenticated correctly' % user_attrs['username']) return user_attrs
def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): """ Action logger for various actions made by users :param user: user that made this action, can be a unique username string or object containing user_id attribute :param action: action to log, should be on of predefined unique actions for easy translations :param repo: string name of repository or object containing repo_id, that action was made on :param ipaddr: optional ip address from what the action was made :param sa: optional sqlalchemy session """ if not sa: sa = meta.Session() # if we don't get explicit IP address try to get one from registered user # in tmpl context var if not ipaddr: ipaddr = getattr(get_current_authuser(), 'ip_addr', '') if getattr(user, 'user_id', None): user_obj = User.get(user.user_id) elif isinstance(user, basestring): user_obj = User.get_by_username(user) else: raise Exception('You have to provide a user object or a username') if getattr(repo, 'repo_id', None): repo_obj = Repository.get(repo.repo_id) repo_name = repo_obj.repo_name elif isinstance(repo, basestring): repo_name = repo.lstrip('/') repo_obj = Repository.get_by_repo_name(repo_name) else: repo_obj = None repo_name = '' user_log = UserLog() user_log.user_id = user_obj.user_id user_log.username = user_obj.username user_log.action = safe_unicode(action) user_log.repository = repo_obj user_log.repository_name = repo_name user_log.action_date = datetime.datetime.now() user_log.user_ip = ipaddr sa.add(user_log) log.info('Logging action:%s on %s by user:%s ip:%s' % (action, safe_unicode(repo), user_obj, ipaddr)) if commit: sa.commit()
def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): """ Action logger for various actions made by users :param user: user that made this action, can be a unique username string or object containing user_id attribute :param action: action to log, should be on of predefined unique actions for easy translations :param repo: string name of repository or object containing repo_id, that action was made on :param ipaddr: optional IP address from what the action was made :param sa: optional sqlalchemy session """ if not sa: sa = meta.Session() # if we don't get explicit IP address try to get one from registered user # in tmpl context var if not ipaddr: ipaddr = getattr(get_current_authuser(), 'ip_addr', '') if getattr(user, 'user_id', None): user_obj = User.get(user.user_id) elif isinstance(user, basestring): user_obj = User.get_by_username(user) else: raise Exception('You have to provide a user object or a username') if getattr(repo, 'repo_id', None): repo_obj = Repository.get(repo.repo_id) repo_name = repo_obj.repo_name elif isinstance(repo, basestring): repo_name = repo.lstrip('/') repo_obj = Repository.get_by_repo_name(repo_name) else: repo_obj = None repo_name = u'' user_log = UserLog() user_log.user_id = user_obj.user_id user_log.username = user_obj.username user_log.action = safe_unicode(action) user_log.repository = repo_obj user_log.repository_name = repo_name user_log.action_date = datetime.datetime.now() user_log.user_ip = ipaddr sa.add(user_log) log.info('Logging action:%s on %s by user:%s ip:%s', action, safe_unicode(repo), user_obj, ipaddr) if commit: sa.commit()
def test_delete_non_ascii(self): self.log_user() non_ascii = "ąęł" repo_name = "%s%s" % (safe_str(self.NEW_REPO), non_ascii) repo_name_unicode = safe_unicode(repo_name) description = 'description for newly created repo' + non_ascii description_unicode = safe_unicode(description) response = self.app.post(url('repos'), fixture._get_repo_create_params(repo_private=False, repo_name=repo_name, repo_type=self.REPO_TYPE, repo_description=description, _authentication_token=self.authentication_token())) ## run the check page that triggers the flash message response = self.app.get(url('repo_check_home', repo_name=repo_name)) self.assertEqual(response.json, {u'result': True}) self.checkSessionFlash(response, u'Created repository <a href="/%s">%s</a>' % (urllib.quote(repo_name), repo_name_unicode)) # test if the repo was created in the database new_repo = Session().query(Repository) \ .filter(Repository.repo_name == repo_name_unicode).one() self.assertEqual(new_repo.repo_name, repo_name_unicode) self.assertEqual(new_repo.description, description_unicode) # test if the repository is visible in the list ? response = self.app.get(url('summary_home', repo_name=repo_name)) response.mustcontain(repo_name) response.mustcontain(self.REPO_TYPE) # test if the repository was created on filesystem try: vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_unicode))) except vcs.exceptions.VCSError: pytest.fail('no repo %s in filesystem' % repo_name) response = self.app.post(url('delete_repo', repo_name=repo_name), params={'_method': 'delete', '_authentication_token': self.authentication_token()}) self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name_unicode)) response.follow() #check if repo was deleted from db deleted_repo = Session().query(Repository) \ .filter(Repository.repo_name == repo_name_unicode).scalar() self.assertEqual(deleted_repo, None) self.assertEqual(os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_unicode)), False)
def test_delete_non_ascii(self): self.log_user() non_ascii = "ąęł" repo_name = "%s%s" % (safe_str(self.NEW_REPO), non_ascii) repo_name_unicode = safe_unicode(repo_name) description = 'description for newly created repo' + non_ascii description_unicode = safe_unicode(description) response = self.app.post(url('repos'), fixture._get_repo_create_params(repo_private=False, repo_name=repo_name, repo_type=self.REPO_TYPE, repo_description=description, _authentication_token=self.authentication_token())) ## run the check page that triggers the flash message response = self.app.get(url('repo_check_home', repo_name=repo_name)) assert response.json == {u'result': True} self.checkSessionFlash(response, u'Created repository <a href="/%s">%s</a>' % (urllib.quote(repo_name), repo_name_unicode)) # test if the repo was created in the database new_repo = Session().query(Repository) \ .filter(Repository.repo_name == repo_name_unicode).one() assert new_repo.repo_name == repo_name_unicode assert new_repo.description == description_unicode # test if the repository is visible in the list ? response = self.app.get(url('summary_home', repo_name=repo_name)) response.mustcontain(repo_name) response.mustcontain(self.REPO_TYPE) # test if the repository was created on filesystem try: vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_unicode))) except vcs.exceptions.VCSError: pytest.fail('no repo %s in filesystem' % repo_name) response = self.app.post(url('delete_repo', repo_name=repo_name), params={'_authentication_token': self.authentication_token()}) self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name_unicode)) response.follow() #check if repo was deleted from db deleted_repo = Session().query(Repository) \ .filter(Repository.repo_name == repo_name_unicode).scalar() assert deleted_repo == None assert os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_unicode)) == False
def app_settings_value(self, val): """ Setter that will always make sure we use unicode in app_settings_value :param val: """ self._app_settings_value = safe_unicode(val)
def _escaper(self, string): """ Escaper for diff escapes special chars and checks the diff limit :param string: """ self.cur_diff_size += len(string) # escaper gets iterated on each .next() call and it checks if each # parsed line doesn't exceed the diff limit if self.diff_limit is not None and self.cur_diff_size > self.diff_limit: raise DiffLimitExceeded('Diff Limit Exceeded') def substitute(m): groups = m.groups() if groups[0]: return '&' if groups[1]: return '<' if groups[2]: return '>' if groups[3]: return '<u>\t</u>' if groups[4]: return '<u class="cr"></u>' if groups[5]: return ' <i></i>' assert False return self._escape_re.sub(substitute, safe_unicode(string))
def setup_class(cls): UserLog.query().delete() Session().commit() def strptime(val): fmt = '%Y-%m-%d %H:%M:%S' if '.' not in val: return datetime.datetime.strptime(val, fmt) nofrag, frag = val.split(".") date = datetime.datetime.strptime(nofrag, fmt) frag = frag[:6] # truncate to microseconds frag += (6 - len(frag)) * '0' # add 0s return date.replace(microsecond=int(frag)) with open(os.path.join(FIXTURES, 'journal_dump.csv')) as f: for row in csv.DictReader(f): ul = UserLog() for k, v in row.iteritems(): v = safe_unicode(v) if k == 'action_date': v = strptime(v) if k in ['user_id', 'repository_id']: # nullable due to FK problems v = None setattr(ul, k, v) Session().add(ul) Session().commit()
def __call__(self, environ, context): try: request.ip_addr = _get_ip_addr(environ) # make sure that we update permissions each time we call controller self._basic_security_checks() #set globals for auth user bearer_token = None try: # Request.authorization may raise ValueError on invalid input type, params = request.authorization except (ValueError, TypeError): pass else: if type.lower() == 'bearer': bearer_token = params request.authuser = request.user = self._determine_auth_user( request.GET.get('api_key'), bearer_token, session.get('authuser'), ) log.info('IP: %s User: %s accessed %s', request.ip_addr, request.authuser, safe_unicode(_get_access_path(environ)), ) return super(BaseController, self).__call__(environ, context) except webob.exc.HTTPException as e: return e
def rst(cls, source, safe=True): source = safe_unicode(source) try: from docutils.core import publish_parts from docutils.parsers.rst import directives docutils_settings = dict([(alias, None) for alias in cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES]) docutils_settings.update({'input_encoding': 'unicode', 'report_level': 4}) for k, v in docutils_settings.iteritems(): directives.register_directive(k, v) parts = publish_parts(source=source, writer_name="html4css1", settings_overrides=docutils_settings) return parts['html_title'] + parts["fragment"] except ImportError: log.warning('Install docutils to use this function') return cls.plain(source) except Exception: log.error(traceback.format_exc()) if safe: log.debug('Falling back to render in plain mode') return cls.plain(source) else: raise
def index_changesets(self, writer, repo_name, repo, start_rev=None): """ Add all changeset in the vcs repo starting at start_rev to the index writer :param writer: the whoosh index writer to add to :param repo_name: name of the repository from whence the changeset originates including the repository group :param repo: the vcs repository instance to index changesets for, the presumption is the repo has changesets to index :param start_rev=None: the full sha id to start indexing from if start_rev is None then index from the first changeset in the repo """ if start_rev is None: start_rev = repo[0].raw_id log.debug('indexing changesets in %s starting at rev: %s', repo_name, start_rev) indexed = 0 cs_iter = repo.get_changesets(start=start_rev) total = len(cs_iter) for cs in cs_iter: log.debug(' >> %s/%s', cs, total) writer.add_document( raw_id=unicode(cs.raw_id), owner=unicode(repo.contact), date=cs._timestamp, repository_rawname=safe_unicode(repo_name), repository=safe_unicode(repo_name), author=cs.author, message=cs.message, last=cs.last, added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(), removed=u' '.join( [safe_unicode(node.path) for node in cs.removed]).lower(), changed=u' '.join( [safe_unicode(node.path) for node in cs.changed]).lower(), parents=u' '.join([cs.raw_id for cs in cs.parents]), ) indexed += 1 log.debug('indexed %d changesets for repo %s', indexed, repo_name) return indexed
def render_w_mentions(source, repo_name=None): """ Render plain text with revision hashes and issue references urlified and with @mention highlighting. """ s = safe_unicode(source) s = urlify_text(s, repo_name=repo_name) return literal('<div class="formatted-fixed">%s</div>' % s)
def _get_fixedpath(self, path): """ Small fix for repo_path :param path: """ path = safe_unicode(path) assert path.startswith('/' + self.repo_name + '/') return path[len(self.repo_name) + 2:].strip('/')
def __add_reviewers(self, user, pr, reviewers, mention_recipients=None): #members for member in set(reviewers): _usr = self._get_user(member) if _usr is None: raise UserInvalidException(member) reviewer = PullRequestReviewers(_usr, pr) Session().add(reviewer) revision_data = [(x.raw_id, x.message) for x in map(pr.org_repo.get_changeset, pr.revisions)] #notification to reviewers pr_url = pr.url(canonical=True) threading = ['%s-pr-%s@%s' % (pr.other_repo.repo_name, pr.pull_request_id, h.canonical_hostname())] subject = safe_unicode( h.link_to( _('%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s') % \ {'user': user.username, 'pr_title': pr.title, 'pr_nice_id': pr.nice_id()}, pr_url) ) body = pr.description _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':') email_kwargs = { 'pr_title': pr.title, 'pr_user_created': user.full_name_and_username, 'pr_repo_url': h.canonical_url('summary_home', repo_name=pr.other_repo.repo_name), 'pr_url': pr_url, 'pr_revisions': revision_data, 'repo_name': pr.other_repo.repo_name, 'org_repo_name': pr.org_repo.repo_name, 'pr_nice_id': pr.nice_id(), 'ref': org_ref_name, 'pr_username': user.username, 'threading': threading, 'is_mention': False, } if reviewers: NotificationModel().create(created_by=user, subject=subject, body=body, recipients=reviewers, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=email_kwargs) if mention_recipients: mention_recipients.discard(None) mention_recipients.difference_update(reviewers) if mention_recipients: email_kwargs['is_mention'] = True subject = _('[Mention]') + ' ' + subject NotificationModel().create(created_by=user, subject=subject, body=body, recipients=mention_recipients, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=email_kwargs)
def __call__(self, user, repo_name): # repo_name MUST be unicode, since we handle keys in permission # dict by unicode repo_name = safe_unicode(repo_name) usr = AuthUser(user.user_id) self.user_perms = set([usr.permissions['repositories'][repo_name]]) self.username = user.username self.repo_name = repo_name return self.check_permissions()
def index_changesets(self, writer, repo_name, repo, start_rev=None): """ Add all changeset in the vcs repo starting at start_rev to the index writer :param writer: the whoosh index writer to add to :param repo_name: name of the repository from whence the changeset originates including the repository group :param repo: the vcs repository instance to index changesets for, the presumption is the repo has changesets to index :param start_rev=None: the full sha id to start indexing from if start_rev is None then index from the first changeset in the repo """ if start_rev is None: start_rev = repo[0].raw_id log.debug('indexing changesets in %s starting at rev: %s', repo_name, start_rev) indexed = 0 cs_iter = repo.get_changesets(start=start_rev) total = len(cs_iter) for cs in cs_iter: log.debug(' >> %s/%s', cs, total) writer.add_document( raw_id=unicode(cs.raw_id), owner=unicode(repo.contact), date=cs._timestamp, repository_rawname=safe_unicode(repo_name), repository=safe_unicode(repo_name), author=cs.author, message=cs.message, last=cs.last, added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(), removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(), changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(), parents=u' '.join([cs.raw_id for cs in cs.parents]), ) indexed += 1 log.debug('indexed %d changesets for repo %s', indexed, repo_name) return indexed
def settings_mapping(self): c.active = 'mapping' if request.POST: rm_obsolete = request.POST.get('destroy', False) install_git_hooks = request.POST.get('hooks', False) overwrite_git_hooks = request.POST.get('hooks_overwrite', False); invalidate_cache = request.POST.get('invalidate', False) log.debug('rescanning repo location with destroy obsolete=%s, ' 'install git hooks=%s and ' 'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks)) filesystem_repos = ScmModel().repo_scan() added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, install_git_hooks=install_git_hooks, user=request.authuser.username, overwrite_git_hooks=overwrite_git_hooks) h.flash(h.literal(_('Repositories successfully rescanned. Added: %s. Removed: %s.') % (', '.join(h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added) or '-', ', '.join(h.escape(safe_unicode(repo_name)) for repo_name in removed) or '-')), category='success') if invalidate_cache: log.debug('invalidating all repositories cache') i = 0 for repo in Repository.query(): try: ScmModel().mark_for_invalidation(repo.repo_name) i += 1 except VCSError as e: log.warning('VCS error invalidating %s: %s', repo.repo_name, e) h.flash(_('Invalidated %s repositories') % i, category='success') raise HTTPFound(location=url('admin_settings_mapping')) defaults = Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( render('admin/settings/settings.html'), defaults=defaults, encoding="UTF-8", force_defaults=False)
def __call__(self, environ, start_response): start = time.time() try: return self.application(environ, start_response) finally: log = logging.getLogger('kallithea.' + self.__class__.__name__) log.info('IP: %s Request to %s time: %.3fs' % ( _get_ip_addr(environ), safe_unicode(_get_access_path(environ)), time.time() - start) )
def _line_counter(self, l): """ Checks each line and bumps total adds/removes for this diff :param l: """ if l.startswith('+') and not l.startswith('+++'): self.adds += 1 elif l.startswith('-') and not l.startswith('---'): self.removes += 1 return safe_unicode(l)
def plain(cls, source, universal_newline=True): source = safe_unicode(source) if universal_newline: newline = '\n' source = newline.join(source.splitlines()) def url_func(match_obj): url_full = match_obj.groups()[0] return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) source = url_re.sub(url_func, source) return '<br />' + source.replace("\n", '<br />')
def __add_reviewers(self, pr, reviewers, mention_recipients=None): #members for member in set(reviewers): _usr = self._get_user(member) reviewer = PullRequestReviewers(_usr, pr) Session().add(reviewer) revision_data = [(x.raw_id, x.message) for x in map(pr.org_repo.get_changeset, pr.revisions)] #notification to reviewers pr_url = pr.url(canonical=True) threading = [h.canonical_url('pullrequest_show', repo_name=pr.other_repo.repo_name, pull_request_id=pr.pull_request_id)] subject = safe_unicode( h.link_to( _('%(user)s wants you to review pull request #%(pr_id)s: %(pr_title)s') % \ {'user': pr.author.username, 'pr_title': pr.title, 'pr_id': pr.pull_request_id}, pr_url) ) body = pr.description _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':') email_kwargs = { 'pr_title': pr.title, 'pr_user_created': h.person(pr.author), 'pr_repo_url': h.canonical_url('summary_home', repo_name=pr.other_repo.repo_name), 'pr_url': pr_url, 'pr_revisions': revision_data, 'repo_name': pr.other_repo.repo_name, 'pr_id': pr.pull_request_id, 'ref': org_ref_name, 'pr_username': pr.author.username, 'threading': threading, 'is_mention': False, } if reviewers: NotificationModel().create(created_by=pr.author, subject=subject, body=body, recipients=reviewers, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=email_kwargs) if mention_recipients: mention_recipients.discard(None) mention_recipients.difference_update(reviewers) if mention_recipients: email_kwargs['is_mention'] = True subject = _('[Mention]') + ' ' + subject NotificationModel().create(created_by=pr.author, subject=subject, body=body, recipients=mention_recipients, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=email_kwargs)
def test_fork_unicode(self): self.log_user() # create a fork repo_name = self.REPO org_repo = Repository.get_by_repo_name(repo_name) fork_name = safe_str(self.REPO_FORK + u'-rødgrød') creation_args = { 'repo_name': fork_name, 'repo_group': u'-1', 'fork_parent_id': org_repo.repo_id, 'repo_type': self.REPO_TYPE, 'description': 'unicode repo 1', 'private': 'False', 'landing_rev': 'rev:tip', '_authentication_token': self.authentication_token() } self.app.post( url(controller='forks', action='fork_create', repo_name=repo_name), creation_args) response = self.app.get( url(controller='forks', action='forks', repo_name=repo_name)) response.mustcontain("""<a href="/%s">%s</a>""" % (urllib.quote(fork_name), fork_name)) fork_repo = Repository.get_by_repo_name(safe_unicode(fork_name)) assert fork_repo # fork the fork fork_name_2 = safe_str(self.REPO_FORK + u'-blåbærgrød') creation_args = { 'repo_name': fork_name_2, 'repo_group': u'-1', 'fork_parent_id': fork_repo.repo_id, 'repo_type': self.REPO_TYPE, 'description': 'unicode repo 2', 'private': 'False', 'landing_rev': 'rev:tip', '_authentication_token': self.authentication_token() } self.app.post( url(controller='forks', action='fork_create', repo_name=fork_name), creation_args) response = self.app.get( url(controller='forks', action='forks', repo_name=fork_name)) response.mustcontain("""<a href="/%s">%s</a>""" % (urllib.quote(fork_name_2), fork_name_2)) # remove these forks response = self.app.post( url('delete_repo', repo_name=fork_name_2), params={'_authentication_token': self.authentication_token()}) response = self.app.post( url('delete_repo', repo_name=fork_name), params={'_authentication_token': self.authentication_token()})
def add_doc(self, writer, path, repo, repo_name, index_rev=None): """ Adding doc to writer this function itself fetches data from the instance of vcs backend """ try: node = self.get_node(repo, path, index_rev) except (ChangesetError, NodeDoesNotExistError): log.debug("couldn't add doc - %s did not have %r at %s", repo, path, index_rev) return 0, 0 indexed = indexed_w_content = 0 if self.is_indexable_node(node): u_content = node.content if not isinstance(u_content, unicode): log.warning(' >> %s Could not get this content as unicode ' 'replacing with empty content' % path) u_content = u'' else: log.debug(' >> %s [WITH CONTENT]', path) indexed_w_content += 1 else: log.debug(' >> %s', path) # just index file name without it's content u_content = u'' indexed += 1 p = safe_unicode(path) writer.add_document( fileid=p, owner=unicode(repo.contact), repository_rawname=safe_unicode(repo_name), repository=safe_unicode(repo_name), path=p, content=u_content, modtime=self.get_node_mtime(node), extension=node.extension ) return indexed, indexed_w_content
def add_doc(self, writer, path, repo, repo_name, index_rev=None): """ Adding doc to writer this function itself fetches data from the instance of vcs backend """ try: node = self.get_node(repo, path, index_rev) except (ChangesetError, NodeDoesNotExistError): log.debug("couldn't add doc - %s did not have %r at %s", repo, path, index_rev) return 0, 0 indexed = indexed_w_content = 0 if self.is_indexable_node(node): u_content = node.content if not isinstance(u_content, unicode): log.warning(' >> %s Could not get this content as unicode ' 'replacing with empty content' % path) u_content = u'' else: log.debug(' >> %s [WITH CONTENT]', path) indexed_w_content += 1 else: log.debug(' >> %s', path) # just index file name without it's content u_content = u'' indexed += 1 p = safe_unicode(path) writer.add_document(fileid=p, owner=unicode(repo.contact), repository_rawname=safe_unicode(repo_name), repository=safe_unicode(repo_name), path=p, content=u_content, modtime=self.get_node_mtime(node), extension=node.extension) return indexed, indexed_w_content
def __call__(self, user, repo_name, purpose=None): # repo_name MUST be unicode, since we handle keys in ok # dict by unicode repo_name = safe_unicode(repo_name) user = AuthUser(user.user_id) try: ok = user.permissions['repositories'][repo_name] in self.required_perms except KeyError: ok = False log.debug('Middleware check %s for %s for repo %s (%s): %s' % (user.username, self.required_perms, repo_name, purpose, ok)) return ok
def __before__(self): super(BaseRepoController, self).__before__() if c.repo_name: # extracted from routes _dbr = Repository.get_by_repo_name(c.repo_name) if not _dbr: return log.debug('Found repository in database %s with state `%s`' % (safe_unicode(_dbr), safe_unicode(_dbr.repo_state))) route = getattr(request.environ.get('routes.route'), 'name', '') # allow to delete repos that are somehow damages in filesystem if route in ['delete_repo']: return if _dbr.repo_state in [Repository.STATE_PENDING]: if route in ['repo_creating_home']: return check_url = url('repo_creating_home', repo_name=c.repo_name) return redirect(check_url) dbr = c.db_repo = _dbr c.db_repo_scm_instance = c.db_repo.scm_instance if c.db_repo_scm_instance is None: log.error( '%s this repository is present in database but it ' 'cannot be created as an scm instance', c.repo_name) from kallithea.lib import helpers as h h.flash(h.literal(_('Repository not found in the filesystem')), category='error') raise paste.httpexceptions.HTTPNotFound() # some globals counter for menu c.repository_followers = self.scm_model.get_followers(dbr) c.repository_forks = self.scm_model.get_forks(dbr) c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) c.repository_following = self.scm_model.is_following_repo( c.repo_name, self.authuser.user_id)
def test_fork_unicode(self): self.log_user() # create a fork repo_name = self.REPO org_repo = Repository.get_by_repo_name(repo_name) fork_name = safe_str(self.REPO_FORK + u'-rødgrød') creation_args = { 'repo_name': fork_name, 'repo_group': u'-1', 'fork_parent_id': org_repo.repo_id, 'repo_type': self.REPO_TYPE, 'description': 'unicode repo 1', 'private': 'False', 'landing_rev': 'rev:tip', '_authentication_token': self.authentication_token()} self.app.post(url(controller='forks', action='fork_create', repo_name=repo_name), creation_args) response = self.app.get(url(controller='forks', action='forks', repo_name=repo_name)) response.mustcontain( """<a href="/%s">%s</a>""" % (urllib.quote(fork_name), fork_name) ) fork_repo = Repository.get_by_repo_name(safe_unicode(fork_name)) assert fork_repo # fork the fork fork_name_2 = safe_str(self.REPO_FORK + u'-blåbærgrød') creation_args = { 'repo_name': fork_name_2, 'repo_group': u'-1', 'fork_parent_id': fork_repo.repo_id, 'repo_type': self.REPO_TYPE, 'description': 'unicode repo 2', 'private': 'False', 'landing_rev': 'rev:tip', '_authentication_token': self.authentication_token()} self.app.post(url(controller='forks', action='fork_create', repo_name=fork_name), creation_args) response = self.app.get(url(controller='forks', action='forks', repo_name=fork_name)) response.mustcontain( """<a href="/%s">%s</a>""" % (urllib.quote(fork_name_2), fork_name_2) ) # remove these forks response = self.app.post(url('delete_repo', repo_name=fork_name_2), params={'_authentication_token': self.authentication_token()}) response = self.app.post(url('delete_repo', repo_name=fork_name), params={'_authentication_token': self.authentication_token()})
def commit_change(self, repo, repo_name, cs, user, author, message, content, f_path): """ Commit a change to a single file :param repo: a db_repo.scm_instance """ user = User.guess_instance(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = IMC(repo) imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path))) try: tip = imc.commit(message=message, author=author, parents=[cs], branch=cs.branch) except Exception as e: log.error(traceback.format_exc()) raise IMCCommitError(str(e)) finally: # always clear caches, if commit fails we want fresh object also self.mark_for_invalidation(repo_name) self._handle_push(repo, username=user.username, action='push_local', repo_name=repo_name, revisions=[tip.raw_id]) return tip
def __before__(self): super(BaseRepoController, self).__before__() if c.repo_name: # extracted from routes _dbr = Repository.get_by_repo_name(c.repo_name) if not _dbr: return log.debug('Found repository in database %s with state `%s`', safe_unicode(_dbr), safe_unicode(_dbr.repo_state)) route = getattr(request.environ.get('routes.route'), 'name', '') # allow to delete repos that are somehow damages in filesystem if route in ['delete_repo']: return if _dbr.repo_state in [Repository.STATE_PENDING]: if route in ['repo_creating_home']: return check_url = url('repo_creating_home', repo_name=c.repo_name) raise webob.exc.HTTPFound(location=check_url) dbr = c.db_repo = _dbr c.db_repo_scm_instance = c.db_repo.scm_instance if c.db_repo_scm_instance is None: log.error('%s this repository is present in database but it ' 'cannot be created as an scm instance', c.repo_name) from kallithea.lib import helpers as h h.flash(h.literal(_('Repository not found in the filesystem')), category='error') raise paste.httpexceptions.HTTPNotFound() # some globals counter for menu c.repository_followers = self.scm_model.get_followers(dbr) c.repository_forks = self.scm_model.get_forks(dbr) c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) c.repository_following = self.scm_model.is_following_repo( c.repo_name, self.authuser.user_id)
def add_doc(self, writer, path, repo, repo_name, index_rev=None): """ Adding doc to writer this function itself fetches data from the instance of vcs backend """ node = self.get_node(repo, path, index_rev) indexed = indexed_w_content = 0 # we just index the content of chosen files, and skip binary files if node.extension in INDEX_EXTENSIONS and not node.is_binary: u_content = node.content if not isinstance(u_content, unicode): log.warning(' >> %s Could not get this content as unicode ' 'replacing with empty content' % path) u_content = u'' else: log.debug(' >> %s [WITH CONTENT]' % path) indexed_w_content += 1 else: log.debug(' >> %s' % path) # just index file name without it's content u_content = u'' indexed += 1 p = safe_unicode(path) writer.add_document( fileid=p, owner=unicode(repo.contact), repository=safe_unicode(repo_name), path=p, content=u_content, modtime=self.get_node_mtime(node), extension=node.extension ) return indexed, indexed_w_content
def __call__(self, user, repo_name, purpose=None): # repo_name MUST be unicode, since we handle keys in ok # dict by unicode repo_name = safe_unicode(repo_name) user = AuthUser(user.user_id) try: ok = user.permissions['repositories'][ repo_name] in self.required_perms except KeyError: ok = False log.debug('Middleware check %s for %s for repo %s (%s): %s' % (user.username, self.required_perms, repo_name, purpose, ok)) return ok
def __make_app(self, repo_name, repo_path, extras): """ Make an wsgi application using dulserver :param repo_name: name of the repository :param repo_path: full path to the repository """ from kallithea.lib.middleware.pygrack import make_wsgi_app app = make_wsgi_app( repo_root=safe_str(self.basepath), repo_name=safe_unicode(repo_name), extras=extras, ) return app
def changed_tooltip(nodes): """ Generates a html string for changed nodes in changeset page. It limits the output to 30 entries :param nodes: LazyNodesGenerator """ if nodes: pref = ': <br/> ' suf = '' if len(nodes) > 30: suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) return literal(pref + '<br/> '.join([safe_unicode(x.path) for x in nodes[:30]]) + suf) else: return ': ' + _('No files')
def update(self, id): """PUT /user_groups/id: Update an existing item""" # Forms posted to this method should contain a hidden field: # <input type="hidden" name="_method" value="PUT" /> # Or using helpers: # h.form(url('users_group', id=ID), # method='put') # url('users_group', id=ID) c.user_group = UserGroup.get_or_404(id) c.active = 'settings' self.__load_data(id) available_members = [safe_unicode(x[0]) for x in c.available_members] users_group_form = UserGroupForm(edit=True, old_data=c.user_group.get_dict(), available_members=available_members)() try: form_result = users_group_form.to_python(request.POST) UserGroupModel().update(c.user_group, form_result) gr = form_result['users_group_name'] action_logger(self.authuser, 'admin_updated_users_group:%s' % gr, None, self.ip_addr, self.sa) h.flash(_('Updated user group %s') % gr, category='success') Session().commit() except formencode.Invalid, errors: ug_model = UserGroupModel() defaults = errors.value e = errors.error_dict or {} defaults.update({ 'create_repo_perm': ug_model.has_perm(id, 'hg.create.repository'), 'fork_repo_perm': ug_model.has_perm(id, 'hg.fork.repository'), '_method': 'put' }) return htmlfill.render( render('admin/user_groups/user_group_edit.html'), defaults=defaults, errors=e, prefix_error=False, encoding="UTF-8", force_defaults=False)
def changed_tooltip(nodes): """ Generates a html string for changed nodes in changeset page. It limits the output to 30 entries :param nodes: LazyNodesGenerator """ if nodes: pref = ': <br/> ' suf = '' if len(nodes) > 30: suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) return literal( pref + '<br/> '.join([safe_unicode(x.path) for x in nodes[:30]]) + suf) else: return ': ' + _('No files')
def markdown(cls, source, safe=True, flavored=False): source = safe_unicode(source) try: import markdown as __markdown if flavored: source = cls._flavored_markdown(source) return __markdown.markdown(source, ['codehilite', 'extra']) except ImportError: log.warning('Install markdown to use this function') return cls.plain(source) except Exception: log.error(traceback.format_exc()) if safe: log.debug('Falling back to render in plain mode') return cls.plain(source) else: raise
def update(self, id): c.user_group = UserGroup.get_or_404(id) c.active = 'settings' self.__load_data(id) available_members = [safe_unicode(x[0]) for x in c.available_members] users_group_form = UserGroupForm(edit=True, old_data=c.user_group.get_dict(), available_members=available_members)() try: form_result = users_group_form.to_python(request.POST) UserGroupModel().update(c.user_group, form_result) gr = form_result['users_group_name'] action_logger(request.authuser, 'admin_updated_users_group:%s' % gr, None, request.ip_addr) h.flash(_('Updated user group %s') % gr, category='success') Session().commit() except formencode.Invalid as errors: ug_model = UserGroupModel() defaults = errors.value e = errors.error_dict or {} defaults.update({ 'create_repo_perm': ug_model.has_perm(id, 'hg.create.repository'), 'fork_repo_perm': ug_model.has_perm(id, 'hg.fork.repository'), }) return htmlfill.render( render('admin/user_groups/user_group_edit.html'), defaults=defaults, errors=e, prefix_error=False, encoding="UTF-8", force_defaults=False) except Exception: log.error(traceback.format_exc()) h.flash(_('Error occurred during update of user group %s') \ % request.POST.get('users_group_name'), category='error') raise HTTPFound(location=url('edit_users_group', id=id))
def __call__(self, environ, start_response): """Invoke the Controller""" # WSGIController.__call__ dispatches to the Controller method # the request is routed to. This routing information is # available in environ['pylons.routes_dict'] try: self.ip_addr = _get_ip_addr(environ) # make sure that we update permissions each time we call controller api_key = request.GET.get('api_key') if api_key: # when using API_KEY we are sure user exists. auth_user = AuthUser(api_key=api_key, ip_addr=self.ip_addr) authenticated = False else: cookie_store = CookieStoreWrapper(session.get('authuser')) try: auth_user = AuthUser(user_id=cookie_store.get( 'user_id', None), ip_addr=self.ip_addr) except UserCreationError, e: from kallithea.lib import helpers as h h.flash(e, 'error') # container auth or other auth functions that create users on # the fly can throw this exception signaling that there's issue # with user creation, explanation should be provided in # Exception itself auth_user = AuthUser(ip_addr=self.ip_addr) authenticated = cookie_store.get('is_authenticated') if not auth_user.is_authenticated and auth_user.user_id is not None: # user is not authenticated and not empty auth_user.set_authenticated(authenticated) request.user = auth_user #set globals for auth user self.authuser = c.authuser = auth_user log.info('IP: %s User: %s accessed %s' % (self.ip_addr, auth_user, safe_unicode(_get_access_path(environ)))) return WSGIController.__call__(self, environ, start_response)
def take_action(self, args): if args.repo_update_list is None: repo_list = Repository.query().all() else: repo_names = [ safe_unicode(n.strip()) for n in args.repo_update_list.split(',') ] repo_list = list(Repository.query().filter( Repository.repo_name.in_(repo_names))) for repo in repo_list: repo.update_changeset_cache() Session().commit() if args.invalidate_cache: for r in repo_list: r.set_invalidate() print 'Updated repo info and invalidated cache for %s repositories' % ( len(repo_list)) else: print 'Updated repo info for %s repositories' % (len(repo_list))