def clone(self, repo_type=None, source_url=None, mount_point=None, mount_label=None, **kw): require_access(c.project, 'admin') if repo_type is None: return ( '<form method="get">' '<input name="repo_type" value="Git">' '<input name="source_url">' '<input type="submit">' '</form>') for ep in pkg_resources.iter_entry_points('allura', repo_type): break if ep is None or source_url is None: raise exc.HTTPNotFound h.log_action(log, 'install tool').info( 'clone repo from %s', source_url, meta=dict(tool_type=repo_type, mount_point=mount_point, mount_label=mount_label)) c.project.install_app( repo_type, mount_point=mount_point, mount_label=mount_label, init_from_url=source_url) M.AuditLog.log('Create repo as clone') redirect('tools')
def commit(self): VersionedArtifact.commit(self) monitoring_email = self.app.config.options.get('TicketMonitoringEmail') if self.version > 1: hist = TicketHistory.query.get(artifact_id=self._id, version=self.version-1) old = hist.data changes = ['Ticket %s has been modified: %s' % ( self.ticket_num, self.summary), 'Edited By: %s (%s)' % (c.user.get_pref('display_name'), c.user.username)] fields = [ ('Summary', old.summary, self.summary), ('Status', old.status, self.status) ] if old.status != self.status and self.status in c.app.globals.set_of_closed_status_names: h.log_action(log, 'closed').info('') g.statsUpdater.ticketEvent("closed", self, self.project, self.assigned_to) for key in self.custom_fields: fields.append((key, old.custom_fields.get(key, ''), self.custom_fields[key])) for title, o, n in fields: if o != n: changes.append('%s updated: %r => %r' % ( title, o, n)) o = hist.assigned_to n = self.assigned_to if o != n: changes.append('Owner updated: %r => %r' % ( o and o.username, n and n.username)) self.subscribe(user=n) g.statsUpdater.ticketEvent("assigned", self, self.project, n) if o: g.statsUpdater.ticketEvent("revoked", self, self.project, o) if old.description != self.description: changes.append('Description updated:') changes.append('\n'.join( difflib.unified_diff( a=old.description.split('\n'), b=self.description.split('\n'), fromfile='description-old', tofile='description-new'))) description = '\n'.join(changes) else: self.subscribe() if self.assigned_to_id: user = User.query.get(_id=self.assigned_to_id) g.statsUpdater.ticketEvent("assigned", self, self.project, user) self.subscribe(user=user) description = '' subject = self.email_subject Thread.new(discussion_id=self.app_config.discussion_id, ref_id=self.index_id()) n = Notification.post(artifact=self, topic='metadata', text=description, subject=subject) if monitoring_email and n and (not self.private or self.app.config.options.get('TicketMonitoringType') in ( 'NewTicketsOnly', 'AllTicketChanges')): n.send_simple(monitoring_email) Feed.post( self, title=self.summary, description=description if description else self.description, author=self.reported_by, pubdate=self.created_date)
def post(self, subject, text, message_id=None, parent_id=None, **kw): post = super(ForumThread, self).post(text, message_id=message_id, parent_id=parent_id, **kw) if not self.first_post_id: self.first_post_id = post._id self.num_replies = 1 h.log_action(log, 'posted').info('') return post
def move(self, app_config, notify=True): """Move ticket from current tickets app to tickets app with given app_config""" app = app_config.project.app_instance(app_config) prior_url = self.url() prior_app = self.app prior_ticket_num = self.ticket_num attachments = self.attachments attach_metadata = BaseAttachment.metadata_for(self) prior_cfs = [(cf["name"], cf["type"], cf["label"]) for cf in prior_app.globals.custom_fields or []] new_cfs = [(cf["name"], cf["type"], cf["label"]) for cf in app.globals.custom_fields or []] skipped_fields = [] user_fields = [] for cf in prior_cfs: if cf not in new_cfs: # can't convert skipped_fields.append(cf) elif cf[1] == "user": # can convert and field type == user user_fields.append(cf) messages = [] for cf in skipped_fields: name = cf[0] messages.append("- **%s**: %s" % (name, self.custom_fields.get(name, ""))) for cf in user_fields: name = cf[0] username = self.custom_fields.get(name, None) user = app_config.project.user_in_project(username) if not user or user == User.anonymous(): messages.append("- **%s**: %s (user not in project)" % (name, username)) self.custom_fields[name] = "" # special case: not custom user field (assigned_to_id) user = self.assigned_to if user and not app_config.project.user_in_project(user.username): messages.append("- **assigned_to**: %s (user not in project)" % user.username) self.assigned_to_id = None custom_fields = {} for cf in new_cfs: fn, ft, fl = cf old_val = self.custom_fields.get(fn, None) if old_val is None: custom_fields[fn] = None if ft == "user" else "" custom_fields[fn] = old_val self.custom_fields = custom_fields # move ticket. ensure unique ticket_num while True: with h.push_context(app_config.project_id, app_config_id=app_config._id): ticket_num = app.globals.next_ticket_num() self.ticket_num = ticket_num self.app_config_id = app_config._id new_url = app_config.url() + str(self.ticket_num) + "/" try: session(self).flush(self) h.log_action(log, "moved").info("Ticket %s moved to %s" % (prior_url, new_url)) break except OperationFailure, err: if "duplicate" in err.args[0]: log.warning("Try to create duplicate ticket %s when moving from %s" % (new_url, prior_url)) session(self).expunge(self) continue
def commit(self): VersionedArtifact.commit(self) monitoring_email = self.app.config.options.get('TicketMonitoringEmail') if self.version > 1: hist = TicketHistory.query.get(artifact_id=self._id, version=self.version - 1) old = hist.data changes = [ 'Ticket %s has been modified: %s' % (self.ticket_num, self.summary), 'Edited By: %s (%s)' % (c.user.get_pref('display_name'), c.user.username) ] fields = [('Summary', old.summary, self.summary), ('Status', old.status, self.status)] if old.status != self.status and self.status in c.app.globals.set_of_closed_status_names: h.log_action(log, 'closed').info('') for key in self.custom_fields: fields.append( (key, old.custom_fields.get(key, ''), self.custom_fields[key])) for title, o, n in fields: if o != n: changes.append('%s updated: %r => %r' % (title, o, n)) o = hist.assigned_to n = self.assigned_to if o != n: changes.append('Owner updated: %r => %r' % (o and o.username, n and n.username)) self.subscribe(user=n) if old.description != self.description: changes.append('Description updated:') changes.append('\n'.join( difflib.unified_diff(a=old.description.split('\n'), b=self.description.split('\n'), fromfile='description-old', tofile='description-new'))) description = '\n'.join(changes) else: self.subscribe() if self.assigned_to_id: self.subscribe(user=User.query.get(_id=self.assigned_to_id)) description = '' subject = self.email_subject Thread(discussion_id=self.app_config.discussion_id, ref_id=self.index_id()) n = Notification.post(artifact=self, topic='metadata', text=description, subject=subject) if monitoring_email and n: n.send_simple(monitoring_email) Feed.post(self, description)
def _update_mounts(self, subproject=None, tool=None, new=None, **kw): if subproject is None: subproject = [] if tool is None: tool = [] for sp in subproject: p = M.Project.query.get(shortname=sp['shortname'], neighborhood_id=c.project.neighborhood_id) if sp.get('delete'): require_access(c.project, 'admin') M.AuditLog.log('delete subproject %s', sp['shortname']) h.log_action(log, 'delete subproject').info( 'delete subproject %s', sp['shortname'], meta=dict(name=sp['shortname'])) p.removal = 'deleted' plugin.ProjectRegistrationProvider.get().delete_project( p, c.user) elif not new: M.AuditLog.log('update subproject %s', sp['shortname']) p.name = sp['name'] p.ordinal = int(sp['ordinal']) for p in tool: if p.get('delete'): require_access(c.project, 'admin') M.AuditLog.log('uninstall tool %s', p['mount_point']) h.log_action(log, 'uninstall tool').info( 'uninstall tool %s', p['mount_point'], meta=dict(mount_point=p['mount_point'])) c.project.uninstall_app(p['mount_point']) elif not new: M.AuditLog.log('update tool %s', p['mount_point']) options = c.project.app_config(p['mount_point']).options options.mount_label = p['mount_label'] options.ordinal = int(p['ordinal']) if new and new.get('install'): ep_name = new.get('ep_name', None) if not ep_name: require_access(c.project, 'create') mount_point = new['mount_point'].lower() or h.nonce() M.AuditLog.log('create subproject %s', mount_point) h.log_action(log, 'create subproject').info( 'create subproject %s', mount_point, meta=dict(mount_point=mount_point, name=new['mount_label'])) sp = c.project.new_subproject(mount_point) sp.name = new['mount_label'] sp.ordinal = int(new['ordinal']) else: require_access(c.project, 'admin') installable_tools = AdminApp.installable_tools_for(c.project) if not ep_name.lower() in [t['name'].lower() for t in installable_tools]: flash('Installation limit exceeded.', 'error') return mount_point = new['mount_point'] or ep_name M.AuditLog.log('install tool %s', mount_point) h.log_action(log, 'install tool').info( 'install tool %s', mount_point, meta=dict(tool_type=ep_name, mount_point=mount_point, mount_label=new['mount_label'])) c.project.install_app( ep_name, mount_point, mount_label=new['mount_label'], ordinal=new['ordinal']) g.post_event('project_updated')
def new(cls): """Create a new ticket, safely (ensuring a unique ticket_num""" while True: ticket_num = c.app.globals.next_ticket_num() ticket = cls(app_config_id=c.app.config._id, custom_fields=dict(), ticket_num=ticket_num) try: session(ticket).flush(ticket) h.log_action(log, "opened").info("") return ticket except OperationFailure, err: if "duplicate" in err.args[0]: log.warning("Try to create duplicate ticket %s", ticket.url()) session(ticket).expunge(ticket) continue raise
def update_mounts(self, subproject=None, tool=None, new=None, **kw): if subproject is None: subproject = [] if tool is None: tool = [] for sp in subproject: p = M.Project.query.get(shortname=sp["shortname"], neighborhood_id=c.project.neighborhood_id) if sp.get("delete"): require_access(c.project, "admin") M.AuditLog.log("delete subproject %s", sp["shortname"]) h.log_action(log, "delete subproject").info( "delete subproject %s", sp["shortname"], meta=dict(name=sp["shortname"]) ) p.removal = "deleted" plugin.ProjectRegistrationProvider.get().delete_project(p, c.user) elif not new: M.AuditLog.log("update subproject %s", sp["shortname"]) p.name = sp["name"] p.ordinal = int(sp["ordinal"]) for p in tool: if p.get("delete"): require_access(c.project, "admin") M.AuditLog.log("uninstall tool %s", p["mount_point"]) h.log_action(log, "uninstall tool").info( "uninstall tool %s", p["mount_point"], meta=dict(mount_point=p["mount_point"]) ) c.project.uninstall_app(p["mount_point"]) elif not new: M.AuditLog.log("update tool %s", p["mount_point"]) options = c.project.app_config(p["mount_point"]).options options.mount_label = p["mount_label"] options.ordinal = int(p["ordinal"]) try: if new and new.get("install"): ep_name = new.get("ep_name", None) if not ep_name: require_access(c.project, "create") mount_point = new["mount_point"].lower() or h.nonce() M.AuditLog.log("create subproject %s", mount_point) h.log_action(log, "create subproject").info( "create subproject %s", mount_point, meta=dict(mount_point=mount_point, name=new["mount_label"]) ) sp = c.project.new_subproject(mount_point) sp.name = new["mount_label"] sp.ordinal = int(new["ordinal"]) else: require_access(c.project, "admin") mount_point = new["mount_point"].lower() or ep_name.lower() M.AuditLog.log("install tool %s", mount_point) h.log_action(log, "install tool").info( "install tool %s", mount_point, meta=dict(tool_type=ep_name, mount_point=mount_point, mount_label=new["mount_label"]), ) c.project.install_app(ep_name, mount_point, mount_label=new["mount_label"], ordinal=new["ordinal"]) except forge_exc.ForgeError, exc: flash("%s: %s" % (exc.__class__.__name__, exc.args[0]), "error")
def update_mounts(self, subproject=None, tool=None, new=None, **kw): if subproject is None: subproject = [] if tool is None: tool = [] for sp in subproject: p = M.Project.query.get(shortname=sp['shortname'], neighborhood_id=c.project.neighborhood_id) if sp.get('delete'): require_access(c.project, 'admin') M.AuditLog.log('delete subproject %s', sp['shortname']) h.log_action(log, 'delete subproject').info( 'delete subproject %s', sp['shortname'], meta=dict(name=sp['shortname'])) p.removal = 'deleted' plugin.ProjectRegistrationProvider.get().delete_project(p, c.user) elif not new: M.AuditLog.log('update subproject %s', sp['shortname']) p.name = sp['name'] p.ordinal = int(sp['ordinal']) for p in tool: if p.get('delete'): require_access(c.project, 'admin') M.AuditLog.log('uninstall tool %s', p['mount_point']) h.log_action(log, 'uninstall tool').info( 'uninstall tool %s', p['mount_point'], meta=dict(mount_point=p['mount_point'])) c.project.uninstall_app(p['mount_point']) elif not new: M.AuditLog.log('update tool %s', p['mount_point']) options = c.project.app_config(p['mount_point']).options options.mount_label = p['mount_label'] options.ordinal = int(p['ordinal']) try: if new and new.get('install'): ep_name = new.get('ep_name', None) if not ep_name: require_access(c.project, 'create') mount_point = new['mount_point'].lower() or h.nonce() M.AuditLog.log('create subproject %s', mount_point) h.log_action(log, 'create subproject').info( 'create subproject %s', mount_point, meta=dict(mount_point=mount_point,name=new['mount_label'])) sp = c.project.new_subproject(mount_point) sp.name = new['mount_label'] sp.ordinal = int(new['ordinal']) else: require_access(c.project, 'admin') mount_point = new['mount_point'] or ep_name M.AuditLog.log('install tool %s', mount_point) h.log_action(log, 'install tool').info( 'install tool %s', mount_point, meta=dict(tool_type=ep_name, mount_point=mount_point, mount_label=new['mount_label'])) c.project.install_app(ep_name, mount_point, mount_label=new['mount_label'], ordinal=new['ordinal']) except forge_exc.ForgeError, exc: flash('%s: %s' % (exc.__class__.__name__, exc.args[0]), 'error')
def new(cls): '''Create a new ticket, safely (ensuring a unique ticket_num''' while True: ticket_num = c.app.globals.next_ticket_num() ticket = cls(app_config_id=c.app.config._id, custom_fields=dict(), ticket_num=ticket_num) try: session(ticket).flush(ticket) h.log_action(log, 'opened').info('') return ticket except OperationFailure, err: if 'duplicate' in err.args[0]: log.warning('Try to create duplicate ticket %s', ticket.url()) session(ticket).expunge(ticket) continue raise
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False): all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, 'commit') for ci in new_commit_ids: stats_log.info('', meta=dict(module='scm-%s' % repo.repo_id, read='0')) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i + 1) % 100 == 0: log.info('Refresh commit info %d: %s', (i + 1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i + 1) % 100 == 0: log.info('Refresh child info %d for parents of %s', (i + 1), ci._id) if repo._refresh_precompute: # Refresh commit runs commit_run_ids = commit_ids # Check if the CommitRuns for the repo are in a good state by checking for # a CommitRunDoc that contains the last known commit. If there isn't one, # the CommitRuns for this repo are in a bad state - rebuild them entirely. if commit_run_ids != all_commit_ids: last_commit = last_known_commit_id(all_commit_ids, new_commit_ids) log.info('Last known commit id: %s', last_commit) if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count(): log.info('CommitRun incomplete, rebuilding with all commits') commit_run_ids = all_commit_ids log.info('Starting CommitRunBuilder for %s', repo.full_fs_path) rb = CommitRunBuilder(commit_run_ids) rb.run() rb.cleanup() log.info('Finished CommitRunBuilder for %s', repo.full_fs_path) # Refresh trees # Like diffs below, pre-computing trees for some SCMs is too expensive, # so we skip it here, then do it on-demand later. if repo._refresh_precompute: cache = {} for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() cache = refresh_commit_trees(ci, cache) if (i + 1) % 100 == 0: log.info('Refresh commit trees %d: %s', (i + 1), ci._id) # Compute diffs cache = {} # For some SCMs, we don't want to pre-compute the diffs because that # would be too expensive, so we skip them here and do them on-demand # with caching. if repo._refresh_precompute: for i, oid in enumerate(commit_ids): cid = CommitDoc.m.find(dict(_id=oid), validate=False).next() ci = mapper(Commit).create(cid, dict(instrument=False)) ci.set_context(repo) compute_diffs(repo._id, cache, ci) if (i + 1) % 100 == 0: log.info('Compute diffs %d: %s', (i + 1), ci._id) if repo._refresh_precompute: model_cache = ModelCache() lcid_cache = {} for i, oid in enumerate(reversed(commit_ids)): ci = model_cache.get(Commit, dict(_id=oid)) ci.set_context(repo) compute_lcds(ci, model_cache, lcid_cache) ThreadLocalORMSession.flush_all() if (i + 1) % 100 == 0: log.info('Compute last commit info %d: %s', (i + 1), ci._id) if not all_commits and not new_clone: for commit in commit_ids: new = repo.commit(commit) user = User.by_email_address(new.committed.email) if user is None: user = User.by_username(new.committed.name) if user is not None: g.statsUpdater.newCommit(new, repo.app_config.project, user) log.info('Refresh complete for %s', repo.full_fs_path) g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone) # Send notifications if notify: send_notifications(repo, commit_ids)
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False): all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, 'commit') for ci in new_commit_ids: stats_log.info( '', meta=dict( module='scm-%s' % repo.repo_id, read='0')) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i + 1) % 100 == 0: log.info('Refresh commit info %d: %s', (i + 1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i + 1) % 100 == 0: log.info('Refresh child info %d for parents of %s', (i + 1), ci._id) if repo._refresh_precompute: # Refresh commit runs commit_run_ids = commit_ids # Check if the CommitRuns for the repo are in a good state by checking for # a CommitRunDoc that contains the last known commit. If there isn't one, # the CommitRuns for this repo are in a bad state - rebuild them # entirely. if commit_run_ids != all_commit_ids: last_commit = last_known_commit_id(all_commit_ids, new_commit_ids) log.info('Last known commit id: %s', last_commit) if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count(): log.info('CommitRun incomplete, rebuilding with all commits') commit_run_ids = all_commit_ids log.info('Starting CommitRunBuilder for %s', repo.full_fs_path) rb = CommitRunBuilder(commit_run_ids) rb.run() rb.cleanup() log.info('Finished CommitRunBuilder for %s', repo.full_fs_path) # Refresh trees # Like diffs below, pre-computing trees for some SCMs is too expensive, # so we skip it here, then do it on-demand later. if repo._refresh_precompute: cache = {} for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() cache = refresh_commit_trees(ci, cache) if (i + 1) % 100 == 0: log.info('Refresh commit trees %d: %s', (i + 1), ci._id) # Compute diffs cache = {} # For some SCMs, we don't want to pre-compute the LCDs because that # would be too expensive, so we skip them here and do them on-demand # with caching. if repo._refresh_precompute: model_cache = ModelCache() lcid_cache = {} for i, oid in enumerate(reversed(commit_ids)): ci = model_cache.get(Commit, dict(_id=oid)) ci.set_context(repo) compute_lcds(ci, model_cache, lcid_cache) ThreadLocalORMSession.flush_all() if (i + 1) % 100 == 0: log.info('Compute last commit info %d: %s', (i + 1), ci._id) # Clear any existing caches for branches/tags if repo.cached_branches: repo.cached_branches = [] session(repo).flush() if repo.cached_tags: repo.cached_tags = [] session(repo).flush() # The first view can be expensive to cache, # so we want to do it here instead of on the first view. repo.get_branches() repo.get_tags() if not all_commits and not new_clone: for commit in commit_ids: new = repo.commit(commit) user = User.by_email_address(new.committed.email) if user is None: user = User.by_username(new.committed.name) if user is not None: g.statsUpdater.newCommit(new, repo.app_config.project, user) actor = user or TransientActor( activity_name=new.committed.name or new.committed.email) g.director.create_activity(actor, 'committed', new, related_nodes=[repo.app_config.project], tags=['commit', repo.tool.lower()]) from allura.webhooks import RepoPushWebhookSender by_branches, by_tags = _group_commits(repo, commit_ids) params = [] for b, commits in by_branches.iteritems(): ref = u'refs/heads/{}'.format(b) if b != '__default__' else None params.append(dict(commit_ids=commits, ref=ref)) for t, commits in by_tags.iteritems(): ref = u'refs/tags/{}'.format(t) params.append(dict(commit_ids=commits, ref=ref)) if params: RepoPushWebhookSender().send(params) log.info('Refresh complete for %s', repo.full_fs_path) g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone) # Send notifications if notify: send_notifications(repo, commit_ids)
def refresh_repo(repo, all_commits=False, notify=True): all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, 'commit') for ci in new_commit_ids: stats_log.info('', meta=dict(module='scm-%s' % repo.repo_id, read='0')) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i + 1) % 100 == 0: log.info('Refresh commit info %d: %s', (i + 1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i + 1) % 100 == 0: log.info('Refresh child info %d for parents of %s', (i + 1), ci._id) # Refresh commit runs commit_run_ids = commit_ids # Check if the CommitRuns for the repo are in a good state by checking for # a CommitRunDoc that contains the last known commit. If there isn't one, # the CommitRuns for this repo are in a bad state - rebuild them entirely. if commit_run_ids != all_commit_ids: last_commit = last_known_commit_id(all_commit_ids, new_commit_ids) log.info('Last known commit id: %s', last_commit) if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count(): log.info('CommitRun incomplete, rebuilding with all commits') commit_run_ids = all_commit_ids log.info('Starting CommitRunBuilder for %s', repo.full_fs_path) rb = CommitRunBuilder(commit_run_ids) rb.run() rb.cleanup() log.info('Finished CommitRunBuilder for %s', repo.full_fs_path) # Refresh trees # Like diffs below, pre-computing trees for SVN repos is too expensive, # so we skip it here, then do it on-demand later. if repo.tool.lower() != 'svn': cache = {} for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() cache = refresh_commit_trees(ci, cache) if (i + 1) % 100 == 0: log.info('Refresh commit trees %d: %s', (i + 1), ci._id) # Compute diffs cache = {} # Have to compute_diffs() for all commits to ensure that LastCommitDocs # are set properly for forked repos. For SVN, compute_diffs() we don't # want to pre-compute the diffs because that would be too expensive, so # we skip them here and do them on-demand with caching. if repo.tool.lower() != 'svn': for i, oid in enumerate(reversed(all_commit_ids)): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() compute_diffs(repo._id, cache, ci) if (i + 1) % 100 == 0: log.info('Compute diffs %d: %s', (i + 1), ci._id) log.info('Refresh complete for %s', repo.full_fs_path) # Send notifications if notify: send_notifications(repo, commit_ids)
from pylons import tmpl_context as c, app_globals as g from pylons import request, response from ming.orm import session from ming.utils import LazyProperty from allura import model as M from allura.lib import helpers as h from allura.lib import security from allura.lib import plugin from allura.lib.exceptions import Invalid from allura.lib.decorators import require_post from allura.lib.security import has_access log = logging.getLogger(__name__) action_logger = h.log_action(log, "API:") class RestController(object): def __init__(self): self.oauth = OAuthNegotiator() def _authenticate_request(self): "Based on request.params or oauth, authenticate the request" headers_auth = "Authorization" in request.headers params_auth = "oauth_token" in request.params params_auth = params_auth or "access_token" in request.params if headers_auth or params_auth: return self.oauth._authenticate() else: return None
from pylons import tmpl_context as c, app_globals as g from pylons import request, response from ming.orm import session from ming.utils import LazyProperty from allura import model as M from allura.lib import helpers as h from allura.lib import security from allura.lib import plugin from allura.lib.exceptions import Invalid from allura.lib.decorators import require_post from allura.lib.security import has_access log = logging.getLogger(__name__) action_logger = h.log_action(log, 'API:') class RestController(object): def __init__(self): self.oauth = OAuthNegotiator() def _authenticate_request(self): 'Based on request.params or oauth, authenticate the request' headers_auth = 'Authorization' in request.headers params_auth = 'oauth_token' in request.params params_auth = params_auth or 'access_token' in request.params if headers_auth or params_auth: return self.oauth._authenticate() else: return None
def move(self, app_config, notify=True): '''Move ticket from current tickets app to tickets app with given app_config''' app = app_config.project.app_instance(app_config) prior_url = self.url() prior_app = self.app prior_ticket_num = self.ticket_num attachments = self.attachments attach_metadata = BaseAttachment.metadata_for(self) prior_cfs = [(cf['name'], cf['type'], cf['label']) for cf in prior_app.globals.custom_fields or []] new_cfs = [(cf['name'], cf['type'], cf['label']) for cf in app.globals.custom_fields or []] skipped_fields = [] user_fields = [] for cf in prior_cfs: if cf not in new_cfs: # can't convert skipped_fields.append(cf) elif cf[1] == 'user': # can convert and field type == user user_fields.append(cf) messages = [] for cf in skipped_fields: name = cf[0] messages.append('- **%s**: %s' % (name, self.custom_fields.get(name, ''))) for cf in user_fields: name = cf[0] username = self.custom_fields.get(name, None) user = app_config.project.user_in_project(username) if not user or user == User.anonymous(): messages.append('- **%s**: %s (user not in project)' % (name, username)) self.custom_fields[name] = '' # special case: not custom user field (assigned_to_id) user = self.assigned_to if user and not app_config.project.user_in_project(user.username): messages.append('- **assigned_to**: %s (user not in project)' % user.username) self.assigned_to_id = None custom_fields = {} for cf in new_cfs: fn, ft, fl = cf old_val = self.custom_fields.get(fn, None) if old_val is None: custom_fields[fn] = None if ft == 'user' else '' custom_fields[fn] = old_val self.custom_fields = custom_fields # move ticket. ensure unique ticket_num while True: with h.push_context(app_config.project_id, app_config_id=app_config._id): ticket_num = app.globals.next_ticket_num() self.ticket_num = ticket_num self.app_config_id = app_config._id new_url = app_config.url() + str(self.ticket_num) + '/' try: session(self).flush(self) h.log_action(log, 'moved').info('Ticket %s moved to %s' % (prior_url, new_url)) break except OperationFailure, err: if 'duplicate' in err.args[0]: log.warning( 'Try to create duplicate ticket %s when moving from %s' % (new_url, prior_url)) session(self).expunge(self) continue
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False, commits_are_new=None): if commits_are_new is None: commits_are_new = not all_commits and not new_clone all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, 'commit') for ci in new_commit_ids: stats_log.info( '', meta=dict( module='scm-%s' % repo.repo_id, read='0')) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i + 1) % 100 == 0: log.info('Refresh commit info %d: %s', (i + 1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i + 1) % 100 == 0: log.info('Refresh child info %d for parents of %s', (i + 1), ci._id) # Clear any existing caches for branches/tags if repo.cached_branches: repo.cached_branches = [] session(repo).flush() if repo.cached_tags: repo.cached_tags = [] session(repo).flush() # The first view can be expensive to cache, # so we want to do it here instead of on the first view. repo.get_branches() repo.get_tags() if commits_are_new: for commit in commit_ids: new = repo.commit(commit) user = User.by_email_address(new.committed.email) if user is None: user = User.by_username(new.committed.name) if user is not None: g.statsUpdater.newCommit(new, repo.app_config.project, user) actor = user or TransientActor( activity_name=new.committed.name or new.committed.email) g.director.create_activity(actor, 'committed', new, related_nodes=[repo.app_config.project], tags=['commit', repo.tool.lower()]) from allura.webhooks import RepoPushWebhookSender by_branches, by_tags = _group_commits(repo, commit_ids) params = [] for b, commits in by_branches.iteritems(): ref = u'refs/heads/{}'.format(b) if b != '__default__' else None params.append(dict(commit_ids=commits, ref=ref)) for t, commits in by_tags.iteritems(): ref = u'refs/tags/{}'.format(t) params.append(dict(commit_ids=commits, ref=ref)) if params: RepoPushWebhookSender().send(params) log.info('Refresh complete for %s', repo.full_fs_path) g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone) # Send notifications if notify: send_notifications(repo, reversed(commit_ids))
def refresh_repo(repo, all_commits=False, notify=True): all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, 'commit') for ci in new_commit_ids: stats_log.info( '', meta=dict( module='scm-%s' % repo.repo_id, read='0')) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i+1) % 100 == 0: log.info('Refresh commit info %d: %s', (i+1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i+1) % 100 == 0: log.info('Refresh child info %d for parents of %s', (i+1), ci._id) # Refresh commit runs commit_run_ids = commit_ids # Check if the CommitRuns for the repo are in a good state by checking for # a CommitRunDoc that contains the last known commit. If there isn't one, # the CommitRuns for this repo are in a bad state - rebuild them entirely. if commit_run_ids != all_commit_ids: last_commit = last_known_commit_id(all_commit_ids, new_commit_ids) log.info('Last known commit id: %s', last_commit) if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count(): log.info('CommitRun incomplete, rebuilding with all commits') commit_run_ids = all_commit_ids log.info('Starting CommitRunBuilder for %s', repo.full_fs_path) rb = CommitRunBuilder(commit_run_ids) rb.run() rb.cleanup() log.info('Finished CommitRunBuilder for %s', repo.full_fs_path) # Refresh trees # Like diffs below, pre-computing trees for SVN repos is too expensive, # so we skip it here, then do it on-demand later. if repo.tool.lower() != 'svn': cache = {} for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() cache = refresh_commit_trees(ci, cache) if (i+1) % 100 == 0: log.info('Refresh commit trees %d: %s', (i+1), ci._id) # Compute diffs cache = {} # Have to compute_diffs() for all commits to ensure that LastCommitDocs # are set properly for forked repos. For SVN, compute_diffs() we don't # want to pre-compute the diffs because that would be too expensive, so # we skip them here and do them on-demand with caching. if repo.tool.lower() != 'svn': for i, oid in enumerate(reversed(all_commit_ids)): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() compute_diffs(repo._id, cache, ci) if (i+1) % 100 == 0: log.info('Compute diffs %d: %s', (i+1), ci._id) log.info('Refresh complete for %s', repo.full_fs_path) # Send notifications if notify: send_notifications(repo, commit_ids)
def commit(self, **kwargs): VersionedArtifact.commit(self) monitoring_email = self.app.config.options.get("TicketMonitoringEmail") if self.version > 1: hist = TicketHistory.query.get(artifact_id=self._id, version=self.version - 1) old = hist.data changes = [ "Ticket %s has been modified: %s" % (self.ticket_num, self.summary), "Edited By: %s (%s)" % (c.user.get_pref("display_name"), c.user.username), ] fields = [("Summary", old.summary, self.summary), ("Status", old.status, self.status)] if old.status != self.status and self.status in c.app.globals.set_of_closed_status_names: h.log_action(log, "closed").info("") g.statsUpdater.ticketEvent("closed", self, self.project, self.assigned_to) for key in self.custom_fields: fields.append((key, old.custom_fields.get(key, ""), self.custom_fields[key])) for title, o, n in fields: if o != n: changes.append("%s updated: %r => %r" % (title, o, n)) o = hist.assigned_to n = self.assigned_to if o != n: changes.append("Owner updated: %r => %r" % (o and o.username, n and n.username)) self.subscribe(user=n) g.statsUpdater.ticketEvent("assigned", self, self.project, n) if o: g.statsUpdater.ticketEvent("revoked", self, self.project, o) if old.description != self.description: changes.append("Description updated:") changes.append( "\n".join( difflib.unified_diff( a=old.description.split("\n"), b=self.description.split("\n"), fromfile="description-old", tofile="description-new", ) ) ) description = "\n".join(changes) else: self.subscribe() if self.assigned_to_id: user = User.query.get(_id=self.assigned_to_id) g.statsUpdater.ticketEvent("assigned", self, self.project, user) self.subscribe(user=user) description = "" subject = self.email_subject Thread.new(discussion_id=self.app_config.discussion_id, ref_id=self.index_id()) # First ticket notification. Use persistend Message-ID (self.message_id()). # Thus we can group notification emails in one thread later. n = Notification.post( message_id=self.message_id(), artifact=self, topic="metadata", text=description, subject=subject ) if ( monitoring_email and n and ( not self.private or self.app.config.options.get("TicketMonitoringType") in ("NewTicketsOnly", "AllTicketChanges") ) ): n.send_simple(monitoring_email) Feed.post( self, title=self.summary, description=description if description else self.description, author=self.reported_by, pubdate=self.created_date, )
from tg import expose, flash, redirect from pylons import tmpl_context as c, app_globals as g from pylons import request from ming.orm import session from ming.utils import LazyProperty from allura import model as M from allura.lib import helpers as h from allura.lib import security from allura.lib import plugin from allura.lib.exceptions import Invalid from allura.lib.decorators import require_post log = logging.getLogger(__name__) action_logger = h.log_action(log, 'API:') class RestController(object): def __init__(self): self.oauth = OAuthNegotiator() def _authenticate_request(self): 'Based on request.params or oauth, authenticate the request' if 'oauth_token' in request.params or 'access_token' in request.params: return self.oauth._authenticate() elif 'api_key' in request.params: api_key = request.params.get('api_key') token = M.ApiTicket.get(api_key) if not token:
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False): all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, "commit") for ci in new_commit_ids: stats_log.info("", meta=dict(module="scm-%s" % repo.repo_id, read="0")) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info("Refreshing %d commits on %s", len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i + 1) % 100 == 0: log.info("Refresh commit info %d: %s", (i + 1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i + 1) % 100 == 0: log.info("Refresh child info %d for parents of %s", (i + 1), ci._id) if repo._refresh_precompute: # Refresh commit runs commit_run_ids = commit_ids # Check if the CommitRuns for the repo are in a good state by checking for # a CommitRunDoc that contains the last known commit. If there isn't one, # the CommitRuns for this repo are in a bad state - rebuild them entirely. if commit_run_ids != all_commit_ids: last_commit = last_known_commit_id(all_commit_ids, new_commit_ids) log.info("Last known commit id: %s", last_commit) if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count(): log.info("CommitRun incomplete, rebuilding with all commits") commit_run_ids = all_commit_ids log.info("Starting CommitRunBuilder for %s", repo.full_fs_path) rb = CommitRunBuilder(commit_run_ids) rb.run() rb.cleanup() log.info("Finished CommitRunBuilder for %s", repo.full_fs_path) # Refresh trees # Like diffs below, pre-computing trees for some SCMs is too expensive, # so we skip it here, then do it on-demand later. if repo._refresh_precompute: cache = {} for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() cache = refresh_commit_trees(ci, cache) if (i + 1) % 100 == 0: log.info("Refresh commit trees %d: %s", (i + 1), ci._id) # Compute diffs cache = {} # For some SCMs, we don't want to pre-compute the diffs because that # would be too expensive, so we skip them here and do them on-demand # with caching. if repo._refresh_precompute: for i, oid in enumerate(commit_ids): cid = CommitDoc.m.find(dict(_id=oid), validate=False).next() ci = mapper(Commit).create(cid, dict(instrument=False)) ci.set_context(repo) compute_diffs(repo._id, cache, ci) if (i + 1) % 100 == 0: log.info("Compute diffs %d: %s", (i + 1), ci._id) if repo._refresh_precompute: model_cache = ModelCache() lcid_cache = {} for i, oid in enumerate(reversed(commit_ids)): ci = model_cache.get(Commit, dict(_id=oid)) ci.set_context(repo) compute_lcds(ci, model_cache, lcid_cache) ThreadLocalORMSession.flush_all() if (i + 1) % 100 == 0: log.info("Compute last commit info %d: %s", (i + 1), ci._id) if not all_commits and not new_clone: for commit in commit_ids: new = repo.commit(commit) user = User.by_email_address(new.committed.email) if user is None: user = User.by_username(new.committed.name) if user is not None: g.statsUpdater.newCommit(new, repo.app_config.project, user) log.info("Refresh complete for %s", repo.full_fs_path) g.post_event("repo_refreshed", len(commit_ids), all_commits, new_clone) # Send notifications if notify: send_notifications(repo, commit_ids)
def update(self, name=None, short_description=None, summary='', icon=None, category=None, external_homepage='', support_page='', support_page_url='', twitter_handle='', facebook_page='', removal='', moved_to_url='', export_controlled=False, export_control_type=None, tracking_id='', **kw): require_access(c.project, 'update') if removal != c.project.removal: M.AuditLog.log('change project removal status to %s', removal) h.log_action(log, 'change project removal status').info('') c.project.removal = removal c.project.removal_changed_date = datetime.utcnow() if 'delete_icon' in kw: M.ProjectFile.query.remove(dict(project_id=c.project._id, category='icon')) M.AuditLog.log('remove project icon') h.log_action(log, 'remove project icon').info('') g.post_event('project_updated') redirect('overview') elif 'delete' in kw: allow_project_delete = asbool(config.get('allow_project_delete', True)) if allow_project_delete or not c.project.is_root: M.AuditLog.log('delete project') h.log_action(log, 'delete project').info('') plugin.ProjectRegistrationProvider.get().delete_project(c.project, c.user) redirect('overview') elif 'undelete' in kw: h.log_action(log, 'undelete project').info('') M.AuditLog.log('undelete project') plugin.ProjectRegistrationProvider.get().undelete_project(c.project, c.user) redirect('overview') if name != c.project.name: h.log_action(log, 'change project name').info('') M.AuditLog.log('change project name to %s', name) c.project.name = name if short_description != c.project.short_description: h.log_action(log, 'change project short description').info('') M.AuditLog.log('change short description to %s', short_description) c.project.short_description = short_description if summary != c.project.summary: h.log_action(log, 'change project summary').info('') M.AuditLog.log('change summary to %s', summary) c.project.summary = summary category = category and ObjectId(category) or None if category != c.project.category_id: h.log_action(log, 'change project category').info('') M.AuditLog.log('change category to %s', category) c.project.category_id = category if external_homepage != c.project.external_homepage: h.log_action(log, 'change external home page').info('') M.AuditLog.log('change external home page to %s', external_homepage) c.project.external_homepage = external_homepage if support_page != c.project.support_page: h.log_action(log, 'change project support page').info('') M.AuditLog.log('change project support page to %s', support_page) c.project.support_page = support_page if twitter_handle != c.project.social_account('Twitter'): h.log_action(log, 'change project twitter handle').info('') M.AuditLog.log('change project twitter handle to %s', twitter_handle) c.project.set_social_account('Twitter', twitter_handle) if facebook_page != c.project.social_account('Facebook'): parsed = urlparse(facebook_page) if 'facebook.com' in parsed.netloc: h.log_action(log, 'change project facebook page').info('') M.AuditLog.log('change project facebook page to %s', facebook_page) c.project.set_social_account('Facebook', facebook_page) if support_page_url != c.project.support_page_url: h.log_action(log, 'change project support page url').info('') M.AuditLog.log('change project support page url to %s', support_page_url) c.project.support_page_url = support_page_url if moved_to_url != c.project.moved_to_url: h.log_action(log, 'change project moved to url').info('') M.AuditLog.log('change project moved to url to %s', moved_to_url) c.project.moved_to_url = moved_to_url if export_controlled != c.project.export_controlled: h.log_action(log, 'change project export controlled status').info('') M.AuditLog.log('change project export controlled status to %s', export_controlled) c.project.export_controlled = not not export_controlled if not export_controlled: export_control_type = None if export_control_type != c.project.export_control_type: h.log_action(log, 'change project export control type').info('') M.AuditLog.log('change project export control type to %s', export_control_type) c.project.export_control_type = export_control_type if tracking_id != c.project.tracking_id: h.log_action(log, 'change project tracking ID').info('') M.AuditLog.log('change project tracking ID to %s', tracking_id) c.project.tracking_id = tracking_id if icon is not None and icon != '': if c.project.icon: M.ProjectFile.remove(dict(project_id=c.project._id, category='icon')) M.AuditLog.log('update project icon') M.ProjectFile.save_image( icon.filename, icon.file, content_type=icon.type, square=True, thumbnail_size=(48,48), thumbnail_meta=dict(project_id=c.project._id,category='icon')) g.post_event('project_updated') flash('Saved', 'success') redirect('overview')
def commit(self): VersionedArtifact.commit(self) monitoring_email = self.app.config.options.get('TicketMonitoringEmail') if self.version > 1: hist = TicketHistory.query.get(artifact_id=self._id, version=self.version - 1) old = hist.data changes = [ 'Ticket %s has been modified: %s' % (self.ticket_num, self.summary), 'Edited By: %s (%s)' % (c.user.get_pref('display_name'), c.user.username) ] fields = [('Summary', old.summary, self.summary), ('Status', old.status, self.status)] if old.status != self.status and self.status in c.app.globals.set_of_closed_status_names: h.log_action(log, 'closed').info('') g.statsUpdater.ticketEvent("closed", self, self.project, self.assigned_to) for key in self.custom_fields: fields.append( (key, old.custom_fields.get(key, ''), self.custom_fields[key])) for title, o, n in fields: if o != n: changes.append('%s updated: %r => %r' % (title, o, n)) o = hist.assigned_to n = self.assigned_to if o != n: changes.append('Owner updated: %r => %r' % (o and o.username, n and n.username)) self.subscribe(user=n) g.statsUpdater.ticketEvent("assigned", self, self.project, n) if o: g.statsUpdater.ticketEvent("revoked", self, self.project, o) if old.description != self.description: changes.append('Description updated:') changes.append('\n'.join( difflib.unified_diff(a=old.description.split('\n'), b=self.description.split('\n'), fromfile='description-old', tofile='description-new'))) description = '\n'.join(changes) else: self.subscribe() if self.assigned_to_id: user = User.query.get(_id=self.assigned_to_id) g.statsUpdater.ticketEvent("assigned", self, self.project, user) self.subscribe(user=user) description = '' subject = self.email_subject Thread.new(discussion_id=self.app_config.discussion_id, ref_id=self.index_id()) # First ticket notification. Use persistend Message-ID (self.message_id()). # Thus we can group notification emails in one thread later. n = Notification.post(message_id=self.message_id(), artifact=self, topic='metadata', text=description, subject=subject) if monitoring_email and n and ( not self.private or self.app.config.options.get('TicketMonitoringType') in ('NewTicketsOnly', 'AllTicketChanges')): n.send_simple(monitoring_email) Feed.post(self, title=self.summary, description=description if description else self.description, author=self.reported_by, pubdate=self.created_date)
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False): all_commit_ids = commit_ids = list(repo.all_commit_ids()) if not commit_ids: # the repo is empty, no need to continue return new_commit_ids = unknown_commit_ids(commit_ids) stats_log = h.log_action(log, 'commit') for ci in new_commit_ids: stats_log.info('', meta=dict(module='scm-%s' % repo.repo_id, read='0')) if not all_commits: # Skip commits that are already in the DB commit_ids = new_commit_ids log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path) # Refresh commits seen = set() for i, oid in enumerate(commit_ids): repo.refresh_commit_info(oid, seen, not all_commits) if (i + 1) % 100 == 0: log.info('Refresh commit info %d: %s', (i + 1), oid) refresh_commit_repos(all_commit_ids, repo) # Refresh child references for i, oid in enumerate(commit_ids): ci = CommitDoc.m.find(dict(_id=oid), validate=False).next() refresh_children(ci) if (i + 1) % 100 == 0: log.info('Refresh child info %d for parents of %s', (i + 1), ci._id) if repo._refresh_precompute: # Refresh commit runs commit_run_ids = commit_ids # Check if the CommitRuns for the repo are in a good state by checking for # a CommitRunDoc that contains the last known commit. If there isn't one, # the CommitRuns for this repo are in a bad state - rebuild them # entirely. if commit_run_ids != all_commit_ids: last_commit = last_known_commit_id(all_commit_ids, new_commit_ids) log.info('Last known commit id: %s', last_commit) if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count(): log.info('CommitRun incomplete, rebuilding with all commits') commit_run_ids = all_commit_ids log.info('Starting CommitRunBuilder for %s', repo.full_fs_path) rb = CommitRunBuilder(commit_run_ids) rb.run() rb.cleanup() log.info('Finished CommitRunBuilder for %s', repo.full_fs_path) # Clear any existing caches for branches/tags if repo.cached_branches: repo.cached_branches = [] session(repo).flush() if repo.cached_tags: repo.cached_tags = [] session(repo).flush() # The first view can be expensive to cache, # so we want to do it here instead of on the first view. repo.get_branches() repo.get_tags() if not all_commits and not new_clone: for commit in commit_ids: new = repo.commit(commit) user = User.by_email_address(new.committed.email) if user is None: user = User.by_username(new.committed.name) if user is not None: g.statsUpdater.newCommit(new, repo.app_config.project, user) actor = user or TransientActor( activity_name=new.committed.name or new.committed.email) g.director.create_activity(actor, 'committed', new, related_nodes=[repo.app_config.project], tags=['commit', repo.tool.lower()]) from allura.webhooks import RepoPushWebhookSender by_branches, by_tags = _group_commits(repo, commit_ids) params = [] for b, commits in by_branches.iteritems(): ref = u'refs/heads/{}'.format(b) if b != '__default__' else None params.append(dict(commit_ids=commits, ref=ref)) for t, commits in by_tags.iteritems(): ref = u'refs/tags/{}'.format(t) params.append(dict(commit_ids=commits, ref=ref)) if params: RepoPushWebhookSender().send(params) log.info('Refresh complete for %s', repo.full_fs_path) g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone) # Send notifications if notify: send_notifications(repo, reversed(commit_ids))
def update( self, name=None, short_description=None, summary="", icon=None, category=None, external_homepage="", support_page="", support_page_url="", removal="", moved_to_url="", export_controlled=False, export_control_type=None, tracking_id="", **kw ): require_access(c.project, "update") if removal != c.project.removal: M.AuditLog.log("change project removal status to %s", removal) h.log_action(log, "change project removal status").info("") c.project.removal = removal c.project.removal_changed_date = datetime.utcnow() if "delete_icon" in kw: M.ProjectFile.query.remove(dict(project_id=c.project._id, category="icon")) M.AuditLog.log("remove project icon") h.log_action(log, "remove project icon").info("") g.post_event("project_updated") redirect("overview") elif "delete" in kw: allow_project_delete = asbool(config.get("allow_project_delete", True)) if allow_project_delete or not c.project.is_root: M.AuditLog.log("delete project") h.log_action(log, "delete project").info("") plugin.ProjectRegistrationProvider.get().delete_project(c.project, c.user) redirect("overview") elif "undelete" in kw: h.log_action(log, "undelete project").info("") M.AuditLog.log("undelete project") plugin.ProjectRegistrationProvider.get().undelete_project(c.project, c.user) redirect("overview") if name != c.project.name: h.log_action(log, "change project name").info("") M.AuditLog.log("change project name to %s", name) c.project.name = name if short_description != c.project.short_description: h.log_action(log, "change project short description").info("") M.AuditLog.log("change short description to %s", short_description) c.project.short_description = short_description if summary != c.project.summary: h.log_action(log, "change project summary").info("") M.AuditLog.log("change summary to %s", summary) c.project.summary = summary category = category and ObjectId(category) or None if category != c.project.category_id: h.log_action(log, "change project category").info("") M.AuditLog.log("change category to %s", category) c.project.category_id = category if external_homepage != c.project.external_homepage: h.log_action(log, "change external home page").info("") M.AuditLog.log("change external home page to %s", external_homepage) c.project.external_homepage = external_homepage if support_page != c.project.support_page: h.log_action(log, "change project support page").info("") M.AuditLog.log("change project support page to %s", support_page) c.project.support_page = support_page if support_page_url != c.project.support_page_url: h.log_action(log, "change project support page url").info("") M.AuditLog.log("change project support page url to %s", support_page_url) c.project.support_page_url = support_page_url if moved_to_url != c.project.moved_to_url: h.log_action(log, "change project moved to url").info("") M.AuditLog.log("change project moved to url to %s", moved_to_url) c.project.moved_to_url = moved_to_url if export_controlled != c.project.export_controlled: h.log_action(log, "change project export controlled status").info("") M.AuditLog.log("change project export controlled status to %s", export_controlled) c.project.export_controlled = not not export_controlled if not export_controlled: export_control_type = None if export_control_type != c.project.export_control_type: h.log_action(log, "change project export control type").info("") M.AuditLog.log("change project export control type to %s", export_control_type) c.project.export_control_type = export_control_type if tracking_id != c.project.tracking_id: h.log_action(log, "change project tracking ID").info("") M.AuditLog.log("change project tracking ID to %s", tracking_id) c.project.tracking_id = tracking_id if icon is not None and icon != "": if c.project.icon: M.ProjectFile.remove(dict(project_id=c.project._id, category="icon")) M.AuditLog.log("update project icon") M.ProjectFile.save_image( icon.filename, icon.file, content_type=icon.type, square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category="icon"), ) g.post_event("project_updated") redirect("overview")