def testGetOwnerId(self): issue = tracker_pb2.Issue() self.assertEquals( tracker_bizobj.GetOwnerId(issue), framework_constants.NO_USER_SPECIFIED) issue.derived_owner_id = 123 self.assertEquals(tracker_bizobj.GetOwnerId(issue), 123) issue.owner_id = 456 self.assertEquals(tracker_bizobj.GetOwnerId(issue), 456)
def EvaluateSubscriptions( cnxn, issue, users_to_queries, services, config): """Determine subscribers who have subs that match the given issue.""" # Note: unlike filter rule, subscriptions see explicit & derived values. lower_labels = [lab.lower() for lab in tracker_bizobj.GetLabels(issue)] label_set = set(lower_labels) subscribers_to_notify = [] for uid, saved_queries in users_to_queries.items(): for sq in saved_queries: if sq.subscription_mode != 'immediate': continue if issue.project_id not in sq.executes_in_project_ids: continue cond = savedqueries_helpers.SavedQueryToCond(sq) # TODO(jrobbins): Support linked accounts me_user_ids. cond, _warnings = searchpipeline.ReplaceKeywordsWithUserIDs([uid], cond) cond_ast = query2ast.ParseUserQuery( cond, '', query2ast.BUILTIN_ISSUE_FIELDS, config) if filterrules_helpers.EvalPredicate( cnxn, services, cond_ast, issue, label_set, config, tracker_bizobj.GetOwnerId(issue), tracker_bizobj.GetCcIds(issue), tracker_bizobj.GetStatus(issue)): subscribers_to_notify.append(uid) break # Don't bother looking at the user's other saved quereies. return subscribers_to_notify
def CanEditIssue(effective_ids, perms, project, issue, granted_perms=None): """Return True if a user can edit an issue. Args: effective_ids: set of user IDs for the logged in user and any user group memberships. Should be an empty set for anon users. perms: PermissionSet for the user. project: Project PB for the project that contains this issue. issue: Issue PB for the issue being viewed. granted_perms: optional list of strings of permissions that the user is granted only within the scope of one issue, e.g., by being named in a user-type custom field that grants permissions. Returns: True iff the user can edit the specified issue. """ # TODO(jrobbins): We need to actually grant View+EditIssue in most cases. # So, always grant View whenever there is any granted perm. if not CanViewIssue( effective_ids, perms, project, issue, granted_perms=granted_perms): return False # The issue owner can always edit the issue. if effective_ids: if tracker_bizobj.GetOwnerId(issue) in effective_ids: return True # Otherwise, apply the usual permission checking. return perms.CanUsePerm(EDIT_ISSUE, effective_ids, project, GetRestrictions(issue), granted_perms=granted_perms)
def CanViewRestrictedIssueInVisibleProject(effective_ids, perms, project, issue, granted_perms=None): """Return True if the user can view this issue. Assumes project is OK.""" # The reporter, owner, and CC'd users can always see the issue. # In effect, these fields override artifact restriction labels. if effective_ids: if (issue.reporter_id in effective_ids or tracker_bizobj.GetOwnerId(issue) in effective_ids or not effective_ids.isdisjoint(tracker_bizobj.GetCcIds(issue))): return True # Otherwise, apply the usual permission checking. return CanView(effective_ids, perms, project, GetRestrictions(issue), granted_perms=granted_perms)
def ExtractUniqueValues(columns, artifact_list, users_by_id, config, related_issues, hotlist_context_dict=None): """Build a nested list of unique values so the user can auto-filter. Args: columns: a list of lowercase column name strings, which may contain combined columns like "priority/pri". artifact_list: a list of artifacts in the complete set of search results. users_by_id: dict mapping user_ids to UserViews. config: ProjectIssueConfig PB for the current project. related_issues: dict {issue_id: issue} of pre-fetched related issues. hotlist_context_dict: dict for building a hotlist grid table Returns: [EZTItem(col1, colname1, [val11, val12,...]), ...] A list of EZTItems, each of which has a col_index, column_name, and a list of unique values that appear in that column. """ column_values = {col_name: {} for col_name in columns} # For each combined column "a/b/c", add entries that point from "a" back # to "a/b/c", from "b" back to "a/b/c", and from "c" back to "a/b/c". combined_column_parts = collections.defaultdict(list) for col in columns: if '/' in col: for col_part in col.split('/'): combined_column_parts[col_part].append(col) unique_labels = set() for art in artifact_list: unique_labels.update(tracker_bizobj.GetLabels(art)) for label in unique_labels: if '-' in label: col, val = label.split('-', 1) col = col.lower() if col in column_values: column_values[col][val.lower()] = val if col in combined_column_parts: for combined_column in combined_column_parts[col]: column_values[combined_column][val.lower()] = val else: if 'summary' in column_values: column_values['summary'][label.lower()] = label # TODO(jrobbins): Consider refacting some of this to tracker_bizobj # or a new builtins.py to reduce duplication. if 'reporter' in column_values: for art in artifact_list: reporter_id = art.reporter_id if reporter_id and reporter_id in users_by_id: reporter_username = users_by_id[reporter_id].display_name column_values['reporter'][ reporter_username] = reporter_username if 'owner' in column_values: for art in artifact_list: owner_id = tracker_bizobj.GetOwnerId(art) if owner_id and owner_id in users_by_id: owner_username = users_by_id[owner_id].display_name column_values['owner'][owner_username] = owner_username if 'cc' in column_values: for art in artifact_list: cc_ids = tracker_bizobj.GetCcIds(art) for cc_id in cc_ids: if cc_id and cc_id in users_by_id: cc_username = users_by_id[cc_id].display_name column_values['cc'][cc_username] = cc_username if 'component' in column_values: for art in artifact_list: all_comp_ids = list(art.component_ids) + list( art.derived_component_ids) for component_id in all_comp_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: column_values['component'][cd.path] = cd.path if 'stars' in column_values: for art in artifact_list: star_count = art.star_count column_values['stars'][star_count] = star_count if 'status' in column_values: for art in artifact_list: status = tracker_bizobj.GetStatus(art) if status: column_values['status'][status.lower()] = status if 'project' in column_values: for art in artifact_list: project_name = art.project_name column_values['project'][project_name] = project_name if 'mergedinto' in column_values: for art in artifact_list: if art.merged_into and art.merged_into != 0: merged_issue = related_issues[art.merged_into] merged_issue_ref = tracker_bizobj.FormatIssueRef( (merged_issue.project_name, merged_issue.local_id)) column_values['mergedinto'][ merged_issue_ref] = merged_issue_ref if 'blocked' in column_values: for art in artifact_list: if art.blocked_on_iids: column_values['blocked']['is_blocked'] = 'Yes' else: column_values['blocked']['is_not_blocked'] = 'No' if 'blockedon' in column_values: for art in artifact_list: if art.blocked_on_iids: for blocked_on_iid in art.blocked_on_iids: blocked_on_issue = related_issues[blocked_on_iid] blocked_on_ref = tracker_bizobj.FormatIssueRef( (blocked_on_issue.project_name, blocked_on_issue.local_id)) column_values['blockedon'][blocked_on_ref] = blocked_on_ref if 'blocking' in column_values: for art in artifact_list: if art.blocking_iids: for blocking_iid in art.blocking_iids: blocking_issue = related_issues[blocking_iid] blocking_ref = tracker_bizobj.FormatIssueRef( (blocking_issue.project_name, blocking_issue.local_id)) column_values['blocking'][blocking_ref] = blocking_ref if 'added' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] date_added = issue_dict['date_added'] column_values['added'][date_added] = date_added if 'adder' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] adder_id = issue_dict['adder_id'] adder = users_by_id[adder_id].display_name column_values['adder'][adder] = adder if 'note' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] note = issue_dict['note'] if issue_dict['note']: column_values['note'][note] = note if 'attachments' in column_values: for art in artifact_list: attachment_count = art.attachment_count column_values['attachments'][attachment_count] = attachment_count # Add all custom field values if the custom field name is a shown column. field_id_to_col = {} for art in artifact_list: for fv in art.field_values: field_col, field_type = field_id_to_col.get( fv.field_id, (None, None)) if field_col == 'NOT_SHOWN': continue if field_col is None: fd = tracker_bizobj.FindFieldDefByID(fv.field_id, config) if not fd: field_id_to_col[fv.field_id] = 'NOT_SHOWN', None continue field_col = fd.field_name.lower() field_type = fd.field_type if field_col not in column_values: field_id_to_col[fv.field_id] = 'NOT_SHOWN', None continue field_id_to_col[fv.field_id] = field_col, field_type if field_type == tracker_pb2.FieldTypes.ENUM_TYPE: continue # Already handled by label parsing elif field_type == tracker_pb2.FieldTypes.INT_TYPE: val = fv.int_value elif field_type == tracker_pb2.FieldTypes.STR_TYPE: val = fv.str_value elif field_type == tracker_pb2.FieldTypes.USER_TYPE: user = users_by_id.get(fv.user_id) val = user.email if user else framework_constants.NO_USER_NAME elif field_type == tracker_pb2.FieldTypes.DATE_TYPE: val = fv.int_value # TODO(jrobbins): convert to date elif field_type == tracker_pb2.FieldTypes.BOOL_TYPE: val = 'Yes' if fv.int_value else 'No' column_values[field_col][val] = val # TODO(jrobbins): make the capitalization of well-known unique label and # status values match the way it is written in the issue config. # Return EZTItems for each column in left-to-right display order. result = [] for i, col_name in enumerate(columns): # TODO(jrobbins): sort each set of column values top-to-bottom, by the # order specified in the project artifact config. For now, just sort # lexicographically to make expected output defined. sorted_col_values = sorted(column_values[col_name].values()) result.append( template_helpers.EZTItem(col_index=i, column_name=col_name, filter_values=sorted_col_values)) return result
def GetArtifactAttr(art, attribute_name, users_by_id, label_attr_values_dict, config, related_issues, hotlist_issue_context=None): """Return the requested attribute values of the given artifact. Args: art: a tracked artifact with labels, local_id, summary, stars, and owner. attribute_name: lowercase string name of attribute to get. users_by_id: dictionary of UserViews already created. label_attr_values_dict: dictionary {'key': [value, ...], }. config: ProjectIssueConfig PB for the current project. related_issues: dict {issue_id: issue} of pre-fetched related issues. hotlist_issue_context: dict of {hotlist_issue_field: field_value,..} Returns: A list of string attribute values, or [framework_constants.NO_VALUES] if the artifact has no value for that attribute. """ if attribute_name == '--': return [] if attribute_name == 'id': return [art.local_id] if attribute_name == 'summary': return [art.summary] if attribute_name == 'status': return [tracker_bizobj.GetStatus(art)] if attribute_name == 'stars': return [art.star_count] if attribute_name == 'attachments': return [art.attachment_count] # TODO(jrobbins): support blocking if attribute_name == 'project': return [art.project_name] if attribute_name == 'mergedinto': if art.merged_into and art.merged_into != 0: return [ tracker_bizobj.FormatIssueRef( (related_issues[art.merged_into].project_name, related_issues[art.merged_into].local_id)) ] else: return [framework_constants.NO_VALUES] if attribute_name == 'blocked': return ['Yes' if art.blocked_on_iids else 'No'] if attribute_name == 'blockedon': if not art.blocked_on_iids: return [framework_constants.NO_VALUES] else: return [ tracker_bizobj.FormatIssueRef( (related_issues[blocked_on_iid].project_name, related_issues[blocked_on_iid].local_id)) for blocked_on_iid in art.blocked_on_iids ] if attribute_name == 'adder': if hotlist_issue_context: adder_id = hotlist_issue_context['adder_id'] return [users_by_id[adder_id].display_name] else: return [framework_constants.NO_VALUES] if attribute_name == 'added': if hotlist_issue_context: return [hotlist_issue_context['date_added']] else: return [framework_constants.NO_VALUES] if attribute_name == 'reporter': return [users_by_id[art.reporter_id].display_name] if attribute_name == 'owner': owner_id = tracker_bizobj.GetOwnerId(art) if not owner_id: return [framework_constants.NO_VALUES] else: return [users_by_id[owner_id].display_name] if attribute_name == 'cc': cc_ids = tracker_bizobj.GetCcIds(art) if not cc_ids: return [framework_constants.NO_VALUES] else: return [users_by_id[cc_id].display_name for cc_id in cc_ids] if attribute_name == 'component': comp_ids = list(art.component_ids) + list(art.derived_component_ids) if not comp_ids: return [framework_constants.NO_VALUES] else: paths = [] for comp_id in comp_ids: cd = tracker_bizobj.FindComponentDefByID(comp_id, config) if cd: paths.append(cd.path) return paths # Check to see if it is a field. Process as field only if it is not an enum # type because enum types are stored as key-value labels. fd = tracker_bizobj.FindFieldDef(attribute_name, config) if fd and fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE: values = [] for fv in art.field_values: if fv.field_id == fd.field_id: value = tracker_bizobj.GetFieldValueWithRawValue( fd.field_type, fv, users_by_id, None) values.append(value) return values # Since it is not a built-in attribute or a field, it must be a key-value # label. return label_attr_values_dict.get(attribute_name, [framework_constants.NO_VALUES])
def _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict): """Make the GAE search index documents for the given issue batch. Args: issues: list of issues to index. comments_dict: prefetched dictionary of comments on those issues. users_by_id: dictionary {user_id: UserView} so that the email addresses of users who left comments can be found via search. config_dict: dict {project_id: config} for all the projects that the given issues are in. """ documents_by_shard = collections.defaultdict(list) for issue in issues: summary = issue.summary # TODO(jrobbins): allow search specifically on explicit vs derived # fields. owner_id = tracker_bizobj.GetOwnerId(issue) owner_email = users_by_id[owner_id].email config = config_dict[issue.project_id] component_paths = [] for component_id in issue.component_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: component_paths.append(cd.path) field_values = [ tracker_bizobj.GetFieldValue(fv, users_by_id) for fv in issue.field_values ] # Convert to string only the values that are not strings already. # This is done because the default encoding in appengine seems to be 'ascii' # and string values might contain unicode characters, so str will fail to # encode them. field_values = [ value if isinstance(value, string_types) else str(value) for value in field_values ] metadata = '%s %s %s %s %s %s' % ( tracker_bizobj.GetStatus(issue), owner_email, [ users_by_id[cc_id].email for cc_id in tracker_bizobj.GetCcIds(issue) ], ' '.join(component_paths), ' '.join(field_values), ' '.join( tracker_bizobj.GetLabels(issue))) custom_fields = _BuildCustomFTSFields(issue) comments = comments_dict.get(issue.issue_id, []) room_for_comments = (framework_constants.MAX_FTS_FIELD_SIZE - len(summary) - len(metadata) - sum(len(cf.value) for cf in custom_fields)) comments = _IndexableComments(comments, users_by_id, remaining_chars=room_for_comments) logging.info('len(comments) is %r', len(comments)) if comments: description = _ExtractCommentText(comments[0], users_by_id) description = description[:framework_constants.MAX_FTS_FIELD_SIZE] all_comments = ' '.join( _ExtractCommentText(c, users_by_id) for c in comments[1:]) all_comments = all_comments[:framework_constants. MAX_FTS_FIELD_SIZE] else: description = '' all_comments = '' logging.info('Issue %s:%r has zero indexable comments', issue.project_name, issue.local_id) logging.info('Building document for %s:%d', issue.project_name, issue.local_id) logging.info('len(summary) = %d', len(summary)) logging.info('len(metadata) = %d', len(metadata)) logging.info('len(description) = %d', len(description)) logging.info('len(comment) = %d', len(all_comments)) for cf in custom_fields: logging.info('len(%s) = %d', cf.name, len(cf.value)) doc = search.Document( doc_id=str(issue.issue_id), fields=[ search.NumberField(name='project_id', value=issue.project_id), search.TextField(name='summary', value=summary), search.TextField(name='metadata', value=metadata), search.TextField(name='description', value=description), search.TextField(name='comment', value=all_comments), ] + custom_fields) shard_id = issue.issue_id % settings.num_logical_shards documents_by_shard[shard_id].append(doc) start_time = time.time() promises = [] for shard_id, documents in documents_by_shard.items(): if documents: promises.append( framework_helpers.Promise(_IndexDocsInShard, shard_id, documents)) for promise in promises: promise.WaitAndGetValue() logging.info('Finished %d indexing in shards in %d ms', len(documents_by_shard), int( (time.time() - start_time) * 1000))
def ProcessFormData(self, mr, post_data): """Process the posted issue update form. Args: mr: commonly used info parsed from the request. post_data: HTML form data from the request. Returns: String URL to redirect the user to, or None if response was already sent. """ cmd = post_data.get('cmd', '') send_email = 'send_email' in post_data comment = post_data.get('comment', '') slot_used = int(post_data.get('slot_used', 1)) page_generation_time = int(post_data['pagegen']) with work_env.WorkEnv(mr, self.services) as we: issue = we.GetIssueByLocalID( mr.project_id, mr.local_id, use_cache=False) old_owner_id = tracker_bizobj.GetOwnerId(issue) config = we.GetProjectConfig(mr.project_id) summary, status, owner_id, cc_ids, labels = commands.ParseQuickEditCommand( mr.cnxn, cmd, issue, config, mr.auth.user_id, self.services) component_ids = issue.component_ids # TODO(jrobbins): component commands field_values = issue.field_values # TODO(jrobbins): edit custom fields permit_edit = permissions.CanEditIssue( mr.auth.effective_ids, mr.perms, mr.project, issue) if not permit_edit: raise permissions.PermissionException( 'User is not allowed to edit this issue') amendments, comment_pb = self.services.issue.ApplyIssueComment( mr.cnxn, self.services, mr.auth.user_id, mr.project_id, mr.local_id, summary, status, owner_id, cc_ids, labels, field_values, component_ids, issue.blocked_on_iids, issue.blocking_iids, issue.dangling_blocked_on_refs, issue.dangling_blocking_refs, issue.merged_into, page_gen_ts=page_generation_time, comment=comment) self.services.project.UpdateRecentActivity( mr.cnxn, mr.project.project_id) if send_email: if amendments or comment.strip(): send_notifications.PrepareAndSendIssueChangeNotification( issue.issue_id, mr.request.host, mr.auth.user_id, send_email=send_email, old_owner_id=old_owner_id, comment_id=comment_pb.id) # TODO(jrobbins): allow issue merge via quick-edit. self.services.features.StoreRecentCommand( mr.cnxn, mr.auth.user_id, mr.project_id, slot_used, cmd, comment) # TODO(jrobbins): this is very similar to a block of code in issuebulkedit. mr.can = int(post_data['can']) mr.query = post_data.get('q', '') mr.col_spec = post_data.get('colspec', '') mr.sort_spec = post_data.get('sort', '') mr.group_by_spec = post_data.get('groupby', '') mr.start = int(post_data['start']) mr.num = int(post_data['num']) preview_issue_ref_str = '%s:%d' % (issue.project_name, issue.local_id) return tracker_helpers.FormatIssueListURL( mr, config, preview=preview_issue_ref_str, updated=mr.local_id, ts=int(time.time()))
def UpdateIssuePermissions( perms, project, issue, effective_ids, granted_perms=None, config=None): """Update the PermissionSet for an specific issue. Take into account granted permissions and label restrictions to filter the permissions, and updates the VIEW and EDIT_ISSUE permissions depending on the role of the user in the issue (i.e. owner, reporter, cc or approver). Args: perms: The PermissionSet to update. project: The Project PB for the issue project. issue: The Issue PB. effective_ids: Set of int user IDs for the current user and all user groups that s/he is a member of. This will be an empty set for anonymous users. granted_perms: optional list of strings of permissions that the user is granted only within the scope of one issue, e.g., by being named in a user-type custom field that grants permissions. config: optional ProjectIssueConfig PB where granted perms should be extracted from, if granted_perms is not given. """ if config: granted_perms = tracker_bizobj.GetGrantedPerms( issue, effective_ids, config) elif granted_perms is None: granted_perms = [] # If the user has no permission to view the project, it has no permissions on # this issue. if not perms.HasPerm(VIEW, None, None): return EMPTY_PERMISSIONSET # Compute the restrictions for the given issue and store them in a dictionary # of {perm: set(needed_perms)}. restrictions = collections.defaultdict(set) if perms.consider_restrictions: for label in GetRestrictions(issue): label = label.lower() # format: Restrict-Action-ToThisPerm _, requested_perm, needed_perm = label.split('-', 2) restrictions[requested_perm.lower()].add(needed_perm.lower()) # Store the user permissions, and the extra permissions of all effective IDs # in the given project. all_perms = set(perms.perm_names) for effective_id in effective_ids: all_perms.update(p.lower() for p in GetExtraPerms(project, effective_id)) # And filter them applying the restriction labels. filtered_perms = set() for perm_name in all_perms: perm_name = perm_name.lower() restricted = any( restriction not in all_perms and restriction not in granted_perms for restriction in restrictions.get(perm_name, [])) if not restricted: filtered_perms.add(perm_name) # Add any granted permissions. filtered_perms.update(granted_perms) # The VIEW perm might have been removed due to restrictions, but the issue # owner, reporter, cc and approvers can always be an issue. allowed_ids = set( tracker_bizobj.GetCcIds(issue) + tracker_bizobj.GetApproverIds(issue) + [issue.reporter_id, tracker_bizobj.GetOwnerId(issue)]) if effective_ids and not allowed_ids.isdisjoint(effective_ids): filtered_perms.add(VIEW.lower()) # If the issue is deleted, only the VIEW and DELETE_ISSUE permissions are # relevant. if issue.deleted: if VIEW.lower() not in filtered_perms: return EMPTY_PERMISSIONSET if DELETE_ISSUE.lower() in filtered_perms: return PermissionSet([VIEW, DELETE_ISSUE], perms.consider_restrictions) return PermissionSet([VIEW], perms.consider_restrictions) # The EDIT_ISSUE permission might have been removed due to restrictions, but # the owner has always permission to edit it. if effective_ids and tracker_bizobj.GetOwnerId(issue) in effective_ids: filtered_perms.add(EDIT_ISSUE.lower()) return PermissionSet(filtered_perms, perms.consider_restrictions)
def ProcessFormData(self, mr, post_data): """Process the posted issue update form. Args: mr: commonly used info parsed from the request. post_data: HTML form data from the request. Returns: String URL to redirect the user to after processing. """ if not mr.local_id_list: logging.info('missing issue local IDs, probably tampered') self.response.status = httplib.BAD_REQUEST return # Check that the user is logged in; anon users cannot update issues. if not mr.auth.user_id: logging.info('user was not logged in, cannot update issue') self.response.status = httplib.BAD_REQUEST # xxx should raise except return # Check that the user has permission to add a comment, and to enter # metadata if they are trying to do that. if not self.CheckPerm(mr, permissions.ADD_ISSUE_COMMENT): logging.info('user has no permission to add issue comment') self.response.status = httplib.BAD_REQUEST return if not self.CheckPerm(mr, permissions.EDIT_ISSUE): logging.info('user has no permission to edit issue metadata') self.response.status = httplib.BAD_REQUEST return move_to = post_data.get('move_to', '').lower() if move_to and not self.CheckPerm(mr, permissions.DELETE_ISSUE): logging.info('user has no permission to move issue') self.response.status = httplib.BAD_REQUEST return config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id) parsed = tracker_helpers.ParseIssueRequest(mr.cnxn, post_data, self.services, mr.errors, mr.project_name) bounce_labels = (parsed.labels[:] + ['-%s' % lr for lr in parsed.labels_remove]) bounce_fields = tracker_views.MakeBounceFieldValueViews( parsed.fields.vals, parsed.fields.phase_vals, config) field_helpers.ShiftEnumFieldsIntoLabels(parsed.labels, parsed.labels_remove, parsed.fields.vals, parsed.fields.vals_remove, config) issue_list = self.services.issue.GetIssuesByLocalIDs( mr.cnxn, mr.project_id, mr.local_id_list) issue_phases = list( itertools.chain.from_iterable(issue.phases for issue in issue_list)) phase_ids_by_name = collections.defaultdict(set) for phase in issue_phases: phase_ids_by_name[phase.name.lower()].add(phase.phase_id) # Note: Not all parsed phase field values will be applicable to every issue. # tracker_bizobj.ApplyFieldValueChanges will take care of not adding # phase field values to issues that don't contain the correct phase. field_vals = field_helpers.ParseFieldValues( mr.cnxn, self.services.user, parsed.fields.vals, parsed.fields.phase_vals, config, phase_ids_by_name=phase_ids_by_name) field_vals_remove = field_helpers.ParseFieldValues( mr.cnxn, self.services.user, parsed.fields.vals_remove, parsed.fields.phase_vals_remove, config, phase_ids_by_name=phase_ids_by_name) field_helpers.ValidateCustomFields(mr, self.services, field_vals, config, mr.errors) # Treat status '' as no change and explicit 'clear' as clearing the status. status = parsed.status if status == '': status = None if post_data.get('op_statusenter') == 'clear': status = '' reporter_id = mr.auth.user_id logging.info('bulk edit request by %s', reporter_id) if parsed.users.owner_id is None: mr.errors.owner = 'Invalid owner username' else: valid, msg = tracker_helpers.IsValidIssueOwner( mr.cnxn, mr.project, parsed.users.owner_id, self.services) if not valid: mr.errors.owner = msg if (status in config.statuses_offer_merge and not post_data.get('merge_into')): mr.errors.merge_into_id = 'Please enter a valid issue ID' move_to_project = None if move_to: if mr.project_name == move_to: mr.errors.move_to = 'The issues are already in project ' + move_to else: move_to_project = self.services.project.GetProjectByName( mr.cnxn, move_to) if not move_to_project: mr.errors.move_to = 'No such project: ' + move_to # Treat owner '' as no change, and explicit 'clear' as NO_USER_SPECIFIED owner_id = parsed.users.owner_id if parsed.users.owner_username == '': owner_id = None if post_data.get('op_ownerenter') == 'clear': owner_id = framework_constants.NO_USER_SPECIFIED comp_ids = tracker_helpers.LookupComponentIDs(parsed.components.paths, config, mr.errors) comp_ids_remove = tracker_helpers.LookupComponentIDs( parsed.components.paths_remove, config, mr.errors) if post_data.get('op_componententer') == 'remove': comp_ids, comp_ids_remove = comp_ids_remove, comp_ids cc_ids, cc_ids_remove = parsed.users.cc_ids, parsed.users.cc_ids_remove if post_data.get('op_memberenter') == 'remove': cc_ids, cc_ids_remove = parsed.users.cc_ids_remove, parsed.users.cc_ids issue_list_iids = {issue.issue_id for issue in issue_list} if post_data.get('op_blockedonenter') == 'append': if issue_list_iids.intersection(parsed.blocked_on.iids): mr.errors.blocked_on = 'Cannot block an issue on itself.' blocked_on_add = parsed.blocked_on.iids blocked_on_remove = [] else: blocked_on_add = [] blocked_on_remove = parsed.blocked_on.iids if post_data.get('op_blockingenter') == 'append': if issue_list_iids.intersection(parsed.blocking.iids): mr.errors.blocking = 'Cannot block an issue on itself.' blocking_add = parsed.blocking.iids blocking_remove = [] else: blocking_add = [] blocking_remove = parsed.blocking.iids iids_actually_changed = [] old_owner_ids = [] combined_amendments = [] merge_into_issue = None new_starrers = set() if not mr.errors.AnyErrors(): # Because we will modify issues, load from DB rather than cache. issue_list = self.services.issue.GetIssuesByLocalIDs( mr.cnxn, mr.project_id, mr.local_id_list, use_cache=False) # Skip any individual issues that the user is not allowed to edit. editable_issues = [ issue for issue in issue_list if permissions.CanEditIssue( mr.auth.effective_ids, mr.perms, mr.project, issue) ] # Skip any restrict issues that cannot be moved if move_to: editable_issues = [ issue for issue in editable_issues if not permissions.GetRestrictions(issue) ] # If 'Duplicate' status is specified ensure there are no permission issues # with the issue we want to merge with. if post_data.get('merge_into'): for issue in editable_issues: _, merge_into_issue = tracker_helpers.ParseMergeFields( mr.cnxn, self.services, mr.project_name, post_data, parsed.status, config, issue, mr.errors) if merge_into_issue: merge_allowed = tracker_helpers.IsMergeAllowed( merge_into_issue, mr, self.services) if not merge_allowed: mr.errors.merge_into_id = 'Target issue %s cannot be modified' % ( merge_into_issue.local_id) break # Update the new_starrers set. new_starrers.update( tracker_helpers.GetNewIssueStarrers( mr.cnxn, self.services, issue.issue_id, merge_into_issue.issue_id)) # Proceed with amendments only if there are no reported errors. if not mr.errors.AnyErrors(): # Sort the issues: we want them in this order so that the # corresponding old_owner_id are found in the same order. editable_issues.sort( lambda i1, i2: cmp(i1.local_id, i2.local_id)) iids_to_invalidate = set() rules = self.services.features.GetFilterRules( mr.cnxn, config.project_id) predicate_asts = filterrules_helpers.ParsePredicateASTs( rules, config, []) for issue in editable_issues: old_owner_id = tracker_bizobj.GetOwnerId(issue) merge_into_iid = (merge_into_issue.issue_id if merge_into_issue else None) delta = tracker_bizobj.MakeIssueDelta( status, owner_id, cc_ids, cc_ids_remove, comp_ids, comp_ids_remove, parsed.labels, parsed.labels_remove, field_vals, field_vals_remove, parsed.fields.fields_clear, blocked_on_add, blocked_on_remove, blocking_add, blocking_remove, merge_into_iid, None) amendments, _ = self.services.issue.DeltaUpdateIssue( mr.cnxn, self.services, mr.auth.user_id, mr.project_id, config, issue, delta, comment=parsed.comment, iids_to_invalidate=iids_to_invalidate, rules=rules, predicate_asts=predicate_asts) if amendments or parsed.comment: # Avoid empty comments. iids_actually_changed.append(issue.issue_id) old_owner_ids.append(old_owner_id) combined_amendments.extend(amendments) self.services.issue.InvalidateIIDs(mr.cnxn, iids_to_invalidate) self.services.project.UpdateRecentActivity( mr.cnxn, mr.project.project_id) # Add new_starrers and new CCs to merge_into_issue. if merge_into_issue: merge_into_project = self.services.project.GetProjectByName( mr.cnxn, merge_into_issue.project_name) tracker_helpers.AddIssueStarrers(mr.cnxn, self.services, mr, merge_into_issue.issue_id, merge_into_project, new_starrers) tracker_helpers.MergeCCsAndAddCommentMultipleIssues( self.services, mr, editable_issues, merge_into_issue) if move_to and editable_issues: tracker_fulltext.UnindexIssues( [issue.issue_id for issue in editable_issues]) for issue in editable_issues: old_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id) moved_back_iids = self.services.issue.MoveIssues( mr.cnxn, move_to_project, [issue], self.services.user) new_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id) if issue.issue_id in moved_back_iids: content = 'Moved %s back to %s again.' % ( old_text_ref, new_text_ref) else: content = 'Moved %s to now be %s.' % (old_text_ref, new_text_ref) self.services.issue.CreateIssueComment( mr.cnxn, issue, mr.auth.user_id, content, amendments=[ tracker_bizobj.MakeProjectAmendment( move_to_project.project_name) ]) send_email = 'send_email' in post_data users_by_id = framework_views.MakeAllUserViews( mr.cnxn, self.services.user, [owner_id], cc_ids, cc_ids_remove, old_owner_ids, tracker_bizobj.UsersInvolvedInAmendments( combined_amendments)) if move_to and editable_issues: iids_actually_changed = [ issue.issue_id for issue in editable_issues ] send_notifications.SendIssueBulkChangeNotification( iids_actually_changed, mr.request.host, old_owner_ids, parsed.comment, reporter_id, combined_amendments, send_email, users_by_id) if mr.errors.AnyErrors(): bounce_cc_parts = ( parsed.users.cc_usernames + ['-%s' % ccur for ccur in parsed.users.cc_usernames_remove]) self.PleaseCorrect( mr, initial_status=parsed.status, initial_owner=parsed.users.owner_username, initial_merge_into=post_data.get('merge_into', 0), initial_cc=', '.join(bounce_cc_parts), initial_comment=parsed.comment, initial_components=parsed.components.entered_str, labels=bounce_labels, fields=bounce_fields) return with mr.profiler.Phase('reindexing issues'): logging.info('starting reindexing') start = time.time() # Get the updated issues and index them issue_list = self.services.issue.GetIssuesByLocalIDs( mr.cnxn, mr.project_id, mr.local_id_list) tracker_fulltext.IndexIssues(mr.cnxn, issue_list, self.services.user, self.services.issue, self.services.config) logging.info('reindexing %d issues took %s sec', len(issue_list), time.time() - start) # TODO(jrobbins): These could be put into the form action attribute. mr.can = int(post_data['can']) mr.query = post_data['q'] mr.col_spec = post_data['colspec'] mr.sort_spec = post_data['sort'] mr.group_by_spec = post_data['groupby'] mr.start = int(post_data['start']) mr.num = int(post_data['num']) # TODO(jrobbins): implement bulk=N param for a better confirmation alert. return tracker_helpers.FormatIssueListURL(mr, config, saved=len(mr.local_id_list), ts=int(time.time()))
def _BulkEditEmailTasks( self, cnxn, issues, old_owner_ids, omit_addrs, project, non_private_issues, users_by_id, ids_in_issues, starrers, commenter_view, hostport, comment_text, amendments, config): """Generate Email PBs to notify interested users after a bulk edit.""" # 1. Get the user IDs of everyone who could be notified, # and make all their user proxies. Also, build a dictionary # of all the users to notify and the issues that they are # interested in. Also, build a dictionary of additional email # addresses to notify and the issues to notify them of. users_by_id = {} ids_to_notify_of_issue = {} additional_addrs_to_notify_of_issue = collections.defaultdict(list) users_to_queries = notify_reasons.GetNonOmittedSubscriptions( cnxn, self.services, [project.project_id], {}) config = self.services.config.GetProjectConfig( cnxn, project.project_id) for issue, old_owner_id in zip(issues, old_owner_ids): issue_participants = set( [tracker_bizobj.GetOwnerId(issue), old_owner_id] + tracker_bizobj.GetCcIds(issue)) # users named in user-value fields that notify. for fd in config.field_defs: issue_participants.update( notify_reasons.ComputeNamedUserIDsToNotify(issue.field_values, fd)) for user_id in ids_in_issues[issue.local_id]: # TODO(jrobbins): implement batch GetUser() for speed. if not user_id: continue auth = authdata.AuthData.FromUserID( cnxn, user_id, self.services) if (auth.user_pb.notify_issue_change and not auth.effective_ids.isdisjoint(issue_participants)): ids_to_notify_of_issue.setdefault(user_id, []).append(issue) elif (auth.user_pb.notify_starred_issue_change and user_id in starrers[issue.local_id]): # Skip users who have starred issues that they can no longer view. starrer_perms = permissions.GetPermissions( auth.user_pb, auth.effective_ids, project) granted_perms = tracker_bizobj.GetGrantedPerms( issue, auth.effective_ids, config) starrer_can_view = permissions.CanViewIssue( auth.effective_ids, starrer_perms, project, issue, granted_perms=granted_perms) if starrer_can_view: ids_to_notify_of_issue.setdefault(user_id, []).append(issue) logging.info( 'ids_to_notify_of_issue[%s] = %s', user_id, [i.local_id for i in ids_to_notify_of_issue.get(user_id, [])]) # Find all subscribers that should be notified. subscribers_to_consider = notify_reasons.EvaluateSubscriptions( cnxn, issue, users_to_queries, self.services, config) for sub_id in subscribers_to_consider: auth = authdata.AuthData.FromUserID(cnxn, sub_id, self.services) sub_perms = permissions.GetPermissions( auth.user_pb, auth.effective_ids, project) granted_perms = tracker_bizobj.GetGrantedPerms( issue, auth.effective_ids, config) sub_can_view = permissions.CanViewIssue( auth.effective_ids, sub_perms, project, issue, granted_perms=granted_perms) if sub_can_view: ids_to_notify_of_issue.setdefault(sub_id, []) if issue not in ids_to_notify_of_issue[sub_id]: ids_to_notify_of_issue[sub_id].append(issue) if issue in non_private_issues: for notify_addr in issue.derived_notify_addrs: additional_addrs_to_notify_of_issue[notify_addr].append(issue) # 2. Compose an email specifically for each user, and one email to each # notify_addr with all the issues that it. # Start from non-members first, then members to reveal email addresses. email_tasks = [] needed_user_view_ids = [uid for uid in ids_to_notify_of_issue if uid not in users_by_id] users_by_id.update(framework_views.MakeAllUserViews( cnxn, self.services.user, needed_user_view_ids)) member_ids_to_notify_of_issue = {} non_member_ids_to_notify_of_issue = {} member_additional_addrs = {} non_member_additional_addrs = {} addr_to_addrperm = {} # {email_address: AddrPerm object} all_user_prefs = self.services.user.GetUsersPrefs( cnxn, ids_to_notify_of_issue) # TODO(jrobbins): Merge ids_to_notify_of_issue entries for linked accounts. for user_id in ids_to_notify_of_issue: if not user_id: continue # Don't try to notify NO_USER_SPECIFIED if users_by_id[user_id].email in omit_addrs: logging.info('Omitting %s', user_id) continue user_issues = ids_to_notify_of_issue[user_id] if not user_issues: continue # user's prefs indicate they don't want these notifications auth = authdata.AuthData.FromUserID( cnxn, user_id, self.services) is_member = bool(framework_bizobj.UserIsInProject( project, auth.effective_ids)) if is_member: member_ids_to_notify_of_issue[user_id] = user_issues else: non_member_ids_to_notify_of_issue[user_id] = user_issues addr = users_by_id[user_id].email omit_addrs.add(addr) addr_to_addrperm[addr] = notify_reasons.AddrPerm( is_member, addr, users_by_id[user_id].user, notify_reasons.REPLY_NOT_ALLOWED, all_user_prefs[user_id]) for addr, addr_issues in additional_addrs_to_notify_of_issue.items(): auth = None try: auth = authdata.AuthData.FromEmail(cnxn, addr, self.services) except: # pylint: disable=bare-except logging.warning('Cannot find user of email %s ', addr) if auth: is_member = bool(framework_bizobj.UserIsInProject( project, auth.effective_ids)) else: is_member = False if is_member: member_additional_addrs[addr] = addr_issues else: non_member_additional_addrs[addr] = addr_issues omit_addrs.add(addr) addr_to_addrperm[addr] = notify_reasons.AddrPerm( is_member, addr, None, notify_reasons.REPLY_NOT_ALLOWED, None) for user_id, user_issues in non_member_ids_to_notify_of_issue.items(): addr = users_by_id[user_id].email email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], user_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify non-member %s (%s) of %s', users_by_id[user_id].email, user_id, [issue.local_id for issue in user_issues]) for addr, addr_issues in non_member_additional_addrs.items(): email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], addr_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify non-member additional addr %s of %s', addr, [addr_issue.local_id for addr_issue in addr_issues]) framework_views.RevealAllEmails(users_by_id) commenter_view.RevealEmail() for user_id, user_issues in member_ids_to_notify_of_issue.items(): addr = users_by_id[user_id].email email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], user_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify member %s (%s) of %s', addr, user_id, [issue.local_id for issue in user_issues]) for addr, addr_issues in member_additional_addrs.items(): email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], addr_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify member additional addr %s of %s', addr, [addr_issue.local_id for addr_issue in addr_issues]) # 4. Add in the project's issue_notify_address. This happens even if it # is the same as the commenter's email address (which would be an unusual # but valid project configuration). Only issues that any contributor could # view are included in emails to the all-issue-activity mailing lists. if (project.issue_notify_address and project.issue_notify_address not in omit_addrs): non_private_issues_live = [] for issue in issues: contributor_could_view = permissions.CanViewIssue( set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET, project, issue) if contributor_could_view: non_private_issues_live.append(issue) if non_private_issues_live: project_notify_addrperm = notify_reasons.AddrPerm( True, project.issue_notify_address, None, notify_reasons.REPLY_NOT_ALLOWED, None) email = self._FormatBulkIssuesEmail( project_notify_addrperm, non_private_issues_live, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) omit_addrs.add(project.issue_notify_address) logging.info('about to bulk notify all-issues %s of %s', project.issue_notify_address, [issue.local_id for issue in non_private_issues]) return email_tasks
def _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict): """Make the GAE search index documents for the given issue batch. Args: issues: list of issues to index. comments_dict: prefetched dictionary of comments on those issues. users_by_id: dictionary {user_id: UserView} so that the email addresses of users who left comments can be found via search. config_dict: dict {project_id: config} for all the projects that the given issues are in. """ documents_by_shard = collections.defaultdict(list) for issue in issues: comments = comments_dict.get(issue.issue_id, []) comments = _IndexableComments(comments, users_by_id) summary = issue.summary # TODO(jrobbins): allow search specifically on explicit vs derived # fields. owner_id = tracker_bizobj.GetOwnerId(issue) owner_email = users_by_id[owner_id].email config = config_dict[issue.project_id] component_paths = [] for component_id in issue.component_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: component_paths.append(cd.path) field_values = [ str(tracker_bizobj.GetFieldValue(fv, users_by_id)) for fv in issue.field_values ] metadata = '%s %s %s %s %s %s' % ( tracker_bizobj.GetStatus(issue), owner_email, [ users_by_id[cc_id].email for cc_id in tracker_bizobj.GetCcIds(issue) ], ' '.join(component_paths), ' '.join(field_values), ' '.join( tracker_bizobj.GetLabels(issue))) assert comments, 'issues should always have at least the description' description = _ExtractCommentText(comments[0], users_by_id) description = description[:framework_constants.MAX_FTS_FIELD_SIZE] all_comments = ' '.join( _ExtractCommentText(c, users_by_id) for c in comments[1:]) all_comments = all_comments[:framework_constants.MAX_FTS_FIELD_SIZE] custom_fields = _BuildCustomFTSFields(issue) doc = search.Document( doc_id=str(issue.issue_id), fields=[ search.NumberField(name='project_id', value=issue.project_id), search.TextField(name='summary', value=summary), search.TextField(name='metadata', value=metadata), search.TextField(name='description', value=description), search.TextField(name='comment', value=all_comments), ] + custom_fields) shard_id = issue.issue_id % settings.num_logical_shards documents_by_shard[shard_id].append(doc) start_time = time.time() promises = [] for shard_id, documents in documents_by_shard.iteritems(): if documents: promises.append( framework_helpers.Promise(_IndexDocsInShard, shard_id, documents)) for promise in promises: promise.WaitAndGetValue() logging.info('Finished %d indexing in shards in %d ms', len(documents_by_shard), int( (time.time() - start_time) * 1000))
def issues_comments_insert(self, request): """Add a comment.""" mar = self.mar_factory(request) issue = self._services.issue.GetIssueByLocalID( mar.cnxn, mar.project_id, request.issueId) old_owner_id = tracker_bizobj.GetOwnerId(issue) if not permissions.CanCommentIssue( mar.auth.effective_ids, mar.perms, mar.project, issue, mar.granted_perms): raise permissions.PermissionException( 'User is not allowed to comment this issue (%s, %d)' % (request.projectId, request.issueId)) updates_dict = {} if request.updates: if request.updates.moveToProject: move_to = request.updates.moveToProject.lower() move_to_project = issuedetail.CheckMoveIssueRequest( self._services, mar, issue, True, move_to, mar.errors) if mar.errors.AnyErrors(): raise endpoints.BadRequestException(mar.errors.move_to) updates_dict['move_to_project'] = move_to_project updates_dict['summary'] = request.updates.summary updates_dict['status'] = request.updates.status if request.updates.owner: if request.updates.owner == framework_constants.NO_USER_NAME: updates_dict['owner'] = framework_constants.NO_USER_SPECIFIED else: updates_dict['owner'] = self._services.user.LookupUserID( mar.cnxn, request.updates.owner) updates_dict['cc_add'], updates_dict['cc_remove'] = ( api_pb2_v1_helpers.split_remove_add(request.updates.cc)) updates_dict['cc_add'] = self._services.user.LookupUserIDs( mar.cnxn, updates_dict['cc_add'], autocreate=True).values() updates_dict['cc_remove'] = self._services.user.LookupUserIDs( mar.cnxn, updates_dict['cc_remove']).values() updates_dict['labels_add'], updates_dict['labels_remove'] = ( api_pb2_v1_helpers.split_remove_add(request.updates.labels)) blocked_on_add_strs, blocked_on_remove_strs = ( api_pb2_v1_helpers.split_remove_add(request.updates.blockedOn)) updates_dict['blocked_on_add'] = api_pb2_v1_helpers.issue_global_ids( blocked_on_add_strs, issue.project_id, mar, self._services) updates_dict['blocked_on_remove'] = api_pb2_v1_helpers.issue_global_ids( blocked_on_remove_strs, issue.project_id, mar, self._services) blocking_add_strs, blocking_remove_strs = ( api_pb2_v1_helpers.split_remove_add(request.updates.blocking)) updates_dict['blocking_add'] = api_pb2_v1_helpers.issue_global_ids( blocking_add_strs, issue.project_id, mar, self._services) updates_dict['blocking_remove'] = api_pb2_v1_helpers.issue_global_ids( blocking_remove_strs, issue.project_id, mar, self._services) components_add_strs, components_remove_strs = ( api_pb2_v1_helpers.split_remove_add(request.updates.components)) updates_dict['components_add'] = ( api_pb2_v1_helpers.convert_component_ids( mar.config, components_add_strs)) updates_dict['components_remove'] = ( api_pb2_v1_helpers.convert_component_ids( mar.config, components_remove_strs)) if request.updates.mergedInto: merge_project_name, merge_local_id = tracker_bizobj.ParseIssueRef( request.updates.mergedInto) merge_into_project = self._services.project.GetProjectByName( mar.cnxn, merge_project_name or issue.project_name) merge_into_issue = self._services.issue.GetIssueByLocalID( mar.cnxn, merge_into_project.project_id, merge_local_id) merge_allowed = tracker_helpers.IsMergeAllowed( merge_into_issue, mar, self._services) if not merge_allowed: raise permissions.PermissionException( 'User is not allowed to merge into issue %s:%s' % (merge_into_issue.project_name, merge_into_issue.local_id)) updates_dict['merged_into'] = merge_into_issue.issue_id (updates_dict['field_vals_add'], updates_dict['field_vals_remove'], updates_dict['fields_clear'], updates_dict['fields_labels_add'], updates_dict['fields_labels_remove']) = ( api_pb2_v1_helpers.convert_field_values( request.updates.fieldValues, mar, self._services)) field_helpers.ValidateCustomFields( mar, self._services, (updates_dict.get('field_vals_add', []) + updates_dict.get('field_vals_remove', [])), mar.config, mar.errors) if mar.errors.AnyErrors(): raise endpoints.BadRequestException( 'Invalid field values: %s' % mar.errors.custom_fields) _, comment = self._services.issue.DeltaUpdateIssue( cnxn=mar.cnxn, services=self._services, reporter_id=mar.auth.user_id, project_id=mar.project_id, config=mar.config, issue=issue, status=updates_dict.get('status'), owner_id=updates_dict.get('owner'), cc_add=updates_dict.get('cc_add', []), cc_remove=updates_dict.get('cc_remove', []), comp_ids_add=updates_dict.get('components_add', []), comp_ids_remove=updates_dict.get('components_remove', []), labels_add=(updates_dict.get('labels_add', []) + updates_dict.get('fields_labels_add', [])), labels_remove=(updates_dict.get('labels_remove', []) + updates_dict.get('fields_labels_remove', [])), field_vals_add=updates_dict.get('field_vals_add', []), field_vals_remove=updates_dict.get('field_vals_remove', []), fields_clear=updates_dict.get('fields_clear', []), blocked_on_add=updates_dict.get('blocked_on_add', []), blocked_on_remove=updates_dict.get('blocked_on_remove', []), blocking_add=updates_dict.get('blocking_add', []), blocking_remove=updates_dict.get('blocking_remove', []), merged_into=updates_dict.get('merged_into'), index_now=False, comment=request.content, summary=updates_dict.get('summary'), ) move_comment = None if 'move_to_project' in updates_dict: move_to_project = updates_dict['move_to_project'] old_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id) tracker_fulltext.UnindexIssues([issue.issue_id]) moved_back_iids = self._services.issue.MoveIssues( mar.cnxn, move_to_project, [issue], self._services.user) new_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id) if issue.issue_id in moved_back_iids: content = 'Moved %s back to %s again.' % (old_text_ref, new_text_ref) else: content = 'Moved %s to now be %s.' % (old_text_ref, new_text_ref) move_comment = self._services.issue.CreateIssueComment( mar.cnxn, move_to_project.project_id, issue.local_id, mar.auth.user_id, content, amendments=[ tracker_bizobj.MakeProjectAmendment(move_to_project.project_name)]) if 'merged_into' in updates_dict: new_starrers = tracker_helpers.GetNewIssueStarrers( mar.cnxn, self._services, issue.issue_id, merge_into_issue.issue_id) tracker_helpers.AddIssueStarrers( mar.cnxn, self._services, mar, merge_into_issue.issue_id, merge_into_project, new_starrers) _merge_comment = tracker_helpers.MergeCCsAndAddComment( self._services, mar, issue, merge_into_project, merge_into_issue) merge_into_issue_cmnts = self._services.issue.GetCommentsForIssue( mar.cnxn, merge_into_issue.issue_id) notify.PrepareAndSendIssueChangeNotification( merge_into_issue.issue_id, framework_helpers.GetHostPort(), mar.auth.user_id, len(merge_into_issue_cmnts) - 1, send_email=True) tracker_fulltext.IndexIssues( mar.cnxn, [issue], self._services.user, self._services.issue, self._services.config) comment = comment or move_comment if comment is None: return api_pb2_v1.IssuesCommentsInsertResponse() cmnts = self._services.issue.GetCommentsForIssue(mar.cnxn, issue.issue_id) seq = len(cmnts) - 1 if request.sendEmail: notify.PrepareAndSendIssueChangeNotification( issue.issue_id, framework_helpers.GetHostPort(), comment.user_id, seq, send_email=True, old_owner_id=old_owner_id) can_delete = permissions.CanDelete( mar.auth.user_id, mar.auth.effective_ids, mar.perms, comment.deleted_by, comment.user_id, mar.project, permissions.GetRestrictions(issue), granted_perms=mar.granted_perms) return api_pb2_v1.IssuesCommentsInsertResponse( id=seq, kind='monorail#issueComment', author=api_pb2_v1_helpers.convert_person( comment.user_id, mar.cnxn, self._services), content=comment.content, published=datetime.datetime.fromtimestamp(comment.timestamp), updates=api_pb2_v1_helpers.convert_amendments( issue, comment.amendments, mar, self._services), canDelete=can_delete)
def StoreIssueSnapshots(self, cnxn, issues, commit=True): """Adds an IssueSnapshot and updates the previous one for each issue.""" for issue in issues: right_now = self._currentTime() # Look for an existing (latest) IssueSnapshot with this issue_id. previous_snapshots = self.issuesnapshot_tbl.Select( cnxn, cols=ISSUESNAPSHOT_COLS, issue_id=issue.issue_id, limit=1, order_by=[('period_start DESC', [])]) if len(previous_snapshots) > 0: previous_snapshot_id = previous_snapshots[0][0] logging.info('Found previous IssueSnapshot with id: %s', previous_snapshot_id) # Update previous snapshot's end time to right now. delta = {'period_end': right_now} where = [('IssueSnapshot.id = %s', [previous_snapshot_id])] self.issuesnapshot_tbl.Update(cnxn, delta, commit=commit, where=where) config = self.config_service.GetProjectConfig( cnxn, issue.project_id) period_end = settings.maximum_snapshot_period_end is_open = tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config) shard = issue.issue_id % settings.num_logical_shards status = tracker_bizobj.GetStatus(issue) status_id = self.config_service.LookupStatusID( cnxn, issue.project_id, status) or None owner_id = tracker_bizobj.GetOwnerId(issue) or None issuesnapshot_rows = [(issue.issue_id, shard, issue.project_id, issue.local_id, issue.reporter_id, owner_id, status_id, right_now, period_end, is_open)] ids = self.issuesnapshot_tbl.InsertRows(cnxn, ISSUESNAPSHOT_COLS[1:], issuesnapshot_rows, replace=True, commit=commit, return_generated_ids=True) issuesnapshot_id = ids[0] # Add all labels to IssueSnapshot2Label. label_rows = [ (issuesnapshot_id, self.config_service.LookupLabelID(cnxn, issue.project_id, label)) for label in tracker_bizobj.GetLabels(issue) ] self.issuesnapshot2label_tbl.InsertRows(cnxn, ISSUESNAPSHOT2LABEL_COLS, label_rows, replace=True, commit=commit) # Add all CCs to IssueSnapshot2Cc. cc_rows = [(issuesnapshot_id, cc_id) for cc_id in tracker_bizobj.GetCcIds(issue)] self.issuesnapshot2cc_tbl.InsertRows(cnxn, ISSUESNAPSHOT2CC_COLS, cc_rows, replace=True, commit=commit) # Add all components to IssueSnapshot2Component. component_rows = [(issuesnapshot_id, component_id) for component_id in issue.component_ids] self.issuesnapshot2component_tbl.InsertRows( cnxn, ISSUESNAPSHOT2COMPONENT_COLS, component_rows, replace=True, commit=commit) # Add all components to IssueSnapshot2Hotlist. # This is raw SQL to obviate passing FeaturesService down through # the call stack wherever this function is called. # TODO(jrobbins): sort out dependencies between service classes. cnxn.Execute( ''' INSERT INTO IssueSnapshot2Hotlist (issuesnapshot_id, hotlist_id) SELECT %s, hotlist_id FROM Hotlist2Issue WHERE issue_id = %s ''', [issuesnapshot_id, issue.issue_id])