def testGetCcIds(self): issue = tracker_pb2.Issue() self.assertEquals(tracker_bizobj.GetCcIds(issue), []) issue.derived_cc_ids.extend([1, 2, 3]) self.assertEquals(tracker_bizobj.GetCcIds(issue), [1, 2, 3]) issue.cc_ids.extend([4, 5, 6]) self.assertEquals(tracker_bizobj.GetCcIds(issue), [4, 5, 6, 1, 2, 3])
def EvaluateSubscriptions( cnxn, issue, users_to_queries, services, config): """Determine subscribers who have subs that match the given issue.""" # Note: unlike filter rule, subscriptions see explicit & derived values. lower_labels = [lab.lower() for lab in tracker_bizobj.GetLabels(issue)] label_set = set(lower_labels) subscribers_to_notify = [] for uid, saved_queries in users_to_queries.items(): for sq in saved_queries: if sq.subscription_mode != 'immediate': continue if issue.project_id not in sq.executes_in_project_ids: continue cond = savedqueries_helpers.SavedQueryToCond(sq) # TODO(jrobbins): Support linked accounts me_user_ids. cond, _warnings = searchpipeline.ReplaceKeywordsWithUserIDs([uid], cond) cond_ast = query2ast.ParseUserQuery( cond, '', query2ast.BUILTIN_ISSUE_FIELDS, config) if filterrules_helpers.EvalPredicate( cnxn, services, cond_ast, issue, label_set, config, tracker_bizobj.GetOwnerId(issue), tracker_bizobj.GetCcIds(issue), tracker_bizobj.GetStatus(issue)): subscribers_to_notify.append(uid) break # Don't bother looking at the user's other saved quereies. return subscribers_to_notify
def CanViewRestrictedIssueInVisibleProject(effective_ids, perms, project, issue, granted_perms=None): """Return True if the user can view this issue. Assumes project is OK.""" # The reporter, owner, and CC'd users can always see the issue. # In effect, these fields override artifact restriction labels. if effective_ids: if (issue.reporter_id in effective_ids or tracker_bizobj.GetOwnerId(issue) in effective_ids or not effective_ids.isdisjoint(tracker_bizobj.GetCcIds(issue))): return True # Otherwise, apply the usual permission checking. return CanView(effective_ids, perms, project, GetRestrictions(issue), granted_perms=granted_perms)
def ExtractUniqueValues(columns, artifact_list, users_by_id, config, related_issues, hotlist_context_dict=None): """Build a nested list of unique values so the user can auto-filter. Args: columns: a list of lowercase column name strings, which may contain combined columns like "priority/pri". artifact_list: a list of artifacts in the complete set of search results. users_by_id: dict mapping user_ids to UserViews. config: ProjectIssueConfig PB for the current project. related_issues: dict {issue_id: issue} of pre-fetched related issues. hotlist_context_dict: dict for building a hotlist grid table Returns: [EZTItem(col1, colname1, [val11, val12,...]), ...] A list of EZTItems, each of which has a col_index, column_name, and a list of unique values that appear in that column. """ column_values = {col_name: {} for col_name in columns} # For each combined column "a/b/c", add entries that point from "a" back # to "a/b/c", from "b" back to "a/b/c", and from "c" back to "a/b/c". combined_column_parts = collections.defaultdict(list) for col in columns: if '/' in col: for col_part in col.split('/'): combined_column_parts[col_part].append(col) unique_labels = set() for art in artifact_list: unique_labels.update(tracker_bizobj.GetLabels(art)) for label in unique_labels: if '-' in label: col, val = label.split('-', 1) col = col.lower() if col in column_values: column_values[col][val.lower()] = val if col in combined_column_parts: for combined_column in combined_column_parts[col]: column_values[combined_column][val.lower()] = val else: if 'summary' in column_values: column_values['summary'][label.lower()] = label # TODO(jrobbins): Consider refacting some of this to tracker_bizobj # or a new builtins.py to reduce duplication. if 'reporter' in column_values: for art in artifact_list: reporter_id = art.reporter_id if reporter_id and reporter_id in users_by_id: reporter_username = users_by_id[reporter_id].display_name column_values['reporter'][ reporter_username] = reporter_username if 'owner' in column_values: for art in artifact_list: owner_id = tracker_bizobj.GetOwnerId(art) if owner_id and owner_id in users_by_id: owner_username = users_by_id[owner_id].display_name column_values['owner'][owner_username] = owner_username if 'cc' in column_values: for art in artifact_list: cc_ids = tracker_bizobj.GetCcIds(art) for cc_id in cc_ids: if cc_id and cc_id in users_by_id: cc_username = users_by_id[cc_id].display_name column_values['cc'][cc_username] = cc_username if 'component' in column_values: for art in artifact_list: all_comp_ids = list(art.component_ids) + list( art.derived_component_ids) for component_id in all_comp_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: column_values['component'][cd.path] = cd.path if 'stars' in column_values: for art in artifact_list: star_count = art.star_count column_values['stars'][star_count] = star_count if 'status' in column_values: for art in artifact_list: status = tracker_bizobj.GetStatus(art) if status: column_values['status'][status.lower()] = status if 'project' in column_values: for art in artifact_list: project_name = art.project_name column_values['project'][project_name] = project_name if 'mergedinto' in column_values: for art in artifact_list: if art.merged_into and art.merged_into != 0: merged_issue = related_issues[art.merged_into] merged_issue_ref = tracker_bizobj.FormatIssueRef( (merged_issue.project_name, merged_issue.local_id)) column_values['mergedinto'][ merged_issue_ref] = merged_issue_ref if 'blocked' in column_values: for art in artifact_list: if art.blocked_on_iids: column_values['blocked']['is_blocked'] = 'Yes' else: column_values['blocked']['is_not_blocked'] = 'No' if 'blockedon' in column_values: for art in artifact_list: if art.blocked_on_iids: for blocked_on_iid in art.blocked_on_iids: blocked_on_issue = related_issues[blocked_on_iid] blocked_on_ref = tracker_bizobj.FormatIssueRef( (blocked_on_issue.project_name, blocked_on_issue.local_id)) column_values['blockedon'][blocked_on_ref] = blocked_on_ref if 'blocking' in column_values: for art in artifact_list: if art.blocking_iids: for blocking_iid in art.blocking_iids: blocking_issue = related_issues[blocking_iid] blocking_ref = tracker_bizobj.FormatIssueRef( (blocking_issue.project_name, blocking_issue.local_id)) column_values['blocking'][blocking_ref] = blocking_ref if 'added' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] date_added = issue_dict['date_added'] column_values['added'][date_added] = date_added if 'adder' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] adder_id = issue_dict['adder_id'] adder = users_by_id[adder_id].display_name column_values['adder'][adder] = adder if 'note' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] note = issue_dict['note'] if issue_dict['note']: column_values['note'][note] = note if 'attachments' in column_values: for art in artifact_list: attachment_count = art.attachment_count column_values['attachments'][attachment_count] = attachment_count # Add all custom field values if the custom field name is a shown column. field_id_to_col = {} for art in artifact_list: for fv in art.field_values: field_col, field_type = field_id_to_col.get( fv.field_id, (None, None)) if field_col == 'NOT_SHOWN': continue if field_col is None: fd = tracker_bizobj.FindFieldDefByID(fv.field_id, config) if not fd: field_id_to_col[fv.field_id] = 'NOT_SHOWN', None continue field_col = fd.field_name.lower() field_type = fd.field_type if field_col not in column_values: field_id_to_col[fv.field_id] = 'NOT_SHOWN', None continue field_id_to_col[fv.field_id] = field_col, field_type if field_type == tracker_pb2.FieldTypes.ENUM_TYPE: continue # Already handled by label parsing elif field_type == tracker_pb2.FieldTypes.INT_TYPE: val = fv.int_value elif field_type == tracker_pb2.FieldTypes.STR_TYPE: val = fv.str_value elif field_type == tracker_pb2.FieldTypes.USER_TYPE: user = users_by_id.get(fv.user_id) val = user.email if user else framework_constants.NO_USER_NAME elif field_type == tracker_pb2.FieldTypes.DATE_TYPE: val = fv.int_value # TODO(jrobbins): convert to date elif field_type == tracker_pb2.FieldTypes.BOOL_TYPE: val = 'Yes' if fv.int_value else 'No' column_values[field_col][val] = val # TODO(jrobbins): make the capitalization of well-known unique label and # status values match the way it is written in the issue config. # Return EZTItems for each column in left-to-right display order. result = [] for i, col_name in enumerate(columns): # TODO(jrobbins): sort each set of column values top-to-bottom, by the # order specified in the project artifact config. For now, just sort # lexicographically to make expected output defined. sorted_col_values = sorted(column_values[col_name].values()) result.append( template_helpers.EZTItem(col_index=i, column_name=col_name, filter_values=sorted_col_values)) return result
def GetArtifactAttr(art, attribute_name, users_by_id, label_attr_values_dict, config, related_issues, hotlist_issue_context=None): """Return the requested attribute values of the given artifact. Args: art: a tracked artifact with labels, local_id, summary, stars, and owner. attribute_name: lowercase string name of attribute to get. users_by_id: dictionary of UserViews already created. label_attr_values_dict: dictionary {'key': [value, ...], }. config: ProjectIssueConfig PB for the current project. related_issues: dict {issue_id: issue} of pre-fetched related issues. hotlist_issue_context: dict of {hotlist_issue_field: field_value,..} Returns: A list of string attribute values, or [framework_constants.NO_VALUES] if the artifact has no value for that attribute. """ if attribute_name == '--': return [] if attribute_name == 'id': return [art.local_id] if attribute_name == 'summary': return [art.summary] if attribute_name == 'status': return [tracker_bizobj.GetStatus(art)] if attribute_name == 'stars': return [art.star_count] if attribute_name == 'attachments': return [art.attachment_count] # TODO(jrobbins): support blocking if attribute_name == 'project': return [art.project_name] if attribute_name == 'mergedinto': if art.merged_into and art.merged_into != 0: return [ tracker_bizobj.FormatIssueRef( (related_issues[art.merged_into].project_name, related_issues[art.merged_into].local_id)) ] else: return [framework_constants.NO_VALUES] if attribute_name == 'blocked': return ['Yes' if art.blocked_on_iids else 'No'] if attribute_name == 'blockedon': if not art.blocked_on_iids: return [framework_constants.NO_VALUES] else: return [ tracker_bizobj.FormatIssueRef( (related_issues[blocked_on_iid].project_name, related_issues[blocked_on_iid].local_id)) for blocked_on_iid in art.blocked_on_iids ] if attribute_name == 'adder': if hotlist_issue_context: adder_id = hotlist_issue_context['adder_id'] return [users_by_id[adder_id].display_name] else: return [framework_constants.NO_VALUES] if attribute_name == 'added': if hotlist_issue_context: return [hotlist_issue_context['date_added']] else: return [framework_constants.NO_VALUES] if attribute_name == 'reporter': return [users_by_id[art.reporter_id].display_name] if attribute_name == 'owner': owner_id = tracker_bizobj.GetOwnerId(art) if not owner_id: return [framework_constants.NO_VALUES] else: return [users_by_id[owner_id].display_name] if attribute_name == 'cc': cc_ids = tracker_bizobj.GetCcIds(art) if not cc_ids: return [framework_constants.NO_VALUES] else: return [users_by_id[cc_id].display_name for cc_id in cc_ids] if attribute_name == 'component': comp_ids = list(art.component_ids) + list(art.derived_component_ids) if not comp_ids: return [framework_constants.NO_VALUES] else: paths = [] for comp_id in comp_ids: cd = tracker_bizobj.FindComponentDefByID(comp_id, config) if cd: paths.append(cd.path) return paths # Check to see if it is a field. Process as field only if it is not an enum # type because enum types are stored as key-value labels. fd = tracker_bizobj.FindFieldDef(attribute_name, config) if fd and fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE: values = [] for fv in art.field_values: if fv.field_id == fd.field_id: value = tracker_bizobj.GetFieldValueWithRawValue( fd.field_type, fv, users_by_id, None) values.append(value) return values # Since it is not a built-in attribute or a field, it must be a key-value # label. return label_attr_values_dict.get(attribute_name, [framework_constants.NO_VALUES])
def _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict): """Make the GAE search index documents for the given issue batch. Args: issues: list of issues to index. comments_dict: prefetched dictionary of comments on those issues. users_by_id: dictionary {user_id: UserView} so that the email addresses of users who left comments can be found via search. config_dict: dict {project_id: config} for all the projects that the given issues are in. """ documents_by_shard = collections.defaultdict(list) for issue in issues: summary = issue.summary # TODO(jrobbins): allow search specifically on explicit vs derived # fields. owner_id = tracker_bizobj.GetOwnerId(issue) owner_email = users_by_id[owner_id].email config = config_dict[issue.project_id] component_paths = [] for component_id in issue.component_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: component_paths.append(cd.path) field_values = [ tracker_bizobj.GetFieldValue(fv, users_by_id) for fv in issue.field_values ] # Convert to string only the values that are not strings already. # This is done because the default encoding in appengine seems to be 'ascii' # and string values might contain unicode characters, so str will fail to # encode them. field_values = [ value if isinstance(value, string_types) else str(value) for value in field_values ] metadata = '%s %s %s %s %s %s' % ( tracker_bizobj.GetStatus(issue), owner_email, [ users_by_id[cc_id].email for cc_id in tracker_bizobj.GetCcIds(issue) ], ' '.join(component_paths), ' '.join(field_values), ' '.join( tracker_bizobj.GetLabels(issue))) custom_fields = _BuildCustomFTSFields(issue) comments = comments_dict.get(issue.issue_id, []) room_for_comments = (framework_constants.MAX_FTS_FIELD_SIZE - len(summary) - len(metadata) - sum(len(cf.value) for cf in custom_fields)) comments = _IndexableComments(comments, users_by_id, remaining_chars=room_for_comments) logging.info('len(comments) is %r', len(comments)) if comments: description = _ExtractCommentText(comments[0], users_by_id) description = description[:framework_constants.MAX_FTS_FIELD_SIZE] all_comments = ' '.join( _ExtractCommentText(c, users_by_id) for c in comments[1:]) all_comments = all_comments[:framework_constants. MAX_FTS_FIELD_SIZE] else: description = '' all_comments = '' logging.info('Issue %s:%r has zero indexable comments', issue.project_name, issue.local_id) logging.info('Building document for %s:%d', issue.project_name, issue.local_id) logging.info('len(summary) = %d', len(summary)) logging.info('len(metadata) = %d', len(metadata)) logging.info('len(description) = %d', len(description)) logging.info('len(comment) = %d', len(all_comments)) for cf in custom_fields: logging.info('len(%s) = %d', cf.name, len(cf.value)) doc = search.Document( doc_id=str(issue.issue_id), fields=[ search.NumberField(name='project_id', value=issue.project_id), search.TextField(name='summary', value=summary), search.TextField(name='metadata', value=metadata), search.TextField(name='description', value=description), search.TextField(name='comment', value=all_comments), ] + custom_fields) shard_id = issue.issue_id % settings.num_logical_shards documents_by_shard[shard_id].append(doc) start_time = time.time() promises = [] for shard_id, documents in documents_by_shard.items(): if documents: promises.append( framework_helpers.Promise(_IndexDocsInShard, shard_id, documents)) for promise in promises: promise.WaitAndGetValue() logging.info('Finished %d indexing in shards in %d ms', len(documents_by_shard), int( (time.time() - start_time) * 1000))
def UpdateIssuePermissions( perms, project, issue, effective_ids, granted_perms=None, config=None): """Update the PermissionSet for an specific issue. Take into account granted permissions and label restrictions to filter the permissions, and updates the VIEW and EDIT_ISSUE permissions depending on the role of the user in the issue (i.e. owner, reporter, cc or approver). Args: perms: The PermissionSet to update. project: The Project PB for the issue project. issue: The Issue PB. effective_ids: Set of int user IDs for the current user and all user groups that s/he is a member of. This will be an empty set for anonymous users. granted_perms: optional list of strings of permissions that the user is granted only within the scope of one issue, e.g., by being named in a user-type custom field that grants permissions. config: optional ProjectIssueConfig PB where granted perms should be extracted from, if granted_perms is not given. """ if config: granted_perms = tracker_bizobj.GetGrantedPerms( issue, effective_ids, config) elif granted_perms is None: granted_perms = [] # If the user has no permission to view the project, it has no permissions on # this issue. if not perms.HasPerm(VIEW, None, None): return EMPTY_PERMISSIONSET # Compute the restrictions for the given issue and store them in a dictionary # of {perm: set(needed_perms)}. restrictions = collections.defaultdict(set) if perms.consider_restrictions: for label in GetRestrictions(issue): label = label.lower() # format: Restrict-Action-ToThisPerm _, requested_perm, needed_perm = label.split('-', 2) restrictions[requested_perm.lower()].add(needed_perm.lower()) # Store the user permissions, and the extra permissions of all effective IDs # in the given project. all_perms = set(perms.perm_names) for effective_id in effective_ids: all_perms.update(p.lower() for p in GetExtraPerms(project, effective_id)) # And filter them applying the restriction labels. filtered_perms = set() for perm_name in all_perms: perm_name = perm_name.lower() restricted = any( restriction not in all_perms and restriction not in granted_perms for restriction in restrictions.get(perm_name, [])) if not restricted: filtered_perms.add(perm_name) # Add any granted permissions. filtered_perms.update(granted_perms) # The VIEW perm might have been removed due to restrictions, but the issue # owner, reporter, cc and approvers can always be an issue. allowed_ids = set( tracker_bizobj.GetCcIds(issue) + tracker_bizobj.GetApproverIds(issue) + [issue.reporter_id, tracker_bizobj.GetOwnerId(issue)]) if effective_ids and not allowed_ids.isdisjoint(effective_ids): filtered_perms.add(VIEW.lower()) # If the issue is deleted, only the VIEW and DELETE_ISSUE permissions are # relevant. if issue.deleted: if VIEW.lower() not in filtered_perms: return EMPTY_PERMISSIONSET if DELETE_ISSUE.lower() in filtered_perms: return PermissionSet([VIEW, DELETE_ISSUE], perms.consider_restrictions) return PermissionSet([VIEW], perms.consider_restrictions) # The EDIT_ISSUE permission might have been removed due to restrictions, but # the owner has always permission to edit it. if effective_ids and tracker_bizobj.GetOwnerId(issue) in effective_ids: filtered_perms.add(EDIT_ISSUE.lower()) return PermissionSet(filtered_perms, perms.consider_restrictions)
def _BulkEditEmailTasks( self, cnxn, issues, old_owner_ids, omit_addrs, project, non_private_issues, users_by_id, ids_in_issues, starrers, commenter_view, hostport, comment_text, amendments, config): """Generate Email PBs to notify interested users after a bulk edit.""" # 1. Get the user IDs of everyone who could be notified, # and make all their user proxies. Also, build a dictionary # of all the users to notify and the issues that they are # interested in. Also, build a dictionary of additional email # addresses to notify and the issues to notify them of. users_by_id = {} ids_to_notify_of_issue = {} additional_addrs_to_notify_of_issue = collections.defaultdict(list) users_to_queries = notify_reasons.GetNonOmittedSubscriptions( cnxn, self.services, [project.project_id], {}) config = self.services.config.GetProjectConfig( cnxn, project.project_id) for issue, old_owner_id in zip(issues, old_owner_ids): issue_participants = set( [tracker_bizobj.GetOwnerId(issue), old_owner_id] + tracker_bizobj.GetCcIds(issue)) # users named in user-value fields that notify. for fd in config.field_defs: issue_participants.update( notify_reasons.ComputeNamedUserIDsToNotify(issue.field_values, fd)) for user_id in ids_in_issues[issue.local_id]: # TODO(jrobbins): implement batch GetUser() for speed. if not user_id: continue auth = authdata.AuthData.FromUserID( cnxn, user_id, self.services) if (auth.user_pb.notify_issue_change and not auth.effective_ids.isdisjoint(issue_participants)): ids_to_notify_of_issue.setdefault(user_id, []).append(issue) elif (auth.user_pb.notify_starred_issue_change and user_id in starrers[issue.local_id]): # Skip users who have starred issues that they can no longer view. starrer_perms = permissions.GetPermissions( auth.user_pb, auth.effective_ids, project) granted_perms = tracker_bizobj.GetGrantedPerms( issue, auth.effective_ids, config) starrer_can_view = permissions.CanViewIssue( auth.effective_ids, starrer_perms, project, issue, granted_perms=granted_perms) if starrer_can_view: ids_to_notify_of_issue.setdefault(user_id, []).append(issue) logging.info( 'ids_to_notify_of_issue[%s] = %s', user_id, [i.local_id for i in ids_to_notify_of_issue.get(user_id, [])]) # Find all subscribers that should be notified. subscribers_to_consider = notify_reasons.EvaluateSubscriptions( cnxn, issue, users_to_queries, self.services, config) for sub_id in subscribers_to_consider: auth = authdata.AuthData.FromUserID(cnxn, sub_id, self.services) sub_perms = permissions.GetPermissions( auth.user_pb, auth.effective_ids, project) granted_perms = tracker_bizobj.GetGrantedPerms( issue, auth.effective_ids, config) sub_can_view = permissions.CanViewIssue( auth.effective_ids, sub_perms, project, issue, granted_perms=granted_perms) if sub_can_view: ids_to_notify_of_issue.setdefault(sub_id, []) if issue not in ids_to_notify_of_issue[sub_id]: ids_to_notify_of_issue[sub_id].append(issue) if issue in non_private_issues: for notify_addr in issue.derived_notify_addrs: additional_addrs_to_notify_of_issue[notify_addr].append(issue) # 2. Compose an email specifically for each user, and one email to each # notify_addr with all the issues that it. # Start from non-members first, then members to reveal email addresses. email_tasks = [] needed_user_view_ids = [uid for uid in ids_to_notify_of_issue if uid not in users_by_id] users_by_id.update(framework_views.MakeAllUserViews( cnxn, self.services.user, needed_user_view_ids)) member_ids_to_notify_of_issue = {} non_member_ids_to_notify_of_issue = {} member_additional_addrs = {} non_member_additional_addrs = {} addr_to_addrperm = {} # {email_address: AddrPerm object} all_user_prefs = self.services.user.GetUsersPrefs( cnxn, ids_to_notify_of_issue) # TODO(jrobbins): Merge ids_to_notify_of_issue entries for linked accounts. for user_id in ids_to_notify_of_issue: if not user_id: continue # Don't try to notify NO_USER_SPECIFIED if users_by_id[user_id].email in omit_addrs: logging.info('Omitting %s', user_id) continue user_issues = ids_to_notify_of_issue[user_id] if not user_issues: continue # user's prefs indicate they don't want these notifications auth = authdata.AuthData.FromUserID( cnxn, user_id, self.services) is_member = bool(framework_bizobj.UserIsInProject( project, auth.effective_ids)) if is_member: member_ids_to_notify_of_issue[user_id] = user_issues else: non_member_ids_to_notify_of_issue[user_id] = user_issues addr = users_by_id[user_id].email omit_addrs.add(addr) addr_to_addrperm[addr] = notify_reasons.AddrPerm( is_member, addr, users_by_id[user_id].user, notify_reasons.REPLY_NOT_ALLOWED, all_user_prefs[user_id]) for addr, addr_issues in additional_addrs_to_notify_of_issue.items(): auth = None try: auth = authdata.AuthData.FromEmail(cnxn, addr, self.services) except: # pylint: disable=bare-except logging.warning('Cannot find user of email %s ', addr) if auth: is_member = bool(framework_bizobj.UserIsInProject( project, auth.effective_ids)) else: is_member = False if is_member: member_additional_addrs[addr] = addr_issues else: non_member_additional_addrs[addr] = addr_issues omit_addrs.add(addr) addr_to_addrperm[addr] = notify_reasons.AddrPerm( is_member, addr, None, notify_reasons.REPLY_NOT_ALLOWED, None) for user_id, user_issues in non_member_ids_to_notify_of_issue.items(): addr = users_by_id[user_id].email email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], user_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify non-member %s (%s) of %s', users_by_id[user_id].email, user_id, [issue.local_id for issue in user_issues]) for addr, addr_issues in non_member_additional_addrs.items(): email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], addr_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify non-member additional addr %s of %s', addr, [addr_issue.local_id for addr_issue in addr_issues]) framework_views.RevealAllEmails(users_by_id) commenter_view.RevealEmail() for user_id, user_issues in member_ids_to_notify_of_issue.items(): addr = users_by_id[user_id].email email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], user_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify member %s (%s) of %s', addr, user_id, [issue.local_id for issue in user_issues]) for addr, addr_issues in member_additional_addrs.items(): email = self._FormatBulkIssuesEmail( addr_to_addrperm[addr], addr_issues, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) logging.info('about to bulk notify member additional addr %s of %s', addr, [addr_issue.local_id for addr_issue in addr_issues]) # 4. Add in the project's issue_notify_address. This happens even if it # is the same as the commenter's email address (which would be an unusual # but valid project configuration). Only issues that any contributor could # view are included in emails to the all-issue-activity mailing lists. if (project.issue_notify_address and project.issue_notify_address not in omit_addrs): non_private_issues_live = [] for issue in issues: contributor_could_view = permissions.CanViewIssue( set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET, project, issue) if contributor_could_view: non_private_issues_live.append(issue) if non_private_issues_live: project_notify_addrperm = notify_reasons.AddrPerm( True, project.issue_notify_address, None, notify_reasons.REPLY_NOT_ALLOWED, None) email = self._FormatBulkIssuesEmail( project_notify_addrperm, non_private_issues_live, users_by_id, commenter_view, hostport, comment_text, amendments, config, project) email_tasks.append(email) omit_addrs.add(project.issue_notify_address) logging.info('about to bulk notify all-issues %s of %s', project.issue_notify_address, [issue.local_id for issue in non_private_issues]) return email_tasks
def _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict): """Make the GAE search index documents for the given issue batch. Args: issues: list of issues to index. comments_dict: prefetched dictionary of comments on those issues. users_by_id: dictionary {user_id: UserView} so that the email addresses of users who left comments can be found via search. config_dict: dict {project_id: config} for all the projects that the given issues are in. """ documents_by_shard = collections.defaultdict(list) for issue in issues: comments = comments_dict.get(issue.issue_id, []) comments = _IndexableComments(comments, users_by_id) summary = issue.summary # TODO(jrobbins): allow search specifically on explicit vs derived # fields. owner_id = tracker_bizobj.GetOwnerId(issue) owner_email = users_by_id[owner_id].email config = config_dict[issue.project_id] component_paths = [] for component_id in issue.component_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: component_paths.append(cd.path) field_values = [ str(tracker_bizobj.GetFieldValue(fv, users_by_id)) for fv in issue.field_values ] metadata = '%s %s %s %s %s %s' % ( tracker_bizobj.GetStatus(issue), owner_email, [ users_by_id[cc_id].email for cc_id in tracker_bizobj.GetCcIds(issue) ], ' '.join(component_paths), ' '.join(field_values), ' '.join( tracker_bizobj.GetLabels(issue))) assert comments, 'issues should always have at least the description' description = _ExtractCommentText(comments[0], users_by_id) description = description[:framework_constants.MAX_FTS_FIELD_SIZE] all_comments = ' '.join( _ExtractCommentText(c, users_by_id) for c in comments[1:]) all_comments = all_comments[:framework_constants.MAX_FTS_FIELD_SIZE] custom_fields = _BuildCustomFTSFields(issue) doc = search.Document( doc_id=str(issue.issue_id), fields=[ search.NumberField(name='project_id', value=issue.project_id), search.TextField(name='summary', value=summary), search.TextField(name='metadata', value=metadata), search.TextField(name='description', value=description), search.TextField(name='comment', value=all_comments), ] + custom_fields) shard_id = issue.issue_id % settings.num_logical_shards documents_by_shard[shard_id].append(doc) start_time = time.time() promises = [] for shard_id, documents in documents_by_shard.iteritems(): if documents: promises.append( framework_helpers.Promise(_IndexDocsInShard, shard_id, documents)) for promise in promises: promise.WaitAndGetValue() logging.info('Finished %d indexing in shards in %d ms', len(documents_by_shard), int( (time.time() - start_time) * 1000))
def StoreIssueSnapshots(self, cnxn, issues, commit=True): """Adds an IssueSnapshot and updates the previous one for each issue.""" for issue in issues: right_now = self._currentTime() # Look for an existing (latest) IssueSnapshot with this issue_id. previous_snapshots = self.issuesnapshot_tbl.Select( cnxn, cols=ISSUESNAPSHOT_COLS, issue_id=issue.issue_id, limit=1, order_by=[('period_start DESC', [])]) if len(previous_snapshots) > 0: previous_snapshot_id = previous_snapshots[0][0] logging.info('Found previous IssueSnapshot with id: %s', previous_snapshot_id) # Update previous snapshot's end time to right now. delta = {'period_end': right_now} where = [('IssueSnapshot.id = %s', [previous_snapshot_id])] self.issuesnapshot_tbl.Update(cnxn, delta, commit=commit, where=where) config = self.config_service.GetProjectConfig( cnxn, issue.project_id) period_end = settings.maximum_snapshot_period_end is_open = tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config) shard = issue.issue_id % settings.num_logical_shards status = tracker_bizobj.GetStatus(issue) status_id = self.config_service.LookupStatusID( cnxn, issue.project_id, status) or None owner_id = tracker_bizobj.GetOwnerId(issue) or None issuesnapshot_rows = [(issue.issue_id, shard, issue.project_id, issue.local_id, issue.reporter_id, owner_id, status_id, right_now, period_end, is_open)] ids = self.issuesnapshot_tbl.InsertRows(cnxn, ISSUESNAPSHOT_COLS[1:], issuesnapshot_rows, replace=True, commit=commit, return_generated_ids=True) issuesnapshot_id = ids[0] # Add all labels to IssueSnapshot2Label. label_rows = [ (issuesnapshot_id, self.config_service.LookupLabelID(cnxn, issue.project_id, label)) for label in tracker_bizobj.GetLabels(issue) ] self.issuesnapshot2label_tbl.InsertRows(cnxn, ISSUESNAPSHOT2LABEL_COLS, label_rows, replace=True, commit=commit) # Add all CCs to IssueSnapshot2Cc. cc_rows = [(issuesnapshot_id, cc_id) for cc_id in tracker_bizobj.GetCcIds(issue)] self.issuesnapshot2cc_tbl.InsertRows(cnxn, ISSUESNAPSHOT2CC_COLS, cc_rows, replace=True, commit=commit) # Add all components to IssueSnapshot2Component. component_rows = [(issuesnapshot_id, component_id) for component_id in issue.component_ids] self.issuesnapshot2component_tbl.InsertRows( cnxn, ISSUESNAPSHOT2COMPONENT_COLS, component_rows, replace=True, commit=commit) # Add all components to IssueSnapshot2Hotlist. # This is raw SQL to obviate passing FeaturesService down through # the call stack wherever this function is called. # TODO(jrobbins): sort out dependencies between service classes. cnxn.Execute( ''' INSERT INTO IssueSnapshot2Hotlist (issuesnapshot_id, hotlist_id) SELECT %s, hotlist_id FROM Hotlist2Issue WHERE issue_id = %s ''', [issuesnapshot_id, issue.issue_id])