def testGetStatus(self): issue = tracker_pb2.Issue() self.assertEquals(tracker_bizobj.GetStatus(issue), '') issue.derived_status = 'InReview' self.assertEquals(tracker_bizobj.GetStatus(issue), 'InReview') issue.status = 'Forgotten' self.assertEquals(tracker_bizobj.GetStatus(issue), 'Forgotten')
def EvaluateSubscriptions( cnxn, issue, users_to_queries, services, config): """Determine subscribers who have subs that match the given issue.""" # Note: unlike filter rule, subscriptions see explicit & derived values. lower_labels = [lab.lower() for lab in tracker_bizobj.GetLabels(issue)] label_set = set(lower_labels) subscribers_to_notify = [] for uid, saved_queries in users_to_queries.items(): for sq in saved_queries: if sq.subscription_mode != 'immediate': continue if issue.project_id not in sq.executes_in_project_ids: continue cond = savedqueries_helpers.SavedQueryToCond(sq) # TODO(jrobbins): Support linked accounts me_user_ids. cond, _warnings = searchpipeline.ReplaceKeywordsWithUserIDs([uid], cond) cond_ast = query2ast.ParseUserQuery( cond, '', query2ast.BUILTIN_ISSUE_FIELDS, config) if filterrules_helpers.EvalPredicate( cnxn, services, cond_ast, issue, label_set, config, tracker_bizobj.GetOwnerId(issue), tracker_bizobj.GetCcIds(issue), tracker_bizobj.GetStatus(issue)): subscribers_to_notify.append(uid) break # Don't bother looking at the user's other saved quereies. return subscribers_to_notify
def _CalculateIssuePings(self, issue, config): """Return a list of (field, timestamp) pairs for dates that should ping.""" timestamp_min, timestamp_max = _GetTimestampRange(int(time.time())) arrived_dates_by_field_id = { fv.field_id: fv.date_value for fv in issue.field_values if timestamp_min <= fv.date_value < timestamp_max } logging.info('arrived_dates_by_field_id = %r', arrived_dates_by_field_id) # TODO(jrobbins): Lookup field defs regardless of project_id to better # handle foreign fields in issues that have been moved between projects. pings = [(field, arrived_dates_by_field_id[field.field_id]) for field in config.field_defs if (field.field_id in arrived_dates_by_field_id and field. date_action in (tracker_pb2.DateAction.PING_OWNER_ONLY, tracker_pb2.DateAction.PING_PARTICIPANTS)) ] # TODO(jrobbins): For now, assume all pings apply only to open issues. # Later, allow each date action to specify whether it applies to open # issues or all issues. means_open = tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config) pings = [ping for ping in pings if means_open] pings = sorted(pings, key=lambda ping: ping[0].field_name) return pings
def ExtractUniqueValues(columns, artifact_list, users_by_id, config, related_issues, hotlist_context_dict=None): """Build a nested list of unique values so the user can auto-filter. Args: columns: a list of lowercase column name strings, which may contain combined columns like "priority/pri". artifact_list: a list of artifacts in the complete set of search results. users_by_id: dict mapping user_ids to UserViews. config: ProjectIssueConfig PB for the current project. related_issues: dict {issue_id: issue} of pre-fetched related issues. hotlist_context_dict: dict for building a hotlist grid table Returns: [EZTItem(col1, colname1, [val11, val12,...]), ...] A list of EZTItems, each of which has a col_index, column_name, and a list of unique values that appear in that column. """ column_values = {col_name: {} for col_name in columns} # For each combined column "a/b/c", add entries that point from "a" back # to "a/b/c", from "b" back to "a/b/c", and from "c" back to "a/b/c". combined_column_parts = collections.defaultdict(list) for col in columns: if '/' in col: for col_part in col.split('/'): combined_column_parts[col_part].append(col) unique_labels = set() for art in artifact_list: unique_labels.update(tracker_bizobj.GetLabels(art)) for label in unique_labels: if '-' in label: col, val = label.split('-', 1) col = col.lower() if col in column_values: column_values[col][val.lower()] = val if col in combined_column_parts: for combined_column in combined_column_parts[col]: column_values[combined_column][val.lower()] = val else: if 'summary' in column_values: column_values['summary'][label.lower()] = label # TODO(jrobbins): Consider refacting some of this to tracker_bizobj # or a new builtins.py to reduce duplication. if 'reporter' in column_values: for art in artifact_list: reporter_id = art.reporter_id if reporter_id and reporter_id in users_by_id: reporter_username = users_by_id[reporter_id].display_name column_values['reporter'][ reporter_username] = reporter_username if 'owner' in column_values: for art in artifact_list: owner_id = tracker_bizobj.GetOwnerId(art) if owner_id and owner_id in users_by_id: owner_username = users_by_id[owner_id].display_name column_values['owner'][owner_username] = owner_username if 'cc' in column_values: for art in artifact_list: cc_ids = tracker_bizobj.GetCcIds(art) for cc_id in cc_ids: if cc_id and cc_id in users_by_id: cc_username = users_by_id[cc_id].display_name column_values['cc'][cc_username] = cc_username if 'component' in column_values: for art in artifact_list: all_comp_ids = list(art.component_ids) + list( art.derived_component_ids) for component_id in all_comp_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: column_values['component'][cd.path] = cd.path if 'stars' in column_values: for art in artifact_list: star_count = art.star_count column_values['stars'][star_count] = star_count if 'status' in column_values: for art in artifact_list: status = tracker_bizobj.GetStatus(art) if status: column_values['status'][status.lower()] = status if 'project' in column_values: for art in artifact_list: project_name = art.project_name column_values['project'][project_name] = project_name if 'mergedinto' in column_values: for art in artifact_list: if art.merged_into and art.merged_into != 0: merged_issue = related_issues[art.merged_into] merged_issue_ref = tracker_bizobj.FormatIssueRef( (merged_issue.project_name, merged_issue.local_id)) column_values['mergedinto'][ merged_issue_ref] = merged_issue_ref if 'blocked' in column_values: for art in artifact_list: if art.blocked_on_iids: column_values['blocked']['is_blocked'] = 'Yes' else: column_values['blocked']['is_not_blocked'] = 'No' if 'blockedon' in column_values: for art in artifact_list: if art.blocked_on_iids: for blocked_on_iid in art.blocked_on_iids: blocked_on_issue = related_issues[blocked_on_iid] blocked_on_ref = tracker_bizobj.FormatIssueRef( (blocked_on_issue.project_name, blocked_on_issue.local_id)) column_values['blockedon'][blocked_on_ref] = blocked_on_ref if 'blocking' in column_values: for art in artifact_list: if art.blocking_iids: for blocking_iid in art.blocking_iids: blocking_issue = related_issues[blocking_iid] blocking_ref = tracker_bizobj.FormatIssueRef( (blocking_issue.project_name, blocking_issue.local_id)) column_values['blocking'][blocking_ref] = blocking_ref if 'added' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] date_added = issue_dict['date_added'] column_values['added'][date_added] = date_added if 'adder' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] adder_id = issue_dict['adder_id'] adder = users_by_id[adder_id].display_name column_values['adder'][adder] = adder if 'note' in column_values: for art in artifact_list: if hotlist_context_dict and hotlist_context_dict[art.issue_id]: issue_dict = hotlist_context_dict[art.issue_id] note = issue_dict['note'] if issue_dict['note']: column_values['note'][note] = note if 'attachments' in column_values: for art in artifact_list: attachment_count = art.attachment_count column_values['attachments'][attachment_count] = attachment_count # Add all custom field values if the custom field name is a shown column. field_id_to_col = {} for art in artifact_list: for fv in art.field_values: field_col, field_type = field_id_to_col.get( fv.field_id, (None, None)) if field_col == 'NOT_SHOWN': continue if field_col is None: fd = tracker_bizobj.FindFieldDefByID(fv.field_id, config) if not fd: field_id_to_col[fv.field_id] = 'NOT_SHOWN', None continue field_col = fd.field_name.lower() field_type = fd.field_type if field_col not in column_values: field_id_to_col[fv.field_id] = 'NOT_SHOWN', None continue field_id_to_col[fv.field_id] = field_col, field_type if field_type == tracker_pb2.FieldTypes.ENUM_TYPE: continue # Already handled by label parsing elif field_type == tracker_pb2.FieldTypes.INT_TYPE: val = fv.int_value elif field_type == tracker_pb2.FieldTypes.STR_TYPE: val = fv.str_value elif field_type == tracker_pb2.FieldTypes.USER_TYPE: user = users_by_id.get(fv.user_id) val = user.email if user else framework_constants.NO_USER_NAME elif field_type == tracker_pb2.FieldTypes.DATE_TYPE: val = fv.int_value # TODO(jrobbins): convert to date elif field_type == tracker_pb2.FieldTypes.BOOL_TYPE: val = 'Yes' if fv.int_value else 'No' column_values[field_col][val] = val # TODO(jrobbins): make the capitalization of well-known unique label and # status values match the way it is written in the issue config. # Return EZTItems for each column in left-to-right display order. result = [] for i, col_name in enumerate(columns): # TODO(jrobbins): sort each set of column values top-to-bottom, by the # order specified in the project artifact config. For now, just sort # lexicographically to make expected output defined. sorted_col_values = sorted(column_values[col_name].values()) result.append( template_helpers.EZTItem(col_index=i, column_name=col_name, filter_values=sorted_col_values)) return result
def GetArtifactAttr(art, attribute_name, users_by_id, label_attr_values_dict, config, related_issues, hotlist_issue_context=None): """Return the requested attribute values of the given artifact. Args: art: a tracked artifact with labels, local_id, summary, stars, and owner. attribute_name: lowercase string name of attribute to get. users_by_id: dictionary of UserViews already created. label_attr_values_dict: dictionary {'key': [value, ...], }. config: ProjectIssueConfig PB for the current project. related_issues: dict {issue_id: issue} of pre-fetched related issues. hotlist_issue_context: dict of {hotlist_issue_field: field_value,..} Returns: A list of string attribute values, or [framework_constants.NO_VALUES] if the artifact has no value for that attribute. """ if attribute_name == '--': return [] if attribute_name == 'id': return [art.local_id] if attribute_name == 'summary': return [art.summary] if attribute_name == 'status': return [tracker_bizobj.GetStatus(art)] if attribute_name == 'stars': return [art.star_count] if attribute_name == 'attachments': return [art.attachment_count] # TODO(jrobbins): support blocking if attribute_name == 'project': return [art.project_name] if attribute_name == 'mergedinto': if art.merged_into and art.merged_into != 0: return [ tracker_bizobj.FormatIssueRef( (related_issues[art.merged_into].project_name, related_issues[art.merged_into].local_id)) ] else: return [framework_constants.NO_VALUES] if attribute_name == 'blocked': return ['Yes' if art.blocked_on_iids else 'No'] if attribute_name == 'blockedon': if not art.blocked_on_iids: return [framework_constants.NO_VALUES] else: return [ tracker_bizobj.FormatIssueRef( (related_issues[blocked_on_iid].project_name, related_issues[blocked_on_iid].local_id)) for blocked_on_iid in art.blocked_on_iids ] if attribute_name == 'adder': if hotlist_issue_context: adder_id = hotlist_issue_context['adder_id'] return [users_by_id[adder_id].display_name] else: return [framework_constants.NO_VALUES] if attribute_name == 'added': if hotlist_issue_context: return [hotlist_issue_context['date_added']] else: return [framework_constants.NO_VALUES] if attribute_name == 'reporter': return [users_by_id[art.reporter_id].display_name] if attribute_name == 'owner': owner_id = tracker_bizobj.GetOwnerId(art) if not owner_id: return [framework_constants.NO_VALUES] else: return [users_by_id[owner_id].display_name] if attribute_name == 'cc': cc_ids = tracker_bizobj.GetCcIds(art) if not cc_ids: return [framework_constants.NO_VALUES] else: return [users_by_id[cc_id].display_name for cc_id in cc_ids] if attribute_name == 'component': comp_ids = list(art.component_ids) + list(art.derived_component_ids) if not comp_ids: return [framework_constants.NO_VALUES] else: paths = [] for comp_id in comp_ids: cd = tracker_bizobj.FindComponentDefByID(comp_id, config) if cd: paths.append(cd.path) return paths # Check to see if it is a field. Process as field only if it is not an enum # type because enum types are stored as key-value labels. fd = tracker_bizobj.FindFieldDef(attribute_name, config) if fd and fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE: values = [] for fv in art.field_values: if fv.field_id == fd.field_id: value = tracker_bizobj.GetFieldValueWithRawValue( fd.field_type, fv, users_by_id, None) values.append(value) return values # Since it is not a built-in attribute or a field, it must be a key-value # label. return label_attr_values_dict.get(attribute_name, [framework_constants.NO_VALUES])
def _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict): """Make the GAE search index documents for the given issue batch. Args: issues: list of issues to index. comments_dict: prefetched dictionary of comments on those issues. users_by_id: dictionary {user_id: UserView} so that the email addresses of users who left comments can be found via search. config_dict: dict {project_id: config} for all the projects that the given issues are in. """ documents_by_shard = collections.defaultdict(list) for issue in issues: summary = issue.summary # TODO(jrobbins): allow search specifically on explicit vs derived # fields. owner_id = tracker_bizobj.GetOwnerId(issue) owner_email = users_by_id[owner_id].email config = config_dict[issue.project_id] component_paths = [] for component_id in issue.component_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: component_paths.append(cd.path) field_values = [ tracker_bizobj.GetFieldValue(fv, users_by_id) for fv in issue.field_values ] # Convert to string only the values that are not strings already. # This is done because the default encoding in appengine seems to be 'ascii' # and string values might contain unicode characters, so str will fail to # encode them. field_values = [ value if isinstance(value, string_types) else str(value) for value in field_values ] metadata = '%s %s %s %s %s %s' % ( tracker_bizobj.GetStatus(issue), owner_email, [ users_by_id[cc_id].email for cc_id in tracker_bizobj.GetCcIds(issue) ], ' '.join(component_paths), ' '.join(field_values), ' '.join( tracker_bizobj.GetLabels(issue))) custom_fields = _BuildCustomFTSFields(issue) comments = comments_dict.get(issue.issue_id, []) room_for_comments = (framework_constants.MAX_FTS_FIELD_SIZE - len(summary) - len(metadata) - sum(len(cf.value) for cf in custom_fields)) comments = _IndexableComments(comments, users_by_id, remaining_chars=room_for_comments) logging.info('len(comments) is %r', len(comments)) if comments: description = _ExtractCommentText(comments[0], users_by_id) description = description[:framework_constants.MAX_FTS_FIELD_SIZE] all_comments = ' '.join( _ExtractCommentText(c, users_by_id) for c in comments[1:]) all_comments = all_comments[:framework_constants. MAX_FTS_FIELD_SIZE] else: description = '' all_comments = '' logging.info('Issue %s:%r has zero indexable comments', issue.project_name, issue.local_id) logging.info('Building document for %s:%d', issue.project_name, issue.local_id) logging.info('len(summary) = %d', len(summary)) logging.info('len(metadata) = %d', len(metadata)) logging.info('len(description) = %d', len(description)) logging.info('len(comment) = %d', len(all_comments)) for cf in custom_fields: logging.info('len(%s) = %d', cf.name, len(cf.value)) doc = search.Document( doc_id=str(issue.issue_id), fields=[ search.NumberField(name='project_id', value=issue.project_id), search.TextField(name='summary', value=summary), search.TextField(name='metadata', value=metadata), search.TextField(name='description', value=description), search.TextField(name='comment', value=all_comments), ] + custom_fields) shard_id = issue.issue_id % settings.num_logical_shards documents_by_shard[shard_id].append(doc) start_time = time.time() promises = [] for shard_id, documents in documents_by_shard.items(): if documents: promises.append( framework_helpers.Promise(_IndexDocsInShard, shard_id, documents)) for promise in promises: promise.WaitAndGetValue() logging.info('Finished %d indexing in shards in %d ms', len(documents_by_shard), int( (time.time() - start_time) * 1000))
def __init__(self, issue, users_by_id, config): """Store relevant values for later display by EZT. Args: issue: An Issue protocol buffer. users_by_id: dict {user_id: UserViews} for all users mentioned in issue. config: ProjectIssueConfig for this issue. """ super(IssueView, self).__init__(issue) # The users involved in this issue must be present in users_by_id if # this IssueView is to be used on the issue detail or peek pages. But, # they can be absent from users_by_id if the IssueView is used as a # tile in the grid view. self.owner = users_by_id.get(issue.owner_id) self.derived_owner = users_by_id.get(issue.derived_owner_id) self.cc = [users_by_id.get(cc_id) for cc_id in issue.cc_ids if cc_id] self.derived_cc = [ users_by_id.get(cc_id) for cc_id in issue.derived_cc_ids if cc_id ] self.status = framework_views.StatusView(issue.status, config) self.derived_status = framework_views.StatusView( issue.derived_status, config) # If we don't have a config available, we don't need to access is_open, so # let it be True. self.is_open = ezt.boolean( not config or tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config)) self.components = sorted([ ComponentValueView(component_id, config, False) for component_id in issue.component_ids if tracker_bizobj.FindComponentDefByID(component_id, config) ] + [ ComponentValueView(component_id, config, True) for component_id in issue.derived_component_ids if tracker_bizobj.FindComponentDefByID(component_id, config) ], key=lambda cvv: cvv.path) self.fields = MakeAllFieldValueViews(config, issue.labels, issue.derived_labels, issue.field_values, users_by_id) labels, derived_labels = tracker_bizobj.ExplicitAndDerivedNonMaskedLabels( issue, config) self.labels = [ framework_views.LabelView(label, config) for label in labels ] self.derived_labels = [ framework_views.LabelView(label, config) for label in derived_labels ] self.restrictions = _RestrictionsView(issue) # TODO(jrobbins): sort by order of labels in project config self.short_summary = issue.summary[:tracker_constants. SHORT_SUMMARY_LENGTH] if issue.closed_timestamp: self.closed = timestr.FormatAbsoluteDate(issue.closed_timestamp) else: self.closed = '' self.blocked_on = [] self.has_dangling = ezt.boolean(self.dangling_blocked_on_refs) self.blocking = [] self.detail_relative_url = tracker_helpers.FormatRelativeIssueURL( issue.project_name, urls.ISSUE_DETAIL, id=issue.local_id) self.crbug_url = tracker_helpers.FormatCrBugURL( issue.project_name, issue.local_id)
def convert_issue(cls, issue, mar, services): """Convert Monorail Issue PB to API IssuesGetInsertResponse.""" config = services.config.GetProjectConfig(mar.cnxn, issue.project_id) granted_perms = tracker_bizobj.GetGrantedPerms(issue, mar.auth.effective_ids, config) issue_project = services.project.GetProject(mar.cnxn, issue.project_id) component_list = [] for cd in config.component_defs: cid = cd.component_id if cid in issue.component_ids: component_list.append(cd.path) cc_list = [convert_person(p, mar.cnxn, services) for p in issue.cc_ids] cc_list = [p for p in cc_list if p is not None] field_values_list = [] fds_by_id = {fd.field_id: fd for fd in config.field_defs} phases_by_id = {phase.phase_id: phase for phase in issue.phases} for fv in issue.field_values: fd = fds_by_id.get(fv.field_id) if not fd: logging.warning('Custom field %d of project %s does not exist', fv.field_id, issue_project.project_name) continue val = None if fv.user_id: val = _get_user_email(services.user, mar.cnxn, fv.user_id) else: val = tracker_bizobj.GetFieldValue(fv, {}) if not isinstance(val, string_types): val = str(val) new_fv = api_pb2_v1.FieldValue(fieldName=fd.field_name, fieldValue=val, derived=fv.derived) if fd.approval_id: # Attach parent approval name approval_fd = fds_by_id.get(fd.approval_id) if not approval_fd: logging.warning( 'Parent approval field %d of field %s does not exist', fd.approval_id, fd.field_name) else: new_fv.approvalName = approval_fd.field_name elif fv.phase_id: # Attach phase name phase = phases_by_id.get(fv.phase_id) if not phase: logging.warning('Phase %d for field %s does not exist', fv.phase_id, fd.field_name) else: new_fv.phaseName = phase.name field_values_list.append(new_fv) approval_values_list = convert_approvals(mar.cnxn, issue.approval_values, services, config, issue.phases) phases_list = convert_phases(issue.phases) with work_env.WorkEnv(mar, services) as we: starred = we.IsIssueStarred(issue) resp = cls( kind='monorail#issue', id=issue.local_id, title=issue.summary, summary=issue.summary, projectId=issue_project.project_name, stars=issue.star_count, starred=starred, status=issue.status, state=(api_pb2_v1.IssueState.open if tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config) else api_pb2_v1.IssueState.closed), labels=issue.labels, components=component_list, author=convert_person(issue.reporter_id, mar.cnxn, services), owner=convert_person(issue.owner_id, mar.cnxn, services), cc=cc_list, updated=datetime.datetime.fromtimestamp(issue.modified_timestamp), published=datetime.datetime.fromtimestamp(issue.opened_timestamp), blockedOn=convert_issue_ids(issue.blocked_on_iids, mar, services), blocking=convert_issue_ids(issue.blocking_iids, mar, services), canComment=permissions.CanCommentIssue(mar.auth.effective_ids, mar.perms, issue_project, issue, granted_perms=granted_perms), canEdit=permissions.CanEditIssue(mar.auth.effective_ids, mar.perms, issue_project, issue, granted_perms=granted_perms), fieldValues=field_values_list, approvalValues=approval_values_list, phases=phases_list) if issue.closed_timestamp > 0: resp.closed = datetime.datetime.fromtimestamp(issue.closed_timestamp) if issue.merged_into: resp.mergedInto = convert_issue_ids([issue.merged_into], mar, services)[0] if issue.owner_modified_timestamp: resp.owner_modified = datetime.datetime.fromtimestamp( issue.owner_modified_timestamp) if issue.status_modified_timestamp: resp.status_modified = datetime.datetime.fromtimestamp( issue.status_modified_timestamp) if issue.component_modified_timestamp: resp.component_modified = datetime.datetime.fromtimestamp( issue.component_modified_timestamp) return resp
def _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict): """Make the GAE search index documents for the given issue batch. Args: issues: list of issues to index. comments_dict: prefetched dictionary of comments on those issues. users_by_id: dictionary {user_id: UserView} so that the email addresses of users who left comments can be found via search. config_dict: dict {project_id: config} for all the projects that the given issues are in. """ documents_by_shard = collections.defaultdict(list) for issue in issues: comments = comments_dict.get(issue.issue_id, []) comments = _IndexableComments(comments, users_by_id) summary = issue.summary # TODO(jrobbins): allow search specifically on explicit vs derived # fields. owner_id = tracker_bizobj.GetOwnerId(issue) owner_email = users_by_id[owner_id].email config = config_dict[issue.project_id] component_paths = [] for component_id in issue.component_ids: cd = tracker_bizobj.FindComponentDefByID(component_id, config) if cd: component_paths.append(cd.path) field_values = [ str(tracker_bizobj.GetFieldValue(fv, users_by_id)) for fv in issue.field_values ] metadata = '%s %s %s %s %s %s' % ( tracker_bizobj.GetStatus(issue), owner_email, [ users_by_id[cc_id].email for cc_id in tracker_bizobj.GetCcIds(issue) ], ' '.join(component_paths), ' '.join(field_values), ' '.join( tracker_bizobj.GetLabels(issue))) assert comments, 'issues should always have at least the description' description = _ExtractCommentText(comments[0], users_by_id) description = description[:framework_constants.MAX_FTS_FIELD_SIZE] all_comments = ' '.join( _ExtractCommentText(c, users_by_id) for c in comments[1:]) all_comments = all_comments[:framework_constants.MAX_FTS_FIELD_SIZE] custom_fields = _BuildCustomFTSFields(issue) doc = search.Document( doc_id=str(issue.issue_id), fields=[ search.NumberField(name='project_id', value=issue.project_id), search.TextField(name='summary', value=summary), search.TextField(name='metadata', value=metadata), search.TextField(name='description', value=description), search.TextField(name='comment', value=all_comments), ] + custom_fields) shard_id = issue.issue_id % settings.num_logical_shards documents_by_shard[shard_id].append(doc) start_time = time.time() promises = [] for shard_id, documents in documents_by_shard.iteritems(): if documents: promises.append( framework_helpers.Promise(_IndexDocsInShard, shard_id, documents)) for promise in promises: promise.WaitAndGetValue() logging.info('Finished %d indexing in shards in %d ms', len(documents_by_shard), int( (time.time() - start_time) * 1000))
def convert_issue(cls, issue, mar, services): """Convert Monorail Issue PB to API IssuesGetInsertResponse.""" config = services.config.GetProjectConfig(mar.cnxn, issue.project_id) granted_perms = tracker_bizobj.GetGrantedPerms( issue, mar.auth.effective_ids, config) issue_project = services.project.GetProject(mar.cnxn, issue.project_id) component_list = [] for cd in config.component_defs: cid = cd.component_id if cid in issue.component_ids: component_list.append(cd.path) cc_list = [convert_person(p, mar.cnxn, services) for p in issue.cc_ids] cc_list = [p for p in cc_list if p is not None] field_values_list = [] field_id_dict = { fd.field_id: fd.field_name for fd in config.field_defs} for fv in issue.field_values: field_name = field_id_dict.get(fv.field_id) if not field_name: logging.warning('Custom field %d of project %s does not exist', fv.field_id, issue_project.project_name) continue val = None if fv.user_id: val = _get_user_email( services.user, mar.cnxn, fv.user_id) elif fv.str_value: val = fv.str_value elif fv.int_value: val = str(fv.int_value) new_fv = api_pb2_v1.FieldValue( fieldName=field_name, fieldValue=val, derived=fv.derived) field_values_list.append(new_fv) resp = cls( kind='monorail#issue', id=issue.local_id, title=issue.summary, summary=issue.summary, projectId=issue_project.project_name, stars=issue.star_count, starred=services.issue_star.IsItemStarredBy( mar.cnxn, issue.issue_id, mar.auth.user_id), status=issue.status, state=(api_pb2_v1.IssueState.open if tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config) else api_pb2_v1.IssueState.closed), labels=issue.labels, components=component_list, author=convert_person(issue.reporter_id, mar.cnxn, services), owner=convert_person(issue.owner_id, mar.cnxn, services), cc=cc_list, updated=datetime.datetime.fromtimestamp(issue.modified_timestamp), published=datetime.datetime.fromtimestamp(issue.opened_timestamp), blockedOn=convert_issue_ids(issue.blocked_on_iids, mar, services), blocking=convert_issue_ids(issue.blocking_iids, mar, services), canComment=permissions.CanCommentIssue( mar.auth.effective_ids, mar.perms, issue_project, issue, granted_perms=granted_perms), canEdit=permissions.CanEditIssue( mar.auth.effective_ids, mar.perms, issue_project, issue, granted_perms=granted_perms), fieldValues=field_values_list) if issue.closed_timestamp > 0: resp.closed = datetime.datetime.fromtimestamp(issue.closed_timestamp) if issue.merged_into: resp.mergedInto=convert_issue_ids([issue.merged_into], mar, services)[0] if issue.owner_modified_timestamp: resp.owner_modified = datetime.datetime.fromtimestamp( issue.owner_modified_timestamp) if issue.status_modified_timestamp: resp.status_modified = datetime.datetime.fromtimestamp( issue.status_modified_timestamp) if issue.component_modified_timestamp: resp.component_modified = datetime.datetime.fromtimestamp( issue.component_modified_timestamp) return resp
def __init__( self, issue, users_by_id, config, open_related=None, closed_related=None, all_related=None): """Store relevant values for later display by EZT. Args: issue: An Issue protocol buffer. users_by_id: dict {user_id: UserViews} for all users mentioned in issue. config: ProjectIssueConfig for this issue. open_related: dict of visible open issues that are related to this issue. closed_related: dict {issue_id: issue} of visible closed issues that are related to this issue. all_related: dict {issue_id: issue} of all blocked-on, blocking, or merged-into issues referenced from this issue, regardless of perms. """ super(IssueView, self).__init__(issue) # The users involved in this issue must be present in users_by_id if # this IssueView is to be used on the issue detail or peek pages. But, # they can be absent from users_by_id if the IssueView is used as a # tile in the grid view. self.owner = users_by_id.get(issue.owner_id) self.derived_owner = users_by_id.get(issue.derived_owner_id) self.cc = [users_by_id.get(cc_id) for cc_id in issue.cc_ids if cc_id] self.derived_cc = [users_by_id.get(cc_id) for cc_id in issue.derived_cc_ids if cc_id] self.status = framework_views.StatusView(issue.status, config) self.derived_status = framework_views.StatusView( issue.derived_status, config) # If we don't have a config available, we don't need to access is_open, so # let it be True. self.is_open = ezt.boolean( not config or tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config)) self.components = sorted( [ComponentValueView(component_id, config, False) for component_id in issue.component_ids if tracker_bizobj.FindComponentDefByID(component_id, config)] + [ComponentValueView(component_id, config, True) for component_id in issue.derived_component_ids if tracker_bizobj.FindComponentDefByID(component_id, config)], key=lambda cvv: cvv.path) self.fields = [ MakeFieldValueView( fd, config, issue.labels, issue.derived_labels, issue.field_values, users_by_id) # TODO(jrobbins): field-level view restrictions, display options for fd in config.field_defs if not fd.is_deleted] self.fields = sorted( self.fields, key=lambda f: (f.applicable_type, f.field_name)) field_names = [fd.field_name.lower() for fd in config.field_defs if not fd.is_deleted] # TODO(jrobbins): restricts self.labels = [ framework_views.LabelView(label, config) for label in tracker_bizobj.NonMaskedLabels(issue.labels, field_names)] self.derived_labels = [ framework_views.LabelView(label, config) for label in issue.derived_labels if not tracker_bizobj.LabelIsMaskedByField(label, field_names)] self.restrictions = _RestrictionsView(issue) # TODO(jrobbins): sort by order of labels in project config self.short_summary = issue.summary[:tracker_constants.SHORT_SUMMARY_LENGTH] if issue.closed_timestamp: self.closed = timestr.FormatAbsoluteDate(issue.closed_timestamp) else: self.closed = '' blocked_on_iids = issue.blocked_on_iids blocking_iids = issue.blocking_iids # Note that merged_into_str and blocked_on_str includes all issue # references, even those referring to issues that the user can't view, # so open_related and closed_related cannot be used. if all_related is not None: all_blocked_on_refs = [ (all_related[ref_iid].project_name, all_related[ref_iid].local_id) for ref_iid in issue.blocked_on_iids] all_blocked_on_refs.extend([ (r.project, r.issue_id) for r in issue.dangling_blocked_on_refs]) self.blocked_on_str = ', '.join( tracker_bizobj.FormatIssueRef( ref, default_project_name=issue.project_name) for ref in all_blocked_on_refs) all_blocking_refs = [ (all_related[ref_iid].project_name, all_related[ref_iid].local_id) for ref_iid in issue.blocking_iids] all_blocking_refs.extend([ (r.project, r.issue_id) for r in issue.dangling_blocking_refs]) self.blocking_str = ', '.join( tracker_bizobj.FormatIssueRef( ref, default_project_name=issue.project_name) for ref in all_blocking_refs) if issue.merged_into: merged_issue = all_related[issue.merged_into] merged_into_ref = merged_issue.project_name, merged_issue.local_id else: merged_into_ref = None self.merged_into_str = tracker_bizobj.FormatIssueRef( merged_into_ref, default_project_name=issue.project_name) self.blocked_on = [] self.has_dangling = ezt.boolean(self.dangling_blocked_on_refs) self.blocking = [] current_project_name = issue.project_name if open_related is not None and closed_related is not None: self.merged_into = IssueRefView( current_project_name, issue.merged_into, open_related, closed_related) self.blocked_on = [ IssueRefView(current_project_name, iid, open_related, closed_related) for iid in blocked_on_iids] self.blocked_on.extend( [DanglingIssueRefView(ref.project, ref.issue_id) for ref in issue.dangling_blocked_on_refs]) self.blocked_on = [irv for irv in self.blocked_on if irv.visible] # TODO(jrobbins): sort by irv project_name and local_id self.blocking = [ IssueRefView(current_project_name, iid, open_related, closed_related) for iid in blocking_iids] self.blocking.extend( [DanglingIssueRefView(ref.project, ref.issue_id) for ref in issue.dangling_blocking_refs]) self.blocking = [irv for irv in self.blocking if irv.visible] # TODO(jrobbins): sort by irv project_name and local_id self.multiple_blocked_on = ezt.boolean(len(self.blocked_on) >= 2) self.detail_relative_url = tracker_helpers.FormatRelativeIssueURL( issue.project_name, urls.ISSUE_DETAIL, id=issue.local_id)
def StoreIssueSnapshots(self, cnxn, issues, commit=True): """Adds an IssueSnapshot and updates the previous one for each issue.""" for issue in issues: right_now = self._currentTime() # Look for an existing (latest) IssueSnapshot with this issue_id. previous_snapshots = self.issuesnapshot_tbl.Select( cnxn, cols=ISSUESNAPSHOT_COLS, issue_id=issue.issue_id, limit=1, order_by=[('period_start DESC', [])]) if len(previous_snapshots) > 0: previous_snapshot_id = previous_snapshots[0][0] logging.info('Found previous IssueSnapshot with id: %s', previous_snapshot_id) # Update previous snapshot's end time to right now. delta = {'period_end': right_now} where = [('IssueSnapshot.id = %s', [previous_snapshot_id])] self.issuesnapshot_tbl.Update(cnxn, delta, commit=commit, where=where) config = self.config_service.GetProjectConfig( cnxn, issue.project_id) period_end = settings.maximum_snapshot_period_end is_open = tracker_helpers.MeansOpenInProject( tracker_bizobj.GetStatus(issue), config) shard = issue.issue_id % settings.num_logical_shards status = tracker_bizobj.GetStatus(issue) status_id = self.config_service.LookupStatusID( cnxn, issue.project_id, status) or None owner_id = tracker_bizobj.GetOwnerId(issue) or None issuesnapshot_rows = [(issue.issue_id, shard, issue.project_id, issue.local_id, issue.reporter_id, owner_id, status_id, right_now, period_end, is_open)] ids = self.issuesnapshot_tbl.InsertRows(cnxn, ISSUESNAPSHOT_COLS[1:], issuesnapshot_rows, replace=True, commit=commit, return_generated_ids=True) issuesnapshot_id = ids[0] # Add all labels to IssueSnapshot2Label. label_rows = [ (issuesnapshot_id, self.config_service.LookupLabelID(cnxn, issue.project_id, label)) for label in tracker_bizobj.GetLabels(issue) ] self.issuesnapshot2label_tbl.InsertRows(cnxn, ISSUESNAPSHOT2LABEL_COLS, label_rows, replace=True, commit=commit) # Add all CCs to IssueSnapshot2Cc. cc_rows = [(issuesnapshot_id, cc_id) for cc_id in tracker_bizobj.GetCcIds(issue)] self.issuesnapshot2cc_tbl.InsertRows(cnxn, ISSUESNAPSHOT2CC_COLS, cc_rows, replace=True, commit=commit) # Add all components to IssueSnapshot2Component. component_rows = [(issuesnapshot_id, component_id) for component_id in issue.component_ids] self.issuesnapshot2component_tbl.InsertRows( cnxn, ISSUESNAPSHOT2COMPONENT_COLS, component_rows, replace=True, commit=commit) # Add all components to IssueSnapshot2Hotlist. # This is raw SQL to obviate passing FeaturesService down through # the call stack wherever this function is called. # TODO(jrobbins): sort out dependencies between service classes. cnxn.Execute( ''' INSERT INTO IssueSnapshot2Hotlist (issuesnapshot_id, hotlist_id) SELECT %s, hotlist_id FROM Hotlist2Issue WHERE issue_id = %s ''', [issuesnapshot_id, issue.issue_id])