def handle_timeline_events(self, session, resource, author, subscribers): for user_id in subscribers: user = db_api.entity_get(models.User, user_id, session=session) send_notification = get_preference( 'receive_notifications_worklists', user) if (send_notification != 'true' and resource.get('worklist_id') is not None): continue if resource['event_type'] == 'user_comment': event_info = json.dumps( self.resolve_comments(session=session, event=resource) ) else: event_info = resource['event_info'] # Don't send a notification if the user isn't allowed to see the # thing this event is about. event = events_api.event_get( resource['id'], current_user=user_id, session=session) if not events_api.is_visible(event, user_id, session=session): continue db_api.entity_create(models.SubscriptionEvents, { "author_id": author.id, "subscriber_id": user_id, "event_type": resource['event_type'], "event_info": event_info }, session=session)
def handle_resources(self, session, method, resource_id, sub_resource_id, author, subscribers): if sub_resource_id: for user_id in subscribers: if method == 'DELETE': event_type = 'project removed from project_group' event_info = json.dumps({'project_group_id': resource_id, 'project_id': sub_resource_id}) else: event_type = 'project added to project_group' event_info = json.dumps({'project_group_id': resource_id, 'project_id': sub_resource_id}) db_api.entity_create(models.SubscriptionEvents, { "author_id": author.id, "subscriber_id": user_id, "event_type": event_type, "event_info": event_info }, session=session) else: if method == 'DELETE': # Handling project_group targeted. for user_id in subscribers: db_api.entity_create(models.SubscriptionEvents, { "author_id": author.id, "subscriber_id": user_id, "event_type": 'project_group deleted', "event_info": json.dumps( {'project_group_id': resource_id}) }, session=session)
def handle_timeline_events(self, session, resource, author, subscribers): for user_id in subscribers: user = db_api.entity_get(models.User, user_id, session=session) send_notification = get_preference( 'receive_notifications_worklists', user) if (send_notification != 'true' and resource.get('worklist_id') is not None): continue if resource['event_type'] == 'user_comment': event_info = json.dumps( self.resolve_comments(session=session, event=resource)) else: event_info = resource['event_info'] # Don't send a notification if the user isn't allowed to see the # thing this event is about. event = events_api.event_get(resource['id'], current_user=user_id, session=session) if not events_api.is_visible(event, user_id, session=session): continue db_api.entity_create(models.SubscriptionEvents, { "author_id": author.id, "subscriber_id": user_id, "event_type": resource['event_type'], "event_info": event_info }, session=session)
def user_update_preferences(user_id, preferences): for key in preferences: value = preferences[key] prefs = api_base.entity_get_all(models.UserPreference, user_id=user_id, key=key) if prefs: pref = prefs[0] else: pref = None # If the preference exists and it's null. if pref and value is None: api_base.entity_hard_delete(models.UserPreference, pref.id) continue # If the preference exists and has a new value. if pref and value and pref.cast_value != value: pref.cast_value = value api_base.entity_update(models.UserPreference, pref.id, dict(pref)) continue # If the preference does not exist and a new value exists. if not pref and value: api_base.entity_create(models.UserPreference, { 'user_id': user_id, 'key': key, 'cast_value': value }) return user_get_preferences(user_id)
def comment_update(comment_id, values): comment = api_base.entity_get(models.Comment, comment_id) old_dict = { 'comment_id': comment_id, 'content': comment.content } api_base.entity_create(models.HistoricalComment, old_dict) return api_base.entity_update(models.Comment, comment_id, values)
def add_lane(board, lane_dict): # Make sure we're adding the lane to the right board lane_dict['board_id'] = board.id if lane_dict.get('list_id') is None: raise ClientSideError(_("A lane must have a worklist_id.")) if lane_dict.get('position') is None: lane_dict['position'] = len(board.lanes) api_base.entity_create(models.BoardWorklist, lane_dict) return board
def create_filter(worklist_id, filter_dict): criteria = filter_dict.pop('filter_criteria') filter_dict['list_id'] = worklist_id filter = api_base.entity_create(models.WorklistFilter, filter_dict) filter = api_base.entity_get(models.WorklistFilter, filter.id) filter.criteria = [] for criterion in criteria: criterion_dict = criterion.as_dict() criterion_dict['filter_id'] = filter.id filter.criteria.append( api_base.entity_create(models.FilterCriterion, criterion_dict)) return filter
def access_token_create(values): # Update the expires_at date. values['created_at'] = datetime.datetime.now(pytz.utc) values['expires_at'] = datetime.datetime.now(pytz.utc) + datetime \ .timedelta(seconds=values['expires_in']) return api_base.entity_create(models.AccessToken, values)
def event_create(values): new_event = api_base.entity_create(models.TimeLineEvent, values) if new_event: if new_event.story_id is not None: stories_api.story_update_updated_at(new_event.story_id) # TODO(SotK): Update board and worklist updated_at when they get events if CONF.enable_notifications: # Build the payload. Use of None is included to ensure that we don't # accidentally blow up the API call, but we don't anticipate it # happening. event_dict = tojson(TimeLineEvent, TimeLineEvent.from_db_model(new_event)) publish(author_id=request.current_user_id or None, method="POST", url=request.headers.get('Referer') or None, path=request.path or None, query_string=request.query_string or None, status=response.status_code or None, resource="timeline_event", resource_id=new_event.id or None, resource_after=event_dict or None) return new_event
def build_tag(self, tag_name): """Retrieve the SQLAlchemy record for the given tag name, creating it if necessary. :param tag_name: Name of the tag to retrieve and/or create. :return: The SQLAlchemy entity corresponding to the tag name. """ if tag_name not in self._tag_map: # Does it exist in the database? tag = db_api.model_query(StoryTag, self.session) \ .filter_by(name=tag_name) \ .first() if not tag: # Go ahead and create it. print("Importing tag '%s'" % tag_name) tag = db_api.entity_create(StoryTag, { 'name': tag_name }, session=self.session) # Add it to our memory cache self._tag_map[tag_name] = tag return self._tag_map[tag_name]
def update_filter(filter_id, update): old_filter = api_base.entity_get(models.WorklistFilter, filter_id) if 'filter_criteria' in update: # Change the criteria for this filter. If an ID is provided, change # the criterion to match the provided criterion. If no ID is provided, # create a new criterion and add it to the filter. for criterion in update['filter_criteria']: criterion_dict = criterion.as_dict(omit_unset=True) if 'id' in criterion_dict: id = criterion_dict.pop('id') api_base.entity_update(models.FilterCriterion, id, criterion_dict) else: created = api_base.entity_create(models.FilterCriterion, criterion_dict) criterion.id = created old_filter.criteria.append(created) # Remove criteria which aren't in the provided set new_ids = [criterion.id for criterion in update['filter_criteria']] for criterion in old_filter.criteria: if criterion.id not in new_ids: old_filter.criteria.remove(criterion) del update['filter_criteria'] return api_base.entity_update(models.WorklistFilter, filter_id, update)
def handle_timeline_events(self, session, event, author, subscribers): for user_id in subscribers: if event.event_type == 'user_comment': event_info = json.dumps( self.resolve_comments(session=session, event=event) ) else: event_info = event.event_info db_api.entity_create(models.SubscriptionEvents, { "author_id": author.id, "subscriber_id": user_id, "event_type": event.event_type, "event_info": event_info }, session=session)
def task_create(values): task = api_base.entity_create(models.Task, values) if task: stories_api.story_update_updated_at(task.story_id) # Update updated_at in projects when task is created projects_api.project_update_updated_at(task.project_id) return task
def write_user(self, lp_user): """Writes a launchpad user record into our user cache, resolving the openid if necessary. :param lp_user: The launchpad user record. :return: The SQLAlchemy entity for the user record. """ try: display_name = lp_user.display_name user_link = lp_user.web_link except errors.HTTPError: display_name = "Disabled Launchpad User" user_link = "000000000000000000000" # Resolve the openid. if user_link not in self._openid_map: try: openid_consumer = consumer.Consumer( dict(id=cryptutil.randomString(16, '0123456789abcdef')), None) openid_request = openid_consumer.begin(user_link) openid = openid_request.endpoint.getLocalID() openid = openid.replace( 'login.launchpad.net', 'login.ubuntu.com') self._openid_map[user_link] = openid except DiscoveryFailure: # If we encounter a launchpad maintenance user, # give it an invalid openid. print("WARNING: Invalid OpenID for user \'%s\'" % (display_name,)) self._openid_map[user_link] = \ 'http://example.com/invalid/~%s' % (display_name,) openid = self._openid_map[user_link] # Resolve the user record from the openid. if openid not in self._user_map: # Check for the user, create if new. user = db_api.model_query(User, self.session) \ .filter_by(openid=openid) \ .first() if not user: print("Importing user '%s'" % (user_link)) # Use a temporary email address, since LP won't give this to # us and it'll be updated on first login anyway. user = db_api.entity_create(User, { 'openid': openid, 'full_name': display_name, 'email': "*****@*****.**" % (display_name, uuid.uuid4()) }, session=self.session) self._user_map[openid] = user return self._user_map[openid]
def add_item(worklist_id, item_id, item_type, list_position, current_user=None): worklist = _worklist_get(worklist_id) if worklist is None: raise exc.NotFound(_("Worklist %s not found") % worklist_id) # Check if this item has an archived card in this worklist to restore archived = get_item_by_item_id( worklist, item_type, item_id, archived=True) if archived: update = { 'archived': False, 'list_position': list_position } api_base.entity_update(models.WorklistItem, archived.id, update) return worklist # If this worklist is a lane, check if the item has an archived card # somewhere in the board to restore if is_lane(worklist): board = boards.get_from_lane(worklist) archived = boards.get_card(board, item_type, item_id, archived=True) if archived: update = { 'archived': False, 'list_id': worklist_id, 'list_position': list_position } api_base.entity_update(models.WorklistItem, archived.id, update) return worklist # Create a new card if item_type == 'story': item = stories_api.story_get(item_id, current_user=current_user) elif item_type == 'task': item = tasks_api.task_get(item_id, current_user=current_user) else: raise ClientSideError(_("An item in a worklist must be either a " "story or a task")) if item is None: raise exc.NotFound(_("%(type)s %(id)s not found") % {'type': item_type, 'id': item_id}) item_dict = { 'list_id': worklist_id, 'item_id': item_id, 'item_type': item_type, 'list_position': list_position } worklist_item = api_base.entity_create(models.WorklistItem, item_dict) if worklist.items is None: worklist.items = [worklist_item] else: worklist.items.append(worklist_item) return worklist
def write_user(self, lp_user): """Writes a launchpad user record into our user cache, resolving the openid if necessary. :param lp_user: The launchpad user record. :return: The SQLAlchemy entity for the user record. """ if lp_user is None: return lp_user display_name = lp_user.display_name user_link = lp_user.web_link # Resolve the openid. if user_link not in self._openid_map: try: openid_consumer = consumer.Consumer( dict(id=cryptutil.randomString(16, '0123456789abcdef')), None) openid_request = openid_consumer.begin(user_link) openid = openid_request.endpoint.getLocalID() self._openid_map[user_link] = openid except DiscoveryFailure: # If we encounter a launchpad maintenance user, # give it an invalid openid. print "WARNING: Invalid OpenID for user \'%s\'" \ % (display_name,) self._openid_map[user_link] = \ 'http://example.com/invalid/~%s' % (display_name,) openid = self._openid_map[user_link] # Resolve the user record from the openid. if openid not in self._user_map: # Check for the user, create if new. user = db_api.model_query(User, self.session) \ .filter_by(openid=openid) \ .first() if not user: print "Importing user '%s'" % (user_link) # Use a temporary email address, since LP won't give this to # us and it'll be updated on first login anyway. user = db_api.entity_create( User, { 'openid': openid, 'full_name': display_name, 'email': "*****@*****.**" % (display_name) }, session=self.session) self._user_map[openid] = user return self._user_map[openid]
def create_permission(board_id, permission_dict, session=None): board = _board_get(board_id, session=session) users = permission_dict.pop('users') permission = api_base.entity_create( models.Permission, permission_dict, session=session) board.permissions.append(permission) for user_id in users: user = users_api.user_get(user_id, session=session) user.permissions.append(permission) return permission
def refresh_token_create(values): session = api_base.get_session() with session.begin(subtransactions=True): values['expires_at'] = datetime.datetime.now(pytz.utc) + datetime.\ timedelta(seconds=values['expires_in']) refresh_token = api_base.entity_create(models.RefreshToken, values) return refresh_token
def create_permission(worklist_id, permission_dict, session=None): worklist = _worklist_get(worklist_id, session=session) users = permission_dict.pop('users') permission = api_base.entity_create( models.Permission, permission_dict, session=session) worklist.permissions.append(permission) for user_id in users: user = users_api.user_get(user_id, session=session) user.permissions.append(permission) return permission
def create_permission(due_date_id, permission_dict, session=None): due_date = _due_date_get(due_date_id, session=session) users = permission_dict.pop('users') permission = api_base.entity_create( models.Permission, permission_dict, session=session) due_date.permissions.append(permission) for user_id in users: user = users_api.user_get(user_id, session=session) user.permissions.append(permission) return permission
def user_update_preferences(user_id, preferences): for key in preferences: value = preferences[key] prefs = api_base.entity_get_all(models.UserPreference, user_id=user_id, key=key) matching_prefs = [] if prefs: for p in prefs: if p.key == key: # FIXME: We create a list here because there appears to # currently be a bug which means that each preference may # appear more than once per-user. We should fix that once # we discover the cause. matching_prefs.append(p) else: pref = None for pref in matching_prefs: # If the preference exists and it's null. if pref and value is None: api_base.entity_hard_delete(models.UserPreference, pref.id) continue # If the preference exists and has a new value. if pref and value is not None and pref.cast_value != value: pref.cast_value = value api_base.entity_update( models.UserPreference, pref.id, dict(pref)) continue # If the preference does not exist and a new value exists. if not matching_prefs and value is not None: api_base.entity_create(models.UserPreference, { 'user_id': user_id, 'key': key, 'cast_value': value }) return user_get_preferences(user_id)
def create_permission(story, users, session=None): story = api_base.model_query(models.Story, session) \ .options(subqueryload(models.Story.tags)) \ .filter_by(id=story.id).first() permission_dict = { 'name': 'view_story_%d' % story.id, 'codename': 'view_story' } permission = api_base.entity_create(models.Permission, permission_dict) story.permissions.append(permission) for user in users: user = users_api.user_get(user.id) user.permissions.append(permission) return permission
def event_create(values): new_event = api_base.entity_create(models.TimeLineEvent, values) if CONF.enable_notifications: # Build the payload. Use of None is included to ensure that we don't # accidentally blow up the API call, but we don't anticipate it # happening. event_dict = tojson(TimeLineEvent, TimeLineEvent.from_db_model(new_event)) publish(author_id=request.current_user_id or None, method="POST", path=request.path or None, status=response.status_code or None, resource="timeline_event", resource_id=new_event.id or None, resource_after=event_dict or None) return new_event
def update_filter(filter_id, update): old_filter = api_base.entity_get(models.WorklistFilter, filter_id) if 'filter_criteria' in update: new_ids = [criterion.id for criterion in update['filter_criteria']] for criterion in update['filter_criteria']: criterion_dict = criterion.as_dict(omit_unset=True) if 'id' in criterion_dict: existing = api_base.entity_get(models.FilterCriterion, criterion['id']) if existing.as_dict() != criterion_dict: api_base.entity_update(models.FilterCriterion, criterion_dict['id'], criterion_dict) else: created = api_base.entity_create(models.FilterCriterion, criterion_dict) old_filter.criteria.append(created) for criterion in old_filter.criteria: if criterion.id not in new_ids: old_filter.criteria.remove(criterion) del update['filter_criteria'] return api_base.entity_update(models.WorklistFilter, filter_id, update)
def build_tag(self, tag_name): """Retrieve the SQLAlchemy record for the given tag name, creating it if necessary. :param tag_name: Name of the tag to retrieve and/or create. :return: The SQLAlchemy entity corresponding to the tag name. """ if tag_name not in self._tag_map: # Does it exist in the database? tag = db_api.model_query(StoryTag, self.session) \ .filter_by(name=tag_name) \ .first() if not tag: # Go ahead and create it. print "Importing tag '%s'" % (tag_name) tag = db_api.entity_create(StoryTag, {'name': tag_name}, session=self.session) # Add it to our memory cache self._tag_map[tag_name] = tag return self._tag_map[tag_name]
def project_create(values): # Create project and 'master' branch for him project = api_base.entity_create(models.Project, values) master_branch = MasterBranchHelper(project["id"]) branches_api.branch_create(master_branch.as_dict()) return project
def story_create(values): return api_base.entity_create(models.Story, values)
def create(values): return api_base.entity_create(models.DueDate, values)
def create(values): return api_base.entity_create(models.Worklist, values)
def comment_create(values): return api_base.entity_create(models.Comment, values)
def write_bug(self, owner, assignee, priority, status, tags, bug): """Writes the story, task, task history, and conversation. :param owner: The bug owner SQLAlchemy entity. :param tags: The tag SQLAlchemy entities. :param bug: The Launchpad Bug record. """ if hasattr(bug, 'date_created'): created_at = bug.date_created else: created_at = None if hasattr(bug, 'date_last_updated'): updated_at = bug.date_last_updated else: updated_at = None # Extract the launchpad ID from the self link. # example url: https://api.launchpad.net/1.0/bugs/1057477 url_match = re.search("([0-9]+)$", str(bug.self_link)) if not url_match: print 'ERROR: Unable to extract launchpad ID from %s.' \ % (bug.self_link,) print 'ERROR: Please file a ticket.' return launchpad_id = int(url_match.groups()[0]) # If the title is too long, prepend it to the description and # truncate it. title = bug.title description = bug.description if len(title) > 100: title = title[:97] + '...' description = bug.title + '\n\n' + description # Sanity check. story = { 'id': launchpad_id, 'description': description, 'created_at': created_at, 'creator': owner, 'is_bug': True, 'title': title, 'updated_at': updated_at, 'tags': tags } duplicate = db_api.entity_get(Story, launchpad_id, session=self.session) if not duplicate: print "Importing Story: %s" % (bug.self_link, ) story = db_api.entity_create(Story, story, session=self.session) else: print "Existing Story: %s" % (bug.self_link, ) story = duplicate # Duplicate check- launchpad import creates one task per story, # so if we already have a project task on this story, skip it. This # is to properly replay imports in the case where errors occurred # during import. existing_task = db_api.model_query(Task, session=self.session) \ .filter(Task.story_id == launchpad_id) \ .filter(Task.project_id == self.project.id) \ .first() if not existing_task: print "- Adding task in project %s" % (self.project.name, ) task = db_api.entity_create( Task, { 'title': title, 'assignee_id': assignee.id if assignee else None, 'project_id': self.project.id, 'story_id': launchpad_id, 'created_at': created_at, 'updated_at': updated_at, 'priority': priority, 'status': status }, session=self.session) else: print "- Existing task in %s" % (self.project.name, ) task = existing_task # Duplication Check - If this story already has a creation event, # we don't need to create a new one. Otherwise, create it manually so # we don't trigger event notifications. story_created_event = db_api \ .model_query(TimeLineEvent, session=self.session) \ .filter(TimeLineEvent.story_id == launchpad_id) \ .filter(TimeLineEvent.event_type == event_types.STORY_CREATED) \ .first() if not story_created_event: print "- Generating story creation event" db_api.entity_create(TimeLineEvent, { 'story_id': launchpad_id, 'author_id': owner.id, 'event_type': event_types.STORY_CREATED, 'created_at': created_at }, session=self.session) # Create the creation event for the task, but only if we just created # a new task. if not existing_task: print "- Generating task creation event" db_api.entity_create(TimeLineEvent, { 'story_id': launchpad_id, 'author_id': owner.id, 'event_type': event_types.TASK_CREATED, 'created_at': created_at, 'event_info': json.dumps({ 'task_id': task.id, 'task_title': title }) }, session=self.session) # Create the discussion, loading any existing comments first. current_count = db_api \ .model_query(TimeLineEvent, session=self.session) \ .filter(TimeLineEvent.story_id == launchpad_id) \ .filter(TimeLineEvent.event_type == event_types.USER_COMMENT) \ .count() desired_count = len(bug.messages) print "- %s of %s comments already imported." % (current_count, desired_count) for i in range(current_count, desired_count): print '- Importing comment %s of %s' % (i + 1, desired_count) message = bug.messages[i] message_created_at = message.date_created message_owner = self.write_user(message.owner) comment = db_api.entity_create(Comment, { 'content': message.content, 'created_at': message_created_at }, session=self.session) db_api.entity_create(TimeLineEvent, { 'story_id': launchpad_id, 'author_id': message_owner.id, 'event_type': event_types.USER_COMMENT, 'comment_id': comment.id, 'created_at': message_created_at }, session=self.session)
def user_create(values): return api_base.entity_create(models.User, values)
def task_create(values): return api_base.entity_create(models.Task, values)
def team_create(values): return api_base.entity_create(models.Team, values)
def write_bug(self, owner, assignee, priority, status, tags, bug, branches): """Writes the story, task, task history, and conversation. :param owner: The bug owner SQLAlchemy entity. :param tags: The tag SQLAlchemy entities. :param bug: The Launchpad Bug record. """ #Checks to make sure that the branch for the bug exists for branch in branches: if not self.check_branch(branch): print('No %s branch found for %s project. Creating one now.' % (branch, self.project.name)) db_api.entity_create(Branch, { 'name': branch, 'project_id': self.project.id }, session=self.session) if hasattr(bug, 'date_created'): created_at = bug.date_created else: created_at = None if hasattr(bug, 'date_last_updated'): updated_at = bug.date_last_updated else: updated_at = None # Extract the launchpad ID from the self link. # example url: https://api.launchpad.net/1.0/bugs/1057477 url_match = re.search("([0-9]+)$", str(bug.self_link)) if not url_match: print('ERROR: Unable to extract launchpad ID from %s.' % (bug.self_link,)) print('ERROR: Please file a ticket.') return launchpad_id = int(url_match.groups()[0]) # If the title is too long, prepend it to the description and # truncate it. title = bug.title description = bug.description if len(title) > 100: title = title[:97] + '...' description = bug.title + '\n\n' + description # Create priority tag tags.append(self.build_priority_tag(priority)) # Sanity check. story = { 'id': launchpad_id, 'description': description, 'created_at': created_at, 'creator': owner, 'is_bug': True, 'title': title, 'updated_at': updated_at, 'tags': tags } duplicate = db_api.entity_get(Story, launchpad_id, session=self.session) if not duplicate: print("Importing Story: %s" % (bug.self_link,)) story = db_api.entity_create(Story, story, session=self.session) else: print("Existing Story: %s" % (bug.self_link,)) story = duplicate # Duplicate check- launchpad import creates one task per story, # so if we already have a project task on this story, skip it. This # is to properly replay imports in the case where errors occurred # during import. existing_task = db_api.model_query(Task, session=self.session) \ .filter(Task.story_id == launchpad_id) \ .filter(Task.project_id == self.project.id) \ .first() if not existing_task: print("- Adding task in project %s" % (self.project.name,)) for branch in branches: task = db_api.entity_create(Task, { 'title': title, 'assignee_id': assignee.id if assignee else None, 'project_id': self.project.id, 'branch_id': self.get_branch(branch).id, 'story_id': launchpad_id, 'created_at': created_at, 'updated_at': updated_at, 'priority': priority, 'status': status }, session=self.session) else: print("- Existing task in %s" % (self.project.name,)) task = existing_task # Duplication Check - If this story already has a creation event, # we don't need to create a new one. Otherwise, create it manually so # we don't trigger event notifications. story_created_event = db_api \ .model_query(TimeLineEvent, session=self.session) \ .filter(TimeLineEvent.story_id == launchpad_id) \ .filter(TimeLineEvent.event_type == event_types.STORY_CREATED) \ .first() if not story_created_event: print("- Generating story creation event") db_api.entity_create(TimeLineEvent, { 'story_id': launchpad_id, 'author_id': owner.id, 'event_type': event_types.STORY_CREATED, 'created_at': created_at }, session=self.session) # Create the creation event for the task, but only if we just created # a new task. if not existing_task: print("- Generating task creation event") db_api.entity_create(TimeLineEvent, { 'story_id': launchpad_id, 'author_id': owner.id, 'event_type': event_types.TASK_CREATED, 'created_at': created_at, 'event_info': json.dumps({ 'task_id': task.id, 'task_title': title }) }, session=self.session) # Create the discussion, loading any existing comments first. current_count = db_api \ .model_query(TimeLineEvent, session=self.session) \ .filter(TimeLineEvent.story_id == launchpad_id) \ .filter(TimeLineEvent.event_type == event_types.USER_COMMENT) \ .count() desired_count = len(bug.messages) print("- %s of %s comments already imported." % (current_count, desired_count)) for i in range(current_count, desired_count): print('- Importing comment %s of %s' % (i + 1, desired_count)) message = bug.messages[i] message_created_at = message.date_created message_owner = self.write_user(message.owner) comment = db_api.entity_create(Comment, { 'content': message.content, 'created_at': message_created_at }, session=self.session) db_api.entity_create(TimeLineEvent, { 'story_id': launchpad_id, 'author_id': message_owner.id, 'event_type': event_types.USER_COMMENT, 'comment_id': comment.id, 'created_at': message_created_at }, session=self.session)
def tag_create(values): return api_base.entity_create(models.StoryTag, values)
def project_group_create(values): return api_base.entity_create(models.ProjectGroup, values)
def milestone_create(values): return api_base.entity_create(models.Milestone, values)