def update_tasks_order_in_bulk(bulk_data: list, field: str, project: object, user_story: object = None, status: object = None, milestone: object = None): """ Updates the order of the tasks specified adding the extra updates needed to keep consistency. [{'task_id': <value>, 'order': <value>}, ...] """ tasks = project.tasks.all() if user_story is not None: tasks = tasks.filter(user_story=user_story) if status is not None: tasks = tasks.filter(status=status) if milestone is not None: tasks = tasks.filter(milestone=milestone) task_orders = {task.id: getattr(task, field) for task in tasks} new_task_orders = {e["task_id"]: e["order"] for e in bulk_data} apply_order_updates(task_orders, new_task_orders) task_ids = task_orders.keys() events.emit_event_for_ids(ids=task_ids, content_type="tasks.task", projectid=project.pk) db.update_attr_in_bulk_for_ids(task_orders, field, models.Task) return task_orders
def update_userstories_order_in_bulk(bulk_data: list, field: str, project: object, status: object = None, milestone: object = None): """ Updates the order of the userstories specified adding the extra updates needed to keep consistency. `bulk_data` should be a list of dicts with the following format: `field` is the order field used [{'us_id': <value>, 'order': <value>}, ...] """ user_stories = project.user_stories.all() if status is not None: user_stories = user_stories.filter(status=status) if milestone is not None: user_stories = user_stories.filter(milestone=milestone) us_orders = {us.id: getattr(us, field) for us in user_stories} new_us_orders = {e["us_id"]: e["order"] for e in bulk_data} apply_order_updates(us_orders, new_us_orders, remove_equal_original=True) user_story_ids = us_orders.keys() events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=project.pk) db.update_attr_in_bulk_for_ids(us_orders, field, models.UserStory) return us_orders
def reset_userstories_kanban_order_in_bulk(project: Project, bulk_userstories: List[int]): """ Reset the order of the userstories specified adding the extra updates needed to keep consistency. - `bulk_userstories` should be a list of user stories IDs """ base_order = models.UserStory.NEW_KANBAN_ORDER() data = ((id, base_order + index) for index, id in enumerate(bulk_userstories)) sql = """ UPDATE userstories_userstory SET kanban_order = tmp.new_kanban_order::BIGINT FROM (VALUES %s) AS tmp (id, new_kanban_order) WHERE tmp.id = userstories_userstory.id """ with connection.cursor() as cursor: execute_values(cursor, sql, data) ## Sent events of updated stories events.emit_event_for_ids(ids=bulk_userstories, content_type="userstories.userstory", projectid=project.id)
def update_epic_related_userstories_order_in_bulk(bulk_data: list, epic: object): """ Updates the order of the related userstories of an specific epic. `bulk_data` should be a list of dicts with the following format: `epic` is the epic with related stories. [{'us_id': <value>, 'order': <value>}, ...] """ related_user_stories = epic.relateduserstory_set.all() # select_related rus_orders = {rus.id: rus.order for rus in related_user_stories} rus_conversion = {rus.user_story_id: rus.id for rus in related_user_stories} new_rus_orders = {rus_conversion[e["us_id"]]: e["order"] for e in bulk_data if e["us_id"] in rus_conversion} apply_order_updates(rus_orders, new_rus_orders) if rus_orders: related_user_story_ids = rus_orders.keys() events.emit_event_for_ids(ids=related_user_story_ids, content_type="epics.relateduserstory", projectid=epic.project_id) db.update_attr_in_bulk_for_ids(rus_orders, "order", models.RelatedUserStory) return rus_orders
def update_epic_related_userstories_order_in_bulk(bulk_data: list, epic: object): """ Updates the order of the related userstories of an specific epic. `bulk_data` should be a list of dicts with the following format: `epic` is the epic with related stories. [{'us_id': <value>, 'order': <value>}, ...] """ related_user_stories = epic.relateduserstory_set.all() # select_related rus_orders = {rus.id: rus.order for rus in related_user_stories} rus_conversion = { rus.user_story_id: rus.id for rus in related_user_stories } new_rus_orders = { rus_conversion[e["us_id"]]: e["order"] for e in bulk_data if e["us_id"] in rus_conversion } apply_order_updates(rus_orders, new_rus_orders) if rus_orders: related_user_story_ids = rus_orders.keys() events.emit_event_for_ids(ids=related_user_story_ids, content_type="epics.relateduserstory", projectid=epic.project_id) db.update_attr_in_bulk_for_ids(rus_orders, "order", models.RelatedUserStory) return rus_orders
def update_tasks_milestone_in_bulk(bulk_data: list, milestone: object): """ Update the milestone and the milestone order of some tasks adding the extra orders needed to keep consistency. `bulk_data` should be a list of dicts with the following format: [{'task_id': <value>, 'order': <value>}, ...] """ tasks = milestone.tasks.all() task_orders = {task.id: getattr(task, "taskboard_order") for task in tasks} new_task_orders = {} for e in bulk_data: new_task_orders[e["task_id"]] = e["order"] # The base orders where we apply the new orders must containg all # the values task_orders[e["task_id"]] = e["order"] apply_order_updates(task_orders, new_task_orders) task_milestones = {e["task_id"]: milestone.id for e in bulk_data} task_ids = task_milestones.keys() events.emit_event_for_ids(ids=task_ids, content_type="tasks.task", projectid=milestone.project.pk) db.update_attr_in_bulk_for_ids(task_milestones, "milestone_id", model=models.Task) db.update_attr_in_bulk_for_ids(task_orders, "taskboard_order", models.Task) return task_milestones
def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object): """ Update the milestone and the milestone order of some user stories adding the extra orders needed to keep consistency. `bulk_data` should be a list of dicts with the following format: [{'us_id': <value>, 'order': <value>}, ...] """ user_stories = milestone.user_stories.all() us_orders = {us.id: getattr(us, "sprint_order") for us in user_stories} new_us_orders = {} for e in bulk_data: new_us_orders[e["us_id"]] = e["order"] # The base orders where we apply the new orders must containg all # the values us_orders[e["us_id"]] = e["order"] apply_order_updates(us_orders, new_us_orders) us_milestones = {e["us_id"]: milestone.id for e in bulk_data} user_story_ids = us_milestones.keys() events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=milestone.project.pk) db.update_attr_in_bulk_for_ids(us_milestones, "milestone_id", model=models.UserStory) db.update_attr_in_bulk_for_ids(us_orders, "sprint_order", models.UserStory) # Updating the milestone for the tasks Task.objects.filter( user_story_id__in=[e["us_id"] for e in bulk_data]).update( milestone=milestone) return us_orders
def update_tasks_order_in_bulk(bulk_data: list, field: str, project: object, user_story: object=None, status: object=None, milestone: object=None): """ Updates the order of the tasks specified adding the extra updates needed to keep consistency. [{'task_id': <value>, 'order': <value>}, ...] """ tasks = project.tasks.all() if user_story is not None: tasks = tasks.filter(user_story=user_story) if status is not None: tasks = tasks.filter(status=status) if milestone is not None: tasks = tasks.filter(milestone=milestone) task_orders = {task.id: getattr(task, field) for task in tasks} new_task_orders = {e["task_id"]: e["order"] for e in bulk_data} apply_order_updates(task_orders, new_task_orders) task_ids = task_orders.keys() events.emit_event_for_ids(ids=task_ids, content_type="tasks.task", projectid=project.pk) db.update_attr_in_bulk_for_ids(task_orders, field, models.Task) return task_orders
def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object): """ Update the milestone of some user stories. `bulk_data` should be a list of user story ids: """ user_story_ids = [us_data["us_id"] for us_data in bulk_data] new_milestone_values = [{"milestone": milestone.id}] * len(user_story_ids) events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=milestone.project.pk) db.update_in_bulk_with_ids(user_story_ids, new_milestone_values, model=models.UserStory)
def update_userstories_milestone_in_bulk(bulk_data:list, milestone:object): """ Update the milestone of some user stories. `bulk_data` should be a list of user story ids: """ user_story_ids = [us_data["us_id"] for us_data in bulk_data] new_milestone_values = [{"milestone": milestone.id}] * len(user_story_ids) events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=milestone.project.pk) db.update_in_bulk_with_ids(user_story_ids, new_milestone_values, model=models.UserStory)
def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object): """ Update the milestone of some user stories. `bulk_data` should be a list of user story ids: """ us_milestones = {e["us_id"]: milestone.id for e in bulk_data} user_story_ids = us_milestones.keys() events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=milestone.project.pk) db.update_attr_in_bulk_for_ids(us_milestones, "milestone_id", model=models.UserStory)
def bulk_update_swimlane_order(project, user, data): with connection.cursor() as curs: execute_values( curs, """ UPDATE projects_swimlane SET "order" = tmp.new_order FROM (VALUES %s) AS tmp (id, new_order) WHERE tmp.id = projects_swimlane.id""", data) # Send event related to swimlane changes swimlane_ids = tuple(map(itemgetter(0), data)) events.emit_event_for_ids(ids=swimlane_ids, content_type="projects.swimlane", projectid=project.pk)
def update_tasks_order_in_bulk(bulk_data: list, field: str, project: object): """ Update the order of some tasks. `bulk_data` should be a list of tuples with the following format: [(<task id>, {<field>: <value>, ...}), ...] """ task_ids = [] new_order_values = [] for task_data in bulk_data: task_ids.append(task_data["task_id"]) new_order_values.append({field: task_data["order"]}) events.emit_event_for_ids(ids=task_ids, content_type="tasks.task", projectid=project.pk) db.update_in_bulk_with_ids(task_ids, new_order_values, model=models.Task)
def update_userstories_order_in_bulk(bulk_data: list, field: str, project: object): """ Update the order of some user stories. `bulk_data` should be a list of tuples with the following format: [(<user story id>, {<field>: <value>, ...}), ...] """ user_story_ids = [] new_order_values = [] for us_data in bulk_data: user_story_ids.append(us_data["us_id"]) new_order_values.append({field: us_data["order"]}) events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=project.pk) db.update_in_bulk_with_ids(user_story_ids, new_order_values, model=models.UserStory)
def update_issues_milestone_in_bulk(bulk_data: list, milestone: object): """ Update the milestone some issues adding `bulk_data` should be a list of dicts with the following format: [{'task_id': <value>}, ...] """ issue_milestones = {e["issue_id"]: milestone.id for e in bulk_data} issue_ids = issue_milestones.keys() events.emit_event_for_ids(ids=issue_ids, content_type="issues.issues", projectid=milestone.project.pk) db.update_attr_in_bulk_for_ids(issue_milestones, "milestone_id", model=models.Issue) return issue_milestones
def update_userstories_order_in_bulk(bulk_data:list, field:str, project:object): """ Update the order of some user stories. `bulk_data` should be a list of tuples with the following format: [(<user story id>, {<field>: <value>, ...}), ...] """ user_story_ids = [] new_order_values = [] for us_data in bulk_data: user_story_ids.append(us_data["us_id"]) new_order_values.append({field: us_data["order"]}) events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=project.pk) db.update_in_bulk_with_ids(user_story_ids, new_order_values, model=models.UserStory)
def update_epics_order_in_bulk(bulk_data: list, field: str, project: object): """ Update the order of some epics. `bulk_data` should be a list of tuples with the following format: [{'epic_id': <value>, 'order': <value>}, ...] """ epics = project.epics.all() epic_orders = {e.id: getattr(e, field) for e in epics} new_epic_orders = {d["epic_id"]: d["order"] for d in bulk_data} apply_order_updates(epic_orders, new_epic_orders) epic_ids = epic_orders.keys() events.emit_event_for_ids(ids=epic_ids, content_type="epics.epic", projectid=project.pk) db.update_attr_in_bulk_for_ids(epic_orders, field, models.Epic) return epic_orders
def update_issues_milestone_in_bulk(bulk_data: list, milestone: object): """ Update the milestone some issues adding `bulk_data` should be a list of dicts with the following format: [{'task_id': <value>}, ...] """ issue_milestones = {e["issue_id"]: milestone.id for e in bulk_data} issue_ids = issue_milestones.keys() events.emit_event_for_ids(ids=issue_ids, content_type="issues.issues", projectid=milestone.project.pk) issues_instance_list = [] issues_values = [] for issue_id in issue_ids: issue = Issue.objects.get(pk=issue_id) issues_instance_list.append(issue) issues_values.append({'milestone_id': milestone.id}) db.update_in_bulk(issues_instance_list, issues_values) return issue_milestones
def update_userstories_kanban_order_in_bulk(user: User, project: Project, status: UserStoryStatus, bulk_userstories: List[int], before_userstory: Optional[models.UserStory] = None, after_userstory: Optional[models.UserStory] = None, swimlane: Optional[Swimlane] = None): """ Updates the order of the userstories specified adding the extra updates needed to keep consistency. Note: `after_userstory_id` and `before_userstory_id` are mutually exclusive; you can use only one at a given request. They can be both None which means "at the beginning of is cell" - `bulk_userstories` should be a list of user stories IDs """ # filter user stories from status and swimlane user_stories = project.user_stories.filter(status=status) if swimlane is not None: user_stories = user_stories.filter(swimlane=swimlane) else: user_stories = user_stories.filter(swimlane__isnull=True) # exclude moved user stories user_stories = user_stories.exclude(id__in=bulk_userstories) # if before_userstory, get it and all elements before too: if before_userstory: user_stories = (user_stories.filter(kanban_order__gte=before_userstory.kanban_order)) # if after_userstory, exclude it and get only elements after it: elif after_userstory: user_stories = (user_stories.exclude(id=after_userstory.id) .filter(kanban_order__gte=after_userstory.kanban_order)) # sort and get only ids user_story_ids = (user_stories.order_by("kanban_order", "id") .values_list('id', flat=True)) # append moved user stories user_story_ids = bulk_userstories + list(user_story_ids) # calculate the start order if before_userstory: # order start with the before_userstory order start_order = before_userstory.kanban_order elif after_userstory: # order start after the after_userstory order start_order = after_userstory.kanban_order + 1 else: # move at the beggining of the column if there is no after and before start_order = 1 # prepare rest of data total_user_stories = len(user_story_ids) user_story_kanban_orders = range(start_order, start_order + total_user_stories) data = tuple(zip(user_story_ids, user_story_kanban_orders)) # execute query for update kanban_order sql = """ UPDATE userstories_userstory SET kanban_order = tmp.new_kanban_order::BIGINT FROM (VALUES %s) AS tmp (id, new_kanban_order) WHERE tmp.id = userstories_userstory.id """ with connection.cursor() as cursor: execute_values(cursor, sql, data) # execute query for update status, swimlane and kanban_order bulk_userstories_objects = project.user_stories.filter(id__in=bulk_userstories) bulk_userstories_objects.update(status=status, swimlane=swimlane) # Update is_closed attr for user stories and related milestones if settings.CELERY_ENABLED: _async_tasks_after_kanban_order_change.delay(bulk_userstories, user.id) else: _async_tasks_after_kanban_order_change(bulk_userstories, user.id) # Sent events of updated stories events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=project.pk) # Generate response with modified info res = ({ "id": id, "swimlane": swimlane.id if swimlane else None, "status": status.id, "kanban_order": kanban_order } for (id, kanban_order) in data) return res
def update_userstories_backlog_or_sprint_order_in_bulk(user: User, project: Project, bulk_userstories: List[int], before_userstory: Optional[models.UserStory] = None, after_userstory: Optional[models.UserStory] = None, milestone: Optional[Milestone] = None): """ Updates the order of the userstories specified adding the extra updates needed to keep consistency. Note: `after_userstory_id` and `before_userstory_id` are mutually exclusive; you can use only one at a given request. They can be both None which means "at the beginning of is cell" - `bulk_userstories` should be a list of user stories IDs """ # Get ids from milestones affected milestones_ids = set(project.milestones.filter(user_stories__in=bulk_userstories).values_list('id', flat=True)) if milestone: milestones_ids.add(milestone.id) order_param = "backlog_order" # filter user stories from milestone user_stories = project.user_stories.all() if milestone is not None: user_stories = user_stories.filter(milestone=milestone) order_param = "sprint_order" else: user_stories = user_stories.filter(milestone__isnull=True) # exclude moved user stories user_stories = user_stories.exclude(id__in=bulk_userstories) # if before_userstory, get it and all elements before too: if before_userstory: user_stories = (user_stories.filter(**{f"{order_param}__gte": getattr(before_userstory, order_param)})) # if after_userstory, exclude it and get only elements after it: elif after_userstory: user_stories = (user_stories.exclude(id=after_userstory.id) .filter(**{f"{order_param}__gte": getattr(after_userstory, order_param)})) # sort and get only ids user_story_ids = (user_stories.order_by(order_param, "id") .values_list('id', flat=True)) # append moved user stories user_story_ids = bulk_userstories + list(user_story_ids) # calculate the start order if before_userstory: # order start with the before_userstory order start_order = getattr(before_userstory, order_param) elif after_userstory: # order start after the after_userstory order start_order = getattr(after_userstory, order_param) + 1 else: # move at the beggining of the column if there is no after and before start_order = 1 # prepare rest of data total_user_stories = len(user_story_ids) user_story_orders = range(start_order, start_order + total_user_stories) data = tuple(zip(user_story_ids, user_story_orders)) # execute query for update milestone and backlog or sprint order sql = f""" UPDATE userstories_userstory SET {order_param} = tmp.new_order::BIGINT FROM (VALUES %s) AS tmp (id, new_order) WHERE tmp.id = userstories_userstory.id """ with connection.cursor() as cursor: execute_values(cursor, sql, data) # execute query for update milestone for user stories and its tasks bulk_userstories_objects = project.user_stories.filter(id__in=bulk_userstories) bulk_userstories_objects.update(milestone=milestone) project.tasks.filter(user_story__in=bulk_userstories).update(milestone=milestone) # Generate snapshots for user stories and tasks and calculate if aafected milestones # are cosed or open now. if settings.CELERY_ENABLED: _async_tasks_after_backlog_or_sprint_order_change.delay(bulk_userstories, milestones_ids, user.id) else: _async_tasks_after_backlog_or_sprint_order_change(bulk_userstories, milestones_ids, user.id) # Sent events of updated stories events.emit_event_for_ids(ids=user_story_ids, content_type="userstories.userstory", projectid=project.pk) # Generate response with modified info res = ({ "id": id, "milestone": milestone.id if milestone else None, order_param: order } for (id, order) in data) return res