def test_status_to_wip_twice(self): tasks_service.start_task(self.task.id) task = Task.get(self.task.id) real_start_date = task.real_start_date task.update({"task_status_id": self.task_status.id}) tasks_service.start_task(self.task.id) task = Task.get(self.task.id) self.assertEqual(task.real_start_date, real_start_date)
def test_new_working_file(self): task = Task.get(self.task_id) self.assertEquals(len(task.assignees), 1) self.assertNotEquals(str(self.user), str(task.assignees[0])) path = "/data/tasks/%s/working-files/new" % self.task_id working_file = self.post( path, { "name": "main", "description": "description test", "comment": "comment test" }) self.assertEqual(working_file["revision"], 1) task = Task.get(self.task_id) assignees = [person.serialize() for person in task.assignees] assignees = sorted(assignees, key=lambda x: x["last_name"]) self.assertEquals(str(self.user.id), assignees[0]["id"]) task = Task.get(self.task_id) path = "/data/tasks/%s/working-files/new" % self.task_id working_file = self.post( path, { "name": "main", "description": "description test", "comment": "comment test" }) self.assertEqual(working_file["revision"], 2) working_file = self.post( path, { "name": "main", "description": "description test", "comment": "comment test" }) self.assertEqual(working_file["revision"], 3) self.assertEqual( working_file["path"], "/simple/productions/cosmos_landromat/assets/props/tree/shaders/" "3ds_max/cosmos_landromat_props_tree_shaders_main_v003") working_file = self.post( path, { "name": "main", "description": "description test", "comment": "comment test", "revision": 66 }) self.assertEqual(working_file["revision"], 66) self.assertEqual( working_file["path"], "/simple/productions/cosmos_landromat/assets/props/tree/shaders/" "3ds_max/cosmos_landromat_props_tree_shaders_main_v066")
def test_delete_all_task_types(self): self.generate_fixture_project_standard() self.generate_fixture_asset_standard() task_1_id = str(self.task.id) task_2_id = str(self.generate_fixture_task(name="second task").id) task_3_id = str(self.shot_task.id) task_4_id = str(self.generate_fixture_task_standard().id) deletion_service.remove_tasks_for_project_and_task_type( self.project.id, self.task_type.id) self.assertIsNone(Task.get(task_1_id)) self.assertIsNone(Task.get(task_2_id)) self.assertIsNotNone(Task.get(task_3_id)) self.assertIsNotNone(Task.get(task_4_id))
def post(self): (task_id, comment, person_id, separator, working_file_revision) = self.get_arguments() separator = "/" task = Task.get(task_id) entity_id = task.entity_id working_file = file_info.create_new_working_revision( entity_id, task.id, person_id, comment, working_file_revision) working_file_dict = self.add_path_info(working_file, "working", task, comment, separator) output_file = file_info.create_new_output_revision( entity_id, task.id, person_id, comment) output_file.source_file_id = working_file.id output_file_dict = self.add_path_info(output_file, "output", task, comment, separator) output_file_dict["preview_path"] = self.get_preview_path( task, output_file.revision, separator) task_info.to_review_task( task, output_file_dict, ) return { "working_file": working_file_dict, "output_file": output_file_dict, "preview_path": output_file_dict["preview_path"] }, 201
def create_or_update_time_spent(task_id, person_id, date, duration, add=False): """ Create a new time spent if it doesn't exist. If it exists, it update it with the new duratin and returns it from the database. """ try: time_spent = TimeSpent.get_by(task_id=task_id, person_id=person_id, date=date) except DataError: raise WrongDateFormatException if time_spent is not None: if duration == 0: time_spent.delete() elif add: time_spent.update({"duration": time_spent.duration + duration}) else: time_spent.update({"duration": duration}) else: time_spent = TimeSpent.create(task_id=task_id, person_id=person_id, date=date, duration=duration) task = Task.get(task_id) task.duration = 0 time_spents = TimeSpent.get_all_by(task_id=task_id) for time_spent in time_spents: task.duration += time_spent.duration task.save() events.emit("task:update", {"task_id": task_id}) return time_spent.serialize()
def test_status_to_wip_again(self): self.task.real_start_date = None self.put("/actions/tasks/%s/start" % self.task.id, {}) real_start_date = Task.get(self.task.id).real_start_date self.put("/actions/tasks/%s/start" % self.task.id, {}) task = self.get("data/tasks/%s" % self.task.id) self.assertEquals(real_start_date.isoformat(), task["real_start_date"])
def test_task(self): task_dict = { "assignees": [str(self.person.id)], "id": "3629fc9f-355f-420e-b3c7-5d69f02888e6", "created_at": "2019-06-03T10:05:57", "updated_at": "2019-06-05T10:05:57", "name": "main", "priority": 2, "duration": 3, "estimation": 2, "completion_rate": 50, "retake_count": 1, "sort_order": 1, "real_start_date": "2019-06-04T10:05:57", "last_comment_date": "2019-06-05T10:05:57", "project_id": str(self.project.id), "task_type_id": str(self.task_type.id), "task_status_id": str(self.task_status.id), "entity_id": str(self.asset.id), "assigner_id": str(self.user["id"]), "type": "Task" } Task.create_from_import(task_dict) task = Task.get(task_dict["id"]) self.assertEqual(task.name, task_dict["name"])
def create_new_working_revision(task_id, person_id, software_id, name="main", path="", comment="", revision=0): """ Create a new working file revision for given task. An author (user) and a software are required. """ task = Task.get(task_id) if revision == 0: revision = get_next_working_revision(task_id, name) try: working_file = WorkingFile.create(comment=comment, name=name, revision=revision, path=path, task_id=task.id, software_id=software_id, entity_id=task.entity_id, person_id=person_id) events.emit("working_file:new", {"working_file_id": working_file.id}) except IntegrityError: raise EntryAlreadyExistsException return working_file.serialize()
def remove_task(task_id, force=False): task = Task.get(task_id) if force: working_files = WorkingFile.query.filter_by(task_id=task_id) for working_file in working_files: output_files = OutputFile.query.filter_by( source_file_id=working_file.id ) for output_file in output_files: output_file.delete() working_file.delete() comments = Comment.query.filter_by(object_id=task_id) for comment in comments: notifications = Notification.query.filter_by(comment_id=comment.id) for notification in notifications: notification.delete() comment.delete() preview_files = PreviewFile.query.filter_by(task_id=task_id) for preview_file in preview_files: preview_file.delete() task.delete() events.emit("task:deletion", { "task_id": task_id }) return task.serialize()
def remove_task(task_id, force=False): """ Remove given task. Force deletion if the task has some comments and files related. This will lead to the deletion of all of them. """ from zou.app.services import tasks_service task = Task.get(task_id) if force: working_files = WorkingFile.query.filter_by(task_id=task_id) for working_file in working_files: output_files = OutputFile.query.filter_by( source_file_id=working_file.id) for output_file in output_files: output_file.delete() working_file.delete() comments = Comment.query.filter_by(object_id=task_id) for comment in comments: notifications = Notification.query.filter_by(comment_id=comment.id) for notification in notifications: notification.delete() news_list = News.query.filter_by(comment_id=comment.id) for news in news_list: news.delete() comment.delete() subscriptions = Subscription.query.filter_by(task_id=task_id) for subscription in subscriptions: subscription.delete() preview_files = PreviewFile.query.filter_by(task_id=task_id) for preview_file in preview_files: remove_preview_file(preview_file) time_spents = TimeSpent.query.filter_by(task_id=task_id) for time_spent in time_spents: time_spent.delete() notifications = Notification.query.filter_by(task_id=task_id) for notification in notifications: notification.delete() news_list = News.query.filter_by(task_id=task.id) for news in news_list: news.delete() task.delete() tasks_service.clear_task_cache(task_id) task_serialized = task.serialize() events.emit( "task:delete", { "task_id": task_id, "entity_id": task_serialized["entity_id"], "task_type_id": task_serialized["task_type_id"], }, project_id=task_serialized["project_id"], ) return task_serialized
def test_publish_task(self): handler = ToReviewHandler(self.open_status_id, self.to_review_status_id) events.register("task:to-review", "mark_event_as_fired", handler) task_info.to_review_task(self.task, self.output_file.serialize()) self.is_event_fired = handler.is_event_fired data = handler.data task = Task.get(self.task.id) self.assertEqual(task.task_status_id, self.to_review_status_id) self.assert_event_is_fired() self.assertEquals(data["task_before"]["task_status_id"], str(self.open_status_id)) self.assertEquals(data["task_after"]["task_status_id"], str(self.to_review_status_id)) self.assertEquals(data["task_after"]["project"]["id"], str(self.project.id)) self.assertEquals(data["task_after"]["entity"]["id"], str(self.entity.id)) self.assertEquals(data["task_after"]["entity_type"]["id"], str(self.entity_type.id)) self.assertEquals(data["task_after"]["person"]["id"], str(self.person.id)) self.assertTrue(data["task_after"]["output_file"]["id"], str(self.output_file.id))
def add_preview_file_to_comment(comment_id, person_id, task_id, revision=0): """ Add a preview to comment preview list. Auto set the revision field (add 1 if it's a new preview, keep the preview revision in other cases). """ comment = get_comment_raw(comment_id) news = News.get_by(comment_id=comment_id) task = Task.get(comment.object_id) project_id = str(task.project_id) position = 1 if revision == 0 and len(comment.previews) == 0: revision = get_next_preview_revision(task_id) elif revision == 0: revision = comment.previews[0].revision position = get_next_position(task_id, revision) else: position = get_next_position(task_id, revision) preview_file = files_service.create_preview_file_raw(str( uuid.uuid4())[:13], revision, task_id, person_id, position=position) events.emit("preview-file:new", { "preview_file_id": preview_file.id, "comment_id": comment_id, }, project_id=project_id) comment.previews.append(preview_file) comment.save() if news is not None: news.update({"preview_file_id": preview_file.id}) events.emit("comment:update", {"comment_id": comment.id}, project_id=project_id) return preview_file.serialize()
def remove_preview_file(preview_file_id): preview_file = get_preview_file_raw(preview_file_id) preview_file.delete() task = Task.get(preview_file.task_id) events.emit("preview-file:delete", {"preview_file_id": preview_file_id}, project_id=str(task.project_id)) return preview_file.serialize()
def remove_preview_file(preview_file): """ Remove all files related to given preview file, then remove the preview file entry from the database. """ task = Task.get(preview_file.task_id) entity = Entity.get(task.entity_id) news = News.get_by(preview_file_id=preview_file.id) if entity.preview_file_id == preview_file.id: entity.update({"preview_file_id": None}) if news is not None: news.update({"preview_file_id": None}) if preview_file.extension == "png": clear_picture_files(preview_file.id) elif preview_file.extension == "mp4": clear_movie_files(preview_file.id) else: clear_generic_files(preview_file.id) preview_file.comments = [] preview_file.save() preview_file.delete() return preview_file.serialize()
def test_publish_task(self): handler = ToReviewHandler(self.open_status_id, self.to_review_status_id) events.register( "task:to-review", "mark_event_as_fired", handler ) tasks_service.task_to_review( self.task.id, self.person.serialize(), "my comment" ) self.is_event_fired = handler.is_event_fired data = handler.data task = Task.get(self.task.id) self.assertEqual(task.task_status_id, self.to_review_status_id) self.assert_event_is_fired() self.assertEqual( data["previous_task_status_id"], str(self.open_status_id) ) self.assertEqual(data["comment"], "my comment")
def remove_comment(comment_id): comment = Comment.get(comment_id) task = Task.get(comment.object_id) if comment is not None: notifications = Notification.query.filter_by(comment_id=comment.id) for notification in notifications: notification.delete() news_list = News.query.filter_by(comment_id=comment.id) for news in news_list: news.delete() if comment.preview_file_id is not None: preview_file = PreviewFile.get(comment.preview_file_id) comment.preview_file_id = None comment.save() remove_preview_file(preview_file) previews = [preview for preview in comment.previews] comment.delete() for preview in previews: remove_preview_file(preview) if task is not None: events.emit("comment:delete", {"comment_id": comment.id}, project_id=str(task.project_id)) return comment.serialize() else: raise CommentNotFoundException
def update_preview_file(preview_file_id, data): preview_file = files_service.get_preview_file_raw(preview_file_id) preview_file.update(data) files_service.clear_preview_file_cache(preview_file_id) task = Task.get(preview_file.task_id) events.emit("preview-file:update", {"preview_file_id": preview_file_id}, project_id=str(task.project_id)) return preview_file.serialize()
def get_entity_from_preview_file(preview_file_id): """ Get entity dict of related preview file. """ preview_file = files_service.get_preview_file_raw(preview_file_id) task = Task.get(preview_file.task_id) entity = Entity.get(task.entity_id) return entity.serialize()
def update_task(task_id, data): task = Task.get(task_id) if is_finished(task, data): data["end_date"] = datetime.datetime.now() task.update(data) return task.serialize()
def get_project_from_preview_file(preview_file_id): """ Get project dict of related preview file. """ preview_file = files_service.get_preview_file_raw(preview_file_id) task = Task.get(preview_file.task_id) project = Project.get(task.project_id) return project.serialize()
def reset_task_data(task_id): clear_task_cache(task_id) task = Task.get(task_id) retake_count = 0 real_start_date = None last_comment_date = None end_date = None task_status_id = TaskStatus.get_by(short_name="todo").id comments = ( Comment.query.join(TaskStatus) .filter(Comment.object_id == task_id) .order_by(Comment.created_at) .add_columns( TaskStatus.is_retake, TaskStatus.is_done, TaskStatus.short_name ) .all() ) previous_is_retake = False for ( comment, task_status_is_retake, task_status_is_done, task_status_short_name, ) in comments: if task_status_is_retake and not previous_is_retake: retake_count += 1 previous_is_retake = task_status_is_retake if task_status_short_name.lower() == "wip" and real_start_date is None: real_start_date = comment.created_at if task_status_is_done: end_date = comment.created_at else: end_date = None task_status_id = comment.task_status_id last_comment_date = comment.created_at duration = 0 time_spents = TimeSpent.get_all_by(task_id=task.id) for time_spent in time_spents: duration += time_spent.duration task.update( { "duration": duration, "retake_count": retake_count, "real_start_date": real_start_date, "last_comment_date": last_comment_date, "end_date": end_date, "task_status_id": task_status_id, } ) project_id = str(task.project_id) events.emit("task:update", {"task_id": task.id}, project_id) return task.serialize()
def get_task_raw(task_id): try: task = Task.get(task_id) except StatementError: raise TaskNotFoundException() if task is None: raise TaskNotFoundException() return task
def test_status_to_wip_again(self): self.task.real_start_date = None task_id = str(self.task.id) self.put("/actions/tasks/%s/start" % task_id, {}) real_start_date = Task.get(task_id).real_start_date self.put("/actions/tasks/%s/start" % task_id, {}) task = self.get("data/tasks/%s" % task_id) self.assertEqual( real_start_date.replace(microsecond=0).isoformat(), task["real_start_date"])
def generate_fixture_subscription(self, task_id=None): task = self.task if task_id is not None: task = Task.get(task_id) self.subscription = Subscription.create(person_id=self.user["id"], task_id=task.id, entity_id=task.entity_id, task_type_id=task.task_type_id) return self.subscription.serialize()
def test_status_to_wip(self): events.register("task:start", "mark_event_as_fired", self) now = datetime.datetime.now() self.task.update({"real_start_date": None}) tasks_service.start_task(self.task.id) task = Task.get(self.task.id) self.assertEqual(task.task_status_id, self.wip_status_id) self.assertGreater(task.real_start_date.isoformat(), now.isoformat()) self.assert_event_is_fired()
def get_task_raw(task_id): """ Get task matching given id as an active record. """ try: task = Task.get(task_id) except StatementError: raise TaskNotFoundException() if task is None: raise TaskNotFoundException() return task
def test_get_preview_files_for_vendor(self): """ Test route data/preview-files for vendor. The vendor can only access the tasks he's working on. """ route = "data/preview-files" self.log_in_vendor() preview_files_vendor = self.get(route) for preview_file in preview_files_vendor: # tasks_service.get_task doesn't contain assignees, thus we use Task class task = Task.get(preview_file["task_id"]) assignees_ids = [str(assignee.id) for assignee in task.assignees] self.assertIn(self.user_vendor_id, assignees_ids) self.assertEqual(len(preview_files_vendor), 3)
def test_get_preview_file_for_vendor(self): """ Test route data/preview-files/<preview_file_id> for vendor. The vendor can only access the tasks he's working on. """ route2_1 = "data/preview-files/%s" % str(self.preview_file2_1.id) route2_2 = "data/preview-files/%s" % str(self.preview_file2_2.id) self.log_in_vendor() preview_file_vendor = self.get(route2_2) # tasks_service.get_task doesn't contain assignees, thus we use Task class task = Task.get(preview_file_vendor["task_id"]) assignees_ids = [str(assignee.id) for assignee in task.assignees] self.assertIn(self.user_vendor_id, assignees_ids) self.get(route2_1, code=403)
def create_new_working_revision( person_id, software_id, entity_id=None, task_id=None, name="main", path="", comment="", revision=0, ): """ Create a new working file revision for given task. An author (user) and a software are required. """ if task_id: task = Task.get(task_id) entity_id = task.entity_id if revision == 0: revision = get_next_working_revision(name, task_id=task_id, entity_id=entity_id) if path: previous_working_file = get_working_file_by_path(path) if previous_working_file: return previous_working_file.serialize() try: working_file = WorkingFile.create( comment=comment, name=name, revision=revision, path=path, task_id=task_id, software_id=software_id, entity_id=entity_id, person_id=person_id, ) events.emit( "working_file:new", {"working_file_id": working_file.id}, project_id=str(task.project_id) ) except IntegrityError: raise EntryAlreadyExistsException return working_file.serialize()
def create_or_update_time_spent(task_id, person_id, date, duration, add=False): """ Create a new time spent if it doesn't exist. If it exists, it update it with the new duratin and returns it from the database. """ try: time_spent = TimeSpent.get_by(task_id=task_id, person_id=person_id, date=date) except DataError: raise WrongDateFormatException task = Task.get(task_id) project_id = str(task.project_id) if time_spent is not None: if duration == 0: time_spent.delete() elif add: time_spent.update({"duration": time_spent.duration + duration}) else: time_spent.update({"duration": duration}) events.emit( "time-spent:update", {"time_spent_id": str(time_spent.id)}, project_id=project_id, ) else: time_spent = TimeSpent.create(task_id=task_id, person_id=person_id, date=date, duration=duration) persons_service.update_person_last_presence(person_id) events.emit( "time-spent:new", {"time_spent_id": str(time_spent.id)}, project_id=project_id, ) task.duration = 0 time_spents = TimeSpent.get_all_by(task_id=task_id) for task_time_spent in time_spents: task.duration += task_time_spent.duration task.save() clear_task_cache(task_id) events.emit("task:update", {"task_id": task_id}, project_id=project_id) return time_spent.serialize()