def emit(event, data=None, persist=True, project_id=None): """ Emit an event which leads to the execution of all event handlers registered for that event name. It publishes too the event to other services (like the realtime event daemon). """ if not data: data = {} if project_id is not None: data["project_id"] = project_id data = fields.serialize_dict(data) publisher_store.publish(event, data) if persist: save_event(event, data, project_id=project_id) # move to listen_event for avoid to refresh Zou after an update try: event_handlers.listen_event_task.delay(event, data) except Exception: current_app.logger.error("Error handling event", exc_info=1)
def emit(event, data={}, persist=True): """ Emit an event which leads to the execution of all event handlers registered for that event name. It publishes too the event to other services (like the realtime event daemon). """ event_handlers = handlers.get(event, {}) data = fields.serialize_dict(data) publisher_store.publish(event, data) if persist: save_event(event, data) from zou.app.config import ENABLE_JOB_QUEUE for func in event_handlers.values(): if ENABLE_JOB_QUEUE: from zou.app.stores.queue_store import job_queue job_queue.enqueue(func.handle_event, data) else: try: func.handle_event(data) except Exception: current_app.logger.error("Error handling event", exc_info=1)
def present(self): return fields.serialize_dict({ "id": self.id, "name": self.name, "has_avatar": self.has_avatar, "hours_by_day": self.hours_by_day })
def present(self): return fields.serialize_dict({ "id": self.id, "name": self.name, "has_avatar": self.has_avatar, "hours_by_day": self.hours_by_day, "use_original_file_name": self.use_original_file_name })
def present(self): return fields.serialize_dict( { "id": self.id, "status": self.status, "created_at": self.created_at, } )
def present(self): return fields.serialize_dict({ "id": self.id, "start_date": self.start_date, "end_date": self.end_date, "man_days": self.man_days, "project_id": self.project_id, "task_type_id": self.task_type_id })
def get_last_notifications(notification_id=None): """ Return last 100 user notifications. """ current_user = persons_service.get_current_user_raw() result = [] query = Notification.query \ .filter_by(person_id=current_user.id) \ .order_by(Notification.created_at) \ .join(Task, Project, Comment) \ .add_columns( Project.id, Project.name, Task.task_type_id, Comment.preview_file_id, Comment.task_status_id, Comment.text, Task.entity_id ) if notification_id is not None: query = query.filter(Notification.id == notification_id) notifications = query.limit(100).all() for ( notification, project_id, project_name, task_type_id, preview_file_id, task_status_id, comment_text, task_entity_id ) in notifications: full_entity_name = notifications_service.get_full_entity_name( task_entity_id ) result.append(fields.serialize_dict({ "id": notification.id, "author_id": notification.author_id, "comment_id": notification.comment_id, "task_id": notification.task_id, "task_type_id": task_type_id, "task_status_id": task_status_id, "preview_file_id": preview_file_id, "project_id": project_id, "project_name": project_name, "comment_text": comment_text, "created_at": notification.created_at, "read": notification.read, "change": notification.change, "full_entity_name": full_entity_name })) return result
def present(self): return fields.serialize_dict( { "id": self.id, "date": self.date, "name": self.name, "project_id": self.project_id, "task_type_id": self.task_type_id, } )
def present(self): return fields.serialize_dict({ "id": self.id, "name": self.name, "has_avatar": self.has_avatar, "hours_by_day": self.hours_by_day, "use_original_file_name": self.use_original_file_name, "timesheets_locked": self.timesheets_locked, "chat_token_slack": self.chat_token_slack, })
def get_preview_files_for_entity(entity_id): """ Get all preview files available for given shot. """ previews = {} query = (Task.query.filter_by(entity_id=entity_id).add_columns( PreviewFile.id, PreviewFile.revision, PreviewFile.position, PreviewFile.original_name, PreviewFile.extension, PreviewFile.status, PreviewFile.annotations, PreviewFile.created_at, PreviewFile.task_id).join(PreviewFile).join(TaskType).order_by( TaskType.priority.desc()).order_by(TaskType.name).order_by( PreviewFile.revision.desc()).order_by(PreviewFile.created_at)) task_previews = {} for (task, preview_file_id, preview_file_revision, preview_file_position, preview_file_original_name, preview_file_extension, preview_file_status, preview_file_annotations, preview_file_created_at, preview_file_task_id) in query.all(): task_id = str(task.id) if task_id not in task_previews: task_previews[task_id] = [] task_previews[task_id].append( fields.serialize_dict({ "id": preview_file_id, "revision": preview_file_revision, "position": preview_file_position, "original_name": preview_file_original_name, "extension": preview_file_extension, "status": preview_file_status, "annotations": preview_file_annotations, "created_at": preview_file_created_at, "task_id": task_id, "task_type_id": str(task.task_type_id), })) for task_id in task_previews.keys(): preview_files = task_previews[task_id] task_type_id = task_previews[task_id][0]["task_type_id"] if len(preview_files) > 0: preview_files = mix_preview_file_revisions(preview_files) previews[task_type_id] = [ { "id": preview_file["id"], "revision": preview_file["revision"], "original_name": preview_file["original_name"], "extension": preview_file["extension"], "status": preview_file["status"], "annotations": preview_file["annotations"], "previews": preview_file["previews"], "created_at": preview_file["created_at"], "task_id": preview_file["task_id"] } for preview_file in preview_files ] # Do not add too much field to avoid building too big responses return previews
def test_serialize_dict(self): now = datetime.datetime.now() unique_id = uuid.uuid4() data = {"now": now, "unique_id": unique_id, "string": "test"} result = { "now": now.isoformat(), "unique_id": str(unique_id), "string": "test" } self.assertEqual(fields.serialize_dict(data), result) self.assertEqual(fields.serialize_value(data), result)
def present(self): return fields.serialize_dict( { "id": self.id, "chat_token_slack": self.chat_token_slack, "chat_webhook_mattermost": self.chat_webhook_mattermost, "chat_token_discord": self.chat_token_discord, "name": self.name, "has_avatar": self.has_avatar, "hours_by_day": self.hours_by_day, "hd_by_default": self.hd_by_default, "use_original_file_name": self.use_original_file_name, "timesheets_locked": self.timesheets_locked, } )
def emit(event, data={}): """ Emit an event which leads to the execution of all event handlers registered for that event name. It publishes too the event to other services (like the realtime event daemon). """ event_handlers = handlers.get(event, {}) data = fields.serialize_dict(data) publisher_store.publish(event, data) save_event(event, data) from zou.app.config import ENABLE_JOB_QUEUE for func in event_handlers.values(): if ENABLE_JOB_QUEUE: from zou.app.stores.queue_store.job_queue import enqueue enqueue(func.handle_event, data) else: func.handle_event(data)
def get_playlist_with_preview_file_revisions(playlist_id): """ Return given playlist. Shot list is augmented with all previews available for a given shot. """ playlist = Playlist.get(playlist_id) if playlist is None: raise PlaylistNotFoundException() playlist_dict = playlist.serialize() playlist_dict["build_jobs"] = [] for build_job in reversed(playlist.build_jobs): playlist_dict["build_jobs"].append(fields.serialize_dict({ "id": build_job.id, "status": build_job.status, "created_at": build_job.created_at })) if playlist_dict["shots"] is None: playlist_dict["shots"] = [] ( playlist_dict, preview_file_map ) = set_preview_files_for_shots(playlist_dict) for shot in playlist_dict["shots"]: try: preview_file = preview_file_map.get(shot["preview_file_id"], None) if preview_file is not None: shot["preview_file_id"] = str(preview_file.id) shot["extension"] = preview_file.extension shot["annotations"] = preview_file.annotations shot["task_id"] = fields.serialize_value(preview_file.task_id) else: del shot["preview_file_id"] except Exception as e: print(e) return playlist_dict
def get_last_events(after=None, before=None, page_size=100, only_files=False, project_id=None): """ Return last 100 events published. If before parameter is set, it returns last 100 events before this date. """ query = ApiEvent.query.order_by(ApiEvent.created_at.desc()) if after is not None: query = query.filter(ApiEvent.created_at > after) if before is not None: query = query.filter(ApiEvent.created_at < before) if only_files: query = query.filter( ApiEvent.name.in_(( "preview-file:add-file", "organisation:set-thumbnail", "person:set-thumbnail", "project:set-thumbnail", ))) if project_id is not None: query = query.filter(ApiEvent.project_id == project_id) events = query.limit(page_size).all() return [ fields.serialize_dict({ "id": event.id, "created_at": event.created_at, "name": event.name, "user_id": event.user_id, "data": event.data, }) for event in events ]
def get_shots_and_tasks(criterions={}): """ Get all shots for given criterions with related tasks for each shot. """ shot_type = get_shot_type() shot_map = {} task_map = {} Sequence = aliased(Entity, name="sequence") Episode = aliased(Entity, name="episode") query = ( Entity.query.join(Project) .join(Sequence, Sequence.id == Entity.parent_id) .outerjoin(Episode, Episode.id == Sequence.parent_id) .outerjoin(Task, Task.entity_id == Entity.id) .outerjoin(assignees_table) .add_columns( Episode.name, Episode.id, Sequence.name, Sequence.id, Task.id, Task.task_type_id, Task.task_status_id, Task.priority, Task.estimation, Task.duration, Task.retake_count, Task.real_start_date, Task.end_date, Task.start_date, Task.due_date, Task.last_comment_date, assignees_table.columns.person, Project.id, Project.name, ) .filter(Entity.entity_type_id == shot_type["id"]) ) if "id" in criterions: query = query.filter(Entity.id == criterions["id"]) if "project_id" in criterions: query = query.filter(Entity.project_id == criterions["project_id"]) if "episode_id" in criterions: query = query.filter(Sequence.parent_id == criterions["episode_id"]) if "assigned_to" in criterions: query = query.filter(user_service.build_assignee_filter()) del criterions["assigned_to"] for ( shot, episode_name, episode_id, sequence_name, sequence_id, task_id, task_type_id, task_status_id, task_priority, task_estimation, task_duration, task_retake_count, task_real_start_date, task_end_date, task_start_date, task_due_date, task_last_comment_date, person_id, project_id, project_name, ) in query.all(): shot_id = str(shot.id) shot.data = shot.data or {} if shot_id not in shot_map: shot_map[shot_id] = fields.serialize_dict( { "canceled": shot.canceled, "data": shot.data, "description": shot.description, "entity_type_id": shot.entity_type_id, "episode_id": episode_id, "episode_name": episode_name or "", "fps": shot.data.get("fps", None), "frame_in": shot.data.get("frame_in", None), "frame_out": shot.data.get("frame_out", None), "id": shot.id, "name": shot.name, "nb_frames": shot.nb_frames, "parent_id": shot.parent_id, "preview_file_id": shot.preview_file_id or None, "project_id": project_id, "project_name": project_name, "sequence_id": sequence_id, "sequence_name": sequence_name, "source_id": shot.source_id, "tasks": [], "type": "Shot", } ) if task_id is not None: if task_id not in task_map: task_dict = fields.serialize_dict( { "id": task_id, "entity_id": shot_id, "task_status_id": task_status_id, "task_type_id": task_type_id, "priority": task_priority or 0, "estimation": task_estimation, "duration": task_duration, "retake_count": task_retake_count, "real_start_date": task_real_start_date, "end_date": task_end_date, "start_date": task_start_date, "due_date": task_due_date, "last_comment_date": task_last_comment_date, "assignees": [], } ) task_map[task_id] = task_dict shot_dict = shot_map[shot_id] shot_dict["tasks"].append(task_dict) if person_id: task_map[task_id]["assignees"].append(str(person_id)) return list(shot_map.values())
def get_last_news_for_project( project_id, news_id=None, only_preview=False, task_type_id=None, task_status_id=None, author_id=None, page=1, page_size=50, before=None, after=None, ): """ Return last 50 news for given project. Add related information to make it displayable. """ offset = (page - 1) * page_size query = (News.query.order_by(News.created_at.desc()).join( Task, News.task_id == Task.id).join(Project).join( Entity, Task.entity_id == Entity.id).outerjoin( Comment, News.comment_id == Comment.id).outerjoin( PreviewFile, News.preview_file_id == PreviewFile.id).filter( Task.project_id == project_id)) if news_id is not None: query = query.filter(News.id == news_id) if task_status_id is not None: query = query.filter(Comment.task_status_id == task_status_id) query = query.filter(News.change == True) if task_type_id is not None: query = query.filter(Task.task_type_id == task_type_id) if author_id is not None: query = query.filter(News.author_id == author_id) if only_preview: query = query.filter(News.preview_file_id != None) if after is not None: query = query.filter(News.created_at > after) if before is not None: query = query.filter(News.created_at < before) (total, nb_pages) = _get_news_total(query, page_size) query = query.add_columns( Project.id, Project.name, Task.task_type_id, Comment.id, Comment.task_status_id, Task.entity_id, PreviewFile.extension, Entity.preview_file_id, ) query = query.limit(page_size) query = query.offset(offset) news_list = query.all() result = [] for ( news, project_id, project_name, task_type_id, comment_id, task_status_id, task_entity_id, preview_file_extension, entity_preview_file_id, ) in news_list: (full_entity_name, episode_id) = names_service.get_full_entity_name(task_entity_id) result.append( fields.serialize_dict({ "id": news.id, "type": "News", "author_id": news.author_id, "comment_id": news.comment_id, "task_id": news.task_id, "task_type_id": task_type_id, "task_status_id": task_status_id, "task_entity_id": task_entity_id, "preview_file_id": news.preview_file_id, "preview_file_extension": preview_file_extension, "project_id": project_id, "project_name": project_name, "created_at": news.created_at, "change": news.change, "full_entity_name": full_entity_name, "episode_id": episode_id, "entity_preview_file_id": entity_preview_file_id, })) return { "data": result, "total": total, "nb_pages": nb_pages, "limit": page_size, "offset": offset, "page": page, }
def get_last_notifications(notification_id=None, after=None, before=None): """ Return last 100 user notifications. """ current_user = persons_service.get_current_user() Author = aliased(Person, name="author") is_current_user_artist = current_user["role"] == "user" result = [] query = ( Notification.query.filter_by(person_id=current_user["id"]) .order_by(Notification.created_at.desc()) .join(Author, Author.id == Notification.author_id) .join(Task, Task.id == Notification.task_id) .join(Project, Project.id == Task.project_id) .outerjoin(Comment, Comment.id == Notification.comment_id) .add_columns( Project.id, Project.name, Task.task_type_id, Comment.id, Comment.task_status_id, Comment.text, Comment.replies, Task.entity_id, Author.role, ) ) if notification_id is not None: query = query.filter(Notification.id == notification_id) if after is not None: query = query.filter(Notification.created_at > after) if before is not None: query = query.filter(Notification.created_at < before) notifications = query.limit(100).all() for ( notification, project_id, project_name, task_type_id, comment_id, task_status_id, comment_text, comment_replies, task_entity_id, role, ) in notifications: (full_entity_name, episode_id) = names_service.get_full_entity_name( task_entity_id ) preview_file_id = None mentions = [] if comment_id is not None: comment = Comment.get(comment_id) if len(comment.previews) > 0: preview_file_id = comment.previews[0].id mentions = comment.mentions or [] reply_text = "" if notification.type == "reply": reply = next( ( reply for reply in comment_replies if reply["id"] == str(notification.reply_id) ), None, ) if reply is not None: reply_text = reply["text"] if role == "client" and is_current_user_artist: comment_text = "" reply_text = "" result.append( fields.serialize_dict( { "id": notification.id, "type": "Notification", "notification_type": notification.type, "author_id": notification.author_id, "comment_id": notification.comment_id, "task_id": notification.task_id, "task_type_id": task_type_id, "task_status_id": task_status_id, "mentions": mentions, "preview_file_id": preview_file_id, "project_id": project_id, "project_name": project_name, "comment_text": comment_text, "reply_text": reply_text, "created_at": notification.created_at, "read": notification.read, "change": notification.change, "full_entity_name": full_entity_name, "episode_id": episode_id, } ) ) return result
def get_last_news_for_project( project_id, filters={}, news_id=None, only_preview=False, task_type_id=None, task_status_id=None, page=1, page_size=50 ): """ Return last 100 user notifications. Add related information to make it displayable. """ offset = (page - 1) * page_size query = News.query \ .order_by(News.created_at.desc()) \ .join(Task, News.task_id == Task.id) \ .join(Project) \ .join(Entity, Task.entity_id == Entity.id) \ .outerjoin(Comment, News.comment_id == Comment.id) \ .outerjoin(PreviewFile, News.preview_file_id == PreviewFile.id) \ .filter(Task.project_id == project_id) \ .add_columns( Project.id, Project.name, Task.task_type_id, Comment.id, Comment.task_status_id, Task.entity_id, PreviewFile.extension, Entity.preview_file_id ) if news_id is not None: query = query.filter(News.id == news_id) if task_status_id is not None: query = query.filter(Comment.task_status_id == task_status_id) if task_type_id is not None: query = query.filter(Task.task_type_id == task_type_id) if only_preview: query = query.filter(News.preview_file_id != None) query = query.limit(page_size) query = query.offset(offset) news_list = query.all() result = [] for ( news, project_id, project_name, task_type_id, comment_id, task_status_id, task_entity_id, preview_file_extension, entity_preview_file_id ) in news_list: (full_entity_name, episode_id) = \ names_service.get_full_entity_name(task_entity_id) result.append(fields.serialize_dict({ "id": news.id, "author_id": news.author_id, "comment_id": news.comment_id, "task_id": news.task_id, "task_type_id": task_type_id, "task_status_id": task_status_id, "task_entity_id": task_entity_id, "preview_file_id": news.preview_file_id, "preview_file_extension": preview_file_extension, "project_id": project_id, "project_name": project_name, "created_at": news.created_at, "change": news.change, "full_entity_name": full_entity_name, "episode_id": episode_id, "entity_preview_file_id": entity_preview_file_id })) return result
def get_last_notifications(notification_id=None): """ Return last 100 user notifications. """ current_user = persons_service.get_current_user_raw() result = [] query = (Notification.query.filter_by(person_id=current_user.id).order_by( Notification.created_at.desc()).join( Task, Project).outerjoin(Comment).add_columns( Project.id, Project.name, Task.task_type_id, Comment.id, Comment.task_status_id, Comment.text, Task.entity_id, )) if notification_id is not None: query = query.filter(Notification.id == notification_id) notifications = query.limit(100).all() for ( notification, project_id, project_name, task_type_id, comment_id, task_status_id, comment_text, task_entity_id, ) in notifications: (full_entity_name, episode_id) = names_service.get_full_entity_name(task_entity_id) preview_file_id = None mentions = [] if comment_id is not None: comment = Comment.get(comment_id) if len(comment.previews) > 0: preview_file_id = comment.previews[0].id mentions = comment.mentions or [] result.append( fields.serialize_dict({ "id": notification.id, "type": "Notification", "notification_type": notification.type, "author_id": notification.author_id, "comment_id": notification.comment_id, "task_id": notification.task_id, "task_type_id": task_type_id, "task_status_id": task_status_id, "mentions": mentions, "preview_file_id": preview_file_id, "project_id": project_id, "project_name": project_name, "comment_text": comment_text, "created_at": notification.created_at, "read": notification.read, "change": notification.change, "full_entity_name": full_entity_name, "episode_id": episode_id, })) return result