def get_new_file_history_info(ent, avatar_size): info = {} creator_name = ent.op_user url, is_default, date_uploaded = api_avatar_url(creator_name, avatar_size) info['creator_avatar_url'] = url info['creator_email'] = creator_name info['creator_name'] = email2nickname(creator_name) info['creator_contact_email'] = email2contact_email(creator_name) info['op_type'] = ent.op_type info['ctime'] = utc_datetime_to_isoformat_timestr(ent.timestamp) info['commit_id'] = ent.commit_id info['size'] = ent.size info['rev_file_id'] = ent.file_id info['old_path'] = ent.old_path if hasattr(ent, 'old_path') else '' info['path'] = ent.path return info
def to_dict(self, include_deleted=False): result = { 'id': self.pk, 'workspace_id': self.workspace_id, 'uuid': self.uuid, 'name': self.dtable_name, 'creator': email2nickname(self.creator), 'modifier': email2nickname(self.modifier), 'created_at': datetime_to_isoformat_timestr(self.created_at), 'updated_at': datetime_to_isoformat_timestr(self.updated_at), } if include_deleted: result.update({ 'deleted': self.deleted, 'delete_time': utc_datetime_to_isoformat_timestr(self.delete_time) if self.delete_time else '', }) return result
def get_new_file_history_info(ent, avatar_size): info = {} creator_name = ent.op_user url, is_default, date_uploaded = api_avatar_url(creator_name, avatar_size) info['creator_avatar_url'] = url info['creator_email'] = creator_name info['creator_name'] = email2nickname(creator_name) info['creator_contact_email'] = email2contact_email(creator_name) info['op_type'] = ent.op_type info['ctime'] = utc_datetime_to_isoformat_timestr(ent.timestamp) info['commit_id'] = ent.commit_id info['size'] = ent.size info['rev_file_id'] = ent.file_id info['old_path'] = ent.old_path if hasattr(ent, 'old_path') else '' info['path'] = ent.path return info
def get(self, request): """ Get all file access logs. Permission checking: 1. only admin can perform this action. """ try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) if user_selected and not is_valid_email(user_selected): error_msg = 'email %s invalid.' % user_selected return api_error(status.HTTP_400_BAD_REQUEST, error_msg) repo_id_selected = request.GET.get('repo_id', None) if repo_id_selected and not is_valid_repo_id_format(repo_id_selected): error_msg = 'repo_id %s invalid.' % repo_id_selected return api_error(status.HTTP_400_BAD_REQUEST, error_msg) start = per_page * (current_page - 1) limit = per_page # org_id = 0, show all file audit events = get_file_audit_events(user_selected, 0, repo_id_selected, start, limit) or [] has_next_page = True if len(events) == per_page else False # Use dict to reduce memcache fetch cost in large for-loop. nickname_dict = {} contact_email_dict = {} repo_dict = {} user_email_set = set() repo_id_set = set() for event in events: user_email_set.add(event.user) repo_id_set.add(event.repo_id) for e in user_email_set: if e not in nickname_dict: nickname_dict[e] = email2nickname(e) if e not in contact_email_dict: contact_email_dict[e] = email2contact_email(e) for e in repo_id_set: if e not in repo_dict: repo_dict[e] = seafile_api.get_repo(e) events_info = [] for ev in events: data = {} user_email = ev.user data['email'] = user_email data['name'] = nickname_dict.get(user_email, '') data['contact_email'] = contact_email_dict.get(user_email, '') data['ip'] = ev.ip data['event_type'], data[ 'device'] = generate_file_audit_event_type(ev) data['time'] = utc_datetime_to_isoformat_timestr(ev.timestamp) repo_id = ev.repo_id data['repo_id'] = repo_id repo = repo_dict.get(repo_id, None) data['repo_name'] = repo.name if repo else '' if ev.file_path.endswith('/'): data[ 'file_or_dir_name'] = '/' if ev.file_path == '/' else os.path.basename( ev.file_path.rstrip('/')) else: data['file_or_dir_name'] = os.path.basename(ev.file_path) events_info.append(data) resp = { 'file_access_log_list': events_info, 'has_next_page': has_next_page, } return Response(resp)
def get(self, request): """ Get all share permissions logs. Permission checking: 1. only admin can perform this action. """ try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) if user_selected and not is_valid_email(user_selected): error_msg = 'email %s invalid.' % user_selected return api_error(status.HTTP_400_BAD_REQUEST, error_msg) repo_id_selected = request.GET.get('repo_id', None) if repo_id_selected and not is_valid_repo_id_format(repo_id_selected): error_msg = 'repo_id %s invalid.' % repo_id_selected return api_error(status.HTTP_400_BAD_REQUEST, error_msg) start = per_page * (current_page - 1) limit = per_page # org_id = 0, show all file audit events = get_perm_audit_events(user_selected, 0, repo_id_selected, start, limit) or [] has_next_page = True if len(events) == per_page else False # Use dict to reduce memcache fetch cost in large for-loop. from_nickname_dict = {} from_contact_email_dict = {} to_nickname_dict = {} to_contact_email_dict = {} repo_dict = {} from_user_email_set = set() to_user_email_set = set() repo_id_set = set() for event in events: from_user_email_set.add(event.from_user) to_user_email_set.add(event.to) repo_id_set.add(event.repo_id) for e in from_user_email_set: if e not in from_nickname_dict: from_nickname_dict[e] = email2nickname(e) if e not in from_contact_email_dict: from_contact_email_dict[e] = email2contact_email(e) for e in to_user_email_set: if e not in to_nickname_dict: to_nickname_dict[e] = email2nickname(e) if e not in to_contact_email_dict: to_contact_email_dict[e] = email2contact_email(e) for e in repo_id_set: if e not in repo_dict: repo_dict[e] = seafile_api.get_repo(e) events_info = [] for ev in events: data = {} from_user_email = ev.from_user to_user_email = ev.to data['from_user_email'] = from_user_email data['from_user_name'] = from_nickname_dict.get( from_user_email, '') data['from_user_contact_email'] = from_contact_email_dict.get( from_user_email, '') data['to_user_email'] = to_user_email data['to_user_name'] = to_nickname_dict.get(to_user_email, '') data['to_user_contact_email'] = to_contact_email_dict.get( to_user_email, '') data['etype'] = ev.etype data['permission'] = ev.permission repo_id = ev.repo_id data['repo_id'] = repo_id repo = repo_dict.get(repo_id, None) data['repo_name'] = repo.name if repo else '' data['folder'] = '/' if ev.file_path == '/' else os.path.basename( ev.file_path.rstrip('/')) data['date'] = utc_datetime_to_isoformat_timestr(ev.timestamp) events_info.append(data) resp = { 'share_permission_log_list': events_info, 'has_next_page': has_next_page, } return Response(resp)
def get(self, request): """ Get all file update logs. Permission checking: 1. only admin can perform this action. """ if not request.user.admin_permissions.can_view_user_log(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) if user_selected and not is_valid_email(user_selected): error_msg = 'email %s invalid.' % user_selected return api_error(status.HTTP_400_BAD_REQUEST, error_msg) repo_id_selected = request.GET.get('repo_id', None) if repo_id_selected and not is_valid_repo_id_format(repo_id_selected): error_msg = 'repo_id %s invalid.' % repo_id_selected return api_error(status.HTTP_400_BAD_REQUEST, error_msg) start = per_page * (current_page - 1) limit = per_page # org_id = 0, show all file audit events = get_file_update_events(user_selected, 0, repo_id_selected, start, limit) or [] has_next_page = True if len(events) == per_page else False # Use dict to reduce memcache fetch cost in large for-loop. nickname_dict = {} contact_email_dict = {} repo_dict = {} user_email_set = set() repo_id_set = set() for event in events: user_email_set.add(event.user) repo_id_set.add(event.repo_id) for e in user_email_set: if e not in nickname_dict: nickname_dict[e] = email2nickname(e) if e not in contact_email_dict: contact_email_dict[e] = email2contact_email(e) for e in repo_id_set: if e not in repo_dict: repo_dict[e] = seafile_api.get_repo(e) events_info = [] for ev in events: data = {} user_email = ev.user data['email'] = user_email data['name'] = nickname_dict.get(user_email, '') data['contact_email'] = contact_email_dict.get(user_email, '') data['time'] = utc_datetime_to_isoformat_timestr(ev.timestamp) repo_id = ev.repo_id data['repo_id'] = repo_id repo = repo_dict.get(repo_id, None) data['repo_name'] = repo.name if repo else '' data['repo_encrypted'] = repo.encrypted if repo else None data['file_operation'] = ev.file_oper data['commit_id'] = ev.commit_id events_info.append(data) resp = { 'file_update_log_list': events_info, 'has_next_page': has_next_page, } return Response(resp)
def get(self, request, format=None): if not EVENTS_ENABLED: events = None return api_error(status.HTTP_404_NOT_FOUND, 'Events not enabled.') try: page = int(request.GET.get('page', '')) except ValueError: page = 1 try: per_page = int(request.GET.get('per_page', '')) except ValueError: per_page = 25 start = (page - 1) * per_page count = per_page email = request.user.username try: events = get_user_activities(email, start, count) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) events_list = [] for e in events: d = dict(op_type=e.op_type) d['repo_id'] = e.repo_id d['repo_name'] = e.repo_name d['obj_type'] = e.obj_type d['commit_id'] = e.commit_id d['path'] = e.path d['name'] = '' if e.path == '/' else os.path.basename(e.path) d['author_email'] = e.op_user d['author_name'] = email2nickname(e.op_user) d['author_contact_email'] = email2contact_email(e.op_user) try: avatar_size = int(request.GET.get('avatar_size', 72)) except ValueError as e: avatar_size = 72 url, is_default, date_uploaded = api_avatar_url(e.op_user, avatar_size) d['avatar_url'] = request.build_absolute_uri(url) d['time'] = utc_datetime_to_isoformat_timestr(e.timestamp) if e.op_type == 'clean-up-trash': d['days'] = e.days elif e.op_type == 'rename' and e.obj_type == 'repo': d['old_repo_name'] = e.old_repo_name elif e.op_type == 'move' and e.obj_type in ['dir', 'file']: d['old_path'] = e.old_path elif e.op_type == 'rename' and e.obj_type in ['dir', 'file']: d['old_path'] = e.old_path d['old_name'] = os.path.basename(e.old_path) elif e.op_type == 'publish': d['old_path'] = e.old_path elif d['name'].endswith('(draft).md'): if e.op_type in ('create', 'edit') and e.obj_type == 'file': try: draft = Draft.objects.filter(username=e.op_user, origin_repo_id=e.repo_id, draft_file_path=e.path) if draft: draft = draft[0] d['draft_id'] = draft.id else: Draft.DoesNotExist except Draft.DoesNotExist: pass events_list.append(d) ret = { 'events': events_list } return Response(ret)
def do_action(self): emails = [] user_dtable_updates_email_intervals = [] for ele in UserOptions.objects.filter( option_key=KEY_DTABLE_UPDATES_EMAIL_INTERVAL): try: user_dtable_updates_email_intervals.append( (ele.email, int(ele.option_val))) emails.append(ele.email) except Exception as e: logger.error(e) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue user_last_emailed_time_dict = {} for ele in UserOptions.objects.filter( option_key=KEY_DTABLE_UPDATES_LAST_EMAILED_TIME).filter( email__in=emails): try: user_last_emailed_time_dict[ele.email] = datetime.strptime( ele.option_val, "%Y-%m-%d %H:%M:%S") except Exception as e: logger.error(e) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue for (username, interval_val) in user_dtable_updates_email_intervals: # save current language cur_language = translation.get_language() # get and active user language user_language = self.get_user_language(username) translation.activate(user_language) logger.debug('Set language code to %s for user: %s' % (user_language, username)) self.stdout.write('[%s] Set language code to %s for user: %s' % (str(datetime.now()), user_language, username)) # get last_emailed_time if any, defaults to today 00:00:00.0 last_emailed_time = user_last_emailed_time_dict.get(username, None) now = datetime.utcnow().replace(microsecond=0) if not last_emailed_time: last_emailed_time = datetime.utcnow().replace(hour=0).replace( minute=0).replace(second=0).replace(microsecond=0) else: if (now - last_emailed_time).total_seconds() < interval_val: continue # find all the user's tables and groups' tables groups = ccnet_api.get_groups(username, return_ancestors=True) owner_list = [username] + [ '%s@seafile_group' % group.id for group in groups ] dtables = list( DTables.objects.filter(workspace__owner__in=owner_list)) # find all tables shared to user shared_tables = list(DTableShare.objects.list_by_to_user(username)) # combine tables dtables.extend([item.dtable for item in shared_tables]) # dtable uuid map dtables_uuid_map = {dtable.uuid.hex: dtable for dtable in dtables} # query all activities about above dtables with DB SQL cursor = connection.cursor() sql = "SELECT a.* FROM activities a JOIN user_activities ua ON a.id=ua.activity_id WHERE ua.timestamp > %s AND ua.username=%s ORDER BY ua.timestamp DESC" cursor.execute(sql, (last_emailed_time, username)) # time and username col_names = [desc[0] for desc in cursor.description] activities, activities_count = [], 0 for activity in cursor.fetchall(): activity = dict(zip(col_names, activity)) if activity['dtable_uuid'] not in dtables_uuid_map: continue activity_detail = json.loads(activity['detail']) activity_dict = dict(dtable_uuid=activity['dtable_uuid']) activity_dict['dtable_name'] = dtables_uuid_map[ activity['dtable_uuid']].name if activity[ 'dtable_uuid'] in dtables_uuid_map else '' activity_dict['row_id'] = activity['row_id'] activity_dict['op_type'] = activity['op_type'] activity_dict['author_email'] = activity['op_user'] activity_dict['author_name'] = email2nickname( activity['op_user']) activity_dict['author_contact_email'] = email2contact_email( activity['op_user']) activity_dict['op_time'] = utc_datetime_to_isoformat_timestr( activity['op_time']) activity_dict['table_id'] = activity_detail['table_id'] activity_dict['table_name'] = activity_detail['table_name'] activity_dict['row_data'] = activity_detail['row_data'] activity_dict['row_name'] = self.get_row_name( activity_dict['row_data']) or activity_detail.get( 'row_name', '') # compatible with previous data avatar_size = 72 # todo: size url, is_default, date_uploaded = api_avatar_url( activity['op_user'], avatar_size) activity_dict['avatar_url'] = url # fields for html-display activity_dict['op_user_link'] = a_tag( activity_dict['author_name'], user_info_url(activity['op_user'])) activity_dict['dtable_link'] = a_tag( activity_dict['dtable_name'], dtable_url(dtables_uuid_map[activity_dict['dtable_uuid']])) activity_dict['details'] = self.format_modify_operation( activity_dict) activity_dict['local_timestamp'] = utc_to_local( activity['op_time']) activities_count += 1 if len(activities) <= 100: activities.append(activity_dict) if not activities: translation.activate(cur_language) continue c = { 'name': email2nickname(username), 'updates_count': activities_count, 'updates': activities, } contact_email = email2contact_email(username) try: send_html_email( _('New table updates on %s') % get_site_name(), 'notifications/dtable_updates_email.html', c, None, [contact_email]) now = datetime.utcnow().replace(microsecond=0) UserOptions.objects.set_dtable_updates_last_emailed_time( username, now) except Exception as e: logger.error('Failed to send email to %s, error detail: %s' % (contact_email, e)) self.stderr.write('[%s] Failed to send email to %s, error ' 'detail: %s' % (str(datetime.now()), contact_email, e)) finally: # reset lang translation.activate(cur_language)
def get(self, request): if not DTABLE_EVENTS_ENABLED: return api_error(status.HTTP_400_BAD_REQUEST, 'Events not enabled.') try: page = int(request.GET.get('page', '')) except ValueError: page = 1 try: per_page = int(request.GET.get('per_page', '')) except ValueError: per_page = 25 start = (page - 1) * per_page count = per_page username = request.user.username try: activity_list = get_user_activities(username, start, count) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) dtable_uuid_name_map = dict() dtable_uuid_list = list() for activity in activity_list: if activity.dtable_uuid not in dtable_uuid_name_map: dtable_uuid_list.append(activity.dtable_uuid) if dtable_uuid_list: dtables = DTables.objects.filter(uuid__in=dtable_uuid_list, deleted=False) for dtable in dtables: dtable_uuid_name_map[dtable.uuid.hex] = dtable.name activities = [] for activity in activity_list: activity_dict = dict(dtable_uuid=activity.dtable_uuid) activity_dict['dtable_name'] = dtable_uuid_name_map[activity.dtable_uuid] if activity.dtable_uuid in dtable_uuid_name_map else '' activity_dict['row_id'] = activity.row_id activity_dict['op_type'] = activity.op_type activity_dict['author_email'] = activity.op_user activity_dict['author_name'] = email2nickname(activity.op_user) activity_dict['author_contact_email'] = email2contact_email(activity.op_user) activity_dict['op_time'] = utc_datetime_to_isoformat_timestr(activity.op_time) activity_dict['table_id'] = activity.table_id activity_dict['table_name'] = activity.table_name activity_dict['row_name'] = getattr(activity, "row_name", "") # compatible with previous data activity_dict['row_data'] = activity.row_data activity_dict['op_app'] = activity.op_app try: avatar_size = int(request.GET.get('avatar_size', 72)) except ValueError: avatar_size = 72 url, is_default, date_uploaded = api_avatar_url(activity.op_user, avatar_size) activity_dict['avatar_url'] = url if activity_dict['op_app']: activity_dict['app_avatar_url'] = api_app_avatar_url()[0] activities.append(activity_dict) return Response({'activities': activities})
def get(self, request, format=None): if not EVENTS_ENABLED: events = None return api_error(status.HTTP_404_NOT_FOUND, 'Events not enabled.') try: page = int(request.GET.get('page', '')) except ValueError: page = 1 try: per_page = int(request.GET.get('per_page', '')) except ValueError: per_page = 25 start = (page - 1) * per_page count = per_page email = request.user.username try: events = get_user_activities(email, start, count) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) events_list = [] for e in events: d = dict(op_type=e.op_type) d['repo_id'] = e.repo_id d['repo_name'] = e.repo_name d['obj_type'] = e.obj_type d['commit_id'] = e.commit_id d['path'] = e.path d['name'] = '' if e.path == '/' else os.path.basename(e.path) d['author_email'] = e.op_user d['author_name'] = email2nickname(e.op_user) d['author_contact_email'] = email2contact_email(e.op_user) try: avatar_size = int(request.GET.get('avatar_size', 72)) except ValueError as e: avatar_size = 72 url, is_default, date_uploaded = api_avatar_url(e.op_user, avatar_size) d['avatar_url'] = request.build_absolute_uri(url) d['time'] = utc_datetime_to_isoformat_timestr(e.timestamp) if e.op_type == 'clean-up-trash': d['days'] = e.days elif e.op_type == 'rename' and e.obj_type == 'repo': d['old_repo_name'] = e.old_repo_name elif e.op_type == 'move' and e.obj_type in ['dir', 'file']: d['old_path'] = e.old_path elif e.op_type == 'rename' and e.obj_type in ['dir', 'file']: d['old_path'] = e.old_path d['old_name'] = os.path.basename(e.old_path) elif e.op_type == 'publish': d['old_path'] = e.old_path elif d['name'].endswith('(draft).md'): if e.op_type in ('create', 'edit') and e.obj_type == 'file': try: draft = Draft.objects.get(username=e.op_user, origin_repo_id=e.repo_id, draft_file_path=e.path) d['draft_id'] = draft.id except Draft.DoesNotExist: pass events_list.append(d) ret = { 'events': events_list } return Response(ret)