def get(self, request): if not request.user.admin_permissions.can_view_admin_log(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # Filtering a DateTimeField with dates won't include items on the last day, # because the bounds are interpreted as '0am on the given date'. end = end + ' 23:59:59' result = [] from seahub_extra.sysadmin_extra.models import UserLoginLog logs = UserLoginLog.objects.filter(login_date__range=(start, end)) for log in logs: result.append({ 'login_time': datetime_to_isoformat_timestr(log.login_date), 'login_ip': log.login_ip, 'name': email2nickname(log.username), 'email': log.username }) return Response(result)
def get(self, request): # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # Filtering a DateTimeField with dates won't include items on the last day, # because the bounds are interpreted as '0am on the given date'. end = end + ' 23:59:59' result = [] from seahub_extra.sysadmin_extra.models import UserLoginLog logs = UserLoginLog.objects.filter(login_date__range=(start, end)) for log in logs: result.append({ 'login_time': datetime_to_isoformat_timestr(log.login_date), 'login_ip': log.login_ip, 'name': email2nickname(log.username), 'email':log.username }) return Response(result)
def get(self, request): if not is_pro_version(): error_msg = 'Feature disabled.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) result = [] events = get_log_events_by_type_and_time('perm_audit', start, end) if events: for ev in events: tmp_repo = seafile_api.get_repo(ev.repo_id) tmp_repo_name = tmp_repo.name if tmp_repo else '' result.append({ 'etype': ev.etype, 'repo_id': ev.repo_id, 'repo_name': tmp_repo_name, 'permission': ev.permission, 'time': datetime_to_isoformat_timestr(ev.timestamp), 'file_path': ev.file_path, 'from_name': email2nickname(ev.from_user), 'from_email': ev.from_user, 'to': ev.to }) return Response(result)
def handle(self, *args, **options): """ Export file access logs to excel. """ if not is_pro_version(): self.stdout.write("Failed to export excel, this feature is only in professional version.") return path = options['path'] start = str(options['start_date']) end = str(options['end_date']) if not check_time_period_valid(start, end): self.stdout.write("Failed to export excel, invalid start or end date.") return events = get_log_events_by_type_and_time('file_audit', start, end) head = [_("User"), _("Type"), _("IP"), _("Device"), _("Date"), _("Library Name"), _("Library ID"), _("Library Owner"), _("File Path"),] data_list = [] repo_obj_dict = {} repo_owner_dict = {} events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: event_type, ev.show_device = generate_file_audit_event_type(ev) repo_id = ev.repo_id if repo_id not in repo_obj_dict: repo = seafile_api.get_repo(repo_id) repo_obj_dict[repo_id] = repo else: repo = repo_obj_dict[repo_id] if repo: repo_name = repo.name if repo_id not in repo_owner_dict: repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) repo_owner_dict[repo_id] = repo_owner else: repo_owner = repo_owner_dict[repo_id] else: repo_name = _('Deleted') repo_owner = '--' username = ev.user if ev.user else _('Anonymous User') date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [username, event_type, ev.ip, ev.show_device, date, repo_name, ev.repo_id, repo_owner, ev.file_path] data_list.append(row) excel_name = 'file-access-logs.xlsx' wb = write_xls(_('file-access-logs'), head, data_list) wb.save(posixpath.join(path, excel_name)) if path else wb.save(excel_name)
def sys_login_admin_export_excel(request): """ Export user login logs to excel. """ next = request.META.get('HTTP_REFERER', None) if not next: next = SITE_ROOT start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _(u'Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next) # Filtering a DateTimeField with dates won't include items on the last day, # because the bounds are interpreted as '0am on the given date'. end = end + ' 23:59:59' try: user_login_logs = UserLoginLog.objects.filter(login_date__range=(start, end)) except ValidationError as e: logger.error(e) messages.error(request, _(u'Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next) logs = list(user_login_logs) head = [ _("Name"), _("IP"), _("Status"), _("Time"), ] data_list = [] for log in logs: login_time = log.login_date.strftime("%Y-%m-%d %H:%M:%S") status = _('Success') if log.login_success else _('Failed') row = [ log.username, log.login_ip, status, login_time, ] data_list.append(row) wb = write_xls(_('login-logs'), head, data_list) if not wb: messages.error(request, _(u'Failed to export excel')) return HttpResponseRedirect(next) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=login-logs.xlsx' wb.save(response) return response
def sys_log_file_audit_export_excel(request): """ Export file access logs to excel. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT if not is_pro_version(): messages.error( request, _('Failed to export excel, this feature is only in professional version.' )) return HttpResponseRedirect(next_page) start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _('Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next_page) events = get_log_events_by_type_and_time('file_audit', start, end) head = [ _("User"), _("Type"), _("IP"), _("Device"), _("Date"), _("Library Name"), _("Library ID"), _("Library Owner"), _("File Path") ] data_list = [] events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: event_type, ev.show_device = generate_file_audit_event_type(ev) repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: repo_name = repo.name repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: repo_name = _('Deleted') repo_owner = '--' username = ev.user if ev.user else _('Anonymous User') date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [ username, event_type, ev.ip, ev.show_device, date, repo_name, ev.repo_id, repo_owner, ev.file_path ] data_list.append(row) wb = write_xls('file-access-logs', head, data_list) if not wb: messages.error(request, _('Failed to export excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=file-access-logs.xlsx' wb.save(response) return response
def sys_log_perm_audit_export_excel(request): """ Export permission audit logs to excel. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT if not is_pro_version(): messages.error( request, _('Failed to export excel, this feature is only in professional version.' )) return HttpResponseRedirect(next_page) start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _('Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next_page) events = get_log_events_by_type_and_time('perm_audit', start, end) head = [ _("From"), _("To"), _("Action"), _("Permission"), _("Library"), _("Folder Path"), _("Date") ] data_list = [] events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: repo = seafile_api.get_repo(ev.repo_id) repo_name = repo.repo_name if repo else _('Deleted') if '@' in ev.to: to = ev.to elif ev.to.isdigit(): group = ccnet_api.get_group(int(ev.to)) to = group.group_name if group else _('Deleted') elif 'all' in ev.to: to = _('Organization') else: to = '--' if 'add' in ev.etype: action = _('Add') elif 'modify' in ev.etype: action = _('Modify') elif 'delete' in ev.etype: action = _('Delete') else: action = '--' if ev.permission == 'rw': permission = _('Read-Write') elif ev.permission == 'r': permission = _('Read-Only') else: permission = '--' date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [ ev.from_user, to, action, permission, repo_name, ev.file_path, date ] data_list.append(row) wb = write_xls('perm-audit-logs', head, data_list) if not wb: next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT messages.error(request, _('Failed to export excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=perm-audit-logs.xlsx' wb.save(response) return response
def get(self, request): # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: events = get_log_events_by_type_and_time('file_audit', start, end) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = [] if events: # get name/contact_email dict for events user/repo_owner ev_user_list = [] ev_repo_owner_list = [] for ev in events: repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: ev.repo_name = repo.name ev.repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: ev.repo_name = '' ev.repo_owner = '' ev_user_list.append(ev.user) ev_repo_owner_list.append(ev.repo_owner) ev_user_name_dict = get_user_name_dict(ev_user_list) ev_user_contact_email_dict = get_user_contact_email_dict(ev_user_list) ev_repo_owner_name_dict = get_user_name_dict(ev_repo_owner_list) ev_repo_owner_contact_email_dict = get_user_contact_email_dict(ev_repo_owner_list) for ev in events: result.append({ 'repo_id': ev.repo_id, 'repo_name': ev.repo_name, 'repo_owner_email': ev.repo_owner, 'repo_owner_name': ev_repo_owner_name_dict[ev.repo_owner], 'repo_owner_contact_email': ev_repo_owner_contact_email_dict[ev.repo_owner], 'time': datetime_to_isoformat_timestr(ev.timestamp), 'ip': ev.ip, 'file_path': ev.file_path, 'etype': ev.etype, 'user_email': ev.user, 'user_name': ev_user_name_dict[ev.user], 'user_contact_email': ev_user_contact_email_dict[ev.user], }) return Response(result)
def get(self, request): # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: events = get_log_events_by_type_and_time('file_audit', start, end) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = [] if events: # get name/contact_email dict for events user/repo_owner ev_user_list = [] ev_repo_owner_list = [] for ev in events: repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: ev.repo_name = repo.name ev.repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: ev.repo_name = '' ev.repo_owner = '' ev_user_list.append(ev.user) ev_repo_owner_list.append(ev.repo_owner) ev_user_name_dict = get_user_name_dict(ev_user_list) ev_user_contact_email_dict = get_user_contact_email_dict( ev_user_list) ev_repo_owner_name_dict = get_user_name_dict(ev_repo_owner_list) ev_repo_owner_contact_email_dict = get_user_contact_email_dict( ev_repo_owner_list) for ev in events: result.append({ 'repo_id': ev.repo_id, 'repo_name': ev.repo_name, 'repo_owner_email': ev.repo_owner, 'repo_owner_name': ev_repo_owner_name_dict[ev.repo_owner], 'repo_owner_contact_email': ev_repo_owner_contact_email_dict[ev.repo_owner], 'time': datetime_to_isoformat_timestr(ev.timestamp), 'ip': ev.ip, 'file_path': ev.file_path, 'etype': ev.etype, 'user_email': ev.user, 'user_name': ev_user_name_dict[ev.user], 'user_contact_email': ev_user_contact_email_dict[ev.user], }) return Response(result)