def batch_add_user_example(request): """ get example file. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT data_list = [] head = [ _('Email'), _('Password'), _('Name') + '(' + _('Optional') + ')', _('Role') + '(' + _('Optional') + ')', _('Space Quota') + '(MB, ' + _('Optional') + ')' ] for i in range(5): username = "******" + str(i) + "@example.com" password = "******" name = "test" + str(i) role = "default" quota = "1000" data_list.append([username, password, name, role, quota]) wb = write_xls('sample', head, data_list) if not wb: messages.error(request, _('Failed to export Excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=users.xlsx' wb.save(response) return response
def handle(self, *args, **options): path = options['path'] db_users = ccnet_api.get_emailusers('DB', -1, -1) ldap_import_users = ccnet_api.get_emailusers('LDAPImport', -1, -1) all_users = db_users + ldap_import_users head = [_("Email"), _("Name"), _("Contact Email"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)"] data_list = [] for user in all_users: user_email = user.email user_name = email2nickname(user_email) user_contact_email = email2contact_email(user_email) _populate_user_quota_usage(user) space_usage_MB = byte_to_mb(user.space_usage) space_quota_MB = byte_to_mb(user.space_quota) row = [user_email, user_name, user_contact_email, space_usage_MB, space_quota_MB] data_list.append(row) excel_name = "User-Storage.xlsx" wb = write_xls('users', head, data_list) wb.save(posixpath.join(path, excel_name)) if path else wb.save(excel_name)
def sys_group_admin_export_excel(request): """ Export all groups to excel """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT try: groups = ccnet_threaded_rpc.get_all_groups(-1, -1) except Exception as e: logger.error(e) messages.error(request, _('Failed to export Excel')) return HttpResponseRedirect(next_page) head = [_("Name"), _("Creator"), _("Create At")] data_list = [] for grp in groups: create_at = tsstr_sec(grp.timestamp) if grp.timestamp else '' row = [grp.group_name, grp.creator_name, create_at] data_list.append(row) wb = write_xls('groups', head, data_list) if not wb: messages.error(request, _('Failed to export Excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=groups.xlsx' wb.save(response) return response
def handle(self, *args, **options): path = options['path'] month = str(options['date']) if not month: self.stdout.write("month invalid.") return month_obj = datetime.datetime.strptime(month, "%Y%m") res_data = seafevents_api.get_all_users_traffic_by_month( month_obj, -1, -1) data_list = [] head = [_("Time"), _("User"), _("Web Download") + ('(MB)'), \ _("Sync Download") + ('(MB)'), _("Link Download") + ('(MB)'), \ _("Web Upload") + ('(MB)'), _("Sync Upload") + ('(MB)'), \ _("Link Upload") + ('(MB)')] for data in res_data: web_download = byte_to_mb(data['web_file_download']) sync_download = byte_to_mb(data['sync_file_download']) link_download = byte_to_mb(data['link_file_download']) web_upload = byte_to_mb(data['web_file_upload']) sync_upload = byte_to_mb(data['sync_file_upload']) link_upload = byte_to_mb(data['link_file_upload']) row = [month, data['user'], web_download, sync_download, \ link_download, web_upload, sync_upload, link_upload] data_list.append(row) excel_name = "User-Traffic-%s" % month wb = write_xls(excel_name, head, data_list) wb.save(posixpath.join(path, '%s.xlsx' % excel_name)) if path else wb.save('%s.xlsx' % excel_name)
def get(self, request): if not request.user.admin_permissions.can_view_statistic(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') month = request.GET.get("month", "") if not month: error_msg = "month invalid." return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: month_obj = datetime.datetime.strptime(month, "%Y%m") except: error_msg = "Month %s invalid" % month return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: res_data = seafevents_api.get_all_users_traffic_by_month( month_obj, -1, -1) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) data_list = [] head = [_("Time"), _("User"), _("Web Download") + ('(MB)'), \ _("Sync Download") + ('(MB)'), _("Link Download") + ('(MB)'), \ _("Web Upload") + ('(MB)'), _("Sync Upload") + ('(MB)'), \ _("Link Upload") + ('(MB)')] for data in res_data: web_download = byte_to_mb(data['web_file_download']) sync_download = byte_to_mb(data['sync_file_download']) link_download = byte_to_mb(data['link_file_download']) web_upload = byte_to_mb(data['web_file_upload']) sync_upload = byte_to_mb(data['sync_file_upload']) link_upload = byte_to_mb(data['link_file_upload']) row = [month, data['user'], web_download, sync_download, \ link_download, web_upload, sync_upload, link_upload] data_list.append(row) excel_name = "User Traffic %s" % month try: wb = write_xls(excel_name, head, data_list) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename="%s.xlsx"' % excel_name wb.save(response) return response
def handle(self, *args, **options): """ Export file access logs to excel. """ if not is_pro_version(): self.stdout.write("Failed to export excel, this feature is only in professional version.") return path = options['path'] start = str(options['start_date']) end = str(options['end_date']) if not check_time_period_valid(start, end): self.stdout.write("Failed to export excel, invalid start or end date.") return events = get_log_events_by_type_and_time('file_audit', start, end) head = [_("User"), _("Type"), _("IP"), _("Device"), _("Date"), _("Library Name"), _("Library ID"), _("Library Owner"), _("File Path"),] data_list = [] repo_obj_dict = {} repo_owner_dict = {} events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: event_type, ev.show_device = generate_file_audit_event_type(ev) repo_id = ev.repo_id if repo_id not in repo_obj_dict: repo = seafile_api.get_repo(repo_id) repo_obj_dict[repo_id] = repo else: repo = repo_obj_dict[repo_id] if repo: repo_name = repo.name if repo_id not in repo_owner_dict: repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) repo_owner_dict[repo_id] = repo_owner else: repo_owner = repo_owner_dict[repo_id] else: repo_name = _('Deleted') repo_owner = '--' username = ev.user if ev.user else _('Anonymous User') date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [username, event_type, ev.ip, ev.show_device, date, repo_name, ev.repo_id, repo_owner, ev.file_path] data_list.append(row) excel_name = 'file-access-logs.xlsx' wb = write_xls(_('file-access-logs'), head, data_list) wb.save(posixpath.join(path, excel_name)) if path else wb.save(excel_name)
def sys_login_admin_export_excel(request): """ Export user login logs to excel. """ next = request.META.get('HTTP_REFERER', None) if not next: next = SITE_ROOT start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _(u'Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next) # Filtering a DateTimeField with dates won't include items on the last day, # because the bounds are interpreted as '0am on the given date'. end = end + ' 23:59:59' try: user_login_logs = UserLoginLog.objects.filter(login_date__range=(start, end)) except ValidationError as e: logger.error(e) messages.error(request, _(u'Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next) logs = list(user_login_logs) head = [ _("Name"), _("IP"), _("Status"), _("Time"), ] data_list = [] for log in logs: login_time = log.login_date.strftime("%Y-%m-%d %H:%M:%S") status = _('Success') if log.login_success else _('Failed') row = [ log.username, log.login_ip, status, login_time, ] data_list.append(row) wb = write_xls(_('login-logs'), head, data_list) if not wb: messages.error(request, _(u'Failed to export excel')) return HttpResponseRedirect(next) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=login-logs.xlsx' wb.save(response) return response
def get(self, request): month = request.GET.get("month", "") if not month: error_msg = "month invalid." return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: month_obj = datetime.datetime.strptime(month, "%Y%m") except: error_msg = "Month %s invalid" % month return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: res_data = seafevents_api.get_all_users_traffic_by_month(month_obj, -1, -1) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) data_list = [] head = [_("Time"), _("User"), _("Web Download") + ('(MB)'), \ _("Sync Download") + ('(MB)'), _("Link Download") + ('(MB)'), \ _("Web Upload") + ('(MB)'), _("Sync Upload") + ('(MB)'), \ _("Link Upload") + ('(MB)')] for data in res_data: web_download = byte_to_mb(data['web_file_download']) sync_download = byte_to_mb(data['sync_file_download']) link_download = byte_to_mb(data['link_file_download']) web_upload = byte_to_mb(data['web_file_upload']) sync_upload = byte_to_mb(data['sync_file_upload']) link_upload = byte_to_mb(data['link_file_upload']) row = [month, data['user'], web_download, sync_download, \ link_download, web_upload, sync_upload, link_upload] data_list.append(row) excel_name = "User Traffic %s" % month try: wb = write_xls(excel_name, head, data_list) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=%s.xlsx' % excel_name wb.save(response) return response
def get(self, request): if not request.user.admin_permissions.can_view_statistic(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') db_users = ccnet_api.get_emailusers('DB', -1, -1) ldap_import_users = ccnet_api.get_emailusers('LDAPImport', -1, -1) all_users = db_users + ldap_import_users head = [ _("Email"), _("Name"), _("Contact Email"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)" ] data_list = [] for user in all_users: user_email = user.email user_name = email2nickname(user_email) user_contact_email = email2contact_email(user_email) _populate_user_quota_usage(user) space_usage_MB = byte_to_mb(user.space_usage) space_quota_MB = byte_to_mb(user.space_quota) row = [ user_email, user_name, user_contact_email, space_usage_MB, space_quota_MB ] data_list.append(row) excel_name = 'User Storage' try: wb = write_xls('users', head, data_list) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename="%s.xlsx"' % excel_name wb.save(response) return response
def get(self, request): data_list = [] head = [_('Email')] for i in range(5): username = "******" + str(i) + "@example.com" data_list.append([username]) wb = write_xls('sample', head, data_list) if not wb: return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, _('Failed to export Excel')) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=members.xlsx' wb.save(response) return response
def get(self, request): db_users = ccnet_api.get_emailusers('DB', -1, -1) ldap_import_users = ccnet_api.get_emailusers('LDAPImport', -1, -1) all_users = db_users + ldap_import_users head = [_("Email"), _("Name"), _("Contact Email"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)"] data_list = [] for user in all_users: user_email = user.email user_name = email2nickname(user_email) user_contact_email = email2contact_email(user_email) _populate_user_quota_usage(user) space_usage_MB = byte_to_mb(user.space_usage) space_quota_MB = byte_to_mb(user.space_quota) row = [user_email, user_name, user_contact_email, space_usage_MB, space_quota_MB] data_list.append(row) excel_name = 'User Storage' try: wb = write_xls('users', head, data_list) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=%s.xlsx' % excel_name wb.save(response) return response
def handle(self, *args, **options): self.stdout.write("Export users to '../users.xlsx'.") try: users = ccnet_api.get_emailusers('DB', -1, -1) + \ ccnet_api.get_emailusers('LDAPImport', -1, -1) except Exception as e: self.stdout.write('Error: ' + str(e)) return if is_pro_version(): is_pro = True else: is_pro = False if is_pro: head = [ _("Email"), _("Name"), _("Contact Email"), _("Status"), _("Role"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)", _("Create At"), _("Last Login"), _("Admin"), _("LDAP(imported)"), ] else: head = [ _("Email"), _("Name"), _("Contact Email"), _("Status"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)", _("Create At"), _("Last Login"), _("Admin"), _("LDAP(imported)"), ] # only operate 100 users for every `for` loop looped = 0 limit = 100 data_list = [] while looped < len(users): current_users = users[looped:looped + limit] last_logins = UserLastLogin.objects.filter(username__in=[x.email \ for x in current_users]) user_profiles = Profile.objects.filter(user__in=[x.email \ for x in current_users]) for user in current_users: # populate name and contact email user.contact_email = '' user.name = '' for profile in user_profiles: if profile.user == user.email: user.contact_email = profile.contact_email user.name = profile.nickname # populate space usage and quota MB = get_file_size_unit('MB') # populate user quota usage orgs = ccnet_api.get_orgs_by_user(user.email) try: if orgs: user.org = orgs[0] org_id = user.org.org_id user.space_usage = seafile_api.get_org_user_quota_usage( org_id, user.email) user.space_quota = seafile_api.get_org_user_quota( org_id, user.email) else: user.space_usage = seafile_api.get_user_self_usage( user.email) user.space_quota = seafile_api.get_user_quota( user.email) except Exception as e: self.stdout.write('Debug: ' + str(e)) user.space_usage = -1 user.space_quota = -1 if user.space_usage > 0: try: space_usage_MB = round(float(user.space_usage) / MB, 2) except Exception as e: self.stdout.write('Debug: ' + str(e)) space_usage_MB = '--' else: space_usage_MB = '' if user.space_quota > 0: try: space_quota_MB = round(float(user.space_quota) / MB, 2) except Exception as e: self.stdout.write('Debug: ' + str(e)) space_quota_MB = '--' else: space_quota_MB = '' # populate user last login time user.last_login = None for last_login in last_logins: if last_login.username == user.email: user.last_login = last_login.last_login if user.is_active: status = _('Active') else: status = _('Inactive') create_at = tsstr_sec(user.ctime) if user.ctime else '' last_login = user.last_login.strftime("%Y-%m-%d %H:%M:%S") if \ user.last_login else '' is_admin = _('Yes') if user.is_staff else '' ldap_import = _('Yes') if user.source == 'LDAPImport' else '' if is_pro: if user.role: if user.role == GUEST_USER: role = _('Guest') elif user.role == DEFAULT_USER: role = _('Default') else: role = user.role else: role = _('Default') row = [ user.email, user.name, user.contact_email, status, role, space_usage_MB, space_quota_MB, create_at, last_login, is_admin, ldap_import ] else: row = [ user.email, user.name, user.contact_email, status, space_usage_MB, space_quota_MB, create_at, last_login, is_admin, ldap_import ] data_list.append(row) # update `looped` value when `for` loop finished looped += limit wb = write_xls('users', head, data_list) if not wb: self.stdout.write('Error: please check the log.') return wb.save('../users.xlsx') self.stdout.write('Done.\n')
def sys_useradmin_export_excel(request): """ Export all users from database to excel """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT try: users = ccnet_api.get_emailusers('DB', -1, -1) + \ ccnet_api.get_emailusers('LDAPImport', -1, -1) except Exception as e: logger.error(e) messages.error(request, _('Failed to export Excel')) return HttpResponseRedirect(next_page) if is_pro_version(): is_pro = True else: is_pro = False if is_pro: head = [ _("Email"), _("Name"), _("Contact Email"), _("Status"), _("Role"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)", _("Create At"), _("Last Login"), _("Admin"), _("LDAP(imported)"), ] else: head = [ _("Email"), _("Name"), _("Contact Email"), _("Status"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)", _("Create At"), _("Last Login"), _("Admin"), _("LDAP(imported)"), ] # only operate 100 users for every `for` loop looped = 0 limit = 100 data_list = [] while looped < len(users): current_users = users[looped:looped + limit] last_logins = UserLastLogin.objects.filter(username__in=[x.email \ for x in current_users]) user_profiles = Profile.objects.filter(user__in=[x.email \ for x in current_users]) for user in current_users: # populate name and contact email user.contact_email = '' user.name = '' for profile in user_profiles: if profile.user == user.email: user.contact_email = profile.contact_email user.name = profile.nickname # populate space usage and quota MB = get_file_size_unit('MB') _populate_user_quota_usage(user) if user.space_usage > 0: try: space_usage_MB = round(float(user.space_usage) / MB, 2) except Exception as e: logger.error(e) space_usage_MB = '--' else: space_usage_MB = '' if user.space_quota > 0: try: space_quota_MB = round(float(user.space_quota) / MB, 2) except Exception as e: logger.error(e) space_quota_MB = '--' else: space_quota_MB = '' # populate user last login time user.last_login = None for last_login in last_logins: if last_login.username == user.email: user.last_login = last_login.last_login if user.is_active: status = _('Active') else: status = _('Inactive') create_at = tsstr_sec(user.ctime) if user.ctime else '' last_login = user.last_login.strftime("%Y-%m-%d %H:%M:%S") if \ user.last_login else '' is_admin = _('Yes') if user.is_staff else '' ldap_import = _('Yes') if user.source == 'LDAPImport' else '' if is_pro: if user.role: if user.role == GUEST_USER: role = _('Guest') elif user.role == DEFAULT_USER: role = _('Default') else: role = user.role else: role = _('Default') row = [ user.email, user.name, user.contact_email, status, role, space_usage_MB, space_quota_MB, create_at, last_login, is_admin, ldap_import ] else: row = [ user.email, user.name, user.contact_email, status, space_usage_MB, space_quota_MB, create_at, last_login, is_admin, ldap_import ] data_list.append(row) # update `looped` value when `for` loop finished looped += limit wb = write_xls('users', head, data_list) if not wb: messages.error(request, _('Failed to export Excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response['Content-Disposition'] = 'attachment; filename=users.xlsx' wb.save(response) return response
def sys_log_file_audit_export_excel(request): """ Export file access logs to excel. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT if not is_pro_version(): messages.error( request, _('Failed to export excel, this feature is only in professional version.' )) return HttpResponseRedirect(next_page) start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _('Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next_page) events = get_log_events_by_type_and_time('file_audit', start, end) head = [ _("User"), _("Type"), _("IP"), _("Device"), _("Date"), _("Library Name"), _("Library ID"), _("Library Owner"), _("File Path") ] data_list = [] events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: event_type, ev.show_device = generate_file_audit_event_type(ev) repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: repo_name = repo.name repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: repo_name = _('Deleted') repo_owner = '--' username = ev.user if ev.user else _('Anonymous User') date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [ username, event_type, ev.ip, ev.show_device, date, repo_name, ev.repo_id, repo_owner, ev.file_path ] data_list.append(row) wb = write_xls('file-access-logs', head, data_list) if not wb: messages.error(request, _('Failed to export excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=file-access-logs.xlsx' wb.save(response) return response
def sys_log_perm_audit_export_excel(request): """ Export permission audit logs to excel. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT if not is_pro_version(): messages.error( request, _('Failed to export excel, this feature is only in professional version.' )) return HttpResponseRedirect(next_page) start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _('Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next_page) events = get_log_events_by_type_and_time('perm_audit', start, end) head = [ _("From"), _("To"), _("Action"), _("Permission"), _("Library"), _("Folder Path"), _("Date") ] data_list = [] events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: repo = seafile_api.get_repo(ev.repo_id) repo_name = repo.repo_name if repo else _('Deleted') if '@' in ev.to: to = ev.to elif ev.to.isdigit(): group = ccnet_api.get_group(int(ev.to)) to = group.group_name if group else _('Deleted') elif 'all' in ev.to: to = _('Organization') else: to = '--' if 'add' in ev.etype: action = _('Add') elif 'modify' in ev.etype: action = _('Modify') elif 'delete' in ev.etype: action = _('Delete') else: action = '--' if ev.permission == 'rw': permission = _('Read-Write') elif ev.permission == 'r': permission = _('Read-Only') else: permission = '--' date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [ ev.from_user, to, action, permission, repo_name, ev.file_path, date ] data_list.append(row) wb = write_xls('perm-audit-logs', head, data_list) if not wb: next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT messages.error(request, _('Failed to export excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=perm-audit-logs.xlsx' wb.save(response) return response
def handle(self, *args, **options): self.stdout.write("Export users to '../users.xlsx'.") try: users = ccnet_api.get_emailusers('DB', -1, -1) + \ ccnet_api.get_emailusers('LDAPImport', -1, -1) except Exception as e: self.stdout.write('Error: ' + str(e)) return if is_pro_version(): is_pro = True else: is_pro = False if is_pro: head = [_("Email"), _("Name"), _("Contact Email"), _("Status"), _("Role"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)", _("Create At"), _("Last Login"), _("Admin"), _("LDAP(imported)"),] else: head = [_("Email"), _("Name"), _("Contact Email"), _("Status"), _("Space Usage") + "(MB)", _("Space Quota") + "(MB)", _("Create At"), _("Last Login"), _("Admin"), _("LDAP(imported)"),] # only operate 100 users for every `for` loop looped = 0 limit = 100 data_list = [] while looped < len(users): current_users = users[looped:looped+limit] last_logins = UserLastLogin.objects.filter(username__in=[x.email \ for x in current_users]) user_profiles = Profile.objects.filter(user__in=[x.email \ for x in current_users]) for user in current_users: # populate name and contact email user.contact_email = '' user.name = '' for profile in user_profiles: if profile.user == user.email: user.contact_email = profile.contact_email user.name = profile.nickname # populate space usage and quota MB = get_file_size_unit('MB') # populate user quota usage orgs = ccnet_api.get_orgs_by_user(user.email) try: if orgs: user.org = orgs[0] org_id = user.org.org_id user.space_usage = seafile_api.get_org_user_quota_usage(org_id, user.email) user.space_quota = seafile_api.get_org_user_quota(org_id, user.email) else: user.space_usage = seafile_api.get_user_self_usage(user.email) user.space_quota = seafile_api.get_user_quota(user.email) except Exception as e: self.stdout.write('Debug: ' + str(e)) user.space_usage = -1 user.space_quota = -1 if user.space_usage > 0: try: space_usage_MB = round(float(user.space_usage) / MB, 2) except Exception as e: self.stdout.write('Debug: ' + str(e)) space_usage_MB = '--' else: space_usage_MB = '' if user.space_quota > 0: try: space_quota_MB = round(float(user.space_quota) / MB, 2) except Exception as e: self.stdout.write('Debug: ' + str(e)) space_quota_MB = '--' else: space_quota_MB = '' # populate user last login time user.last_login = None for last_login in last_logins: if last_login.username == user.email: user.last_login = last_login.last_login if user.is_active: status = _('Active') else: status = _('Inactive') create_at = tsstr_sec(user.ctime) if user.ctime else '' last_login = user.last_login.strftime("%Y-%m-%d %H:%M:%S") if \ user.last_login else '' is_admin = _('Yes') if user.is_staff else '' ldap_import = _('Yes') if user.source == 'LDAPImport' else '' if is_pro: if user.role: if user.role == GUEST_USER: role = _('Guest') elif user.role == DEFAULT_USER: role = _('Default') else: role = user.role else: role = _('Default') row = [user.email, user.name, user.contact_email, status, role, space_usage_MB, space_quota_MB, create_at, last_login, is_admin, ldap_import] else: row = [user.email, user.name, user.contact_email, status, space_usage_MB, space_quota_MB, create_at, last_login, is_admin, ldap_import] data_list.append(row) # update `looped` value when `for` loop finished looped += limit wb = write_xls('users', head, data_list) if not wb: self.stdout.write('Error: please check the log.') return wb.save('../users.xlsx') self.stdout.write('Done.\n')