def get_group_info(request, group_id, avatar_size=GROUP_AVATAR_DEFAULT_SIZE): group = seaserv.get_group(group_id) try: avatar_url, is_default, date_uploaded = api_grp_avatar_url( group.id, avatar_size) except Exception as e: logger.error(e) avatar_url = get_default_group_avatar_url() val = utc_to_local(dt(group.timestamp)) group_info = { "id": group.id, "name": group.group_name, "owner": group.creator_name, "created_at": val.strftime("%Y-%m-%dT%H:%M:%S") + DateFormat(val).format('O'), "avatar_url": request.build_absolute_uri(avatar_url), "admins": get_group_admins(group.id), } return group_info
def group_events_data(events): """ Group events according to the date. """ event_groups = [] for e in events: e.time = utc_to_local(e.timestamp) e.date = e.time.strftime("%Y-%m-%d") if e.etype == 'repo-update': e.author = e.commit.creator_name elif e.etype == 'repo-create': e.author = e.creator else: e.author = e.repo_owner if len(event_groups) == 0 or \ len(event_groups) > 0 and e.date != event_groups[-1]['date']: event_group = {} event_group['date'] = e.date event_group['events'] = [e] event_groups.append(event_group) else: event_groups[-1]['events'].append(e) return event_groups
def get(self, request, format=None): if not EVENTS_ENABLED: events = None return api_error(status.HTTP_404_NOT_FOUND, 'Events not enabled.') try: page = int(request.GET.get('page', '')) except ValueError: page = 1 try: per_page = int(request.GET.get('per_page', '')) except ValueError: per_page = 25 start = (page - 1) * per_page count = per_page email = request.user.username events = get_user_activities(email, start, count) events_list = [] for e in events: d = dict(op_type=e.op_type) d['repo_id'] = e.repo_id d['repo_name'] = e.repo_name d['obj_type'] = e.obj_type d['commit_id'] = e.commit_id d['path'] = e.path d['name'] = '' if e.path == '/' else os.path.basename(e.path) d['author_email'] = e.op_user d['author_name'] = email2nickname(e.op_user) d['author_contact_email'] = email2contact_email(e.op_user) try: size = int(request.GET.get('size', 36)) except ValueError as e: size = 36 url, is_default, date_uploaded = api_avatar_url(e.op_user, size) d['avatar_url'] = request.build_absolute_uri(url) d['time_relative'] = translate_seahub_time( utc_to_local(e.timestamp)) if e.op_type == 'clean-up-trash': d['days'] = e.days elif e.op_type == 'rename' and e.obj_type == 'repo': d['old_repo_name'] = e.old_repo_name elif e.op_type == 'move' and e.obj_type in ['dir', 'file']: d['old_path'] = e.old_path elif e.op_type == 'rename' and e.obj_type in ['dir', 'file']: d['old_path'] = e.old_path d['old_name'] = os.path.basename(e.old_path) events_list.append(d) ret = {'events': events_list} return Response(ret)
def sys_log_perm_audit(request): """ """ if not EVENTS_ENABLED: raise Http404 # Make sure page request is an int. If not, deliver first page. try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) repo_selected = request.GET.get('repo_id', None) start = per_page * (current_page - 1) limit = per_page # org_id = 0, show all file audit events = get_perm_audit_events(user_selected, 0, repo_selected, start, limit) if events: for ev in events: if ev.to.isdigit(): group = ccnet_api.get_group(int(ev.to)) ev.perm_group_name = group.group_name if group else None ev.repo = seafile_api.get_repo(ev.repo_id) ev.folder_name = os.path.basename(ev.file_path) ev.time = utc_to_local(ev.timestamp) page_next = True if len(events) == per_page else False else: page_next = False extra_href = '' if user_selected: extra_href += "&email=%s" % user_selected if repo_selected: extra_href += "&repo_id=%s" % repo_selected return render( request, 'sys_perm_audit.html', { 'events': events, 'user_selected': user_selected, 'repo_selected': repo_selected, 'extra_href': extra_href, 'current_page': current_page, 'prev_page': current_page - 1, 'next_page': current_page + 1, 'per_page': per_page, 'page_next': page_next, })
def handle(self, *args, **options): """ Export file access logs to excel. """ if not is_pro_version(): self.stdout.write("Failed to export excel, this feature is only in professional version.") return path = options['path'] start = str(options['start_date']) end = str(options['end_date']) if not check_time_period_valid(start, end): self.stdout.write("Failed to export excel, invalid start or end date.") return events = get_log_events_by_type_and_time('file_audit', start, end) head = [_("User"), _("Type"), _("IP"), _("Device"), _("Date"), _("Library Name"), _("Library ID"), _("Library Owner"), _("File Path"),] data_list = [] repo_obj_dict = {} repo_owner_dict = {} events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: event_type, ev.show_device = generate_file_audit_event_type(ev) repo_id = ev.repo_id if repo_id not in repo_obj_dict: repo = seafile_api.get_repo(repo_id) repo_obj_dict[repo_id] = repo else: repo = repo_obj_dict[repo_id] if repo: repo_name = repo.name if repo_id not in repo_owner_dict: repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) repo_owner_dict[repo_id] = repo_owner else: repo_owner = repo_owner_dict[repo_id] else: repo_name = _('Deleted') repo_owner = '--' username = ev.user if ev.user else _('Anonymous User') date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [username, event_type, ev.ip, ev.show_device, date, repo_name, ev.repo_id, repo_owner, ev.file_path] data_list.append(row) excel_name = 'file-access-logs.xlsx' wb = write_xls(_('file-access-logs'), head, data_list) wb.save(posixpath.join(path, excel_name)) if path else wb.save(excel_name)
def org_log_file_update(request): """ """ if not EVENTS_ENABLED: raise Http404 # Make sure page request is an int. If not, deliver first page. try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) repo_selected = request.GET.get('repo_id', None) start = per_page * (current_page - 1) limit = per_page org_id = request.user.org.org_id events = get_file_update_events(user_selected, org_id, repo_selected, start, limit) if events: for ev in events: ev.repo = get_repo(ev.repo_id) ev.local_time = utc_to_local(ev.timestamp) ev.time = int(ev.local_time.strftime('%s')) if org_user_exists(org_id, ev.user): ev.is_org_user = True page_next = True if len(events) == per_page else False else: page_next = False extra_href = '' if user_selected: extra_href += "&email=%s" % user_selected if repo_selected: extra_href += "&repo_id=%s" % repo_selected return render( request, 'organizations/org_file_update.html', { 'events': events, 'user_selected': user_selected, 'repo_selected': repo_selected, 'extra_href': extra_href, 'current_page': current_page, 'prev_page': current_page - 1, 'next_page': current_page + 1, 'per_page': per_page, 'page_next': page_next, })
def post(self, request): """ Create a group """ if not self._can_add_group(request): error_msg = _(u'You do not have permission to create group.') return api_error(status.HTTP_403_FORBIDDEN, error_msg) username = request.user.username group_name = request.data.get('group_name', '') group_name = group_name.strip() # Check whether group name is validate. if not validate_group_name(group_name): error_msg = _(u'Group name can only contain letters, numbers, blank, hyphen or underscore') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # Check whether group name is duplicated. if check_group_name_conflict(request, group_name): error_msg = _(u'There is already a group with that name.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # Group name is valid, create that group. try: group_id = seaserv.ccnet_threaded_rpc.create_group(group_name, username) except SearpcError as e: logger.error(e) error_msg = _(u'Failed') return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) try: size = int(request.data.get('avatar_size', GROUP_AVATAR_DEFAULT_SIZE)) except ValueError: size = GROUP_AVATAR_DEFAULT_SIZE g = seaserv.get_group(group_id) try: avatar_url, is_default, date_uploaded = api_grp_avatar_url(g.id, size) except Exception as e: logger.error(e) avatar_url = get_default_group_avatar_url() val = utc_to_local(dt(g.timestamp)) new_group = { "id": g.id, "name": g.group_name, "creator": g.creator_name, "created_at": val.strftime("%Y-%m-%dT%H:%M:%S") + DateFormat(val).format('O'), "avatar_url": request.build_absolute_uri(avatar_url), "admins": self._get_group_admins(g.id), } return Response(new_group, status=status.HTTP_201_CREATED)
def get_group_info(request, group_id, avatar_size=GROUP_AVATAR_DEFAULT_SIZE): group = seaserv.get_group(group_id) try: avatar_url, is_default, date_uploaded = api_grp_avatar_url(group.id, avatar_size) except Exception as e: logger.error(e) avatar_url = get_default_group_avatar_url() val = utc_to_local(dt(group.timestamp)) group_info = { "id": group.id, "name": group.group_name, "creator": group.creator_name, "created_at": val.strftime("%Y-%m-%dT%H:%M:%S") + DateFormat(val).format('O'), "avatar_url": request.build_absolute_uri(avatar_url), "admins": get_group_admins(group.id), } return group_info
def sys_log_file_audit_export_excel(request): """ Export file access logs to excel. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT if not is_pro_version(): messages.error( request, _('Failed to export excel, this feature is only in professional version.' )) return HttpResponseRedirect(next_page) start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _('Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next_page) events = get_log_events_by_type_and_time('file_audit', start, end) head = [ _("User"), _("Type"), _("IP"), _("Device"), _("Date"), _("Library Name"), _("Library ID"), _("Library Owner"), _("File Path") ] data_list = [] events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: event_type, ev.show_device = generate_file_audit_event_type(ev) repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: repo_name = repo.name repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: repo_name = _('Deleted') repo_owner = '--' username = ev.user if ev.user else _('Anonymous User') date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [ username, event_type, ev.ip, ev.show_device, date, repo_name, ev.repo_id, repo_owner, ev.file_path ] data_list.append(row) wb = write_xls('file-access-logs', head, data_list) if not wb: messages.error(request, _('Failed to export excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=file-access-logs.xlsx' wb.save(response) return response
def do_action(self): today = datetime.utcnow().replace(hour=0).replace(minute=0).replace( second=0).replace(microsecond=0) emails = [] user_file_updates_email_intervals = [] for ele in UserOptions.objects.filter( option_key=KEY_FILE_UPDATES_EMAIL_INTERVAL): try: user_file_updates_email_intervals.append( (ele.email, int(ele.option_val)) ) emails.append(ele.email) except Exception as e: logger.error(e) continue user_last_emailed_time_dict = {} for ele in UserOptions.objects.filter( option_key=KEY_FILE_UPDATES_LAST_EMAILED_TIME).filter( email__in=emails): try: user_last_emailed_time_dict[ele.email] = datetime.strptime( ele.option_val, "%Y-%m-%d %H:%M:%S") except Exception as e: logger.error(e) continue for (username, interval_val) in user_file_updates_email_intervals: # save current language cur_language = translation.get_language() # get and active user language user_language = self.get_user_language(username) translation.activate(user_language) logger.debug('Set language code to %s for user: %s' % ( user_language, username)) self.stdout.write('[%s] Set language code to %s' % ( str(datetime.now()), user_language)) # get last_emailed_time if any, defaults to today last_emailed_time = user_last_emailed_time_dict.get(username, today) now = datetime.utcnow().replace(microsecond=0) if (now - last_emailed_time).seconds < interval_val: continue # get file updates(from: last_emailed_time, to: now) for repos # user can access res = seafevents_api.get_user_activities_by_timestamp( username, last_emailed_time, now) if not res: continue # remove my activities res = filter(lambda x: x.op_user != username, res) if not res: continue # format mail content & send file updates email to user try: for ele in res: ele.user_avatar = self.get_avatar_src(ele.op_user) ele.local_timestamp = utc_to_local(ele.timestamp) ele.op_user_link = a_tag(email2nickname(ele.op_user), user_info_url(ele.op_user)) ele.operation, ele.op_details = self.format_file_operation(ele) except Exception as e: logger.error('Failed to format mail content for user: %s' % username) logger.error(e, exc_info=True) continue nickname = email2nickname(username) contact_email = Profile.objects.get_contact_email_by_user(username) c = { 'name': nickname, 'updates_count': len(res), 'updates': res, } try: send_html_email(_('New file updates on %s') % get_site_name(), 'notifications/file_updates_email.html', c, None, [contact_email]) # set new last_emailed_time UserOptions.objects.set_file_updates_last_emailed_time( username, now) except Exception as e: logger.error('Failed to send email to %s, error detail: %s' % (contact_email, e)) self.stderr.write('[%s] Failed to send email to %s, error ' 'detail: %s' % (str(now), contact_email, e)) finally: # reset lang translation.activate(cur_language)
def do_action(self): emails = [] user_dtable_updates_email_intervals = [] for ele in UserOptions.objects.filter( option_key=KEY_DTABLE_UPDATES_EMAIL_INTERVAL): try: user_dtable_updates_email_intervals.append( (ele.email, int(ele.option_val))) emails.append(ele.email) except Exception as e: logger.error(e) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue user_last_emailed_time_dict = {} for ele in UserOptions.objects.filter( option_key=KEY_DTABLE_UPDATES_LAST_EMAILED_TIME).filter( email__in=emails): try: user_last_emailed_time_dict[ele.email] = datetime.strptime( ele.option_val, "%Y-%m-%d %H:%M:%S") except Exception as e: logger.error(e) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue for (username, interval_val) in user_dtable_updates_email_intervals: # save current language cur_language = translation.get_language() # get and active user language user_language = self.get_user_language(username) translation.activate(user_language) logger.debug('Set language code to %s for user: %s' % (user_language, username)) self.stdout.write('[%s] Set language code to %s for user: %s' % (str(datetime.now()), user_language, username)) # get last_emailed_time if any, defaults to today 00:00:00.0 last_emailed_time = user_last_emailed_time_dict.get(username, None) now = datetime.utcnow().replace(microsecond=0) if not last_emailed_time: last_emailed_time = datetime.utcnow().replace(hour=0).replace( minute=0).replace(second=0).replace(microsecond=0) else: if (now - last_emailed_time).total_seconds() < interval_val: continue # find all the user's tables and groups' tables groups = ccnet_api.get_groups(username, return_ancestors=True) owner_list = [username] + [ '%s@seafile_group' % group.id for group in groups ] dtables = list( DTables.objects.filter(workspace__owner__in=owner_list)) # find all tables shared to user shared_tables = list(DTableShare.objects.list_by_to_user(username)) # combine tables dtables.extend([item.dtable for item in shared_tables]) # dtable uuid map dtables_uuid_map = {dtable.uuid.hex: dtable for dtable in dtables} # query all activities about above dtables with DB SQL cursor = connection.cursor() sql = "SELECT a.* FROM activities a JOIN user_activities ua ON a.id=ua.activity_id WHERE ua.timestamp > %s AND ua.username=%s ORDER BY ua.timestamp DESC" cursor.execute(sql, (last_emailed_time, username)) # time and username col_names = [desc[0] for desc in cursor.description] activities, activities_count = [], 0 for activity in cursor.fetchall(): activity = dict(zip(col_names, activity)) if activity['dtable_uuid'] not in dtables_uuid_map: continue activity_detail = json.loads(activity['detail']) activity_dict = dict(dtable_uuid=activity['dtable_uuid']) activity_dict['dtable_name'] = dtables_uuid_map[ activity['dtable_uuid']].name if activity[ 'dtable_uuid'] in dtables_uuid_map else '' activity_dict['row_id'] = activity['row_id'] activity_dict['op_type'] = activity['op_type'] activity_dict['author_email'] = activity['op_user'] activity_dict['author_name'] = email2nickname( activity['op_user']) activity_dict['author_contact_email'] = email2contact_email( activity['op_user']) activity_dict['op_time'] = utc_datetime_to_isoformat_timestr( activity['op_time']) activity_dict['table_id'] = activity_detail['table_id'] activity_dict['table_name'] = activity_detail['table_name'] activity_dict['row_data'] = activity_detail['row_data'] activity_dict['row_name'] = self.get_row_name( activity_dict['row_data']) or activity_detail.get( 'row_name', '') # compatible with previous data avatar_size = 72 # todo: size url, is_default, date_uploaded = api_avatar_url( activity['op_user'], avatar_size) activity_dict['avatar_url'] = url # fields for html-display activity_dict['op_user_link'] = a_tag( activity_dict['author_name'], user_info_url(activity['op_user'])) activity_dict['dtable_link'] = a_tag( activity_dict['dtable_name'], dtable_url(dtables_uuid_map[activity_dict['dtable_uuid']])) activity_dict['details'] = self.format_modify_operation( activity_dict) activity_dict['local_timestamp'] = utc_to_local( activity['op_time']) activities_count += 1 if len(activities) <= 100: activities.append(activity_dict) if not activities: translation.activate(cur_language) continue c = { 'name': email2nickname(username), 'updates_count': activities_count, 'updates': activities, } contact_email = email2contact_email(username) try: send_html_email( _('New table updates on %s') % get_site_name(), 'notifications/dtable_updates_email.html', c, None, [contact_email]) now = datetime.utcnow().replace(microsecond=0) UserOptions.objects.set_dtable_updates_last_emailed_time( username, now) except Exception as e: logger.error('Failed to send email to %s, error detail: %s' % (contact_email, e)) self.stderr.write('[%s] Failed to send email to %s, error ' 'detail: %s' % (str(datetime.now()), contact_email, e)) finally: # reset lang translation.activate(cur_language)
def do_action(self): emails = [] user_file_updates_email_intervals = [] for ele in UserOptions.objects.filter( option_key=KEY_FILE_UPDATES_EMAIL_INTERVAL): try: user_file_updates_email_intervals.append( (ele.email, int(ele.option_val))) emails.append(ele.email) except Exception as e: logger.error(e) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue user_last_emailed_time_dict = {} for ele in UserOptions.objects.filter( option_key=KEY_FILE_UPDATES_LAST_EMAILED_TIME).filter( email__in=emails): try: user_last_emailed_time_dict[ele.email] = datetime.strptime( ele.option_val, "%Y-%m-%d %H:%M:%S") except Exception as e: logger.error(e) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue for (username, interval_val) in user_file_updates_email_intervals: # save current language cur_language = translation.get_language() # get and active user language user_language = self.get_user_language(username) translation.activate(user_language) logger.debug('Set language code to %s for user: %s' % (user_language, username)) self.stdout.write('[%s] Set language code to %s for user: %s' % (str(datetime.now()), user_language, username)) # get last_emailed_time if any, defaults to today 00:00:00.0 last_emailed_time = user_last_emailed_time_dict.get(username, None) now = datetime.utcnow().replace(microsecond=0) if not last_emailed_time: last_emailed_time = datetime.utcnow().replace(hour=0).replace( minute=0).replace(second=0).replace(microsecond=0) else: if (now - last_emailed_time).total_seconds() < interval_val: continue # get file updates(from: last_emailed_time, to: now) for repos # user can access res = seafevents_api.get_user_activities_by_timestamp( username, last_emailed_time, now) if not res: continue # remove my activities res = [x for x in res if x.op_user != username] if not res: continue # format mail content & send file updates email to user try: for ele in res: ele.user_avatar = self.get_avatar_src(ele.op_user) ele.local_timestamp = utc_to_local(ele.timestamp) ele.op_user_link = a_tag(email2nickname(ele.op_user), user_info_url(ele.op_user)) ele.operation, ele.op_details = self.format_file_operation( ele) except Exception as e: logger.error('Failed to format mail content for user: %s' % username) logger.error(e, exc_info=True) self.stderr.write( '[%s] Failed to format mail content for user: %s' % (str(datetime.now()), username)) self.stderr.write('[%s]: %s' % (str(datetime.now()), e)) continue nickname = email2nickname(username) contact_email = Profile.objects.get_contact_email_by_user(username) c = { 'name': nickname, 'updates_count': len(res), 'updates': res, } try: send_html_email( _('New file updates on %s') % get_site_name(), 'notifications/file_updates_email.html', c, None, [contact_email]) # set new last_emailed_time UserOptions.objects.set_file_updates_last_emailed_time( username, now) self.stdout.write('[%s] Successful to send email to %s' % (str(datetime.now()), contact_email)) except Exception as e: logger.error('Failed to send email to %s, error detail: %s' % (contact_email, e)) self.stderr.write('[%s] Failed to send email to %s, error ' 'detail: %s' % (str(datetime.now()), contact_email, e)) finally: # reset lang translation.activate(cur_language)
def sys_log_file_update(request): """ """ if not EVENTS_ENABLED: raise Http404 # Make sure page request is an int. If not, deliver first page. try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) repo_selected = request.GET.get('repo_id', None) start = per_page * (current_page - 1) limit = per_page # org_id = 0, show all file audit events = get_file_update_events(user_selected, 0, repo_selected, start, limit) if events: for ev in events: repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: ev.repo = repo ev.repo_name = repo.name ev.repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) ev.repo_encrypted = repo.encrypted else: ev.repo_name = _('Deleted') ev.repo_owner = '--' ev.local_time = utc_to_local(ev.timestamp) ev.time = int(ev.local_time.strftime('%s')) page_next = True if len(events) == per_page else False else: page_next = False extra_href = '' if user_selected: extra_href += "&email=%s" % user_selected if repo_selected: extra_href += "&repo_id=%s" % repo_selected return render( request, 'sys_file_update.html', { 'events': events, 'user_selected': user_selected, 'repo_selected': repo_selected, 'extra_href': extra_href, 'current_page': current_page, 'prev_page': current_page - 1, 'next_page': current_page + 1, 'per_page': per_page, 'page_next': page_next, })
def org_log_perm_audit(request): """ """ if not EVENTS_ENABLED: raise Http404 # Make sure page request is an int. If not, deliver first page. try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 user_selected = request.GET.get('email', None) repo_selected = request.GET.get('repo_id', None) start = per_page * (current_page - 1) limit = per_page org_id = request.user.org.org_id events = get_perm_audit_events(user_selected, org_id, repo_selected, start, limit) if events: for ev in events: if ev.to.isdigit(): ev.perm_group_name = get_group(ev.to).group_name if \ get_group(ev.to) is not None else None ev.repo = get_repo(ev.repo_id) ev.folder_name = os.path.basename(ev.file_path) ev.time = utc_to_local(ev.timestamp) if org_user_exists(org_id, ev.from_user): ev.is_org_from_user = True if org_user_exists(org_id, ev.to): ev.is_org_to_user = True page_next = True if len(events) == per_page else False else: page_next = False extra_href = '' if user_selected: extra_href += "&email=%s" % user_selected if repo_selected: extra_href += "&repo_id=%s" % repo_selected return render( request, 'organizations/org_perm_audit.html', { 'events': events, 'user_selected': user_selected, 'repo_selected': repo_selected, 'extra_href': extra_href, 'current_page': current_page, 'prev_page': current_page - 1, 'next_page': current_page + 1, 'per_page': per_page, 'page_next': page_next, })
def recover_data(): # open DB session session = SeafEventsSession() # QUERY CONDITIONS # Searc in FileUpdate Table # timestamp between '2018-02-02 05:05' and '2018-02-02 16:00' # q = session.query(FileUpdate).filter(FileUpdate.timestamp.between('2018-02-02 05:05','2018-02-02 16:00')) q = session.query(FileUpdate).filter( FileUpdate.timestamp.between('2018-03-31 05:05:08', '2018-03-31 05:05:42')) # order by creation, desc q = q.order_by(desc(FileUpdate.eid)) events = q.all() # Generate common data structure as dict, will be used later # by reports, tasks ,etc. users = defaultdict(list) if events: for ev in events: ev.repo = get_repo(ev.repo_id) ev.local_time = utc_to_local(ev.timestamp) ev.time = int(ev.local_time.strftime('%s')) changes = get_diff(ev.repo.repo_id, '', ev.commit_id) c = get_commit(ev.repo.repo_id, ev.repo.version, ev.commit_id) # number of changes in event c_num = 0 for k in changes: c_num += len(changes[k]) if c.parent_id is None: # A commit is a first commit only if it's parent id is None. changes['cmt_desc'] = repo.desc elif c.second_parent_id is None: # Normal commit only has one parent. if c.desc.startswith('Changed library'): changes['cmt_desc'] = 'Changed library name or description' else: # A commit is a merge only if it has two parents. changes['cmt_desc'] = 'No conflict in the merge.' changes['date_time'] = str(c.ctime) # ev.repo is saved in the dict to make seafobj manipulation possible users[ev.user].append({ 'event': { 'repo': ev.repo, 'time': str(ev.local_time), 'library': ev.repo.name, 'encrypted': ev.repo.encrypted, 'file_oper': ev.file_oper }, 'details': changes, 'changes_num': c_num }) changed_files = defaultdict(dict) # for the moment we save modified and new files in separate dirs change = 'modified' # change = 'new' for key, events in users.iteritems(): for e in events: if change in e['details']: ev = e['event'] # encrypted libs will not be recovered if not ev['encrypted']: lib = ev['library'] if not lib in changed_files[key]: changed_files[key].update({ lib: { 'repo': ev['repo'], 'files': set(e['details'][change]) } }) else: changed_files[key][lib]['files'].update( e['details'][change]) # convert sets to lists to be serialized in json for u in changed_files: for l in changed_files[u]: changed_files[u][l]['files'] = list( changed_files[u][l]['files']) # path in filesystem where the recovered files will be stored STORAGE_PATH = '/keeper/tmp/recovery/' + change + '/' # gnerate packages of changed files for u in changed_files: path = STORAGE_PATH + u for lib in changed_files[u]: dir = get_root_dir(changed_files[u][lib]['repo']) dest_path = path + "/" + lib for fn in changed_files[u][lib]['files']: copy_file(dir, fn, dest_path)
def get(self, request): """ List all groups. """ org_id = None username = request.user.username if is_org_context(request): org_id = request.user.org.org_id user_groups = seaserv.get_org_groups_by_user(org_id, username) else: user_groups = seaserv.get_personal_groups_by_user(username) try: size = int(request.GET.get('avatar_size', GROUP_AVATAR_DEFAULT_SIZE)) except ValueError: size = GROUP_AVATAR_DEFAULT_SIZE with_repos = request.GET.get('with_repos') with_repos = True if with_repos == '1' else False groups = [] for g in user_groups: try: avatar_url, is_default, date_uploaded = api_grp_avatar_url(g.id, size) except Exception as e: logger.error(e) avatar_url = get_default_group_avatar_url() val = utc_to_local(dt(g.timestamp)) group = { "id": g.id, "name": g.group_name, "creator": g.creator_name, "created_at": val.strftime("%Y-%m-%dT%H:%M:%S") + DateFormat(val).format('O'), "avatar_url": request.build_absolute_uri(avatar_url), "admins": self._get_group_admins(g.id), } if with_repos: if org_id: group_repos = seafile_api.get_org_group_repos(org_id, g.id) else: group_repos = seafile_api.get_repos_by_group(g.id) repos = [] for r in group_repos: repo = { "id": r.id, "name": r.name, "desc": r.desc, "size": r.size, "size_formatted": filesizeformat(r.size), "mtime": r.last_modified, "mtime_relative": translate_seahub_time(r.last_modified), "encrypted": r.encrypted, "permission": r.permission, "owner": r.user, "owner_nickname": email2nickname(r.user), "share_from_me": True if username == r.user else False, } repos.append(repo) group['repos'] = repos groups.append(group) return Response(groups)
def sys_log_perm_audit_export_excel(request): """ Export permission audit logs to excel. """ next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT if not is_pro_version(): messages.error( request, _('Failed to export excel, this feature is only in professional version.' )) return HttpResponseRedirect(next_page) start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): messages.error(request, _('Failed to export excel, invalid start or end date')) return HttpResponseRedirect(next_page) events = get_log_events_by_type_and_time('perm_audit', start, end) head = [ _("From"), _("To"), _("Action"), _("Permission"), _("Library"), _("Folder Path"), _("Date") ] data_list = [] events.sort(key=lambda x: x.timestamp, reverse=True) for ev in events: repo = seafile_api.get_repo(ev.repo_id) repo_name = repo.repo_name if repo else _('Deleted') if '@' in ev.to: to = ev.to elif ev.to.isdigit(): group = ccnet_api.get_group(int(ev.to)) to = group.group_name if group else _('Deleted') elif 'all' in ev.to: to = _('Organization') else: to = '--' if 'add' in ev.etype: action = _('Add') elif 'modify' in ev.etype: action = _('Modify') elif 'delete' in ev.etype: action = _('Delete') else: action = '--' if ev.permission == 'rw': permission = _('Read-Write') elif ev.permission == 'r': permission = _('Read-Only') else: permission = '--' date = utc_to_local(ev.timestamp).strftime('%Y-%m-%d %H:%M:%S') if \ ev.timestamp else '' row = [ ev.from_user, to, action, permission, repo_name, ev.file_path, date ] data_list.append(row) wb = write_xls('perm-audit-logs', head, data_list) if not wb: next_page = request.META.get('HTTP_REFERER', None) if not next_page: next_page = SITE_ROOT messages.error(request, _('Failed to export excel')) return HttpResponseRedirect(next_page) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=perm-audit-logs.xlsx' wb.save(response) return response