def _api_token_obj_to_dict(api_token_obj): return { 'app_name': api_token_obj.app_name, 'api_token': api_token_obj.token, 'generated_by': api_token_obj.generated_by, 'generated_at': datetime_to_isoformat_timestr(api_token_obj.generated_at), 'last_access': datetime_to_isoformat_timestr(api_token_obj.last_access), 'permission': api_token_obj.permission, }
def to_dict(self): return { 'id': self.pk, 'workspace_id': self.workspace_id, 'uuid': self.uuid, 'name': self.name, 'creator': email2nickname(self.creator), 'modifier': email2nickname(self.modifier), 'created_at': datetime_to_isoformat_timestr(self.created_at), 'updated_at': datetime_to_isoformat_timestr(self.updated_at), }
def get_share_link_info(fileshare): data = {} token = fileshare.token repo_id = fileshare.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = fileshare.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' if fileshare.expire_date: expire_date = datetime_to_isoformat_timestr(fileshare.expire_date) else: expire_date = '' if fileshare.ctime: ctime = datetime_to_isoformat_timestr(fileshare.ctime) else: ctime = '' data['username'] = fileshare.username data['repo_id'] = repo_id data['repo_name'] = repo.repo_name if repo else '' data['path'] = path data['obj_name'] = obj_name data['is_dir'] = True if fileshare.s_type == 'd' else False data['token'] = token data['link'] = gen_shared_link(token, fileshare.s_type) data['view_cnt'] = fileshare.view_cnt data['ctime'] = ctime data['expire_date'] = expire_date data['is_expired'] = fileshare.is_expired() data['permissions'] = fileshare.get_permissions() data['can_edit'] = False if repo and path != '/' and not data['is_dir']: dirent = seafile_api.get_dirent_by_path(repo_id, path) try: can_edit, error_msg = can_edit_file(obj_name, dirent.size, repo) data['can_edit'] = can_edit except Exception as e: logger.error(e) return data
def get_share_link_info(fileshare): data = {} token = fileshare.token repo_id = fileshare.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = fileshare.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' if fileshare.expire_date: expire_date = datetime_to_isoformat_timestr(fileshare.expire_date) else: expire_date = '' if fileshare.ctime: ctime = datetime_to_isoformat_timestr(fileshare.ctime) else: ctime = '' ccnet_email = fileshare.username data['creator_email'] = ccnet_email data['creator_name'] = email2nickname(ccnet_email) data['creator_contact_email'] = email2contact_email(ccnet_email) data['repo_id'] = repo_id data['repo_name'] = repo.repo_name if repo else '' data['path'] = path data['obj_name'] = obj_name data['is_dir'] = True if fileshare.s_type == 'd' else False data['token'] = token data['link'] = gen_shared_link(token, fileshare.s_type) data['view_cnt'] = fileshare.view_cnt data['ctime'] = ctime data['expire_date'] = expire_date data['is_expired'] = fileshare.is_expired() data['permissions'] = fileshare.get_permissions() if fileshare.s_type == 'f': obj_id = seafile_api.get_file_id_by_path(repo_id, path) data['size'] = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) return data
def get(self, request): """ Get all share links. Permission checking: 1. only admin can perform this action. """ if not request.user.admin_permissions.other_permission(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 start = (current_page - 1) * per_page end = start + per_page share_links = FileShare.objects.all().order_by('-ctime')[start:end] count = FileShare.objects.all().count() # Use dict to reduce memcache fetch cost in large for-loop. nickname_dict = {} owner_email_set = set([link.username for link in share_links]) for e in owner_email_set: if e not in nickname_dict: nickname_dict[e] = email2nickname(e) share_links_info = [] for link in share_links: if link.expire_date: expire_date = datetime_to_isoformat_timestr(link.expire_date) else: expire_date = '' link_info = {} link_info['obj_name'] = link.get_obj_name() link_info['token'] = link.token owner_email = link.username link_info['creator_email'] = owner_email link_info['creator_name'] = nickname_dict.get(owner_email, '') link_info['ctime'] = datetime_to_isoformat_timestr(link.ctime) link_info['view_cnt'] = link.view_cnt link_info['expire_date'] = expire_date link_info['is_expired'] = link.is_expired() share_links_info.append(link_info) return Response({"share_link_list": share_links_info, "count": count})
def to_dict(self): accept_time = datetime_to_isoformat_timestr(self.accept_time) \ if self.accept_time else "" return { "id": self.pk, "token": self.token, "inviter": self.inviter, "accepter": self.accepter, "type": self.invite_type, "invite_time": datetime_to_isoformat_timestr(self.invite_time), "accept_time": accept_time, "expire_time": datetime_to_isoformat_timestr(self.expire_time), }
def to_dict(self): uuid = FileUUIDMap.objects.get_fileuuidmap_by_uuid(self.origin_file_uuid) file_path = posixpath.join(uuid.parent_path, uuid.filename) return { 'id': self.pk, 'owner': self.username, 'owner_nickname': email2nickname(self.username), 'origin_repo_id': self.origin_repo_id, 'origin_file_path': file_path, 'origin_file_version': self.origin_file_version, 'draft_file_path': self.draft_file_path, 'created_at': datetime_to_isoformat_timestr(self.created_at), 'updated_at': datetime_to_isoformat_timestr(self.updated_at), }
def get(self, request, format=None): username = request.user.username uploadlinks = UploadLinkShare.objects.filter(username=username) p_uploadlinks = [] for link in uploadlinks: r = seafile_api.get_repo(link.repo_id) if not r: link.delete() continue if seafile_api.get_dir_id_by_path(r.id, link.path) is None: link.delete() continue if link.path != '/': link.dir_name = os.path.basename(link.path.rstrip('/')) else: link.dir_name = link.path link.shared_link = gen_shared_upload_link(link.token) link.repo = r if link.expire_date: expire_date = datetime_to_isoformat_timestr(link.expire_date) else: expire_date = "" p_uploadlinks.append({ "username": link.username, "repo_id": link.repo_id, "path": link.path, "token": link.token, "ctime": datetime_to_isoformat_timestr(link.ctime), "view_cnt": link.view_cnt, "expire_date": expire_date, }) return HttpResponse(json.dumps(p_uploadlinks), status=200, content_type=json_content_type)
def get(self, request): """List all Institutions """ try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 start = per_page * (current_page - 1) institutions = Institution.objects.all()[start:start + per_page] count = Institution.objects.count() institutions_info = [] for institution in institutions: data = {} data['id'] = institution.id data['name'] = institution.name data['ctime'] = datetime_to_isoformat_timestr( institution.create_time) institutions_info.append(data) resp = { 'institution_list': institutions_info, 'total_count': count, } return Response(resp)
def _get_upload_link_info(self, uls): data = {} token = uls.token repo_id = uls.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = uls.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' if uls.ctime: ctime = datetime_to_isoformat_timestr(uls.ctime) else: ctime = '' data['repo_id'] = repo_id data['repo_name'] = repo.repo_name if repo else '' data['path'] = path data['obj_name'] = obj_name data['view_cnt'] = uls.view_cnt data['ctime'] = ctime data['link'] = gen_shared_upload_link(token) data['token'] = token data['username'] = uls.username return data
def post(self, request, group_id, format=None): """Post a group discussion. Only group members can perform this op. """ content = request.data.get('content', '') if not content: return api_error(status.HTTP_400_BAD_REQUEST, 'Content can not be empty.') try: avatar_size = int(request.data.get('avatar_size', AVATAR_DEFAULT_SIZE)) except ValueError: avatar_size = AVATAR_DEFAULT_SIZE username = request.user.username msg = GroupMessage.objects.create(group_id=group_id, from_email=username, message=content) # send signal grpmsg_added.send(sender=GroupMessage, group_id=group_id, from_email=username, message=content) info = get_user_common_info(username, avatar_size) isoformat_timestr = datetime_to_isoformat_timestr(msg.timestamp) return Response({ "id": msg.pk, "group_id": group_id, "user_name": info["name"], "user_email": info["email"], "user_contact_email": info["contact_email"], "avatar_url": request.build_absolute_uri(info["avatar_url"]), "content": msg.message, "created_at": isoformat_timestr }, status=201)
def get_upload_link_info(uls): data = {} token = uls.token repo_id = uls.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = uls.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' if uls.ctime: ctime = datetime_to_isoformat_timestr(uls.ctime) else: ctime = '' data['repo_id'] = repo_id data['repo_name'] = repo.repo_name if repo else '' data['path'] = path data['obj_name'] = obj_name data['view_cnt'] = uls.view_cnt data['ctime'] = ctime data['link'] = gen_shared_upload_link(token) data['token'] = token data['username'] = uls.username return data
def _get_upload_link_info(self, uls): data = {} token = uls.token repo_id = uls.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = uls.path if path: obj_name = "/" if path == "/" else os.path.basename(path.rstrip("/")) else: obj_name = "" if uls.ctime: ctime = datetime_to_isoformat_timestr(uls.ctime) else: ctime = "" data["repo_id"] = repo_id data["repo_name"] = repo.repo_name if repo else "" data["path"] = path data["obj_name"] = obj_name data["view_cnt"] = uls.view_cnt data["ctime"] = ctime data["link"] = gen_shared_upload_link(token) data["token"] = token data["username"] = uls.username return data
def get(self, request): if not is_pro_version(): error_msg = 'Feature disabled.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) result = [] events = get_log_events_by_type_and_time('file_update', start, end) if events: for ev in events: tmp_repo = seafile_api.get_repo(ev.repo_id) tmp_repo_name = tmp_repo.name if tmp_repo else '' result.append({ 'commit_id': ev.commit_id, 'repo_id': ev.repo_id, 'repo_name': tmp_repo_name, 'time': datetime_to_isoformat_timestr(ev.timestamp), 'file_operation': ev.file_oper, 'user_name': email2nickname(ev.user), 'user_email': ev.user }) return Response(result)
def get(self, request, format=None): try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '50')) except ValueError: current_page = 1 per_page = 50 platform = request.GET.get('platform', None) start = (current_page - 1) * per_page end = current_page * per_page + 1 devices = TokenV2.objects.get_devices(platform, start, end) if len(devices) == end - start: devices = devices[:per_page] has_next_page = True else: has_next_page = False return_results = [] for device in devices: result = {} result['client_version'] = device.client_version result['device_id'] = device.device_id result['device_name'] = device.device_name result['last_accessed'] = datetime_to_isoformat_timestr( device.last_accessed) result['last_login_ip'] = device.last_login_ip result['user'] = device.user result['platform'] = device.platform return_results.append(result) return Response(({'has_next_page': has_next_page}, return_results))
def get_institution_user_info(user_obj, institution, is_check_admin=True): """ If this function is called in for loop, do not check admin in this function, but add is_institution_admin attribute out of this function. is_institution_admin() will invoke a database query. """ info = {} info['email'] = user_obj.email info['name'] = email2nickname(user_obj.email) info['contact_email'] = email2contact_email(user_obj.email) info['quota_usage'], info['quota_total'] = get_user_quota_usage_and_total( user_obj.email) info['create_time'] = timestamp_to_isoformat_timestr(user_obj.ctime) info['is_active'] = user_obj.is_active if is_check_admin: info['is_institution_admin'] = is_institution_admin( user_obj.email, institution) last_login_obj = UserLastLogin.objects.get_by_username(user_obj.email) info['last_login'] = datetime_to_isoformat_timestr( last_login_obj.last_login) if last_login_obj else '' return info
def post(self, request): """Create an Institution """ if not request.user.admin_permissions.other_permission(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') name = request.data.get('name', '').strip() if not name: error_msg = 'name invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: institution = Institution.objects.add_institution(name=name) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) info = {} info['id'] = institution.id info['name'] = institution.name info['ctime'] = datetime_to_isoformat_timestr(institution.create_time) return Response(info)
def get(self, request): """ used for get notifications Permission checking: 1. login user. """ result = {} username = request.user.username try: per_page = int(request.GET.get('per_page', '')) page = int(request.GET.get('page', '')) except ValueError: per_page = 25 page = 1 start = (page - 1) * per_page end = page * per_page notice_list = UserNotification.objects.get_user_notifications( username)[start:end] result_notices = update_notice_detail(request, notice_list) notification_list = [] unseen_count = 0 for i in result_notices: if i.detail is not None: notice = {} notice['id'] = i.id notice['type'] = i.msg_type notice['detail'] = i.detail notice['time'] = datetime_to_isoformat_timestr(i.timestamp) notice['seen'] = i.seen if not i.seen: unseen_count += 1 notification_list.append(notice) cache_key = get_cache_key_of_unseen_notifications(username) count_from_cache = cache.get(cache_key, None) # for case of count value is `0` if count_from_cache is not None: result['unseen_count'] = count_from_cache unseen_num = count_from_cache else: result['unseen_count'] = unseen_count # set cache cache.set(cache_key, unseen_count) unseen_num = unseen_count total_count = UserNotification.objects.filter(to_user=username).count() result['notification_list'] = notification_list result['count'] = total_count result['unseen_count'] = unseen_num return Response(result)
def get(self, request, start_time, end_time): """ Get records of the specified time range. param: start: the start time of the query. end: the end time of the query. return: the list of file operations record. """ offset = get_time_offset() data = get_file_ops_stats_by_day(start_time, end_time, offset) ops_added_dict = get_init_data(start_time, end_time) ops_visited_dict = get_init_data(start_time, end_time) ops_deleted_dict = get_init_data(start_time, end_time) ops_modified_dict = get_init_data(start_time, end_time) for e in data: if e[1] == 'Added': ops_added_dict[e[0]] = e[2] elif e[1] == 'Visited': ops_visited_dict[e[0]] = e[2] elif e[1] == 'Deleted': ops_deleted_dict[e[0]] = e[2] elif e[1] == 'Modified': ops_modified_dict[e[0]] = e[2] res_data = [] for k, v in ops_added_dict.items(): res_data.append({'datetime': datetime_to_isoformat_timestr(k), 'added': v, 'visited': ops_visited_dict[k], 'deleted': ops_deleted_dict[k], 'modified': ops_modified_dict[k]}) return Response(sorted(res_data, key=lambda x: x['datetime']))
def get(self, request): if not is_pro_version(): error_msg = 'Feature disabled.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # Filtering a DateTimeField with dates won't include items on the last day, # because the bounds are interpreted as '0am on the given date'. end = end + ' 23:59:59' result = [] from seahub_extra.sysadmin_extra.models import UserLoginLog logs = UserLoginLog.objects.filter(login_date__range=(start, end)) for log in logs: result.append({ 'login_time': datetime_to_isoformat_timestr(log.login_date), 'login_ip': log.login_ip, 'name': email2nickname(log.username), 'email':log.username }) return Response(result)
def test_can_get_file_audit_stats(self, mock_get_file_audit_stats_by_day, mock_is_pro, mock_events_enabled): mock_get_file_audit_stats_by_day.return_value = [ (datetime.datetime(2017, 6, 2, 4, 2), 'Added', 2), (datetime.datetime(2017, 6, 2, 4, 2), 'Deleted', 2), (datetime.datetime(2017, 6, 2, 4, 2), 'Visited', 2), (datetime.datetime(2017, 6, 2, 4, 2), 'Modified', 2), ] mock_is_pro.return_value = True mock_events_enabled = True url = reverse('api-v2.1-admin-statistics-file-operations') url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00" resp = self.client.get(url) json_resp = json.loads(resp.content) self.assertEqual(200, resp.status_code) data = { 'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 4, 2)), 'added': 2, 'deleted': 2, 'visited': 2, 'modified': 2 } assert data in json_resp
def get_org_user_info(org_id, user_obj): email = user_obj.email user_info = {} user_info['org_id'] = org_id user_info['email'] = email user_info['name'] = email2nickname(email) user_info['contact_email'] = email2contact_email(email) org_user_quota = seafile_api.get_org_user_quota(org_id, email) user_info['quota_total'] = org_user_quota org_user_quota_usage = seafile_api.get_org_user_quota_usage(org_id, email) user_info['quota_usage'] = org_user_quota_usage user_info['create_time'] = timestamp_to_isoformat_timestr(user_obj.ctime) user_info['last_login'] = '' last_login = UserLastLogin.objects.get_by_username(email).last_login \ if UserLastLogin.objects.get_by_username(email) else '' if last_login: user_info['last_login'] = datetime_to_isoformat_timestr(last_login) user_info['is_org_staff'] = True if ccnet_api.is_org_staff( org_id, email) == 1 else False return user_info
def get(self, request): if not request.user.admin_permissions.can_view_admin_log(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # Filtering a DateTimeField with dates won't include items on the last day, # because the bounds are interpreted as '0am on the given date'. end = end + ' 23:59:59' result = [] from seahub_extra.sysadmin_extra.models import UserLoginLog logs = UserLoginLog.objects.filter(login_date__range=(start, end)) for log in logs: result.append({ 'login_time': datetime_to_isoformat_timestr(log.login_date), 'login_ip': log.login_ip, 'name': email2nickname(log.username), 'email': log.username }) return Response(result)
def _get_response_data(self, logs): user_list = [] for log in logs: user_list.append(log.username) name_dict = get_user_name_dict(user_list) contact_email_dict = get_user_contact_email_dict(user_list) data = [] for log in logs: email = log.username data.append({ 'login_time': datetime_to_isoformat_timestr(log.login_date), 'login_ip': log.login_ip, 'login_success': log.login_success, 'email': email, 'name': name_dict[email], 'contact_email': contact_email_dict[email], }) return data
def get(self, request): """List all Institutions """ if not request.user.admin_permissions.other_permission(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 start = per_page * (current_page - 1) institutions = Institution.objects.all()[start:start + per_page] count = Institution.objects.count() institutions_info = [] for institution in institutions: data = {} data['id'] = institution.id data['name'] = institution.name data['ctime'] = datetime_to_isoformat_timestr(institution.create_time) institutions_info.append(data) resp = { 'institution_list': institutions_info, 'total_count': count, } return Response(resp)
def get(self, request, start_time, end_time): """ Get records of the specified time range. param: start: the start time of the query. end: the end time of the query. return: the list of file operations record. """ offset = get_time_offset() data = get_file_ops_stats_by_day(start_time, end_time, offset) ops_added_dict = get_init_data(start_time, end_time) ops_visited_dict = get_init_data(start_time, end_time) ops_deleted_dict = get_init_data(start_time, end_time) for e in data: if e[1] == 'Added': ops_added_dict[e[0]] = e[2] elif e[1] == 'Visited': ops_visited_dict[e[0]] = e[2] elif e[1] == 'Deleted': ops_deleted_dict[e[0]] = e[2] res_data = [] for k, v in ops_added_dict.items(): res_data.append({ 'datetime': datetime_to_isoformat_timestr(k), 'added': v, 'visited': ops_visited_dict[k], 'deleted': ops_deleted_dict[k] }) return Response(sorted(res_data, key=lambda x: x['datetime']))
def to_dict(self): uuid = self.origin_file_uuid file_path = posixpath.join(uuid.parent_path, uuid.filename) # TODO: refactor uuid return { 'id': self.pk, 'owner': self.username, 'owner_nickname': email2nickname(self.username), 'origin_repo_id': self.origin_repo_id, 'origin_file_path': file_path, 'origin_file_version': self.origin_file_version, 'draft_repo_id': self.draft_repo_id, 'draft_file_path': self.draft_file_path, 'created_at': datetime_to_isoformat_timestr(self.created_at), 'updated_at': datetime_to_isoformat_timestr(self.updated_at), }
def get(self, request, start_time, end_time, *args, **kwargs): if not request.user.admin_permissions.other_permission(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') org_id = kwargs['org_id'] op_type_list = [ 'web-file-upload', 'web-file-download', 'sync-file-download', 'sync-file-upload', 'link-file-upload', 'link-file-download' ] init_count = [0] * 6 init_data = get_init_data(start_time, end_time, dict(list(zip(op_type_list, init_count)))) for e in get_org_traffic_by_day(org_id, start_time, end_time, get_time_offset()): dt, op_type, count = e init_data[dt].update({op_type: count}) res_data = [] for k, v in list(init_data.items()): res = {'datetime': datetime_to_isoformat_timestr(k)} res.update(v) res_data.append(res) return Response(sorted(res_data, key=lambda x: x['datetime']))
def get(self, request): if not is_pro_version(): error_msg = 'Feature disabled.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) result = [] events = get_log_events_by_type_and_time('file_audit', start, end) if events: for ev in events: tmp_repo = seafile_api.get_repo(ev.repo_id) tmp_repo_name = tmp_repo.name if tmp_repo else '' result.append({ 'repo_id': ev.repo_id, 'repo_name': tmp_repo_name, 'time': datetime_to_isoformat_timestr(ev.timestamp), 'etype': ev.etype, 'ip': ev.ip, 'file_path': ev.file_path, 'etype': ev.etype, 'user_name': email2nickname(ev.user), 'user_email': ev.user }) return Response(result)
def get(self, request): """ used for get notifications Permission checking: 1. login user. """ result = {} username = request.user.username try: per_page = int(request.GET.get('per_page', '')) page = int(request.GET.get('page', '')) except ValueError: per_page = 25 page = 1 start = (page - 1) * per_page end = page * per_page notice_list = UserNotification.objects.get_user_notifications(username)[start:end] result_notices = update_notice_detail(request, notice_list) notification_list = [] unseen_count = 0 for i in result_notices: if i.detail is not None: notice = {} notice['id'] = i.id notice['type'] = i.msg_type notice['detail'] = i.detail notice['time'] = datetime_to_isoformat_timestr(i.timestamp) notice['seen'] = i.seen if not i.seen: unseen_count += 1 notification_list.append(notice) cache_key = get_cache_key_of_unseen_notifications(username) count_from_cache = cache.get(cache_key, None) # for case of count value is `0` if count_from_cache is not None: result['unseen_count'] = count_from_cache unseen_num = count_from_cache else: result['unseen_count'] = unseen_count # set cache cache.set(cache_key, unseen_count) unseen_num = unseen_count total_count = UserNotification.objects.filter(to_user=username).count() result['notification_list'] = notification_list result['count'] = total_count result['unseen_count'] = unseen_num return Response(result)
def list_draft_by_username(self, username, status='open'): """list all user drafts If with_reviews is true, return the draft associated review """ repo_cache = {} def get_repo_with_cache(repo_id, repo_cache): """return repo object Avoid loading the same repo multiple times """ if repo_id in repo_cache: return repo_cache[repo_id] repo = seafile_api.get_repo(repo_id) repo_cache[repo_id] = repo return repo data = [] qs = self.filter(username=username, status=status) for d in qs: # If repo does not exist, no related items are displayed. repo = get_repo_with_cache(d.origin_repo_id, repo_cache) if not repo: continue uuid = FileUUIDMap.objects.get_fileuuidmap_by_uuid( d.origin_file_uuid) file_path = posixpath.join(uuid.parent_path, uuid.filename) draft = {} draft['id'] = d.id draft['owner'] = d.username draft['repo_name'] = repo.name draft['owner_nickname'] = email2nickname(d.username) draft['origin_repo_id'] = d.origin_repo_id draft['origin_file_path'] = file_path draft['origin_file_version'] = d.origin_file_version draft['draft_file_path'] = d.draft_file_path draft['created_at'] = datetime_to_isoformat_timestr(d.created_at) draft['updated_at'] = datetime_to_isoformat_timestr(d.updated_at) draft['status'] = d.status data.append(draft) return data
def to_dict(self): return { 'id': self.pk, 'plugin_name': self.name, 'info': json.loads(self.info), 'added_by': email2nickname(self.added_by), 'added_time': datetime_to_isoformat_timestr(self.added_time), }
def list_draft_by_username(self, username, status='open'): """list all user drafts If with_reviews is true, return the draft associated review """ repo_cache = {} def get_repo_with_cache(repo_id, repo_cache): """return repo object Avoid loading the same repo multiple times """ if repo_id in repo_cache: return repo_cache[repo_id] repo = seafile_api.get_repo(repo_id) repo_cache[repo_id] = repo return repo data = [] qs = self.filter(username=username, status='open') for d in qs: # If repo does not exist, no related items are displayed. repo = get_repo_with_cache(d.origin_repo_id, repo_cache) if not repo: continue uuid = FileUUIDMap.objects.get_fileuuidmap_by_uuid(d.origin_file_uuid) file_path = posixpath.join(uuid.parent_path, uuid.filename) draft = {} draft['id'] = d.id draft['owner'] = d.username draft['repo_name'] = repo.name draft['owner_nickname'] = email2nickname(d.username) draft['origin_repo_id'] = d.origin_repo_id draft['origin_file_path'] = file_path draft['origin_file_version'] = d.origin_file_version draft['draft_file_path'] = d.draft_file_path draft['created_at'] = datetime_to_isoformat_timestr(d.created_at) draft['updated_at'] = datetime_to_isoformat_timestr(d.updated_at) draft['status'] = d.status data.append(draft) return data
def get(self, request): """List all admins from database and ldap imported """ if not request.user.admin_permissions.can_manage_user(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') try: admin_users = ccnet_api.get_superusers() except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) admin_users_info = [] for user in admin_users: user_info = {} profile = Profile.objects.get_profile_by_user(user.email) user_info['email'] = user.email user_info['name'] = email2nickname(user.email) user_info['contact_email'] = email2contact_email(user.email) user_info['login_id'] = profile.login_id if profile and profile.login_id else '' user_info['is_staff'] = user.is_staff user_info['is_active'] = user.is_active orgs = ccnet_api.get_orgs_by_user(user.email) try: if orgs: org_id = orgs[0].org_id user_info['org_id'] = org_id user_info['org_name'] = orgs[0].org_name user_info['quota_usage'] = seafile_api.get_org_user_quota_usage(org_id, user.email) user_info['quota_total'] = seafile_api.get_org_user_quota(org_id, user.email) else: user_info['quota_usage'] = seafile_api.get_user_self_usage(user.email) user_info['quota_total'] = seafile_api.get_user_quota(user.email) except Exception as e: logger.error(e) user_info['quota_usage'] = -1 user_info['quota_total'] = -1 user_info['create_time'] = timestamp_to_isoformat_timestr(user.ctime) last_login_obj = UserLastLogin.objects.get_by_username(user.email) user_info['last_login'] = datetime_to_isoformat_timestr(last_login_obj.last_login) if last_login_obj else '' try: admin_role = AdminRole.objects.get_admin_role(user.email) user_info['admin_role'] = admin_role.role except AdminRole.DoesNotExist: user_info['admin_role'] = DEFAULT_ADMIN admin_users_info.append(user_info) result = { 'admin_user_list': admin_users_info, } return Response(result)
def get_info_of_users_order_by_quota_usage(self, source, direction, page, per_page): # get user's quota usage info user_usage_dict = {} users_with_usage = seafile_api.list_user_quota_usage() for user in users_with_usage: email = user.user if email not in user_usage_dict: user_usage_dict[email] = user.usage # get all users and map quota usage to user if source == 'db': users = ccnet_api.get_emailusers('DB', -1, -1) else: users = ccnet_api.get_emailusers('LDAPImport', -1, -1) for user in users: email = user.email user.quota_usage = user_usage_dict.get(email, -1) # sort users.sort(key=lambda item: item.quota_usage, reverse=direction == 'desc') data = [] MULTI_INSTITUTION = getattr(settings, 'MULTI_INSTITUTION', False) for user in users[(page - 1) * per_page:page * per_page]: info = {} info['email'] = user.email info['name'] = email2nickname(user.email) info['contact_email'] = email2contact_email(user.email) profile = Profile.objects.get_profile_by_user(user.email) info[ 'login_id'] = profile.login_id if profile and profile.login_id else '' info['is_staff'] = user.is_staff info['is_active'] = user.is_active info['create_time'] = timestamp_to_isoformat_timestr(user.ctime) info['quota_usage'] = user.quota_usage info['quota_total'] = seafile_api.get_user_quota(user.email) last_login_obj = UserLastLogin.objects.get_by_username(user.email) info['last_login'] = datetime_to_isoformat_timestr( last_login_obj.last_login) if last_login_obj else '' info['role'] = get_user_role(user) if MULTI_INSTITUTION: info['institution'] = profile.institution if profile else '' data.append(info) return data
def to_dict(self): r_repo = seafile_api.get_repo(self.origin_repo_id) if not r_repo: raise DraftFileConflict return { 'id': self.pk, 'creator': self.creator, 'status': self.status, 'creator_name': email2nickname(self.creator), 'draft_origin_repo_id': self.origin_repo_id, 'draft_origin_repo_name': r_repo.name, 'draft_origin_file_version': self.origin_file_version, 'draft_publish_file_version': self.publish_file_version, 'draft_file_path': self.draft_file_path, 'created_at': datetime_to_isoformat_timestr(self.created_at), 'updated_at': datetime_to_isoformat_timestr(self.updated_at), }
def get_dtable_by_workspace(self, workspace): try: dtables = super(DTablesManager, self).filter(workspace=workspace) dtable_list = list() for dtable in dtables: dtable_dict = dict() dtable_dict['id'] = dtable.pk dtable_dict['workspace_id'] = dtable.workspace_id dtable_dict['uuid'] = dtable.uuid dtable_dict['name'] = dtable.name dtable_dict['creator'] = email2nickname(dtable.creator) dtable_dict['modifier'] = email2nickname(dtable.modifier) dtable_dict['created_at'] = datetime_to_isoformat_timestr(dtable.created_at) dtable_dict['updated_at'] = datetime_to_isoformat_timestr(dtable.updated_at) dtable_list.append(dtable_dict) return dtable_list except self.model.DoesNotExist: return None
def get(self, request, group_id, format=None): """List all group discussions. Only group members can perform this op. """ # 1 <= page, defaults to 1 try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 if page < 0: page = 1 # 1 <= per_page <= 100, defaults to 20 try: per_page = int(request.GET.get('per_page', '20')) except ValueError: per_page = 20 if per_page < 1 or per_page > 100: per_page = 20 paginator = Paginator( GroupMessage.objects.filter( group_id=group_id).order_by('-timestamp'), per_page) try: group_msgs = paginator.page(page) except (EmptyPage, InvalidPage): group_msgs = paginator.page(paginator.num_pages) try: avatar_size = int( request.GET.get('avatar_size', AVATAR_DEFAULT_SIZE)) except ValueError: avatar_size = AVATAR_DEFAULT_SIZE msgs = [] for msg in group_msgs: info = get_user_common_info(msg.from_email, avatar_size) isoformat_timestr = datetime_to_isoformat_timestr(msg.timestamp) msgs.append({ "id": msg.pk, "group_id": group_id, "user_name": info["name"], "user_email": info["email"], "user_contact_email": info["contact_email"], "avatar_url": info["avatar_url"], "content": msg.message, "created_at": isoformat_timestr }) return HttpResponse(json.dumps({ "msgs": msgs, "current_page": page, "page_num": paginator.num_pages, }), status=200, content_type=json_content_type)
def to_dict(self): o = self return { 'id': o.pk, 'repo_id': o.repo_id, 'parent_path': o.parent_path, 'item_name': o.item_name, 'comment': o.comment, 'created_at': datetime_to_isoformat_timestr(o.created_at), }
def get_share_link_info(fileshare): data = {} token = fileshare.token repo_id = fileshare.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = fileshare.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' if fileshare.expire_date: expire_date = datetime_to_isoformat_timestr(fileshare.expire_date) else: expire_date = '' if fileshare.ctime: ctime = datetime_to_isoformat_timestr(fileshare.ctime) else: ctime = '' data['username'] = fileshare.username data['repo_id'] = repo_id data['repo_name'] = repo.repo_name if repo else '' data['path'] = path data['obj_name'] = obj_name data['is_dir'] = True if fileshare.s_type == 'd' else False data['token'] = token data['link'] = gen_shared_link(token, fileshare.s_type) data['view_cnt'] = fileshare.view_cnt data['ctime'] = ctime data['expire_date'] = expire_date data['is_expired'] = fileshare.is_expired() return data
def get(self, request, start_time, end_time): data = get_user_activity_stats_by_day(start_time, end_time, get_time_offset()) res_data = [] init_data = get_init_data(start_time, end_time) for e in data: init_data[e[0]] = e[1] for k, v in init_data.items(): res_data.append({'datetime': datetime_to_isoformat_timestr(k), 'count': v}) return Response(sorted(res_data, key=lambda x: x['datetime']))
def get_log_info(log_obj): isoformat_timestr = datetime_to_isoformat_timestr(log_obj.datetime) log_info = { "email": log_obj.email, "name": email2nickname(log_obj.email), "operation": log_obj.operation, "detail": json.loads(log_obj.detail), "datetime": isoformat_timestr, } return log_info
def get(self, request, group_id, format=None): """List all group discussions. Only group members can perform this op. """ # 1 <= page, defaults to 1 try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 if page < 0: page = 1 # 1 <= per_page <= 100, defaults to 20 try: per_page = int(request.GET.get('per_page', '20')) except ValueError: per_page = 20 if per_page < 1 or per_page > 100: per_page = 20 paginator = Paginator(GroupMessage.objects.filter( group_id=group_id).order_by('-timestamp'), per_page) try: group_msgs = paginator.page(page) except (EmptyPage, InvalidPage): group_msgs = paginator.page(paginator.num_pages) try: avatar_size = int(request.GET.get('avatar_size', AVATAR_DEFAULT_SIZE)) except ValueError: avatar_size = AVATAR_DEFAULT_SIZE msgs = [] for msg in group_msgs: info = get_user_common_info(msg.from_email, avatar_size) isoformat_timestr = datetime_to_isoformat_timestr(msg.timestamp) msgs.append({ "id": msg.pk, "group_id": group_id, "user_name": info["name"], "user_email": info["email"], "user_contact_email": info["contact_email"], "avatar_url": request.build_absolute_uri(info["avatar_url"]), "content": msg.message, "created_at": isoformat_timestr }) return HttpResponse(json.dumps({ "msgs": msgs, "current_page": page, "page_num": paginator.num_pages, }), status=200, content_type=json_content_type)
def to_dict(self): return { 'id': self.pk, 'owner': self.username, 'owner_nickname': email2nickname(self.username), 'name': self.name, 'slug': self.slug, 'link': self.link, 'permission': self.permission, 'created_at': datetime_to_isoformat_timestr(self.created_at), 'updated_at': timestamp_to_isoformat_timestr(self.updated_at), }
def get_user_devices(username): devices = TokenV2.objects.get_user_devices(username) peer_repos_map = get_user_synced_repo_infos(username) for device in devices: if device['platform'] in DESKTOP_PLATFORMS: peer_id = device['device_id'] repos = peer_repos_map.get(peer_id, []) device['synced_repos'] = repos if repos: device['last_accessed'] = max(device['last_accessed'], _last_sync_time(repos)) device['last_accessed'] = datetime_to_isoformat_timestr(device['last_accessed']) return devices
def list_draft_by_repo_id(self, repo_id, status='open'): """list draft by repo id """ drafts = [] qs = self.filter(origin_repo_id=repo_id, status=status) for d in qs: draft = {} draft['id'] = d.id draft['owner_nickname'] = email2nickname(d.username) draft['origin_repo_id'] = d.origin_repo_id draft['draft_file_path'] = d.draft_file_path draft['created_at'] = datetime_to_isoformat_timestr(d.created_at) drafts.append(draft) return drafts
def get(self, request, format=None): try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '50')) except ValueError: current_page = 1 per_page = 50 platform = request.GET.get('platform', None) start = (current_page - 1) * per_page end = current_page * per_page + 1 devices = TokenV2.objects.get_devices(platform, start, end) if len(devices) == end - start: devices = devices[:per_page] has_next_page = True else: has_next_page = False return_results = [] for device in devices: result = {} result['client_version'] = device.client_version result['device_id'] = device.device_id result['device_name'] = device.device_name result['last_accessed'] = datetime_to_isoformat_timestr(device.last_accessed) result['last_login_ip'] = device.last_login_ip result['user'] = device.user result['user_name'] = email2nickname(device.user) result['platform'] = device.platform result['is_desktop_client'] = False if result['platform'] in DESKTOP_PLATFORMS: result['is_desktop_client'] = True return_results.append(result) page_info = { 'has_next_page': has_next_page, 'current_page': current_page } return Response({"page_info": page_info, "devices": return_results})
def get(self, request, start_time, end_time): op_type_list = ['web-file-upload', 'web-file-download', 'sync-file-download', 'sync-file-upload', 'link-file-upload', 'link-file-download'] init_count = [0] * 6 init_data = get_init_data(start_time, end_time, dict(zip(op_type_list, init_count))) for e in get_system_traffic_by_day(start_time, end_time, get_time_offset()): dt, op_type, count = e init_data[dt].update({op_type: count}) res_data = [] for k, v in init_data.items(): res = {'datetime': datetime_to_isoformat_timestr(k)} res.update(v) res_data.append(res) return Response(sorted(res_data, key=lambda x: x['datetime']))
def get(self, request): """ list all notifications / filt by user name Permission checking: 1.login and is admin user. """ user_name = request.GET.get('username', '') # argument check # permission check # permission_classes will return 403, if not admin user # prepare default values result = {} try: per_page = int(request.GET.get('per_page', '')) page = int(request.GET.get('page', '')) except ValueError: per_page = 100 page = 1 start = (page - 1) * per_page end = page * per_page total_count = 0 # resource check if user_name != '': # return all notifications of a user given by name total_count = UserNotification.objects.get_user_notifications(user_name).count() notification_list = UserNotification.objects.get_user_notifications(user_name)[start:end] else: # return all notifications of all users total_count = UserNotification.objects.get_all_notifications().count() notification_list = UserNotification.objects.get_all_notifications()[start:end] # notification does not exist, return an empty list if not notification_list: result['count'] = 0 result['notification_list'] = [] return Response(result) result_notification = update_notice_detail(request, notification_list) notification_list = [] for i in result_notification: notification_info = {} notification_info['id'] = i.id notification_info['type'] = i.msg_type notification_info['time'] = datetime_to_isoformat_timestr(i.timestamp) if i.detail is not None: notification_info['detail'] = i.detail else: notification_info['detail'] = {} notification_list.append(notification_info) result['count'] = total_count result['notification_list'] = notification_list return Response(result)
def get(self, request): # check the date format, should be like '2015-10-10' start = request.GET.get('start', None) end = request.GET.get('end', None) if not check_time_period_valid(start, end): error_msg = 'start or end date invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: events = get_log_events_by_type_and_time('file_audit', start, end) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = [] if events: # get name/contact_email dict for events user/repo_owner ev_user_list = [] ev_repo_owner_list = [] for ev in events: repo_id = ev.repo_id repo = seafile_api.get_repo(repo_id) if repo: ev.repo_name = repo.name ev.repo_owner = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: ev.repo_name = '' ev.repo_owner = '' ev_user_list.append(ev.user) ev_repo_owner_list.append(ev.repo_owner) ev_user_name_dict = get_user_name_dict(ev_user_list) ev_user_contact_email_dict = get_user_contact_email_dict(ev_user_list) ev_repo_owner_name_dict = get_user_name_dict(ev_repo_owner_list) ev_repo_owner_contact_email_dict = get_user_contact_email_dict(ev_repo_owner_list) for ev in events: result.append({ 'repo_id': ev.repo_id, 'repo_name': ev.repo_name, 'repo_owner_email': ev.repo_owner, 'repo_owner_name': ev_repo_owner_name_dict[ev.repo_owner], 'repo_owner_contact_email': ev_repo_owner_contact_email_dict[ev.repo_owner], 'time': datetime_to_isoformat_timestr(ev.timestamp), 'ip': ev.ip, 'file_path': ev.file_path, 'etype': ev.etype, 'user_email': ev.user, 'user_name': ev_user_name_dict[ev.user], 'user_contact_email': ev_user_contact_email_dict[ev.user], }) return Response(result)