def ips_to_sql(req): master_13 = RedisDriver().master_13 num = master_13.llen("all_ips_list") while master_13.llen("all_ips_list") > 0: ip = master_13.rpop("all_ips_list") ip_attribution = sina_ip(ip)[-3:-1] item = Access_ip() item.ip = ip item.ip_attribution = ip_attribution item.save() return HttpResponse("{0}-IP-TO_MYSQL".format(num))
def access_count_day(req): master_13 = RedisDriver().master_13 day = time.strftime('%Y-%m-%d', time.localtime(time.time())) yesterday = str(datetime.date.today() - datetime.timedelta(days=1)) # 同步昨日的 yesd = 'Day:{0}'.format(yesterday) access_ip = master_13.hget(yesd, 'IP') access_pv = master_13.hget(yesd, 'PV') item = Access_amount_day() item.access_time = yesterday item.access_ip = access_ip item.access_pv = access_pv item.save() access_log.info("{0}访问量-IP:{1}-PV:{2}".format(yesterday, access_ip, access_pv)) return HttpResponse("{0}访问量-IP:{1}-PV:{2}".format(yesterday, access_ip, access_pv))
def access_count_mon(req): master_13 = RedisDriver().master_13 mon = time.strftime('%Y-%m', time.localtime(time.time())) yesterday = str(datetime.date.today() - datetime.timedelta(days=1)) yestermon = yesterday[:7] # 同步上月 yesm = 'Month:{0}'.format(yestermon) access_ip = master_13.hget(yesm, 'IP') access_pv = master_13.hget(yesm, 'PV') item = Access_amount_mon() item.access_time = yestermon item.access_ip = access_ip item.access_pv = access_pv item.save() access_log.info("上月访问量-IP:{0}-PV:{1}".format(access_ip, access_pv)) return HttpResponse("上月访问量-IP:{0}-PV:{1}".format(access_ip, access_pv))
def about(req): master_14 = RedisDriver().master_14 master_13 = RedisDriver().master_13 ip = '' if 'HTTP_X_FORWARDED_FOR' in req.META: ip = req.META['HTTP_X_FORWARDED_FOR'] else: ip = req.META['REMOTE_ADDR'] ip_attribution = sina_ip(ip) if req.method == "POST": content = req.POST.get('content', '') if content != '': key = ip_attribution + ':' + content value = time.strftime("%Y-%m-%d %H:%M", time.localtime()) t = master_13.get(ip_attribution) if t is None: master_13.hset('message', key, value) master_13.setex(ip_attribution, 60, value) return HttpResponse(json.dumps({'status': 'ok', 'key': key, 'value': value})) else: return HttpResponse(json.dumps({'status': 'fail'})) messages = master_13.hgetall('message') ips_info = {} ips_access = cache.get("ips_access", []) if ips_access: for ip in ips_access: ip_attribution = sina_ip(ip) ip_access_time = master_14.get(ip) ips_info[ip_attribution] = ip_access_time try: ips_info = sorted(ips_info.items(), key=lambda d: d[1], reverse=True) messages = sorted(messages.items(), key=lambda d: d[1], reverse=True) except Exception as e: pass return render_to_response('about_new.html', {"ips_info": ips_info, "messages": messages}, RequestContext(req))
def index(req): blogs = [] # 登录 if req.is_ajax(): username = req.POST.get("username", None) password = req.POST.get("password", None) if login_validate(req, username, password): response = render_to_response('index_new.html', RequestContext(req)) response.set_cookie('username', username, 3600) return response elif req.method == 'POST': content = req.POST.get("content", None) if content: _blogs = Blog.objects.all().order_by("-post_time") for blog in _blogs: if content.lower() in blog.title.lower(): blogs.append(blog) request_log.info('SEARCH - BLOG - {0}'.format(blogs)) master_13 = RedisDriver().master_13 master_15 = RedisDriver().master_15 online_ips = master_15.dbsize() day = time.strftime('%Y-%m-%d', time.localtime(time.time())) yesterday = str(datetime.date.today() - datetime.timedelta(days=1)) d = 'Day:{0}'.format(day) yesd = 'Day:{0}'.format(yesterday) access_day_ip = master_13.hget(d, 'IP') access_day_pv = master_13.hget(d, 'PV') access_yesterday_ip = master_13.hget(yesd, 'IP') access_yesterday_pv = master_13.hget(yesd, 'PV') if not blogs: blogs = Blog.objects.all().order_by("-post_time") num = None page_num_list = [] try: num = len(blogs) paginator = Paginator(blogs, 10) try: page = int(req.GET.get('page', 1)) blogs = paginator.page(page) for i in range(blogs.paginator.num_pages): page_num_list.append(i + 1) except (EmptyPage, InvalidPage, PageNotAnInteger): blogs = paginator.page(1) except Exception as e: error_log.error(e) return render_to_response('index_new.html', { 'blogs': blogs, 'page_num_list': page_num_list, 'online_ips': online_ips, 'access_day_ip': access_day_ip, 'access_day_pv': access_day_pv, 'access_yesterday_ip': access_yesterday_ip, 'access_yesterday_pv': access_yesterday_pv, }, RequestContext(req))
def __init__(self): self.master_13 = RedisDriver().master_13 self.master_14 = RedisDriver().master_14 self.master_15 = RedisDriver().master_15
class IPMiddleware(object): def __init__(self): self.master_13 = RedisDriver().master_13 self.master_14 = RedisDriver().master_14 self.master_15 = RedisDriver().master_15 def process_request(self, request): # ip = request.META.get('REMOTE_ADDR', '0.0.0.0') if 'HTTP_X_FORWARDED_FOR' in request.META: ip = request.META['HTTP_X_FORWARDED_FOR'] else: ip = request.META['REMOTE_ADDR'] user_agent = request.META.get('HTTP_USER_AGENT', "") day = time.strftime('%Y-%m-%d', time.localtime(time.time())) self.master_13.sadd("user_agent_set:{0}".format(day), user_agent) spider = False spider_list = ["Spider", "spider", "Googlebot", "bingbot"] for i in spider_list: if i in user_agent: spider = True break if len(ip) < 20: if not spider: # ip = "183.206.160.95" # ip:给每个ip存储过期时间 #“online_ips_str”:当前在线人数 #“all_ips_set”:所有不重复IP集合 #“lately_ip_list” "lately_ip_set" #更新ip访问时间 self.master_14.set(ip, time.strftime("%Y-%m-%d %H:%M", time.localtime())) # 统计在线人数 self.master_15.setex("online_ips_str:{0}".format(ip), 60 * 1, time.strftime("%Y-%m-%d %H:%M", time.localtime())) online_ips = self.master_15.dbsize() #存储所有IP if not self.master_13.sismember("all_ips_set", ip): self.master_13.sadd("all_ips_set", ip) self.master_13.lpush("all_ips_list", ip) #最近访问 #ips_access[所有进来的ip列表,ip不重复且最大值为5]在cache中存储{"ips_access":["127.0.0.1","127.0.0.1"]} ips_access=["127.0.0.1","127.0.0.1"] ips_access = cache.get("ips_access", []) if ip not in ips_access: ips_access.append(ip) if len(ips_access) > 5: ips_access.pop(0) cache.set("ips_access", ips_access, 60 * 60 * 24 * 30) self.clicks(ip) # 统计PV,IP访问量 def clicks(self, ip): mon = time.strftime('%Y-%m', time.localtime(time.time())) m = 'Month:{0}'.format(mon) day = time.strftime('%Y-%m-%d', time.localtime(time.time())) d = 'Day:{0}'.format(day) self.master_13.hincrby(d, 'PV') self.master_13.hincrby(m, 'PV') self.master_13.sadd('all_mon_set:month', mon) # 一共统计了哪几个月 self.master_13.sadd('all_day_set:day', day) # 一共统计了哪几个日子 # 一个月的所有ip if not self.master_13.sismember("mon_ip_set:{0}".format(mon), ip): self.master_13.sadd("mon_ip_set:{0}".format(mon), ip) self.master_13.hincrby(m, 'IP') #self.master_13.hget(m, 'IP') #一天的所有ip if not self.master_13.sismember("day_ip_set:{0}".format(day), ip): self.master_13.sadd("day_ip_set:{0}".format(day), ip) self.master_13.hincrby(d, 'IP')
def get(self): page_index = 1 bt_keywords = None try: bt_keywords = self.get_argument('bt_keywords') # 搜索关键词 page_index = self.get_argument('page_index', 1) # 页码 except Exception as e: error_log.error(e) if bt_keywords: try: from pywormsite import RedisDriver ip = self.request.remote_ip if not ip: ip = '111.111.111.111' key = 'bt_search:{0}'.format(ip) master_13 = RedisDriver().master_13 master_13.lpush(key, bt_keywords) except: pass mongo_url = 'mongodb://localhost:27017/' db = pymongo.MongoClient(mongo_url).bt mongo_find = db.bt_info.find({ '$or': [{ 'name': { '$regex': bt_keywords, '$options': 'i' } }, { "files": { "$elemMatch": { "file_name": { '$regex': bt_keywords } } } }] }) # 获得一个游标后,对它取数据后,不能再次 skip和limit 调用cursor 对象的clone()方法,赋值给新的一个cursor 对象 mongo_find_clone = mongo_find.clone() # 如果相关数据超过100条,只按100条算 max_data = mongo_find.skip(10 * (int(page_index) + 9)).limit(1) bt_data = False for m in max_data: if m: bt_data = True # try: # bt_data = max_data.next() # except StopIteration: # bt_data = None if bt_data: bt_count = 10 * (int(page_index) + 9) - 1 else: bt_count = int(mongo_find.count()) links = mongo_find_clone.sort('create_at', pymongo.ASCENDING).skip( 10 * (int(page_index) - 1)).limit(10) page_num = int(bt_count / 10) + 1 # 共有几页 new_links = [] for link in links: link_files = link.get('files') _files = link_files[:15] link['files'] = _files new_links.append(link) self.render('bt_list.html', links=new_links, page_index=int(page_index), page_num=int(page_num), bt_keywords=bt_keywords, time=time) else: self.render('bt_list.html', links=None, page_index=int(page_index), page_num=0, bt_keywords=bt_keywords, time=time)
def online_ips(req): master_15 = RedisDriver().master_15 online_ips = master_15.dbsize() return HttpResponse(online_ips)
def index(req): print('in index') blogs = [] # 登录 if req.is_ajax(): username = req.POST.get("username", None) password = req.POST.get("password", None) if login_validate(req, username, password): response = render_to_response('index_new.html', RequestContext(req)) response.set_cookie('username', username, 3600) return response elif req.method == 'POST': content = req.POST.get("content", None) if content: _blogs = Blog.objects.all().order_by("-post_time") for blog in _blogs: if content.lower() in blog.title.lower(): blogs.append(blog) request_log.info('SEARCH - BLOG - {0}'.format(blogs)) master_13 = RedisDriver().master_13 master_15 = RedisDriver().master_15 online_ips = master_15.dbsize() day = time.strftime('%Y-%m-%d', time.localtime(time.time())) yesterday = str(datetime.date.today() - datetime.timedelta(days=1)) d = 'Day:{0}'.format(day) yesd = 'Day:{0}'.format(yesterday) access_day_ip = master_13.hget(d, 'IP') access_day_pv = master_13.hget(d, 'PV') access_yesterday_ip = master_13.hget(yesd, 'IP') access_yesterday_pv = master_13.hget(yesd, 'PV') if not blogs: blogs = Blog.objects.all().order_by("-post_time") num = None page_num_list = [] try: num = len(blogs) paginator = Paginator(blogs, 10) try: page = int(req.GET.get('page', 1)) blogs = paginator.page(page) for i in range(blogs.paginator.num_pages): page_num_list.append(i + 1) except (EmptyPage, InvalidPage, PageNotAnInteger): blogs = paginator.page(1) except Exception as e: error_log.error(e) return render_to_response( 'index_new.html', { 'blogs': blogs, 'page_num_list': page_num_list, 'online_ips': online_ips, 'access_day_ip': access_day_ip, 'access_day_pv': access_day_pv, 'access_yesterday_ip': access_yesterday_ip, 'access_yesterday_pv': access_yesterday_pv, }, RequestContext(req))
def about(req): master_14 = RedisDriver().master_14 master_13 = RedisDriver().master_13 ip = '' if 'HTTP_X_FORWARDED_FOR' in req.META: ip = req.META['HTTP_X_FORWARDED_FOR'] else: ip = req.META['REMOTE_ADDR'] ip_attribution = sina_ip(ip) if req.method == "POST": content = req.POST.get('content', '') if content != '': key = ip_attribution + ':' + content value = time.strftime("%Y-%m-%d %H:%M", time.localtime()) t = master_13.get(ip_attribution) if t is None: master_13.hset('message', key, value) master_13.setex(ip_attribution, 60, value) return HttpResponse( json.dumps({ 'status': 'ok', 'key': key, 'value': value })) else: return HttpResponse(json.dumps({'status': 'fail'})) messages = master_13.hgetall('message') ips_info = {} ips_access = cache.get("ips_access", []) if ips_access: for ip in ips_access: ip_attribution = sina_ip(ip) ip_access_time = master_14.get(ip) ips_info[ip_attribution] = ip_access_time try: ips_info = sorted(ips_info.items(), key=lambda d: d[1], reverse=True) messages = sorted(messages.items(), key=lambda d: d[1], reverse=True) except Exception as e: pass return render_to_response('about_new.html', { "ips_info": ips_info, "messages": messages }, RequestContext(req))
def blog(req, blog_id): master_14 = RedisDriver().master_14 ip = '' if 'HTTP_X_FORWARDED_FOR' in req.META: ip = req.META['HTTP_X_FORWARDED_FOR'] else: ip = req.META['REMOTE_ADDR'] user_agent = req.META.get('HTTP_USER_AGENT', "") spider = False spider_list = ["Spider", "spider", "Googlebot", "bingbot"] for i in spider_list: if i in user_agent: spider = True break ips_access = cache.get("ips_access_{0}".format(blog_id), []) ip_attribution = "匿名" if len(ip) < 20: if not spider: ip_attribution = sina_ip(ip) if ip not in ips_access: ips_access.append(ip) # print(len(ips_access)) if len(ips_access) > 5: ips_access.pop(0) cache.set("ips_access_{0}".format(blog_id), ips_access, 60 * 60 * 24 * 30) ips_info = {} if ips_access: for ip in ips_access: ip_attribution = sina_ip(ip) ip_access_time = master_14.get(ip) ips_info[ip_attribution] = ip_access_time ips_info = sorted(ips_info.items(), key=lambda d: d[1], reverse=True) ip_obj = IP_access.objects.filter(ip=ip) blog_obj = Blog.objects.filter(blog_id=blog_id) ip_num = 0 if blog_obj: blog = Blog.objects.get(blog_id=blog_id) if ip_obj: # 如果已存在此ip ip = IP_access.objects.get(ip=ip) ip.blogs.add(blog) ip_num = blog.ip_access_set.all() blog.IP_num = len(ip_num) blog.PV_num += 1 blog.save() else: ip = IP_access(ip=ip) ip.save() ip.blogs.add(blog) ip_num = blog.ip_access_set.all() blog.IP_num = len(ip_num) blog.PV_num += 1 blog.save() # print(len(ip_num)) if req.method == "POST": content = req.POST.get('content', None) p_id = req.POST.get('p_id', None) if p_id and content: blog = Blog.objects.get(blog_id=blog_id) Replay.objects.create(content=content, blog=blog, replay_time=datetime.datetime.today(), replay_user=ip_attribution, replay_id=0, parent_id=p_id) return HttpResponse(json.dumps({"content": content})) elif content: item = Blog.objects.get(blog_id=blog_id) num = 0 try: blog = Blog.objects.get(blog_id=blog_id) replays_re = Replay.objects.filter(blog=blog, replay_id=0) replays_er = Replay.objects.filter(blog=blog) replays = int(len(replays_er)) - (len(replays_re)) num = replays except Exception as e: error_log.error(e) Replay.objects.create(content=content, blog=item, replay_time=datetime.datetime.today(), replay_user=ip_attribution, replay_id=num + 1, parent_id=0) return HttpResponse(json.dumps({"content": content})) blog_id = blog_id replays = None replay_num = None PV_num = None to_replays_dict = {} blog_list = [] try: # replays = Blog.objects.get(blog_id=blog_id).replay_set.all() blog = Blog.objects.get(blog_id=blog_id) categorie = blog.categorie PV_num = blog.PV_num replays = Replay.objects.filter(blog=blog, parent_id=0) replay_num = int(len(replays)) # 评论数量 for replay in replays: to_replays = Replay.objects.filter(blog=blog, parent_id=replay.id) to_replays_dict[replay] = to_replays except Exception as e: error_log.error(e) return render_to_response(blog_id + '.html', { 'blog_id': blog_id, 'replay_num': replay_num, 'PV_num': PV_num, 'ip_num': ip_num, 'replays': replays, 'to_replays_dict': to_replays_dict, 'ips_info': ips_info }, context_instance=RequestContext(req))
def blog(req, blog_id): print('11111111111111111111') master_14 = RedisDriver().master_14 ip = '' if 'HTTP_X_FORWARDED_FOR' in req.META: ip = req.META['HTTP_X_FORWARDED_FOR'] else: ip = req.META['REMOTE_ADDR'] user_agent = req.META.get('HTTP_USER_AGENT', "") spider = False spider_list = ["Spider", "spider", "Googlebot", "bingbot"] for i in spider_list: if i in user_agent: spider = True break ips_access = cache.get("ips_access_{0}".format(blog_id), []) ip_attribution = "匿名" if len(ip) < 20: if not spider: ip_attribution = sina_ip(ip) if ip not in ips_access: ips_access.append(ip) # print(len(ips_access)) if len(ips_access) > 5: ips_access.pop(0) cache.set("ips_access_{0}".format(blog_id), ips_access, 60 * 60 * 24 * 30) ips_info = {} if ips_access: for ip in ips_access: ip_attribution = sina_ip(ip) ip_access_time = master_14.get(ip) if ip_access_time: ips_info[ip_attribution] = ip_access_time ips_info = sorted(ips_info.items(), key=lambda d: d[1], reverse=True) ip_obj = IP_access.objects.filter(ip=ip) blog_obj = Blog.objects.filter(blog_id=blog_id) ip_num = 0 if blog_obj: blog = Blog.objects.get(blog_id=blog_id) if ip_obj: # 如果已存在此ip ip = IP_access.objects.get(ip=ip) ip.blogs.add(blog) ip_num = blog.ip_access_set.all() blog.IP_num = len(ip_num) blog.PV_num += 1 blog.save() else: ip = IP_access(ip=ip) ip.save() ip.blogs.add(blog) ip_num = blog.ip_access_set.all() blog.IP_num = len(ip_num) blog.PV_num += 1 blog.save() # print(len(ip_num)) if req.method == "POST": content = req.POST.get('content', None) p_id = req.POST.get('p_id', None) if p_id and content: blog = Blog.objects.get(blog_id=blog_id) Replay.objects.create(content=content, blog=blog, replay_time=datetime.datetime.today(), replay_user=ip_attribution, replay_id=0, parent_id=p_id) return HttpResponse(json.dumps({"content": content})) elif content: item = Blog.objects.get(blog_id=blog_id) num = 0 try: blog = Blog.objects.get(blog_id=blog_id) replays_re = Replay.objects.filter(blog=blog, replay_id=0) replays_er = Replay.objects.filter(blog=blog) replays = int(len(replays_er)) - (len(replays_re)) num = replays except Exception as e: error_log.error(e) Replay.objects.create(content=content, blog=item, replay_time=datetime.datetime.today(), replay_user=ip_attribution, replay_id=num + 1, parent_id=0) return HttpResponse(json.dumps({"content": content})) blog_id = blog_id replays = None replay_num = None PV_num = None to_replays_dict = {} blog_list = [] try: # replays = Blog.objects.get(blog_id=blog_id).replay_set.all() blog = Blog.objects.get(blog_id=blog_id) categorie = blog.categorie PV_num = blog.PV_num replays = Replay.objects.filter(blog=blog, parent_id=0) replay_num = int(len(replays)) # 评论数量 for replay in replays: to_replays = Replay.objects.filter(blog=blog, parent_id=replay.id) to_replays_dict[replay] = to_replays except Exception as e: error_log.error(e) return render_to_response(blog_id + '.html', { 'blog_id': blog_id, 'replay_num': replay_num, 'PV_num': PV_num, 'ip_num': ip_num, 'replays': replays, 'to_replays_dict': to_replays_dict, 'ips_info': ips_info }, context_instance=RequestContext(req))