Пример #1
0
def get_suspensionqq():
    suspension_qq = SuspensionQQModel()
    sort_col = request.args.get('sort_col');
    if sort_col == None:
        sort_col = 'CreatTime';
    sort_type = request.args.get('sort_type')
    if sort_type == None:
        sort_type = 'DESC'
    limit = request.args.get('max')
    if limit == None:
        limit = 5
    offset = int(request.args.get('start'))
    if offset == None:
        offset = 0;
    offset = offset*int(limit)
    data = suspension_qq.get_suspensionqq(fileds="QQ")
    info = "find suspension qq"
    wirte_log_to_file(info)
    total = len(data)
    data = suspension_qq.get_suspensionqq(limit = int(limit),offset=int(offset),sort_col=sort_col,order_type=sort_type)
    all_data = {
        "total" :total,
        "data" :data
    }
    return json.dumps(all_data)
Пример #2
0
def get_all_website():
    url  = app.config.get('es_host')
    timeout = app.config.get('es_timeout')
    es_lib = elasticsearchLib(url,timeout)
    type = request.args.get('type');
    rangetime = request.args.get('rangetime');
    if rangetime == None or type == None:
         message = {
             "message":"empty value"
         }
         return json.dumps(message)
    else:
        info = "get all website"
        wirte_log_to_file(info)
        if type == 'all':
            data = es_lib.get_all_website_name(index="*",is_all=True,rangeminute=int(rangetime))
        elif type == 'website':
            data = es_lib.get_all_website_name(index="*",is_all=False,rangeminute=int(rangetime))
        if data == 'not found':
            message = {
             "message":"not found"
            }
            return json.dumps(message)
        else:
            message = {
             "message":data
            }
            return json.dumps(message)
Пример #3
0
def get_suspensionIp():
    suspension_ip = SuspensionIpModel()
    sort_col = request.args.get('sort_col');
    if sort_col == None:
        sort_col = 'SuspensionTime';
    sort_type = request.args.get('sort_type')
    if sort_type == None:
        sort_type = 'DESC'
    limit = request.args.get('max')
    if limit == None:
        limit = 5
    offset = int(request.args.get('start'))
    offset = offset*int(limit)
    if offset == None:
        offset = 0;
    info = "find ip :get ip status =0"
    wirte_log_to_file(info)
    total_data = suspension_ip.get_suspensionIp(0,fileds="IpAddress")
    total = len(total_data)
    data = suspension_ip.get_suspensionIp(0,limit = int(limit),offset=int(offset),sort_col=sort_col,order_type=sort_type)
    all_data = {
        "total" :total,
        "data" :data
    }
    return json.dumps(all_data)
Пример #4
0
def get_api_visit():
    url  = app.config.get('es_host')
    timeout = app.config.get('es_timeout')
    es_lib = elasticsearchLib(url,timeout)
    website = request.args.get('website');
    apiname = request.args.get('apiname');
    type = request.args.get('type');
    info = "find website: %s +++++++ apiname:%s  rangetime:%s"%(website,apiname,type)
    wirte_log_to_file(info)
    if type == None or apiname == None or website ==None:
         message = {
             "message":"empty value"
         }
         return json.dumps(message)
    else:
        data = es_lib.search_api_relatime(index="ns-*",website_name=website,api_name=apiname,type=type)
        if data == 'not found':
            message = {
             "message":"not found"
            }
            return json.dumps(message)
        else:
            message = {
             "message":data
            }
            return json.dumps(message)
Пример #5
0
 def get_suspension_qq(self,index,path,re_pattern):
     lte = str(time.time())
     lte = lte.split(".")
     lte = int(lte[0])*1000
     gte = self._range(lte,5)
     body= {
         "query": {
         "filtered": {
         "query": {
             "query_string": {
             "analyze_wildcard": True,
             "query": path
         }
     },
     "filter": {
             "bool": {
             "must": [
             {
                 "range": {
                 "@timestamp": {
                 "gte": gte,
                 "lte": lte,
                 "format": "epoch_millis"
                 }
             }
             }
         ],
         "must_not": []
         }
      }
     }
     },
 "aggs": {}
 }
     print(body);
     print(index);
     info = "get suspension_qq: index->%s,body->%s"%(index,str(body))
     wirte_log_to_file(info)
     try:
         all_data = self.es.search(index = index,body = body)
         all_data = all_data['hits']['hits']
         qq_number = {}
         match_num = re.compile(re_pattern)
         for i in all_data:
             if i['_source'].has_key('cookies'):
                 qq_n = match_num.search(str(i['_source']['cookies']))
                 if qq_n:
                     suspension_user = qq_n.group()
                     print(suspension_user)
                     suspension_user = suspension_user.split("----",2)
                     qq_number[suspension_user[0]] = suspension_user[1]
             else:
                 continue
         info = "get qq : %s"%(qq_number)
         wirte_log_to_file(info)
         return qq_number
     except Exception as e:
         return "notfound"
Пример #6
0
def search_api_visit():
    url  = app.config.get('es_host')
    timeout = app.config.get('es_timeout')
    es_lib = elasticsearchLib(url,timeout)
    apiname = request.args.get('apiname');
    start = request.args.get('from');
    end = request.args.get('to')
    type = request.args.get('type')
    info = "find website: %s ----- rangetime:%s"%(apiname,type)
    wirte_log_to_file(info)
    if start == None or apiname == None or end == None or type ==None:
         message = {
             "message":"empty value"
         }
         return json.dumps(message)
    else:
        if apiname == 'all':
            apiname ="*"
        data = es_lib.get_realtime_api(index="ns-*",api_name=apiname,start=start,end=end,type=type)
        return json.dumps(data)
Пример #7
0
def search_suspension_ip():
     url  = app.config.get('es_host')
     timeout = app.config.get('es_timeout')
     es_lib = elasticsearchLib(url,timeout)
     search_time = request.args.get('time');
     search_index = request.args.get('index');
     search_path = request.args.get('path');
     search_rangtime= request.args.get('rangtime');
     if search_rangtime == None or search_path == None or search_time== None or search_index ==None:
         message = {
             "message":"empty value"
         }
         return json.dumps(message)
     else:
         search_time = str(search_time)
         search_time = search_time + ":00"
         search_time = time.mktime(time.strptime(search_time,'%Y-%m-%d %H:%M:%S'))
         search_time = time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(int(search_time)))
         data = []
         info = "serach ip : index->%s ,path->%s"%(search_index,search_path)
         wirte_log_to_file(info)
         data = es_lib.getDataByIndex(index=str(search_index),path=str(search_path),rangeMinute=int(search_rangtime),
                                     sort=True,lte=search_time)
         print(data)
         if data !="notfound":
            suspension_ips = {}
            for i in data:
                if i[1]>5000:
                    suspension_ips[i[0]] = i[1]
            message = {
             "message":suspension_ips
             }
            print(message)
            return json.dumps(message)
         else:
             message = {
             "message":"not found"
             }
             return json.dumps(message)
Пример #8
0
    def get_all_china_visit(self,index,rangeminute=1,total = 1000):
        lte = str(time.time())
        lte = lte.split(".")
        lte = int(lte[0])*1000
        gte = self._range(lte,rangeminute)
        body = {
  "size": 0,
  "query": {
    "filtered": {
      "query": {
        "query_string": {
          "query": "geoip.country_name:China",
          "analyze_wildcard": True
        }
      },
      "filter": {
        "bool": {
          "must": [
            {
              "range": {
                "@timestamp": {
                  "gte": gte,
                  "lte": lte,
                  "format": "epoch_millis"
                }
              }
            }
          ],
          "must_not": []
        }
      }
    }
  },
  "aggs": {
    "2": {
      "terms": {
        "field": "geoip.city_name",
        "size": 0,
        "order": {
          "_count": "desc"
        }
      },
      "aggs": {
        "3": {
          "terms": {
            "field": "geoip.longitude",
            "size": 0,
            "order": {
              "_count": "desc"
            }
          },
          "aggs": {
            "4": {
              "terms": {
                "field": "geoip.latitude",
                "size": 0,
                "order": {
                  "_count": "desc"
                }
              }
            }
          }
        }
      }
    }
  }
}
        print(body)
        info = "get_all_china_visit : index->%s body->%s"%(index,str(body))
        wirte_log_to_file(info)
        high_item = []
        middle_item = []
        min_item = []
        all_count = {}
        first_item_count = []
        final_data = {}
        try:
            all_data = self.es.search(index=index,body=body)
            all_data = all_data[u'aggregations'][u'2'][u'buckets']
            for i in range(0,len(all_data)):
                first_item_count.append(all_data[i]['doc_count'])
                all_count[i] = all_data[i]['doc_count']
            item_count = list(set(first_item_count))
            item_count = sorted(item_count)
            item_len = len(item_count)
            middle_len = int(item_len*0.33)
            max_len = int(item_len*0.67)
            middle_count = item_count[middle_len]
            max_count = item_count[max_len]
            for i in range(0,len(all_data)):
                value = {}
                geoCoord = []
                value['name'] =  all_data[i]['key']
                value['value'] = all_data[i]['doc_count']
                geoCoord.append(all_data[i]['3']['buckets'][0]['key'])
                geoCoord.append(all_data[i]['3']['buckets'][0]['4']['buckets'][0]['key'])
                value['geoCoord'] = geoCoord
                if  value['value'] > max_count:
                    high_item.append(value)
                elif  value['value'] > middle_count and value['value'] < max_count:
                    middle_item.append(value)
                elif  value['value'] < middle_count and  value['value'] > 0 :
                    min_item.append(value)
            total_count = sum(e for  k,e in all_count.items())
            final_data['is_success']= True
            sum_data = {}
            sum_data['low'] = min_item
            sum_data['middle'] = middle_item
            sum_data['high'] = high_item
            final_data['data'] = sum_data
            final_data['msg'] = 'suceess'
            final_data['total'] = total_count
            return final_data
        except Exception as e:
            final_data['is_success']= False
            return final_data
Пример #9
0
 def getDataByIndex(self,index,path,rangeMinute =5,sort = True,lte = datetime.datetime.now().strftime('%Y.%m.%d %H:%M:%S')):
     lte = self._formatTime(lte)
     lte = str(lte)
     lte = lte.split(".")
     lte = int(lte[0])*1000
     index_time = time.strftime('%Y.%m.%d',time.localtime(lte/1000))
     body = {
         "query": {
         "filtered": {
         "query": {
              "query_string": {
             "query": "*",
             "analyze_wildcard": True
             }
         },
         "filter": {
             "bool": {
                 "must": [
         {
             "query": {
             "query_string": {
               "query": "*",
               "analyze_wildcard": True
             }
           }
         },
         {
           "range": {
             "@timestamp": {
               "gte": self._range(lte,rangeMinute),
               "lte": lte,
               "format": "epoch_millis"
                 }
              }
             }
         ],
       "must_not": []
         }
      }
     }
     },
         "size": 0,
         "aggs": {
         "2": {
         "terms": {
         "field": "geoip.ip",
         "size": 10,
         "order": {
         "_count": "desc"
                     }
                 }
             }
         }
     }
     print(body)
     index = index + "-%s"%index_time
     print(index)
     info = "get ip_data: index->%s,body->%s"%(index,str(body))
     wirte_log_to_file(info)
     try:
         all_data = self.es.search(index = index,body = body)
         ip = all_data['aggregations'];
         ip_sum = ip['2']['buckets']
         ip_data = {}
         for i in ip_sum:
             ip_data[i['key']] = i['doc_count']
         ip_data = sorted(ip_data.iteritems(),key= lambda x:x[1],reverse=sort)
         info = "get all_ip: %s"%(str(ip_data))
         wirte_log_to_file(info)
         return ip_data
     except Exception as e:
         return "notfound"
Пример #10
0
 def get_all_website_name(self,index,is_all = True,rangeminute =20):
     lte = str(time.time())
     lte = lte.split(".")
     lte = int(lte[0])*1000
     gte = self._range(lte,rangeminute)
     body = {
     "size": 0,
             "query": {
             "filtered": {
             "query": {
             "query_string": {
             "query": "*",
             "analyze_wildcard": True
         }
     },
         "filter": {
         "bool": {
         "must": [
         {
             "range": {
                 "@timestamp": {
                 "gte": gte,
                 "lte": lte,
                 "format": "epoch_millis"
                 }
             }
             }
         ],
         "must_not": []
         }
     }
     }
 },
 "aggs": {
     "2": {
     "terms": {
         "field": "site",
         "size": 200,
         "order": {
         "_count": "desc"
         }
     }
     }
     }
     }
     print(body)
     info = "all_website_name : index->%s body->%s"%(index,str(body))
     wirte_log_to_file(info)
     try:
         all_data = self.es.search(index=index,body=body)
         all_data = all_data['aggregations']['2']['buckets']
         all_web_site = []
         if is_all:
             all_web_site.append("all")
         for i in all_data:
             all_web_site.append(i['key'])
         info = "all_website_name : %s"%(str(all_web_site))
         wirte_log_to_file(info)
         return all_web_site
     except Exception as e:
         return 'not found'
Пример #11
0
    def search_api_relatime(self,index,website_name,api_name,type):
        lte = str(time.time())
        lte = lte.split(".")
        lte = int(lte[0])*1000
        gte = self._range(lte,self.rangeSpan[type])
        body = {
            "size": 0,
            "query": {
            "filtered": {
            "query": {
                "query_string": {
            "analyze_wildcard": True,
            "query": 'site:%s AND path:%s'%(website_name,api_name)
            }
        },
        "filter": {
            "bool": {
            "must": [
                {
                "range": {
                    "@timestamp": {
                    "gte": gte,
                    "lte": lte,
                    "format": "epoch_millis"
                    }
                }
            }
          ],
          "must_not": []
        }
      }
    }
  },
  "aggs": {
    "2": {
      "date_histogram": {
        "field": "@timestamp",
        "interval": self.timsSpan[type],
        "time_zone": "Asia/Shanghai",
        "min_doc_count": 1,
        "extended_bounds": {
          "min": gte,
          "max": lte
        }
      }
    }
  }
}
        print(body)
        info = "api_relatime : index->%s body->%s"%(index,str(body))
        wirte_log_to_file(info)
        try:
            all_data = self.es.search(index=index,body=body)
            all_data = all_data['aggregations']['2']['buckets']
            api_count = []
            for i in all_data:
                if type == '15m' or type == '30m'or type == '1h' or type == '4h' or type=='5m' or type=='1m':
                    key = time.strftime("%H:%M:%S",time.localtime(i['key']/1000))
                elif type == '1d' or type == '4d'or type == '7d':
                    key = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(i['key']/1000))
                value = i['doc_count']
                value_all = key + '?' +str(value)
                api_count.append(value_all)
            info = "api_relatime : %s"%(str(api_count))
            wirte_log_to_file(info)
            return api_count
        except Exception as e:
            return 'not found'
Пример #12
0
 def get_realtime_api(self,index,api_name,start,end,type):
     lte = self._formatTime(end,"%Y-%m-%d %H:%M:%S")
     lte = str(lte)
     lte = lte.split(".")
     lte = int(lte[0])*1000
     lte = lte-1000
     print(lte)
     gte = self._formatTime(start,"%Y-%m-%d %H:%M:%S")
     gte = str(gte)
     gte = gte.split(".")
     gte = int(gte[0])*1000
     print(gte)
     body = {
             "size": 0,
             "sort": [
             {
                 "@timestamp": {
                 "order": "desc",
                 "unmapped_type": "boolean"
             }
         }
     ],
         "query": {
         "filtered": {
         "query": {
         "query_string": {
         "analyze_wildcard": True,
         "query": "site:%s"%api_name
         }
     },
     "filter": {
     "bool": {
       "must": [
         {
           "range": {
             "@timestamp": {
               "gte": gte,
               "lte": lte,
               "format": "epoch_millis"
              }
             }
          }
         ],
       "must_not": []
      }
      }
      }
     },
     "highlight": {
     "pre_tags": [
     "@kibana-highlighted-field@"
     ],
     "post_tags": [
     "@/kibana-highlighted-field@"
     ],
     "fields": {
     "*": {}
     },
     "require_field_match": False,
     "fragment_size": 2147483647
 },
 "aggs": {
         "2": {
         "date_histogram": {
         "field": "@timestamp",
         "interval": self.timsSpan[type],
         "time_zone": "Asia/Shanghai",
         "min_doc_count": 0,
         "extended_bounds": {
         "min": gte,
         "max": lte
         }
     }
     }
 },
 "fields": [
     "*",
     "_source"
     ],
     "script_fields": {},
     "fielddata_fields": [
     "@timestamp"
     ]
 }
     print(body)
     info = "get realtime_api : index->%s,body->%s"%(index,str(body))
     wirte_log_to_file(info)
     all_data = self.es.search(index=index,body = body)
     all_data = all_data['aggregations']['2']['buckets']
     api_count = []
     print(all_data)
     for i in all_data:
         if type == '15m' or type == '30m'or type == '1h' or type == '4h' or type=='5m' or type=='1m':
             key = time.strftime("%H:%M:%S",time.localtime(i['key']/1000))
         elif type == '1d' or type == '4d'or type == '7d':
             key = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(i['key']/1000))
         value = i['doc_count']
         value_all = key + '?' +str(value)
         api_count.append(value_all)
     info = "realtime_api : %s"%(str(api_count))
     wirte_log_to_file(info)
     return api_count