Example #1
0
def get_user_trend(uid):
    activity_dict = {}
    if RUN_TYPE:
        now_ts = time.time()
        now_date = ts2datetime(now_ts)  # 2015-09-22
    else:
        now_date = "2013-09-08"
    ts = datetime2ts(now_date)

    #test
    #ts = datetime2ts('2013-09-08')
    timestamp = ts
    return_results = dict()
    return_sensitive_results = {}
    for i in range(1, 8):
        ts = timestamp - 24 * 3600 * i
        date = ts2datetime(ts)
        if WORK_TYPE == 0:
            index_name = act_index_pre + date
            sensitive_index_name = sen_act_index_pre + date
            exist_bool = es_cluster.indices.exists(index=index_name)
            sensitive_exist_bool = es_cluster.indices.exists(
                index=sensitive_index_name)
            if exist_bool:
                try:
                    tmp_act_result = es_cluster.get(index=index_name,
                                                    doc_type="activity",
                                                    id=uid)['_source']
                    results = tmp_act_result['activity_dict']
                except:
                    results = dict()
            else:
                results = dict()
            if sensitive_exist_bool:
                try:
                    tmp_sensitive_act_result = es_cluster.get(
                        index=sensitive_index_name,
                        doc_type="sensitive_activity",
                        id=uid)['_source']
                    sensitive_results = tmp_sensitive_ip_result[
                        'sensitive_activity_dict']
                except:
                    sensitive_results = dict()
            else:
                sensitive_results = dict()
        else:
            results = redis_activity.hget('activity_' + str(ts), uid)
            sensitive_results = redis_activity.hget(
                'sensitive_activity_' + str(ts), uid)
        if results:
            result_dict = json.loads(results)
            key_set = set(result_dict.keys())
            for key in result_dict.keys():
                return_results[int(key) * 900 + ts] = result_dict[key]
        else:
            pass
        if sensitive_results:
            sensitive_result_dict = json.loads(sensitive_results)
            for key in sensitive_result_dict.keys():
                return_sensitive_results[int(key) * 900 +
                                         ts] = sensitive_result_dict[key]
        else:
            pass

    trend_dict = {}
    for i in range(1, 8):
        ts = timestamp - i * 24 * 3600
        for j in range(0, 6):
            base_time = ts + j * 900 * 16
            num = 0
            for k in range(16):
                seg_time = base_time + k * 900
                if seg_time in return_results:
                    num += return_results[seg_time]
            trend_dict[base_time] = num

    sensitive_trend_dict = {}
    for i in range(1, 8):
        ts = timestamp - i * 24 * 3600
        for j in range(0, 6):
            base_time = ts + j * 900 * 16
            num = 0
            for k in range(16):
                seg_time = base_time + k * 900
                if seg_time in return_sensitive_results:
                    num += return_sensitive_results[seg_time]
            sensitive_trend_dict[base_time] = num

    ordinary_key_set = set(trend_dict.keys())
    sensitive_key_set = set(sensitive_trend_dict.keys())
    for key in sensitive_key_set:
        if key in ordinary_key_set:
            trend_dict[key] += sensitive_trend_dict[key]
        else:
            trend_dict[key] = sensitive_trend_dict[key]

    sorted_dict = sorted(trend_dict.items(), key=lambda x: x[0], reverse=False)
    sorted_sensitive_dict = sorted(sensitive_trend_dict.items(),
                                   key=lambda x: x[0],
                                   reverse=False)
    return [sorted_dict, sorted_sensitive_dict]  # total and sensitive
def get_user_trend(uid):
    activity_dict = {}
    if RUN_TYPE:
        now_ts = time.time()
        now_date = ts2datetime(now_ts) # 2015-09-22
    else:
        now_date = "2013-09-08"
    ts = datetime2ts(now_date)

    #test
    #ts = datetime2ts('2013-09-08')
    timestamp = ts
    return_results = dict()
    return_sensitive_results = {}
    for i in range(1,8):
        ts = timestamp -24*3600*i
        date = ts2datetime(ts)
        if WORK_TYPE == 0:
            index_name = act_index_pre + date
            sensitive_index_name = sen_act_index_pre + date
            exist_bool = es_cluster.indices.exists(index=index_name)
            sensitive_exist_bool = es_cluster.indices.exists(index=sensitive_index_name)
            if exist_bool:
                try:
                    tmp_act_result = es_cluster.get(index=index_name, doc_type="activity", id=uid)['_source']
                    results = tmp_act_result['activity_dict']
                except:
                    results = dict()
            else:
                results = dict()
            if sensitive_exist_bool:
                try:
                    tmp_sensitive_act_result = es_cluster.get(index=sensitive_index_name, doc_type="sensitive_activity", id=uid)['_source']
                    sensitive_results = tmp_sensitive_ip_result['sensitive_activity_dict']
                except:
                    sensitive_results = dict()
            else:
                sensitive_results = dict()
        else:
            results = redis_activity.hget('activity_'+str(ts), uid)
            sensitive_results = redis_activity.hget('sensitive_activity_'+str(ts), uid)
        if results:
            result_dict = json.loads(results)
            key_set = set(result_dict.keys())
            for key in result_dict.keys():
                return_results[int(key)*900+ts] = result_dict[key]
        else:
            pass
        if sensitive_results:
            sensitive_result_dict = json.loads(sensitive_results)
            for key in sensitive_result_dict.keys():
                return_sensitive_results[int(key)*900+ts] = sensitive_result_dict[key]
        else:
            pass

    trend_dict = {}
    for i in range(1,8):
        ts = timestamp - i*24*3600
        for j in range(0,6):
            base_time = ts + j*900*16
            num = 0
            for k in range(16):
                seg_time = base_time + k*900
                if seg_time in return_results:
                    num += return_results[seg_time]
            trend_dict[base_time] = num

    sensitive_trend_dict = {}
    for i in range(1,8):
        ts = timestamp - i*24*3600
        for j in range(0,6):
            base_time = ts + j*900*16
            num = 0
            for k in range(16):
                seg_time = base_time + k*900
                if seg_time in return_sensitive_results:
                    num += return_sensitive_results[seg_time]
            sensitive_trend_dict[base_time] = num

    ordinary_key_set = set(trend_dict.keys())
    sensitive_key_set = set(sensitive_trend_dict.keys())
    for key in sensitive_key_set:
        if key in ordinary_key_set:
            trend_dict[key] += sensitive_trend_dict[key]
        else:
            trend_dict[key] = sensitive_trend_dict[key]

    sorted_dict = sorted(trend_dict.items(), key=lambda x:x[0], reverse=False)
    sorted_sensitive_dict = sorted(sensitive_trend_dict.items(), key=lambda x:x[0], reverse=False)
    return [sorted_dict, sorted_sensitive_dict] # total and sensitive
Example #3
0
def get_user_geo(uid):
    results = []
    user_geo_result = {}
    user_ip_dict = {}
    user_ip_result = {}  # ordinary ip
    user_sensitive_ip_result = {}  # sensitive ip
    if RUN_TYPE:
        now_ts = time.time()
        now_date = ts2datetime(now_ts)  # 2015-09-22
    else:
        now_date = "2013-09-08"
    ts = datetime2ts(now_date)

    for i in range(1, 8):
        ts = ts - 3600 * 24
        date = ts2datetime(ts)
        if WORK_TYPE == 0:
            index_name = ip_index_pre + str(date)
            sensitive_index_name = sen_ip_index_pre + str(date)
            exist_bool = es_cluster.indices.exists(index=index_name)
            sensitive_exist_bool = es_cluster.indices.exists(
                index=sensitive_index_name)
            if exist_bool:
                try:
                    tmp_ip_result = es_cluster.get(index=index_name,
                                                   doc_type="ip",
                                                   id=uid)['_source']
                    results = tmp_ip_result['ip_dict']
                except:
                    results = dict()
            else:
                results = dict()
            if sensitive_exist_bool:
                try:
                    tmp_sensitive_ip_result = es_cluster.get(
                        index=sensitive_index_name,
                        doc_type="sensitive_ip",
                        id=uid)['_source']
                    sensitive_results = tmp_sensitive_ip_result[
                        'sensitive_ip_dict']
                except:
                    sensitive_results = dict()
            else:
                sensitive_results = dict()
        else:
            results = redis_ip.hget('ip_' + str(ts), uid)
            sensitive_results = redis_ip.hget('sensitive_ip' + str(ts), uid)
        if results:
            ip_results = json.loads(results)
            for ip in ip_results:
                if user_ip_result.has_key(ip):
                    user_ip_result[ip] += ip_results[ip]
                else:
                    user_ip_result[ip] = ip_results[ip]

        if sensitive_results:
            sensitive_ip_results = json.loads(sensitive_results)
            for ip in sensitive_ip_results:
                if user_sensitive_ip_result.has_key(ip):
                    user_sensitive_ip_result[ip] += sensitive_ip_results[ip]
                else:
                    user_sensitive_ip_result[ip] = sensitive_ip_results[ip]

    ordinary_key_set = set(user_ip_result.keys())
    sensitive_key_set = set(user_sensitive_ip_result.keys())
    for key in sensitive_key_set:
        if key in ordinary_key_set:
            user_ip_result[key] += user_sensitive_ip_result[key]
        else:
            user_ip_result[key] = user_sensitive_ip_result[key]

    user_geo_dict = ip2geo(user_ip_result)
    sorted_user_geo_dict = sorted(user_geo_dict.items(),
                                  key=lambda x: x[1],
                                  reverse=True)
    sensitive_user_geo_dict = ip2geo(user_sensitive_ip_result)
    sorted_sensitive_user_geo_dict = sorted(sensitive_user_geo_dict.items(),
                                            key=lambda x: x[1],
                                            reverse=True)

    return_list = []
    return_list = [sorted_user_geo_dict,
                   sorted_sensitive_user_geo_dict]  # total and sensitive
    return return_list
def get_user_geo(uid):
    results = []
    user_geo_result = {}
    user_ip_dict = {}
    user_ip_result = {} # ordinary ip
    user_sensitive_ip_result = {} # sensitive ip
    if RUN_TYPE:
        now_ts = time.time()
        now_date = ts2datetime(now_ts) # 2015-09-22
    else:
        now_date = "2013-09-08"
    ts = datetime2ts(now_date)

    for i in range(1,8):
        ts = ts - 3600*24
        date = ts2datetime(ts)
        if WORK_TYPE == 0:
            index_name = ip_index_pre + str(date)
            sensitive_index_name = sen_ip_index_pre + str(date)
            exist_bool = es_cluster.indices.exists(index=index_name)
            sensitive_exist_bool = es_cluster.indices.exists(index=sensitive_index_name)
            if exist_bool:
                try:
                    tmp_ip_result = es_cluster.get(index=index_name, doc_type="ip", id=uid)['_source']
                    results = tmp_ip_result['ip_dict']
                except:
                    results = dict()
            else:
                results = dict()
            if sensitive_exist_bool:
                try:
                    tmp_sensitive_ip_result = es_cluster.get(index=sensitive_index_name, doc_type="sensitive_ip", id=uid)['_source']
                    sensitive_results = tmp_sensitive_ip_result['sensitive_ip_dict']
                except:
                    sensitive_results = dict()
            else:
                sensitive_results = dict()
        else:
            results = redis_ip.hget('ip_'+str(ts), uid)
            sensitive_results = redis_ip.hget('sensitive_ip'+str(ts), uid)
        if results:
            ip_results = json.loads(results)
            for ip in ip_results:
                if user_ip_result.has_key(ip):
                    user_ip_result[ip] += ip_results[ip]
                else:
                    user_ip_result[ip] = ip_results[ip]

        if sensitive_results:
            sensitive_ip_results = json.loads(sensitive_results)
            for ip in sensitive_ip_results:
                if user_sensitive_ip_result.has_key(ip):
                    user_sensitive_ip_result[ip] += sensitive_ip_results[ip]
                else:
                    user_sensitive_ip_result[ip] = sensitive_ip_results[ip]

    ordinary_key_set = set(user_ip_result.keys())
    sensitive_key_set = set(user_sensitive_ip_result.keys())
    for key in sensitive_key_set:
        if key in ordinary_key_set:
            user_ip_result[key] += user_sensitive_ip_result[key]
        else:
            user_ip_result[key] = user_sensitive_ip_result[key]

    user_geo_dict = ip2geo(user_ip_result)
    sorted_user_geo_dict = sorted(user_geo_dict.items(), key=lambda x:x[1], reverse=True)
    sensitive_user_geo_dict = ip2geo(user_sensitive_ip_result)
    sorted_sensitive_user_geo_dict = sorted(sensitive_user_geo_dict.items(), key=lambda x:x[1], reverse=True)


    return_list = []
    return_list = [sorted_user_geo_dict, sorted_sensitive_user_geo_dict] # total and sensitive
    return return_list