def get_forward_numerical_info(task_name, ts, create_by):
    results = []
    ts_series = []
    for i in range(1, forward_n + 1):
        ts_series.append(ts - i * time_interval)

    # check if detail es of task exists
    doctype = create_by + "-" + task_name
    index_exist = es_user_portrait.indices.exists_type(index_sensing_task, doctype)
    if not index_exist:
        print "new create task detail index"
        mappings_sensing_task(doctype)

    if ts_series:
        search_results = es_user_portrait.mget(index=index_sensing_task, doc_type=doctype, body={"ids": ts_series})[
            "docs"
        ]
        found_count = 0
        average_origin = []
        average_retweeted = []
        average_commet = []
        average_total = []
        average_negetive = []
        for item in search_results:
            if item["found"]:
                temp = item["_source"]
                sentiment_dict = json.loads(temp["sentiment_distribution"])
                average_total.append(int(temp["weibo_total_number"]))
                average_negetive.append(
                    int(sentiment_dict["2"])
                    + int(sentiment_dict["3"])
                    + int(sentiment_dict["4"])
                    + int(sentiment_dict["5"])
                    + int(sentiment_dict["6"])
                )
                found_count += 1

        if found_count > initial_count:
            number_mean = np.mean(average_total)
            number_std = np.std(average_total)
            sentiment_mean = np.mean(average_negetive)
            sentiment_std = np.mean(average_negetive)
            results = [1, number_mean, number_std, sentiment_mean, sentiment_std]
        else:
            results = [0]

    return results
def get_forward_numerical_info(task_name, ts, create_by):
    results = []
    ts_series = []
    for i in range(1, forward_n + 1):
        ts_series.append(ts - i * time_interval)

    # check if detail es of task exists
    doctype = create_by + "-" + task_name
    index_exist = es_user_portrait.indices.exists_type(index_sensing_task,
                                                       doctype)
    if not index_exist:
        print "new create task detail index"
        mappings_sensing_task(doctype)

    if ts_series:
        search_results = es_user_portrait.mget(index=index_sensing_task,
                                               doc_type=doctype,
                                               body={"ids": ts_series})['docs']
        found_count = 0
        average_origin = []
        average_retweeted = []
        average_commet = []
        average_total = []
        average_negetive = []
        for item in search_results:
            if item['found']:
                temp = item['_source']
                sentiment_dict = json.loads(temp['sentiment_distribution'])
                average_total.append(int(temp['weibo_total_number']))
                average_negetive.append(
                    int(sentiment_dict["2"]) + int(sentiment_dict['3']) +
                    int(sentiment_dict['4']) + int(sentiment_dict['5']) +
                    int(sentiment_dict['6']))
                found_count += 1

        if found_count > initial_count:
            number_mean = np.mean(average_total)
            number_std = np.std(average_total)
            sentiment_mean = np.mean(average_negetive)
            sentiment_std = np.mean(average_negetive)
            results = [
                1, number_mean, number_std, sentiment_mean, sentiment_std
            ]
        else:
            results = [0]

    return results