# reduce方法 def reduce(user_num, time_num, poi_num): res = [[[0 for i in range(poi_num)] for j in range(time_num)] for k in range(user_num)] for i in range(user_num): for j in range(time_num): for k in range(poi_num): file_name = str(i) + "_" + str(j) + "_" + str(k) + ".db" file_path = "data/step_two/" + file_name print file_path tensor_tensor_db = open(file_path, 'r') tensor_tensor = pickle.load(tensor_tensor_db) res = three_order_tensor_add(res, tensor_tensor) return res if __name__ == '__main__': # beijing = (39.433333, 41.05, 115.416667, 117.5) # haidian = (39.883333, 40.15, 116.05, 116.383333) region = (39.883333, 40.05, 116.05, 116.383333) filter_count = 600 zero_adjustment = True time_num = settings.TIME_SLICE data, axis_users, axis_pois, check_data = init_data(region, filter_count) user_num = len(axis_users) poi_num = len(axis_pois) map(user_num, time_num, poi_num)
for k in range(poi_num): res_tensor[i][j][k] /= sum elif strategy == "all": res_tensor = three_tensor_hadarmard(1/three_order_tensor_first_norm(res_tensor), res_tensor) else: raise return res_tensor if __name__ == '__main__': # beijing = (39.433333, 41.05, 115.416667, 117.5) # haidian = (39.883333, 40.15, 116.05, 116.383333) region = (39.883333, 40.05, 116.05, 116.383333) filter_count = 600 data, axis_users, axis_pois = init_data(region, filter_count) for key in data.keys(): print "用户" + str(key) + "序列为" + str(data[key]) tensor_list = {} poi_num = len(axis_pois) for index in range(len(data.keys())): temp = data[index] tensor_list[index] = build_fouth_order_transition_tensor(temp, poi_num) for user_index in range(len(axis_users)): print sparsity(tensor_list[user_index]) print check_fourth_order_transition_tensor(tensor_list[user_index]) # 等价关系 print "transition: ", sparsity(build_fouth_order_transition_tensor(data[1], poi_num))
for j in range(time_num): for k in range(poi_num): tensor[i][j][k] /= sum return tensor if __name__ == '__main__': # beijing = (39.433333, 41.05, 115.416667, 117.5) # haidian = (39.883333, 40.15, 116.05, 116.383333) region = (39.883333, 40.05, 116.05, 116.383333) filter_count = 600 alpha = 0.8 alpha_shift = 0.1 data, axis_users, axis_pois, check_data = init_data(region, filter_count) user_num = len(axis_users) time_num = settings.TIME_SLICE poi_num = len(axis_pois) transition_tensor = mtt(data, user_num, poi_num) transition_tensor2 = inreducible_tensor(transition_tensor, user_num, time_num, poi_num, alpha) transition_tensor3 = mtt(data, user_num, poi_num, zero_adjustment=False) # equal_all_sum_one: equal # init_tensor1 = [[[1/(poi_num * time_num * user_num) for i in range(poi_num)] for j in range(time_num)] for k in range(user_num)] # # random_all_sum_one: no zero element # temp_tensor = [[[random.choice([1, 2, 3, 100]) for i in range(poi_num)] for j in range(time_num)] for k in range(user_num)] # init_tensor2 = three_tensor_hadarmard(1/three_order_tensor_first_norm(temp_tensor), temp_tensor) # # init_tensor3 = [[[0 for i in range(poi_num)] for j in range(time_num)] for k in range(user_num)]
res_tensor[i][j][k] /= sum elif strategy == "all": res_tensor = three_tensor_hadarmard( 1 / three_order_tensor_first_norm(res_tensor), res_tensor) else: raise return res_tensor if __name__ == '__main__': # beijing = (39.433333, 41.05, 115.416667, 117.5) # haidian = (39.883333, 40.15, 116.05, 116.383333) region = (39.883333, 40.05, 116.05, 116.383333) filter_count = 600 data, axis_users, axis_pois = init_data(region, filter_count) for key in data.keys(): print "用户" + str(key) + "序列为" + str(data[key]) tensor_list = {} poi_num = len(axis_pois) for index in range(len(data.keys())): temp = data[index] tensor_list[index] = build_fouth_order_transition_tensor(temp, poi_num) for user_index in range(len(axis_users)): print sparsity(tensor_list[user_index]) print check_fourth_order_transition_tensor(tensor_list[user_index]) # 等价关系 print "transition: ", sparsity(