Example #1
0
def analysis():
	print(">>> Analyzing")
	DATE = str(datetime.datetime.now().strftime('%d_%m')) 
	make_sure_path_exists(PATH+"ground_truth_realistic/"+DATE)
	make_sure_path_exists(PATH+"ground_truth_realistic/"+DATE+"_analyzed")
	files=(glob.glob(PATH+"ground_truth_realistic/"+DATE+"/*.json"))
	if len(files) == 0:
		print("No files in the directory. Program ended.")
		return 1
	elif len(files) == 1:
		#print("File analizzato: ", files)
		with open(files[0]) as side_a:
			a = json.load(side_a)
		h = files[0][80:82]
		m = files[0][83:85]
		TIME = files[0][80:85]
		#indici andre
		#h = files[0][84:86]
		#m = files[0][87:89]
		#TIME = files[0][84:89]
		print("Execution @"+h+":"+m)
		try:
			en, ex, real_en, real_ex = jp.just_processing(a, a, 0, 0, use, TIME)
			data = {
					"in" : str(ex),
					"out": str(en)
			}
			data_j = json.dumps(data)
			client.publish(TOPIC, data_j, qos=1)
		except TypeError:
			print("--------------------------------------------")
			print("----------------- ERROR --------------------")
			print("------- Pass to the next 5 minutes ---------")
		os.rename(PATH+"ground_truth_realistic/"+DATE+"/"+files[0][67:], PATH+"ground_truth_realistic/"+DATE+"_analyzed/"+files[0][67:])
		return 1
Example #2
0
    with open(f) as side_a:
        a = json.load(side_a)
    # TOF ANALYSIS
    print("--------------- TOF EXECUTION ---------------\n")
    #per gli indici di Dani
    h = f[80:82]
    m = f[83:85]
    TIME = f[80:85]
    #per gli indici del cluster
    #h = f[66:68]
    #m = f[69:71]
    #TIME = f[66:71]
    #indici andre (con sampling)
    #h = f[104:106]
    #m = f[107:109]
    #TIME = f[104:109]
    print("Execution @" + h + ":" + m)
    try:
        en, ex, real_en, real_ex = jp.just_processing(a, a, 0, 0, use, TIME)
        print("---------------------------------------------\n")

    except TypeError:
        print("--------------------------------------------")
        print("----------------- ERROR --------------------")
        print("------- Pass to the next 5 minutes ---------")
    results = []
    results.append([h + ":" + m, real_en, en, real_ex, ex])
    with open(OUTPUT_PATH + DATE + "_all_flux.csv", 'a') as partial:
        writer = csv.writer(partial, delimiter=';')
        writer.writerow(results)
Example #3
0
results = []

with open(DATA_INPUT_0) as side_a:
    a = json.load(side_a)
with open(DATA_INPUT_1) as side_b:
    b = json.load(side_b)

en = 0
ex = 0
d = 0
v = 0

use = parse_args()

en, ex, real_en, real_ex = jp.just_processing(a, b, d, v, use,
                                              ground_truth_time)

actual_values = [real_en, real_ex]
temp = []
temp.append(ground_truth_time)
temp.append(real_en)
temp.append(en)
temp.append(real_ex)
temp.append(ex)
pred = [en, ex]
temp.append("%.2f" % sqrt(mean_squared_error(actual_values, pred)))
temp.append("%.2f" % mean_absolute_error(actual_values, pred))
if actual_values[0] != 0 and actual_values[1] != 0:
    acc_in = 100 - (abs(en - actual_values[0]) / actual_values[0] * 100)
    acc_out = 100 - (abs(ex - actual_values[1]) / actual_values[1] * 100)
    temp.append(acc_in)
Example #4
0
PATH = "/home/daniubo/Scrivania/Git/smartGate/"
#PATH = "/Users/wunagana/Documents/GitHub/smartGate/"
TIME = "10_19"
DATA_INPUT_A = PATH + "ground_truth_realistic/"+ DATE + "/side_a_" + TIME + ".json"
DATA_INPUT_B = PATH + "ground_truth_realistic/"+ DATE + "/side_b_" + TIME + ".json"

with open(DATA_INPUT_A) as side_a:
	a = json.load(side_a)
with open(DATA_INPUT_B) as side_b:
	b = json.load(side_b)

if PIR:
	print("--------------- PIR EXECUTION ---------------\n")
	opt_span = 900
	opt_delta = 1000
	jp.just_processing(a, b, opt_delta, opt_span, use, TIME)
	print("---------------------------------------------\n")

if TOF and INFRA:
	# MATCHING ANALYSIS
	opt_enough_zero = 4
	opt_delta = 850
	print("--------------- MATCH EXECUTION ---------------\n")
	en, ex, real_en, real_ex = jp.just_processing(a, b, opt_delta, opt_enough_zero, use, TIME)
	print("---------------------------------------------\n")

elif INFRA:
	print("--------------- INF EXECUTION ---------------\n")
	opt_enough_zero = 3
	opt_delta = 850
	jp.just_processing(a, b, opt_delta, opt_enough_zero, use, TIME)
Example #5
0
	delta_jump = 50
	OUTPUT_PATH = PATH+"output/"+ground_truth_date+"/inf/"+ground_truth_time+"_inf_results.csv"
	OUTPUT_PATH_PARTIAL = PATH+"output/"+ground_truth_date+"/inf/"+ground_truth_time+"_inf_partials.csv"


with open(DATA_INPUT_A) as side_a:
	a = json.load(side_a)
with open(DATA_INPUT_B) as side_b:
	b = json.load(side_b)

en = 0
ex = 0

for d in range(delta[0], delta[1]+delta_jump, delta_jump):
	for v in range(var[0], var[1]+var_jump, var_jump):
		en, ex, min_ts, max_ts = jp.just_processing(a, b, d, v, use, ground_truth_time)
		temp = []
		REAL_IN, REAL_OUT = f.get_ground_truth(PATH_GT, DATE, DATA, min_ts, max_ts)
		actual_values = [len(REAL_IN), len(REAL_OUT)]
		temp.append(len(REAL_IN))
		temp.append(en)
		temp.append(len(REAL_OUT))
		temp.append(ex)
		pred = [en, ex]
		temp.append("%.2f" % sqrt(mean_squared_error(actual_values, pred)))
		temp.append("%.2f" % mean_absolute_error(actual_values, pred))
		if actual_values[0] != 0 and actual_values[1] != 0:
			acc_in = 100-(abs(en-actual_values[0])/actual_values[0] * 100)
			acc_out = 100-(abs(ex-actual_values[1])/actual_values[1] * 100)
			temp.append(acc_in)
			temp.append(acc_out)