def estimate(self, do_se=False, do_print=False): """ estimate model """ self.est = {} # a. estimate estimate.estimate(self, do_print=do_print) # b. standard errors if do_se: estimate.std_error(self) else: for name in self.theta.keys(): self.est[(name, 'se')] = np.nan
def on_message(mqttc, obj, msg): # parse the payload to get observation vector, channel number and alg payload = json.loads(msg.payload) channel = payload['channel'] obs = payload['observation'] alg = payload['ALG'] Tx = payload['Tx'] Ty = payload['Ty'] print("estimate start") # debug info stime = time.time() result = estimate.estimate(obs, alg, Tx, Ty) print(list(result)) # debug info print("time elapsed: " + str(time.time() - stime)) # publish the result coordinate message = {} message['real_pos'] = payload['real_pos'] message['msg_id'] = payload['msg_id'] message['channel'] = channel message['coordinate'] = result # add timestamp when server is ready to publish timestamp = payload['timestamp'] mid = time.mktime(datetime.datetime.now().timetuple()) timestamp.append(mid.__str__()) message['timestamp'] = timestamp mqttc.publish("localization/result/" + channel, json.dumps(message), qos=0)
def estimate_function(): payload = { 'observation': [-120.40999999999991, -126.28899999999972, -125.26200000000017], 'Tx': [1, 2, 3], 'Ty': [5, 5, 5], 'timestamp': [1563815078.0], 'msg_id': 1, 'real_pos': (1, 5), 'ALG': 'MESE', 'channel': 'one' } #payload = json.loads(payload) channel = payload['channel'] obs = payload['observation'] alg = payload['ALG'] Tx = payload['Tx'] Ty = payload['Ty'] #print ("estimate start") # debug info stime = time.time() result = estimate.estimate(obs, alg, Tx, Ty) print(list(result)) # debug info #print("time elapsed: " + str(time.time() - stime)) print(str(time.time() - stime))
def app_estimate(): if request.headers.getlist("X-Forwarded-For"): ip = request.headers.getlist("X-Forwarded-For")[0] else: ip = request.remote_addr slack.notify(text=f"[ESTIMATE] : \n from {ip}") test_csv = request.files.get('test_csv') if test_csv is None: return abort(404) if test_csv.content_type == 'text/csv' or test_csv.content_type == 'application/vnd.ms-excel': test_df = pd.read_csv(test_csv) else: return abort( 404, { 'code': 'Wrong file', 'message': 'the file is not csv please send me csv' }) test_encoded = preprocess(test_df) test_preds_df = estimate(test_encoded) submit_df = postprocess(test_preds_df, test_df) temp_time = time.time() test_df.to_csv(osp.join(DATA_DIR, f'dip_input_{temp_time}.csv'), index=False, header=True) submit_df.to_csv(osp.join(OUTPUT_DIR, f'dip_estimete_{temp_time}.csv'), index=False, header=True) output_name = f"estimate_{test_csv.filename}" if output_name[-4:] != ".csv": output_name += ".csv" textStream = StringIO() submit_df.to_csv(textStream, index=False, header=True) return Response( textStream.getvalue(), mimetype="text/csv", headers={"Content-disposition": f"attachment; filename={output_name}"})
T = (24-8)*20 #monthly waking hours Lc = 8*20 #monthly cc hours w_matrix = np.identity(10) times = 50 times_boot = 1000 #------------ CALL CLASSES, ESTIMATION SIM & BOOTSTRAP ------------# param0 = parameters.Parameters(betasw, betastd, betasn, sigma2n, sigma2w_reg, meanshocks, covshocks, T, Lc, alpha, gamma, times) model = util.Utility(param0, N, data) model_sim = simdata.SimData(N, model) model_boot= bstr.bootstrap(N, data) moments_boot = model_boot.boostr(times_boot) model_est = est.estimate(N, data, param0, moments_boot, w_matrix) results_estimate = model_est.simulation(model_sim) #------------ EXCEL TABLE ------------# workbook = xlsxwriter.Workbook('data/labor_choice.xlsx') worksheet = workbook.add_worksheet() worksheet.write('B2', 'parameter') worksheet.write('B3', 'labor choice') worksheet.write('B4', 'cc choice') worksheet.write('B5', 'test score') worksheet.write('B6', 'wage ec: beta_0')
def simulation(j): """ Obtains one set of estimates """ n = df.shape[0] np.random.seed(j + 100) rev = df.sample(n, replace=True) # TREATMENT # #treatmentOne = rev[['d_trat']] treatment = np.array(rev['d_trat']) # EXPERIENCE # #yearsOne = rev[['experience']] years = np.array(rev['experience']) # SCORE PORTFOLIO # #p1_0_1 = rev[['score_port']] p1_0 = np.array(rev['score_port']) p1 = np.array(rev['score_port']) # SCORE TEST # #p2_0_1 = rev[['score_test']] p2_0 = np.array(rev['score_test']) p2 = np.array(rev['score_test']) # CATEGORY PORTFOLIO # #categPortfolio = rev[['cat_port']] catPort = np.array(rev['cat_port']) # CATEGORY TEST # #categPrueba = rev[['cat_test']] catPrueba = np.array(rev['cat_test']) # TRAME # #TrameInitial = rev[['trame']] TrameI = np.array(rev['trame']) # TYPE SCHOOL # #typeSchoolOne = rev[['typeschool']] typeSchool = np.array(rev['typeschool']) # Priority # priotity = np.array(rev['por_priority']) rural_rbd = np.array(rev['rural_rbd']) locality = np.array(rev['AsignacionZona']) #### PARAMETERS MODEL #### N = np.size(p1_0) HOURS = np.array([44] * N) alphas = [[ betas_nelder[0], betas_nelder[1], 0, betas_nelder[2], betas_nelder[3], betas_nelder[4] ], [ betas_nelder[5], 0, betas_nelder[6], betas_nelder[7], betas_nelder[8], betas_nelder[9] ]] betas = [ betas_nelder[10], betas_nelder[11], betas_nelder[12], betas_nelder[13], betas_nelder[14] ] gammas = [betas_nelder[15], betas_nelder[16], betas_nelder[17]] dolar = 600 value = [14403, 15155] hw = [value[0] / dolar, value[1] / dolar] porc = [0.0338, 0.0333] #inflation adjustment: 2012Jan-2019Dec: 1.266 qualiPesos = [72100 * 1.266, 24034 * 1.266, 253076, 84360] pro = [ qualiPesos[0] / dolar, qualiPesos[1] / dolar, qualiPesos[2] / dolar, qualiPesos[3] / dolar ] progress = [14515, 47831, 96266, 99914, 360892, 138769, 776654, 210929] pol = [ progress[0] / dolar, progress[1] / dolar, progress[2] / dolar, progress[3] / dolar, progress[4] / dolar, progress[5] / dolar, progress[6] / dolar, progress[7] / dolar ] pri = [47872, 113561] priori = [pri[0] / dolar, pri[1] / dolar] Asig = [150000 * 1.111, 100000 * 1.111, 50000 * 1.111] AEP = [Asig[0] / dolar, Asig[1] / dolar, Asig[2] / dolar] param0 = parameters.Parameters(alphas, betas, gammas, hw, porc, pro, pol, AEP, priori) output_ins = est.estimate(N, years,param0, p1_0,p2_0,treatment, \ typeSchool,HOURS,p1,p2,catPort,catPrueba,TrameI, priotity,rural_rbd,locality, \ w_matrix,moments_vector) start_time = time.time() output = output_ins.optimizer() time_opt = time.time() - start_time print('Done in') print("--- %s seconds ---" % (time_opt)) return output.x
def seif(): filename1 = 'z.mat' filename2 = 'aa3_dr.mat' filename3 = 'aa3_lsr2.mat' filename4 = 'c_5000.mat' include_dir = './data/' z_contents = sio.loadmat(include_dir + filename1) z = z_contents['z'] z_contents = sio.loadmat(include_dir + filename2) speed = z_contents['speed'].ravel() steering = z_contents['steering'].ravel() time = z_contents['time'].ravel() z_contents = sio.loadmat(include_dir + filename3) timeLsr = z_contents['TLsr'].ravel() L = size(timeLsr, 0) z_contents = sio.loadmat(include_dir + filename4) corresp = z_contents['corresp'] Lc = size(corresp, 0) del z_contents dt = 25e-3 G = csr_matrix((1, 1), dtype=bool) Q = diag([5.0, 0.02]) m = zeros(3) xi = zeros(3) omega = 10e4 * eye(3) m0 = zeros(0) #max active landmarks N = 20 plt.ion() fig, ax = plt.subplots(1, 1) ax.set_aspect('equal') ax.set_xlim(-100, 300) ax.set_ylim(-50, 350) ax.hold(True) plt.show(False) plt.draw() #background = fig.canvas.copy_from_bbox(ax.bbox) line1 = ax.plot(0, 0, 'b-')[0] line2 = ax.plot(-1000, 1000, 'ro', markersize=2)[0] poses = zeros([3, 5000]) stindex = 0 j = searchsorted(timeLsr, time[stindex]) timechr.sleep(3) t1 = timechr.time() for i in range(stindex, time.shape[0]): #for i in range(stindex, 10000): t3 = timechr.time() if i > stindex and i % 5000 == 0: save(include_dir + 'poses_' + str(i), poses) save(include_dir + 'landmarks_' + str(i), m) save(include_dir + 'xi_' + str(i), xi) save(include_dir + 'omega_' + str(i), omega) save(include_dir + 'm0_' + str(i), m0) save_npz(include_dir + 'G_', G) xi, omega, m, G = motion(speed[i], steering[i], dt, m0, xi, omega, m, G) m = estimate(m0, xi, omega, m) while j < L and timeLsr[j] < time[i] + dt * 1000: z1 = z[0:2, nonzero(z[0, :, j]), j].transpose((0, 2, 1))[:, :, 0] if z1.size > 0: co = correspondence(z1, m0, omega, m, G) xi, omega, m, G = measurement(z1, co, xi, omega, m, G) j += 1 if co.size > 0: n = (xi.size - 3) // 2 m0a = setdiff1d(m0, co) #returns float when m0 empty tq = hstack((m0a, unique(co))).astype(int) tq = tq[max(0, tq.size - N):] m1 = setdiff1d(m0, tq) m1 = union1d(m1, setdiff1d(co, tq)) m0 = tq if m1.size > 0: xi, omega, G = sparsification(m0, m1, xi, omega, m, G) print("\x1b[2J\x1b[H") t2 = timechr.time() print('iter: ' + str(i)) print('iter time: {0:.5f}'.format(t2 - t3)) print('avg: {0:.5f}'.format((t2 - t1) / (i + 1))) poses[:, i % 5000] = m[0:3] if i % 5000 == 4999: line2.set_data(m[3::2], m[4::2]) line1.set_xdata(hstack((line1.get_xdata(), poses[0, :]))) line1.set_ydata(hstack((line1.get_ydata(), poses[1, :]))) #fig.canvas.restore_region(background) #ax.draw_artist(line1) #ax.draw_artist(line2) fig.canvas.blit(ax.bbox) fig.canvas.draw()
#d = 1 #r = 0 #t = 1 #v = 0 #final_t = 1 #indicating that Compress has not been able to remove the T gate (the final_t count = 1) this is expected as the circuit is not Clifford so must contain at least one T gate #now lets see what the Compute algorithm does with this circuit print("Analytic probability = ", (1 + numpy.cos(numpy.pi / 4)) / 2) gates, controls, targets = util.convert_circuit_to_numpy_arrays( circ ) # note the Compress algorithm code changes the gate arrays in place to do gagetization prob_compute = clifford_t_estim.compute_algorithm(qubits, measured_qubits, gates, controls, targets, measurement_outcome) print("Compute probability =", prob_compute) m = numpy.sqrt(4. - 2 * numpy.sqrt(2)) #sqrt(stabilizer extent) prob_estimate, eps = estimate.estimate(epsTot=0.01, deltaTot=1e-5, t=t, measured_qubits=measured_qubits, r=r, v=v, m=m, CH=pyChState, AG=pyAGState) print("Estimate probability =", prob_estimate, "+/-", eps)
modelSD = sd.SimData(N, model) #ses_opt = np.load("D:\Git\ExpSIMCE/ses_model.npy") ses_opt = np.load( "/Users/jorge-home/Dropbox/Research/teachers-reform/codes/teachers/ses_model.npy" ) w_matrix = np.zeros((ses_opt.shape[0], ses_opt.shape[0])) #var_cov = np.load("/Users/jorge-home/Dropbox/Research/teachers-reform/codes/teachers/var_cov.npy") #w_matrix = np.linalg.inv(var_cov) for j in range(ses_opt.shape[0]): w_matrix[j, j] = ses_opt[j]**(-2) output_ins = est.estimate(N, years,param0, p1_0,p2_0,treatment, \ typeSchool,HOURS,p1,p2,catPort,catPrueba,TrameI,priotity,rural_rbd,locality, w_matrix,moments_vector) corr_data = output_ins.simulation(50, modelSD) print(corr_data) beta0 = np.array([ param0.alphas[0][0], param0.alphas[0][1], param0.alphas[0][3], param0.alphas[0][4], param0.alphas[0][5], param0.alphas[1][0], param0.alphas[1][2], param0.alphas[1][3], param0.alphas[1][4], param0.alphas[1][5], param0.betas[0], param0.betas[1], param0.betas[2], param0.betas[3], param0.betas[4], param0.gammas[0], param0.gammas[1], param0.gammas[2] ]) print(beta0)