def plot_area_vol(self): """ Plot area vs hours, curves of SLR, subplots of rates """ area_bins = np.linspace(1, 100, 11) fig, axes = plt.subplots(2, 2, True, True) title = fig.suptitle('percent area experiencing given runoff depth') axe = axes.ravel() for i, vol in enumerate(self.vols): cols = [ col for col in self.df_area.columns if col.split('_')[-1] == str(vol) ] df_area_one = self.df_area[cols] df_area_one.columns = [ 'SLR: {} m'.format(slr.split('_')[0]) for slr in df_area_one.columns ] df_days = pd.DataFrame(index=area_bins, columns=df_area_one.columns) for area in area_bins: df_days.loc[area, :] = (df_area_one >= area).sum() # df_days.plot(ax=axe[i], title='DTW: {}'.format(dtw), grid=True) BB.fill_plot(df_days, axe[i], title='Depth >= {} (mm)'.format(vol)) axe[i].set_xlabel('% Area') axe[i].set_ylabel('Hours') fig.set_label(title.get_text()) return fig
def plot_area_hours(self, compare=False): """ Plot area vs hours, curves of SLR, subplots of DTW """ area_bins = np.linspace(0, 100, 40) fig, axes = plt.subplots(2, 2, True, True) # title = fig.suptitle('percent area within given depth to water') axe = axes.ravel() for i, dtw in enumerate(self.dtws): df_area_one = self.df_area.filter( like=str(dtw)).loc[self.st:self.end, :] df_area_one.columns = [ 'SLR: {} m'.format(slr.split('_')[0]) for slr in df_area_one.columns ] df_hrs = pd.DataFrame(index=area_bins, columns=df_area_one.columns) for area in area_bins: df_hrs.loc[area, :] = (df_area_one >= area).sum() BB.fill_plot(df_hrs, axe[i], title='DTW <= {} m'.format(dtw)) axe[i].set_xlabel('% Area') axe[i].set_ylabel('Hours') axe[i].legend(loc='lower left', frameon=True, shadow=True, facecolor='white', prop={'size': 16}) axe[i].set_ylim((0, len(df_area_one) * 1.10)) axe[i].yaxis.grid(True) fig.subplots_adjust(left=0.125, right=0.92, wspace=0.175, hspace=0.35) fig.set_label('dtw_area') # for comparing in sensitivity if compare: for ax in axe: ax.set_xlabel('% Area ({})'.format(compare)) fig.set_label('dtw_{}'.format(compare)) return df_hrs
def _WriteSharedVar(sharedVarType, name, data): w = str(data) if sharedVarType == SharedVarTypes.BYTE_ARRAY: w = '0x' + ''.join(["%02X" % x for x in data]) elif sharedVarType in [SharedVarTypes.INT, SharedVarTypes.LONG]: w = str(int(data)) elif sharedVarType == SharedVarTypes.DOUBLE: w = str(float(data)) elif sharedVarType in [ SharedVarTypes.INT_ARRAY, SharedVarTypes.LONG_ARRAY ]: w = ' '.join([str(int(x)) for x in data]) elif sharedVarType == SharedVarTypes.DOUBLE_ARRAY: w = ' '.join([str(float(x)) for x in data]) elif sharedVarType == SharedVarTypes.STRING: w = Message._SerializeString(data) elif sharedVarType == SharedVarTypes.MATRIX: w = SharedVar._SerializeMatrix(data) elif sharedVarType == SharedVarTypes.RECOGNIZED_SPEECH: w = SharedVar._SerializeRecognizedSpeech(data) else: print 'pyRobotics - ERROR: Unhandled shared var type' return False r = BB.SendAndWait( Command('write_var', sharedVarType + ' ' + name + ' ' + w), 2000, 2) return (r and r.successful)
def __init__(self, path_result): self.path = path_result self.path_picks = op.join(path_result, 'Pickles') self.path_data = op.join(op.expanduser('~'), 'Google_Drive', 'WNC', 'Coupled', 'Data') self.path_fig = op.join(self.path, 'Figures') self.path_gis = op.join(op.expanduser('~'), 'Dropbox', 'win_gis') self.df_sys = pd.read_pickle(op.join( self.path_picks, 'swmm_sys.df')) #.loc['2012-01-01-00':, :] self.df_xy = pd.read_csv(op.join(self.path_data, 'Grid_XY.csv'), index_col='Zone') self.slr_names = self._get_slr() self.slr = sorted(self.slr_names) self.sys_vars = BB.uniq( [slr.rsplit('_', 1)[0] for slr in self.df_sys.columns]) self.ts_day = self.df_sys.resample('D').first().index self.ts_hr = self.df_sys.index self.df_swmm = pd.read_csv(op.join(self.path_data, 'SWMM_subs.csv'), index_col='Zone') self.subs = self.df_swmm.index.values self.slr_sh = ['0.0', '1.0', '2.0'] self.seasons = ['Winter', 'Spring', 'Summer', 'Fall'] # to truncate time series to start at Dec 1, 2012; implement in pickling self.st = '2011-12-01-00' self.end = '2012-11-30-00' self.ts_yr_hr = self.ts_hr[3696:-698] self.ts_yr_day = self.ts_day[154:-30] self.nrows = 74 self.ncols = 51
def main(): Stock_name = 'AMD' A = Stock_history.sum() B = Technical_index.sum() F = BB.sum() # G=IV_HV.sum() df = A.Stock_price(Stock_name) weekly_dict = B.MACD_weekly_check(df, Stock_name, 26, 570 * 1, period=5, back_ornot=0, weekly_BT=0) # get weekly data_570Weeks daily_dict = B.MACD_weekly_check( df, Stock_name, 26, 570 * 1, period=1, back_ornot=0, weekly_BT=weekly_dict['weekly_BT']) # get daily data_570days # input DTE, output: std_Dev of out_BB, which include all High/Low price(DTE) BB_dict = F.bollinger_bands(df, DTE=60, lookback=20, numsd=2) # price,DTE,BB中心均線(fix),內BB標準差 print(daily_dict) print('\n\n\n') print(weekly_dict) print('\n\n\n') condition_result = Strategy_trigger(daily_dict) print(condition_result)
def _SubscribeToSharedVar(name, subscriptionType, reportType): r = BB.SendAndWait( Command( 'suscribe_var', name + ' suscribe=' + subscriptionType + ' report=' + reportType), 2000, 2) return (r and r.successful)
def _ReadSharedVar(name): r = BB.SendAndWait(Command('read_var', name), 2000) if not (r and r.successful): return None return r.data
def __init__(self, path_result): res_base.__init__(self, path_result) self.df = pd.read_pickle(op.join(self.path_picks, 'dtw_seasons.df')) self.df_area = pd.read_pickle( op.join(self.path_picks, 'percent_at_surface.df')) #.loc['2011-12-01-00':, :] self.dtws = BB.uniq( [float(dtw.split('_')[1]) for dtw in self.df_area.columns])
def __init__(self, path_result): res_base.__init__(self, path_result) self.df = pd.read_pickle(op.join(self.path_picks, 'run_seasons.df')) self.df_area = pd.read_pickle( op.join(self.path_picks, 'percent_vols.df')) self.vols = BB.uniq( [float(vol.split('_')[1]) for vol in self.df_area.columns]) self.seasons = ['Winter', 'Spring', 'Summer', 'Fall'] self.dict = self._load_swmm('run')
def __init__(self, path_result): super(runoff, self).__init__(path_result) self.df_area = pd.read_pickle( op.join(self.path_picks, 'percent_vols.df')) self.vols = BB.uniq( [float(vol.split('_')[1]) for vol in self.df_area.columns]) self.dict = self._load_swmm('run') self.df_run = (self.df_sys.filter( like='Runoff').resample('MS').mean().loc[self.st:self.end, :])
def solve(z0, f, nabla_f, stopping, log, proj, options): preconditionoptions = { 'max_iter': 1000, 'verbose': 1, 'suff_dec': 0.003, # FIXME unused 'corrections': 500 } # FIXME unused z0 = BB.solve(z0,f,nabla_f, solvers.stopping,log=log,proj=proj, options=preconditionoptions) restart = 0 while restart < 10 and f(z0) > 1: restart += 1 try: z0 = LBFGS.solve(z0, f, nabla_f, stopping, log=log,proj=proj, options=options) break except ArithmeticError: pass print('restarting') z0 = BB.solve(z0,f,nabla_f, solvers.stopping,log=log,proj=proj, options=preconditionoptions) return z0
def __init__(self, path_result): res_base.__init__(self, path_result) # super(dtw, self).__init__(path_result) ### annual average, all cells; pickle converted this to a year self.df_year = pd.read_pickle(op.join(self.path_picks, 'dtw_yr.df')) # print (self.df_year.head()) self.df_area = pd.read_pickle( op.join(self.path_picks, 'percent_at_surface.df')) #.loc['2011-12-01-00':, :] self.dtws = BB.uniq( [float(dtw.split('_')[1]) for dtw in self.df_area.columns])
def __init__(self, path_result): self.path = path_result self.path_picks = op.join(path_result, 'Pickles') self.path_data = op.join(op.expanduser('~'), 'Google_Drive', 'WNC', 'Coupled', 'Data') self.path_fig = op.join(self.path, 'Figures') self.df_swmm = pd.read_csv(op.join(self.path_data, 'SWMM_subs.csv'), index_col='Zone') self.subs = self.df_swmm.index.values if op.isdir(self.path_picks): self.df_sys = pd.read_pickle( op.join(self.path_picks, 'swmm_sys.df')) #.loc['2012-01-01-00':, :] self.slr = BB.uniq( [float(slr.rsplit('_', 1)[1]) for slr in self.df_sys.columns]) self.sys_vars = BB.uniq( [slr.rsplit('_', 1)[0] for slr in self.df_sys.columns]) self.ts_day = self.df_sys.resample('D').first().index self.ts_hr = self.df_sys.index self.st = '2011-12-01-00' self.end = '2012-11-30-00'
def run(self): logging.debug('Starting %s solver...' % self.method) if self.method == 'LBFGS': LBFGS.solve(self.z0 + 1, self.f, self.nabla_f, solvers.stopping, log=self.log, proj=self.proj, options=self.options) logging.debug("Took %s time" % str(np.sum(self.times))) elif self.method == 'BB': BB.solve(self.z0, self.f, self.nabla_f, solvers.stopping, log=self.log, proj=self.proj, options=self.options) elif self.method == 'DORE': # setup for DORE alpha = 0.99 lsv = lsv_operator(self.A, self.N) logging.info("Largest singular value: %s" % lsv) A_dore = self.A * alpha / lsv target_dore = self.target * alpha / lsv DORE.solve(self.z0, lambda z: A_dore.dot(self.N.dot(z)), lambda b: self.N.T.dot(A_dore.T.dot(b)), target_dore, proj=self.proj, log=self.log, options=self.options, record_every=100) A_dore = None logging.debug('Stopping %s solver...' % self.method) return self.iters, self.times, self.states
def plot_area_days(self): """ Plot area vs days, curves of SLR, subplots of DTW """ area_bins = np.linspace(0, 100, 11) fig, axes = plt.subplots(2, 2, True) title = fig.suptitle('percent area within given depth to water') axe = axes.ravel() for i, dtw in enumerate(self.dtws): df_area_one = self.df_area.filter(like=str(dtw)) df_area_one.columns = [ 'SLR: {} m'.format(slr.split('_')[0]) for slr in df_area_one.columns ] df_hrs = pd.DataFrame(index=area_bins, columns=df_area_one.columns) for area in area_bins: df_hrs.loc[area, :] = (df_area_one >= area).sum() # df_days.plot(ax=axe[i], title='DTW: {}'.format(dtw), grid=True) BB.fill_plot(df_hrs, axe[i], title='DTW <= {}'.format(dtw)) axe[i].set_xlabel('% Area') axe[i].set_ylabel('Hours') fig.set_label(title.get_text()) return fig
def solve(z0, f, nabla_f, stopping, log, proj, options): preconditionoptions = { 'max_iter': 1000, 'verbose': 1, 'suff_dec': 0.003, # FIXME unused 'corrections': 500 } # FIXME unused z0 = BB.solve(z0, f, nabla_f, solvers.stopping, log=log, proj=proj, options=preconditionoptions) restart = 0 while restart < 10 and f(z0) > 1: restart += 1 try: z0 = LBFGS.solve(z0, f, nabla_f, stopping, log=log, proj=proj, options=options) break except ArithmeticError: pass print('restarting') z0 = BB.solve(z0, f, nabla_f, solvers.stopping, log=log, proj=proj, options=preconditionoptions) return z0
def run(self): logging.debug('Starting %s solver...' % self.method) if self.method == 'LBFGS': LBFGS.solve(self.z0+1, self.f, self.nabla_f, solvers.stopping, log=self.log, proj=self.proj, options=self.options) logging.debug("Took %s time" % str(np.sum(self.times))) elif self.method == 'BB': BB.solve(self.z0, self.f, self.nabla_f, solvers.stopping, log=self.log, proj=self.proj, options=self.options) elif self.method == 'DORE': # setup for DORE alpha = 0.99 lsv = lsv_operator(self.A, self.N) logging.info("Largest singular value: %s" % lsv) A_dore = self.A*alpha/lsv target_dore = self.target*alpha/lsv DORE.solve(self.z0, lambda z: A_dore.dot(self.N.dot(z)), lambda b: self.N.T.dot(A_dore.T.dot(b)), target_dore, proj=self.proj, log=self.log, options=self.options, record_every=100) A_dore = None logging.debug('Stopping %s solver...' % self.method) return self.iters, self.times, self.states
def __init__(self, path_coupled): self.path = path_coupled self.path_data = op.join(self.path, 'Data') path_stor = op.join('/', 'Volumes', 'BB_4TB', 'Thesis') self.path_res = op.join(path_stor, 'Results_Default') self.path_picks = op.join(self.path_res, 'Pickles') self.df_xy = pd.read_csv(op.join(self.path_data, 'Grid_XY.csv'), index_col='Zone') self.df_sys = pd.read_pickle(op.join(self.path_picks, 'swmm_sys.df')) self.slr = BB.uniq( [float(slr.rsplit('_', 1)[1]) for slr in self.df_sys.columns]) self.ts_day = self.df_sys.resample('D').first().index self.st = '2011-12-01-00' self.end = '2012-11-30-00'
def _Execute(self): response = None currentAttempt = 0 self._attemptsLock.acquire() att = self._attempts self._attemptsLock.release() while not response and (att == 0 or currentAttempt < att): currentAttempt += 1 response = BB.SendAndWait(self._command, self._timeout) self._attemptsLock.acquire() att = self._attempts self._attemptsLock.release() self._setResponse(response) self._setSending(False)
def main(): # variables for empty lists HeaderRow = [] Stats = [] Results = [] # call functions from imported modules ReadData.readData(Stats, Results, HeaderRow) BB.PlayerInfo(Stats, Results) BB.BattingAverage(Stats, Results) BB.SluggingPercentage(Stats, Results) BB.OnBasePercentage(Stats, Results) BB.OPS(Stats, Results) BB.RunsProduced(Stats, Results) BB.RunsProducedPerAtBat(Stats, Results) Reports.BattingAverage(Results) Reports.SluggingPercentage(Results) Reports.OnBasePercentage(Results) Reports.OPS(Results) Reports.RunsProduced(Results) Reports.RunsProducedPerAtBat(Results) PrintReports.printReports()
def main(filepath): p = parser() args = p.parse_args() if args.log in c.ACCEPTED_LOG_LEVELS: logging.basicConfig(level=eval('logging.'+args.log)) # load data filepath = '%s/%s' % (c.EXPERIMENT_MATRICES_DIR, filepath) A, b, N, block_sizes, x_true, nz, flow, _ = util.load_data(filepath, CP=True) sio.savemat('fullData.mat', {'A':A,'b':b,'N':block_sizes,'N2':N, 'x_true':x_true}) print A.shape if args.noise: b_true = b delta = np.random.normal (scale=b*1) b = b + delta # Sample usage #P = A.T.dot(A) #q = A.T.dot(b) #solvers.qp2(P, q, block_sizes=block_sizes, constraints={PROB_SIMPLEX}, \ # reduction=EQ_CONSTR_ELIM, method=L_BFGS) logging.debug("Blocks: %s" % block_sizes.shape) x0 = np.array(util.block_e(block_sizes - 1, block_sizes)) target = A.dot(x0)-b options = { 'max_iter': 10000, 'verbose': 1, 'suff_dec': 0.003, # FIXME unused 'corrections': 500 } # FIXME unused AT = A.T.tocsr() NT = N.T.tocsr() f = lambda z: 0.5 * la.norm(A.dot(N.dot(z)) + target)**2 nabla_f = lambda z: NT.dot(AT.dot(A.dot(N.dot(z)) + target)) # regularization included lamb = 1 #lamb = 1.0/N.shape[1] f = lambda z: 0.5 * la.norm(A.dot(N.dot(z)) + target)**2 + 0.5 * lamb * la.norm(N.dot(z) + x0)**2 nabla_f = lambda z: NT.dot(AT.dot(A.dot(N.dot(z)) + target)) + lamb * NT.dot(N.dot(z) + x0) def proj(x): projected_value = simplex_projection(block_sizes - 1,x) # projected_value = pysimplex_projection(block_sizes - 1,x) return projected_value #z0 = np.zeros(N.shape[1]) z0 = np.random.random(N.shape[1]) import time iters, times, states = [], [], [] def log(iter_,state,duration): iters.append(iter_) times.append(duration) states.append(state) start = time.time() return start logging.debug('Starting %s solver...' % args.solver) if args.solver == 'LBFGS': z_sol = LBFGS.solve(z0+1, f, nabla_f, solvers.stopping, log=log,proj=proj, options=options) logging.debug("Took %s time" % str(np.sum(times))) elif args.solver == 'BB': z_sol = BB.solve(z0,f,nabla_f,solvers.stopping,log=log,proj=proj, options=options) elif args.solver == 'DORE': # setup for DORE alpha = 0.99 lsv = util.lsv_operator(A, N) logging.info("Largest singular value: %s" % lsv) A_dore = A*alpha/lsv target_dore = target*alpha/lsv DORE.solve(z0, lambda z: A_dore.dot(N.dot(z)), lambda b: N.T.dot(A_dore.T.dot(b)), target_dore, proj=proj, log=log,options=options) A_dore = None elif args.solver == 'COMBINED': z_sol = solve(z0, f, nabla_f, solvers.stopping, log=log, proj=proj, options=options) elif args.solver == 'CONT': f_l = lambda z,lamb: 0.5 * la.norm(A.dot(N.dot(z)) + lamb*target)**2 + 0.5 *lamb* la.norm(N.dot(z) + x0)**2 nabla_f_l = lambda z,lamb: NT.dot(AT.dot(A.dot(N.dot(z)) + lamb*target)) + lamb*NT.dot(N.dot(z) + x0) z_sol = continuation_solver.solve(z0, f_l, nabla_f_l, solvers.stopping, log=log, proj=proj, options=options, solve=BB.solve) logging.debug('Stopping %s solver...' % args.solver) # Plot some stuff d = len(states) x_hat = N.dot(np.array(states).T) + np.tile(x0,(d,1)).T x_last = x_hat[:,-1] logging.debug("Shape of x0: %s" % repr(x0.shape)) logging.debug("Shape of x_hat: %s" % repr(x_hat.shape)) starting_error = 0.5 * la.norm(A.dot(x0)-b)**2 opt_error = 0.5 * la.norm(A.dot(x_true)-b)**2 diff = A.dot(x_hat) - np.tile(b,(d,1)).T error = 0.5 * np.diag(diff.T.dot(diff)) dist_from_true = np.max(np.abs(x_last-x_true)) start_dist_from_true = np.max(np.abs(x_last-x0)) x_diff = x_true - x_last #print 'incorrect x entries: %s' % x_diff[np.abs(x_diff) > 1e-3].shape[0] per_flow = np.sum(np.abs(flow * (x_last-x_true))) / np.sum(flow * x_true) print 'percent flow allocated incorrectly: %f' % per_flow #print '0.5norm(A*x-b)^2: %8.5e\n0.5norm(A*x_init-b)^2: %8.5e\n0.5norm(A*x*-b)^2: %8.5e\nmax|x-x_true|: %.2f\nmax|x_init-x_true|: %.2f\n\n\n' % \ # (error[-1], starting_error, opt_error, dist_from_true,start_dist_from_true) # import ipdb # ipdb.set_trace() # # plt.figure() # plt.hist(x_last) # # plt.figure() # plt.loglog(np.cumsum(times),error) # plt.show() return z_sol, f(z_sol)
def main(filepath): p = parser() args = p.parse_args() if args.log in c.ACCEPTED_LOG_LEVELS: logging.basicConfig(level=eval('logging.' + args.log)) # load data filepath = '%s/%s/%s' % (c.DATA_DIR, c.EXPERIMENT_MATRICES_DIR, filepath) A, b, N, block_sizes, x_true, nz, flow = util.load_data(filepath) sio.savemat('fullData.mat', { 'A': A, 'b': b, 'N': block_sizes, 'N2': N, 'x_true': x_true }) if args.noise: b_true = b delta = np.random.normal(scale=b * args.noise) b = b + delta # Sample usage #P = A.T.dot(A) #q = A.T.dot(b) #solvers.qp2(P, q, block_sizes=block_sizes, constraints={PROB_SIMPLEX}, \ # reduction=EQ_CONSTR_ELIM, method=L_BFGS) logging.debug("Blocks: %s" % block_sizes.shape) x0 = np.array(util.block_e(block_sizes - 1, block_sizes)) target = A.dot(x0) - b options = { 'max_iter': 5000, 'verbose': 1, 'suff_dec': 0.003, # FIXME unused 'corrections': 500 } # FIXME unused AT = A.T.tocsr() NT = N.T.tocsr() #f = lambda z: 0.5 * la.norm(A.dot(N.dot(z)) + target)**2 #nabla_f = lambda z: NT.dot(AT.dot(A.dot(N.dot(z)) + target)) # regularization included lamb = 1 #lamb = 1.0/N.shape[1] f = lambda z: 0.5 * la.norm(A.dot(N.dot(z)) + target )**2 + 0.5 * lamb * la.norm(N.dot(z) + x0)**2 nabla_f = lambda z: NT.dot(AT.dot(A.dot(N.dot(z)) + target) ) + lamb * NT.dot(N.dot(z) + x0) def proj(x): projected_value = simplex_projection(block_sizes - 1, x) # projected_value = pysimplex_projection(block_sizes - 1,x) return projected_value #z0 = np.zeros(N.shape[1]) z0 = np.random.random(N.shape[1]) import time iters, times, states = [], [], [] def log(iter_, state, duration): iters.append(iter_) times.append(duration) states.append(state) start = time.time() return start logging.debug('Starting %s solver...' % args.solver) if args.solver == 'LBFGS': z_sol = LBFGS.solve(z0 + 1, f, nabla_f, solvers.stopping, log=log, proj=proj, options=options) logging.debug("Took %s time" % str(np.sum(times))) elif args.solver == 'BB': z_sol = BB.solve(z0, f, nabla_f, solvers.stopping, log=log, proj=proj, options=options) elif args.solver == 'DORE': # setup for DORE alpha = 0.99 lsv = util.lsv_operator(A, N) logging.info("Largest singular value: %s" % lsv) A_dore = A * alpha / lsv target_dore = target * alpha / lsv DORE.solve(z0, lambda z: A_dore.dot(N.dot(z)), lambda b: N.T.dot(A_dore.T.dot(b)), target_dore, proj=proj, log=log, options=options) A_dore = None elif args.solver == 'COMBINED': z_sol = solve(z0, f, nabla_f, solvers.stopping, log=log, proj=proj, options=options) elif args.solver == 'CONT': f_l = lambda z, lamb: 0.5 * la.norm(A.dot(N.dot( z)) + lamb * target)**2 + 0.5 * lamb * la.norm(N.dot(z) + x0)**2 nabla_f_l = lambda z, lamb: NT.dot( AT.dot(A.dot(N.dot(z)) + lamb * target)) + lamb * NT.dot( N.dot(z) + x0) z_sol = continuation_solver.solve(z0, f_l, nabla_f_l, solvers.stopping, log=log, proj=proj, options=options, solve=BB.solve) logging.debug('Stopping %s solver...' % args.solver) # Plot some stuff d = len(states) x_hat = N.dot(np.array(states).T) + np.tile(x0, (d, 1)).T x_last = x_hat[:, -1] logging.debug("Shape of x0: %s" % repr(x0.shape)) logging.debug("Shape of x_hat: %s" % repr(x_hat.shape)) starting_error = 0.5 * la.norm(A.dot(x0) - b)**2 opt_error = 0.5 * la.norm(A.dot(x_true) - b)**2 diff = A.dot(x_hat) - np.tile(b, (d, 1)).T error = 0.5 * np.diag(diff.T.dot(diff)) dist_from_true = np.max(np.abs(x_last - x_true)) start_dist_from_true = np.max(np.abs(x_last - x0)) x_diff = x_true - x_last #print 'incorrect x entries: %s' % x_diff[np.abs(x_diff) > 1e-3].shape[0] per_flow = np.sum(np.abs(flow * (x_last - x_true))) / np.sum(flow * x_true) print 'percent flow allocated incorrectly: %f' % per_flow #print '0.5norm(A*x-b)^2: %8.5e\n0.5norm(A*x_init-b)^2: %8.5e\n0.5norm(A*x*-b)^2: %8.5e\nmax|x-x_true|: %.2f\nmax|x_init-x_true|: %.2f\n\n\n' % \ # (error[-1], starting_error, opt_error, dist_from_true,start_dist_from_true) # import ipdb # ipdb.set_trace() # # plt.figure() # plt.hist(x_last) # # plt.figure() # plt.loglog(np.cumsum(times),error) # plt.show() return z_sol, f(z_sol)
import BB as c import BB.test as d import BB c.test.bb(1) d.bb(1) BB.bb(2) import bbb as ww ww.ccc() import bbb bbb.ccc() import gazebo2rviz
from pyibex import * from BB import * import numpy as np #Define a Function f = Function("x", "y", "x+y") #Define the input domain of the function input_box = IntervalVector(2, [0.5, 1]) print(len(input_box)) #Define the output range (i.e. desired value of the function) output_range = Interval(1, 1) test = BB(f, input_box, output_range) print(test.getRoot(10))
from MatrixSimplex import MatrixSimplex import BB import numpy as np import scipy.optimize # f = lambda x: x[0] - x[1] + x[2] # http://www.reshmat.ru/simplex.html?l1=1&l2=-1&l3=1&l4=0&maxOrMin=max&a11=-1&a12=2&a13=-1&a14=0&z1=2&b1=4&a21=3&a22=1&a23=0&a24=1&z2=2&b2=14&step=2&sizeA=4&sizeB=2#b # cond_f1 = lambda x: -x[0] + 2 * x[1] - x[2] == 4 # cond_f2 = lambda x: 3 * x[0] + x[1] + x[3] == 14 # cond_f2 = lambda x: x[0] >= 0 and x[1] >= 0 and x[2] >= 0 and x[3] >= 0 if __name__ == "__main__": A = [[-1, 2, -1, 0], [3, 1, 0, 1]] c = [1, -1, 1, 0] b = [4, 14] simplex = MatrixSimplex(A, b, c, [1, 2]) simplex.do_simplex() c = [-1, 11, -1, 5] # c = [-1, 1, -1, 0] A_eq = [[-1, 2, -1, 0], [3, 1, 0, 1]] b_eq = [4, 14] BB.calculate(A_eq, b_eq, c)
def _CreateSharedVar(svType, name): r = BB.SendAndWait(Command('create_var', svType + ' ' + name), 2000, 2) return (r and r.successful)
def run(self): for i, (train, test) in enumerate(self.kf): # Setup b_train, A_train = self.b[train], self.A[train, :] b_test, A_test = self.b[test], self.A[test, :] AT = A_train.T.tocsr() target = A_train.dot(self.x0) - b_train if self.reg == None: f = lambda z: 0.5 * la.norm( A_train.dot(self.N.dot(z)) + target)**2 nabla_f = lambda z: self.NT.dot(AT.dot(A_train.dot(self.N.dot(z)) \ + target)) elif self.reg == 'L2' and self.weights: f = lambda z: 0.5 * la.norm( A_train.dot(self.N.dot(z)) + target)**2 + 0.5 * la.norm( self.D * (self.N.dot(z) + self.x0))**2 nabla_f = lambda z: self.NT.dot(AT.dot(A_train.dot(self.N.dot(z)) \ + target)) + self.NT.dot(self.D2 * (self.N.dot(z) + \ self.x0)) elif self.reg == 'L2': f = lambda z: 0.5 * la.norm( A_train.dot(self.N.dot(z)) + target)**2 + 0.5 * la.norm( self.N.dot(z) + self.x0)**2 nabla_f = lambda z: self.NT.dot(AT.dot(A_train.dot(self.N.dot(z)) \ + target)) + self.NT.dot(self.N.dot(z) + self.x0) iters, times, states = [], [], [] def log(iter_, state, duration): iters.append(iter_) times.append(duration) states.append(state) start = time.time() return start # Solve logging.debug('[%d] Starting %s solver...' % (i, self.solver)) if self.solver == 'LBFGS': LBFGS.solve(self.z0 + 1, f, nabla_f, solvers.stopping, log=log, proj=self.proj, options=self.options) elif self.solver == 'BB': BB.solve(self.z0, f, nabla_f, solvers.stopping, log=log, proj=self.proj, options=self.options) elif self.solver == 'DORE': # setup for DORE alpha = 0.99 lsv = util.lsv_operator(A_train, self.N) logging.info("Largest singular value: %s" % lsv) A_dore = A_train * alpha / lsv target_dore = target * alpha / lsv DORE.solve(self.z0, lambda z: A_dore.dot(self.N.dot(z)), lambda b: self.N.T.dot(A_dore.T.dot(b)), target_dore, proj=self.proj, log=log, options=self.options) A_dore = None logging.debug('[%d] Stopping %s solver... %s' % \ (i,self.solver,str(np.sum(times)))) self.iters[i] = iters self.times[i] = times self.states[i] = states AT, A_train, A_test = None, None, None
plt.ylim(0, 300) plt.xlabel("time") plt.ylabel("cell intensity") plt.plot([0, 10], [0, 50], color="k", lw=6) plt.title("Estimated Intensity for each cell") for i in range(len(t2)): plt.plot([t2[i], t2[i]], [-1, 1], lw=2, color="k") #plt.scatter(t2[1:],np.repeat(0,len(t2)-1),marker=2,linewidths=2) plt.xlabel("time") plt.ylim(-0.1, 0.1) plt.xlim((0, 10)) #run the bayesian blocks algorithm using linear and constant blocks(takes a couple of minutes to run) q = BB.BayesianBlocks(t, c=4, type="linear", verbose=True, force_intercept=False) start = time.time() r = BB.BayesianBlocks(t, c=1, type="constant", verbose=True, PELT=True) print(str(time.time() - start) + " seconds") ####### PLOTTING ######### #plot points plt.hist(t, color="gray", bins=40) #plot the linear blocks for i in range(len(q.blocks)): linear, = plt.plot([t[q.left[i]], t[q.right[i]]], [q.leftintensities[i], q.rightintensities[i]],
from pyibex import * from BB import * import numpy as np #Define a Function f = Function("x", "y", "x^2+y^2") #Define the input domain of the function input_box = IntervalVector(2, [0.5, 5]) print(len(input_box)) #Define the output range (i.e. desired value of the function) output_range = Interval(1, 1) test = BB(f, input_box, output_range) print(test.getGameEnded(IntervalVector(2, [5, 5]), 1)) ''' assert((test.getRoot() == np.array([[0.5, 2.75, 5. ], [0.5, 2.75, 5. ]])).all()) assert((test.getNextState(0,3)[1] == np.array([2.75, 3.875, 5. ])).all()) '''