def run_net_2(training_data, test_data, monitor_evaluation_accuracy=True, monitoring_evaluation_cost=True): """ Runs net 2 """ n_input, n_hidden, n_out = 784, 100, 10 epochs, mini_batch_size, eta, lmbda = 30, 10, 0.5, 5 net = network_test_2.NetworkTest2([n_input, n_hidden, n_out]) net.stochastic_gradient_descent( training_data, epochs, mini_batch_size, eta, lmbda, evaluation_data=test_data, monitor_evaluation_accuracy=monitor_evaluation_accuracy, monitor_evaluation_cost=monitoring_evaluation_cost, monitor_training_accuracy=True, monitor_training_cost=True) if monitor_evaluation_accuracy: x, y = net.get_accuracy_per_epoch() plt = plot.Plot(x, 'Epoch', y, 'Accuracy (%)', 'Net 2 Accuracy', 'net_2_accuracy') plt.plot() if monitoring_evaluation_cost: x, y = net.get_cost_per_epoch() plt = plot.Plot(x, 'Epoch', y, 'Total Cost', 'Net 2 Cost', 'net_2_cost') plt.plot()
def main_pc(): # this code uses pictures taken manualy # this is not the correct way to use the code but because of COVID-19, the setup was not obtainable folder = 'D:\_Udes\S4\Projet\ScanUS\Photos_boite/' files = os.listdir(folder) laser_angles = [-5, -3, -2, -1, -0.5, 0, 0.5, 1.5, 2, 3, 4] my_plot = plot.Plot(name='main plot', range=[0, 10]) position_laser_ref_plaque = Matrix(pos=[259, 512, 150]) angle_laser_cam = 11.1 * 2 * math.pi / 360 + math.atan(259 / 512) trans_plaque_to_cam_ref = Matrix(angles=[0, 0, angle_laser_cam]) position_laser = mult([trans_plaque_to_cam_ref, position_laser_ref_plaque]) for i in range(len(files)): filename = folder + files[i] img = cv2.imread(filename) x, y, fail = camera.find_red_dot(frame=img) if not fail: angle_table = ((i * 11.25) % 360) * 2 * math.pi / 360 angle_laser = laser_angles[i // 32] * 2 * math.pi / 360 p1, v1 = camera.get_red_dot_point_vector_in_world( angle_table, x, y) p2, v2 = laser.get_laser_point_vector_in_world( angle_table=angle_table, angle_wrist=angle_laser) p, error = intersect(p1, v1, p2, v2) my_plot.add_point(p + [error]) if i > 11 * 32: break input() my_plot.close()
def test_velocity_dist_default_key(): """Test velocity distribution.""" tracking = load.Load("tests/tracking.txt") plotObj = plot.Plot(tracking) velocityTest = plotObj.velocityDistribution(ids=[0, 1]) refData = tracking.getObjects(0) a = (np.sqrt( np.diff(refData.xBody.values)**2 + np.diff(refData.yBody.values)**2) ) / np.diff(refData.imageNumber.values) refData = tracking.getObjects(1) b = (np.sqrt( np.diff(refData.xBody.values)**2 + np.diff(refData.yBody.values)**2) ) / np.diff(refData.imageNumber.values) pooled = np.concatenate((a, b)) np.testing.assert_array_equal(pooled, velocityTest[1][0]) velocityTest = plotObj.velocityDistribution(ids=[0, 1], pooled=False) np.testing.assert_array_equal(a, velocityTest[1][0]) np.testing.assert_array_equal(b, velocityTest[1][1]) refData = tracking.getObjectsInFrames(0, indexes=list(range(0, 100))) a = (np.sqrt( np.diff(refData.xBody.values)**2 + np.diff(refData.yBody.values)**2) ) / np.diff(refData.imageNumber.values) velocityTest = plotObj.velocityDistribution(ids=[0], pooled=True, indexes=(0, 100)) np.testing.assert_array_equal(a, velocityTest[1][0])
def plot_pdf(self): """print self.pdf""" if self.pdf is None: print 'PDF not yet computed' pass else: labels = {} # mind the $ and r's for latex formatted printing labels['ylabel'] = r'PDF(\langle\bar{' + r'{}'.format( self.symbol) + r'_{\tau}}\rangle)' # lacks universality, but based on use is ok labels['xlabel'] = r'{} / {}'.format(self.symbol, self.unit) labels['title'] = (r'probability distribution of values of ' + r'${}$'.format(self.symbol)) labels['opath'] = 'output/' labels['oname'] = self.name + '_pdf' kwargs = { 'linestyle': 'none', 'label': '', 'marker': 'o', 'ms': 1.5, 'lw': '1.0', 'color': 'blue' } self.pdf_fig = pc.Plot(self.pdf, labels, **kwargs) self.pdf_fig.save() self.pdf_fig.cla()
def make_plotlayout(self): self.plot_layout = QtGui.QGridLayout() self.plot_layout.setGeometry(QtCore.QRect(200, 200, 200, 200)) #self.plot_layout. self.plots.append(plot.Plot()) self.root_layout.addLayout(self.plot_layout) self.plot_layout.addWidget(self.plots[0])
def plotThread(self): #print("1") plot1 = plot.Plot("x v y", "x", "y", np.empty(0), np.empty(0)) while(1): #plot1.addData(time.time(), self.robots[0].getPos()[0]) plot1.addData(self.robots[0].getPos()[0], self.robots[0].getPos()[1]) plot1.plot() time.sleep(0.01)
def do_GET(s): """Respond to a GET request.""" plot.Plot(LOG_FILENAME, PLOT_FILENAME) s.send_response(200) s.send_header("Content-type", "image/png") s.end_headers() with open(PLOT_FILENAME, 'rb') as f: s.wfile.write(f.read())
def do_plot_indexes(self, arg): indexes = arg.split(',', 1) a_plot = plot.Plot(plot.PlotCellIndex(indexes[0])) try: for index in indexes[1:]: p = plot.PlotCellIndex(index) a_plot.addSimple(plot.PlotCell((p.data, p.dates))) finally: a_plot.plot()
def main(): num = 15 st = stats.Loadstats() st.loadCpu(True) st.loadMem(True) st.loadTemp(True) time_lbl = [] with open('time.log', 'r') as f: s = f.readlines() # print(len(s)) for x in range(len(s) - 1, len(s) - num-1, -1): time_lbl.append(s[x]) cpu_plt = [] with open('cpu_use.log', 'r') as f: s = f.readlines() # print(len(s)) for x in range(len(s) - 1, len(s) - num-1, -1): cpu_plt.append(s[x].split(',')[:-1]) cpu = plot.Plot() cpu.plot(cpu_plt, 'm', time_lbl, num, 'cpu_usege.png', figname = 'CPU utilization', labelname = 'Processor:') mem_plt = [] with open('mem.log', 'r') as f: s = f.readlines() for x in range(len(s) - 1, len(s) - num-1, -1): mem_plt.append(s[x]) mem = plot.Plot() mem.plot(mem_plt, 's', time_lbl, num, 'mem_usege.png', figname = 'MEM utilization', labelname = 'Utilization:') temp_plt = [] with open('cpu_temp.log', 'r') as f: s = f.readlines() for x in range(len(s) - 1, len(s) - num-1, -1): temp_plt.append(s[x].split(',')[:-1]) temp = plot.Plot() temp.plot(temp_plt, 'm', time_lbl, num, 'temp.png', figname = 'CPU Temperature', labelname = 'Core:')
def main(): # ------ CONFIGURE PARAMETERS ------ params = set_params.Params() # ------ EXECUTE ------ results = adagrad(params) # ------ PLOT ------ algorithm = "adagrad" plt = plot.Plot(params) plt.update_algorithm(algorithm, results, thresholding=True) plt.plot_all()
def draw_empty(self): self.canvas.Destroy() self.toolbar.Destroy() plt = plot.Plot() self.canvas = FigureCanvas(self, -1, plt.fig) self.toolbar = NavigationToolbar(self.canvas) self.sizer.Add(self.toolbar, 0, wx.EXPAND) self.sizer.Add(self.canvas, 1, wx.GROW) self.Layout()
def position_selected(self, index): item = index.internalPointer() it = item.data(index.column(), QtCore.Qt.UserRole) if isinstance(it, position.position): for i in range(self.plot_layout.count()): self.plot_layout.takeAt(i) self.plot_layout.addWidget(plot.Plot(position=it)) elif isinstance(it, position._security): pass
def run_with_plot(self): """Open plot window and run menu in thread""" thread = MenuThread(self) self.plot = plot.Plot() try: thread.start() self.plot.run() finally: thread.join() self.plot = None if thread.exception is not None: raise thread.exception
def __init__(self, rateTrain=0.0, lr=1e-3, nCell=5, trialID=0): # path ---- self.modelPath = 'model' self.figurePath = 'figure' # ---- # parameter ---- dInput = 6 self.dOutput = 3 self.trialID = trialID # ---- # func ---- # data self.myData = data.NankaiData() # Test data, interval/seq/onehotyear/b self.xTest, self.seqTest, self.yYearTest, self.yTest = self.myData.TrainTest() # Eval data nankai rireki self.xEval, self.seqEval, self.yYearEval, self.yEval = self.myData.Eval() # plot self.myPlot = plot.Plot(figurepath=figurePath, trialID=trialID) # ---- # Placeholder ---- # interval # pred paramb + vt-1 self.odex = tf.compat.v1.placeholder(tf.float32,shape=[None, None, dInput]) # vt self.odey = tf.compat.v1.placeholder(tf.float32,shape=[None, self.dOutput]) # ---- # neural network ---- self.Vt = self.odeNN(self.odex) self.Vt_test = self.odeNN(self.odex, reuse=True) # ---- # loss ---- self.odeloss = tf.square(self.odey - self.Vt) self.odeloss_test = tf.square(self.odey - self.Vt_test) # ---- #pdb.set_trace() # optimizer ---- odeVars = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES,scope='odeNN') self.optODE = tf.compat.v1.train.AdamOptimizer(lr).minimize(self.odeloss, var_list=odeVars) # ---- # ---- config = tf.compat.v1.ConfigProto(gpu_options=tf.compat.v1.GPUOptions(per_process_gpu_memory_fraction=0.1,allow_growth=True)) self.sess = tf.compat.v1.Session(config=config) self.sess.run(tf.compat.v1.global_variables_initializer()) self.saver = tf.compat.v1.train.Saver()
def _build_gui(self): """ Builds the interface of this window, the entire tree of widgets. """ _vbox = gtk.VBox() self.add(_vbox) _toolbar = self._build_toolbar() _vbox.pack_start(_toolbar, False, False, 5) self._build_drawing_area(_vbox) self._plot_window = plot.Plot() self._plot_window.set_title(TITLE) self._plot_window.set_icon_from_file(ROBOT_FILE)
def main(): # ------ CONFIGURE PARAMETERS ------ params = set_params.Params() # ------ EXECUTE ------ results = adam_lb_modified(params) # ------ PLOT ------ if (params.flipping): algorithm = "adam-lb-modified-w-flipping" else: algorithm = "adam-lb-modified" plt = plot.Plot(params) plt.update_algorithm(algorithm, results, thresholding=True) plt.plot_all()
def plot_data(self): # mind the $ and r's for latex formatted printing labels['ylabel'] = r'${} / {}$'.format(symbol, unit) # lacks universality, but based on use is ok labels['xlabel'] = 'timesteps in s' labels['title'] = '' kwargs = { 'linestyle': 'solid', 'label': '', 'marker': 'o', 'lw': '1.0', 'color': 'blue' } self.fig = pc.Plot(self.data, labels, **kwargs)
def draw_map(out_path, dataset, metric, ts, format, dots, file_formats): tm = datetime.fromtimestamp(ts, timezone.utc) decorations = format not in ['bare', 'overlay'] basemaps = format not in ['overlay'] plt = plot.Plot(metric, tm, decorations=decorations, basemaps=basemaps) if metric == 'mufd': plt.scale_mufd() elif metric == 'fof2': plt.scale_fof2() else: plt.scale_generic() zi = dataset['/maps/' + metric][:] plt.draw_contour(zi) dotjson, dot_df = None, None if dots == 'curr': dotjson = str(dataset['/stationdata/curr'][...]) elif dots == 'pred': dotjson = str(dataset['/stationdata/pred'][...]) if dotjson is not None: dot_df = pd.read_json(dotjson) dot_df = filter_data(dot_df, metric, ts) plt.draw_dots(dot_df, metric) if decorations: plt.draw_title( metric, 'eSFI: %.1f, eSSN: %.1f' % (dataset['/essn/sfi'][...], dataset['/essn/ssn'][...])) if 'svg' in file_formats: plt.write(out_path + '.svg') subprocess.run( ['/usr/local/bin/svgo', '--multipass', out_path + '.svg'], check=True) if 'png' in file_formats: plt.write(out_path + '.png') if 'jpg' in file_formats: plt.write(out_path + '.jpg') if 'station_json' in file_formats and dotjson is not None: with open(out_path + '_station.json', 'w') as f: dot_df.to_json(f, orient='records')
def __init__(self): # Config parameters self.EDGE_SIZE = 500 self.NUM_POINTS = 200 self.LOOKAHEAD = 1 self.STOPPING_LOOKBACK = 10 self.LINE_OPACITY = .03 self.PADDING = 10 self.RANDOM_INTERVAL = None self.CACHE_REFRESH = 10 self.PLOT_INTERVAL = 50 self.LAMBDA = 0 self.original_img = get_image('/Users/delbalso/Downloads/dave.jpg') self.original_weights = get_image( '/Users/delbalso/Downloads/dave-mask.jpg') self.points = self.config_circle() self.ref_lines = {} for a, b in list(itertools.combinations(xrange(self.NUM_POINTS), 2)): if a > b: a, b = b, a self.ref_lines[a, b] = list( bresenham(self.points[a][0], self.points[a][1], self.points[b][0], self.points[b][1])) # Set up main images self.img = normalize_image( (self.original_img - self.original_img.min()) / self.original_img.max() * 255, self.EDGE_SIZE) self.weights = normalize_image(self.original_weights, self.EDGE_SIZE) self.p = plot.Plot(self) # Set up derivative images # Raster is the image we're drawing to simulate thread self.raster = np.zeros((self.EDGE_SIZE, self.EDGE_SIZE)) + 255 assert self.raster.shape == self.img.shape self.l1_errors = [] self.l2_errors = [] self.loss_delta = [] self.update_diff() # start with a random point self.points_log = [randint(0, self.NUM_POINTS - 1)] self.process_start_time = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) self.wheelOptimizer = WheelOptimizer(self)
def run_net_1(training_data, test_data): """ Runs net 1 """ n_input, n_hidden, n_out = 784, 30, 10 epochs, mini_batch_size, eta = 30, 10, 3.0 net = network_test_1.NetworkTest1([n_input, n_hidden, n_out]) net.stochastic_gradient_descent(training_data, epochs, mini_batch_size, eta, test_data=test_data, stdout=True) x, y = net.get_accuracy_per_epoch() plt = plot.Plot(x, 'Epoch', y, 'Accuracy (%)', 'Net 1 Accuracy', 'net_1_accuracy') plt.plot()
def redraw(self, agg): self.canvas.Destroy() self.toolbar.Destroy() plt = plot.Plot() kind = self.frame.plot_type.GetItemLabel( self.frame.plot_type.GetSelection()) errkind = self.frame.err_type.GetItemLabel( self.frame.err_type.GetSelection()) plt.plot(agg, kind=kind, errkind=errkind.lower()) #, within='rows') self.canvas = FigureCanvas(self, -1, plt.fig) self.toolbar = NavigationToolbar(self.canvas) self.sizer.Add(self.toolbar, 0, wx.EXPAND) self.sizer.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW) self.Layout()
def __init__(self, *args, **kwargs): wx.Panel.__init__(self, *args, **kwargs) self.subplots = None self.rows = None self.cols = None self.yerr = None self.values = None self.sizer = wx.BoxSizer(wx.VERTICAL) self.SetSizer(self.sizer) plt = plot.Plot() self.canvas = FigureCanvas(self, -1, plt.fig) self.toolbar = NavigationToolbar(self.canvas) self.sizer.Add(self.toolbar, 0, wx.EXPAND) self.sizer.Add(self.canvas, 1, wx.GROW) self.Fit()
def do_simulation(self, arg): ticker, startdate = arg.split() calc = stk.StockCalcIndex(self.stk_data_coll) sd = stk.StockData() sd.load(ticker, startdate) port = des.DecisionCollection(ticker, 50000) decision = des.DecisionSimpleSMA(ticker, (sd.Cs, sd.dates), port) decision.looper() print ticker, ":", str(port) port2 = des.DecisionCollection(ticker, 50000) decision2 = des.DecisionSimpleStopSMA( ticker, (sd.Cs, sd.dates), port2, risk_factor=0.01, ) decision2.looper() print ticker, ":", str(port2) port2.print_all() a_plot = plot.Plot(plot.PlotCell((sd.Cs, sd.dates))) a_plot.addSimple( plot.PlotCell(calc.sma((sd.Cs, sd.dates), 200), overlay=True)) a_plot.addSimple( plot.PlotCell(calc.sma((sd.Cs, sd.dates), 50), overlay=True)) a_plot.addSimple( plot.PlotCell(calc.llv((sd.Cs, sd.dates), 100), overlay=True)) a_plot.addSimple( plot.PlotCell(port2.get_enter_plot_cell(), overlay=True, color='go')) a_plot.addSimple( plot.PlotCell(port2.get_leave_plot_cell(), overlay=True, color='ro')) a_plot.addSimple(plot.PlotCell(port2.get_value_plot_cell())) a_plot.plot()
def plot(self, refFlag=True, title='', xyPath=[]): """ Plot solution @param refFlag set to True if reference solution, False for specimen @param title add title to plot @param xyPath show Newton covnergence """ import plot data = self.refDic if not refFlag: data = self.spcDic root = Tk() bxsize = self.boxsize() plot.Plot(root, data['grid'], width=800, height=800, \ title=title, boxsize=bxsize).draw(data['rho'], \ data['the'], xyPath=xyPath) root.mainloop()
def make_plot(settings, config): basename = 'plot{}'.format(hashargs(settings)) name = os.path.join(config['plotdir'], basename).replace('\\', '/') # try to get plot from cache if not config['debug'] and config['cachedir'] and os.path.isfile(name + '.png'): return [ dict([(e, name + '.' + e) for e in ['png', 'svg', 'pdf']]), None ] else: # lock long running plot creation with plot_lock: valid, errors = validate_settings(settings) if not valid: return [None, errors] p = plot.Plot(config, **settings) return [p.save(name), None]
def __init__(self, max_count, *layers): self.count = 0 # count to change input self.max_count = max_count self.image_idx = 0 self.training_data = None self.test_data = None self.currentNumber = None self.dictOfAttributeAdjustments = {} self.count_to_five = 0 self.middle_layers_idx = [] self.input = [] self.neurons = [[]] self.layers = layers self.image_dir = None self.label_dir = None self.plot_reference = plot.Plot()
def do_plot_collection(self, arg): calc = stk.StockCalcIndex(self.stk_data_coll) sd = stk.StockData() ticker, startdate = arg.split() sd.load(ticker, startdate) a_plot = plot.Plot(plot.PlotCell((sd.Cs, sd.dates))) a_plot.addSimple( plot.PlotCell(calc.sma((sd.Cs, sd.dates), 200), overlay=True)) a_plot.addSimple( plot.PlotCell(calc.sma((sd.Cs, sd.dates), 50), overlay=True)) a_plot.addSimple( plot.PlotCell(calc.llv((sd.Cs, sd.dates), 100), overlay=True)) a_plot.addSimple(plot.PlotCell(calc.sma((sd.Vs, sd.dates), 20))) a_plot.addSimple(plot.PlotCell(calc.obv((sd.Cs, sd.Vs, sd.dates)))) a_plot.addSimple(plot.PlotCell(calc.correlation_adj( (sd.Cs, sd.dates)))) a_plot.plot()
def do_plot_ticker_indexes(self, arg): calc = stk.StockCalcIndex(self.stk_data_coll) sd = stk.StockData() ticker, indexes, startdate = arg.split() indexes = indexes.split(',', 1) sd.load(ticker, startdate) a_plot = plot.Plot(plot.PlotCell((sd.Cs, sd.dates))) a_plot.addSimple( plot.PlotCell(calc.sma((sd.Cs, sd.dates), 200), overlay=True)) a_plot.addSimple( plot.PlotCell(calc.sma((sd.Cs, sd.dates), 50), overlay=True)) for index in indexes: p = plot.PlotCellIndex(index) p.truncate(startdate) a_plot.addSimple(plot.PlotCell((p.data, p.dates))) a_plot.addSimple(plot.PlotCell(calc.sma((p.data, p.dates), 20))) a_plot.addSimple( plot.PlotCell(calc.sma((p.data, p.dates), 50), overlay=True)) a_plot.plot()
def main_pi(): # This code is obsolete # This code needs to be changed with functions from laser.py if the actual laser tower is used my_plot = plot.Plot(name='main plot', range=[0, 10]) laser_p = [0, 0, 180] with camera.init_picamera() as cam: input() motor.start_motor() temp = 0 while (True): pic = camera.take_one_picture_pi(cam) # print(pic) ang = -motor.get_angle_motor() x, y = camera.find_red_dot(pic) if ang > temp: break floor_matrix = Matrix(angles=[0, 0, ang]) cam_matrix = Matrix(pos=laser_p, angles=[0, 0, 0.26]) result_matrix = mult([floor_matrix, cam_matrix]) p2 = result_matrix.get_pos() v2 = result_matrix.get_vector_in_referential([0, 1, 0]) p1, v1 = camera.get_red_dot_point_vector_in_world(ang, x, y) m, l = intersect(p1, v1, p2, v2) point = m + [l] my_plot.add_point(point) # print(m, l, sep=' ') temp = ang # k = input() # if k == 'q': # break motor.restart_motor() input() my_plot.close()
y_train = np_utils.to_categorical(y_train, num_classes) y_test = np_utils.to_categorical(y_test, num_classes) model = Sequential() model.add(Conv2D(32, (3, 3), activation='relu', input_shape=input_shape)) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(64, (3, 3), activation='relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(256, activation='relu')) model.add(Dropout(0.5)) model.add(Dense(num_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) History = model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=EPOCHS, batch_size=BATCH_SIZE) print(History.history.keys()) plot = plot.Plot(History) plot.accuracy() plot.loss()