def __init__(self, D): super().__init__(D) self.target_modules = [torch.nn.Linear] self.gain_rate = 0.01 self.loss_rate = 0.05 self.epochs = 100 self.batch = 8 self.score = MovingAverage(momentum=0.90) self.train_score = MovingAverage(momentum=0.9)
def __init__(self, args): input_path = args['input'] if not os.path.exists(input_path): raise FileNotFoundError(f'{input_path} does not exist') # get capture object self.cap = self.get_video(input_path) # get total number of frames self.total_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT)) # get buffer size self.buffer_size = args['buffer'] # handle size parameter self.handle_size(args['size']) # get name of file filename = self.get_filename(input_path) # get output output = args['output'] # get final dir self.output_dir = output + filename + '/' # create dictionary of video writers self.input_videos = {} self.video_writers = {} # determine options for program self.handle_option(args['option'], args['read']) # initiate average and edit class self.stabiliser = Stabiliser(self.total_frames, self.size) self.ma = MovingAverage(self.size, False, False, args['buffer']) self.fe = FrameEditor() self.plt = Plotter(self.output_dir) # creates background model class using mixture of gradients self.backSub = cv2.createBackgroundSubtractorMOG2()
def train_evalnet(self, evalnet, X, Y): print("Evaluation network data size: %d" % X.size(0)) dataset = torch.utils.data.TensorDataset(X, Y) dataloader = torch.utils.data.DataLoader(dataset, batch_size=8, shuffle=True) epochs = 200 lossf = torch.nn.MSELoss() optim = torch.optim.SGD(evalnet.parameters(), lr=0.01, momentum=0.9) sched = torch.optim.lr_scheduler.MultiStepLR(optim, milestones=[80, 160]) bar = tqdm.tqdm(range(epochs), ncols=80) avg = MovingAverage(momentum=0.95) for epoch in bar: for Xb, Yb in dataloader: Xb = self.create_normal(Xb) Yb = self.create_normal(Yb) Yh = evalnet(Xb).squeeze() loss = lossf(Yh, Yb) full_loss = loss + self.grad_penalty(evalnet, X, Xb) optim.zero_grad() full_loss.backward() optim.step() sched.step() avg.update(loss.item()) bar.set_description("Fitting evalnet: %.3f" % avg.peek())
def __init__(self, data, fastType, fastWindow, fastGradWindow, slowType, slowWindow, slowGradWindow, spreadGradWindow): self.data = data.fillna(method='bfill', axis=1) self.x = self.data[self.data.columns[0]].tolist() self.dates = self.data.index.tolist() self.height = len(self.data) self.spreadGradWindow = spreadGradWindow self.TodayStatics = {} self.slowType = slowType self.slowWindow = slowWindow self.slowGradWindow = slowGradWindow self.slowMAObj = MovingAverage(self.data, self.slowType, slowWindow, gradWindow=slowGradWindow) self.slowMA = self.slowMAObj.data['MA'].tolist() self.fastType = fastType self.fastWindow = fastWindow self.fastGradWindow = fastGradWindow self.fastMAObj = MovingAverage(self.data, self.fastType, fastWindow, gradWindow=fastGradWindow) self.fastMA = self.fastMAObj.data['MA'].tolist() self.tradeSignal = self.CalcCrossOverScore() self.spread = self.CalcSpread() self.CalcSpreadGradient() # This will set all gradient data self.tradeDaySeries = self.CalcTradeDaySeries() self.TodayStatics['todayMAslow'] = self.slowMA[ -1] # today's level of the slow MA self.TodayStatics['todayMAfast'] = self.fastMA[ -1] # today's level of the fast MA self.TodayStatics['todayCrossScore'] = self.tradeSignal[-1] self.TodayStatics['todaySpread'] = self.spread[-1] self.TodayStatics['todayTradeDays'] = self.tradeDaySeries[-1] self.preCrossOver = self.CalcLastCrossOver() self.CalcTradeDayZscores() # Calc Z-score of Trading days if (fastWindow > slowWindow): raise Exception('Parameter Error : fast MA is larger than slow MA') self.CalcHiLoInCurrentTrade( ) # calculate the price hilo during current trade
def main(): dates = [] temps = [] with open(CSV_FILENAME, 'r') as csv_file: r = csv.reader(csv_file, delimiter=',', quotechar='"') first_row = True for row in r: # First row contains csv table header, so we discard it if first_row: first_row = False continue dates.append(datetime.strptime(row[1], DATETIME_FORMAT)) temps.append(int(row[5])) averages = list(MovingAverage(temps, MOVING_AVERAGE_WINDOW_SIZE)) """ Gives us points where the 2 graphs intersect by checking where the sign of the difference changes """ intersections = np.argwhere(np.diff( np.sign([temps[i] - averages[i] for i in range(len(temps))]))).flatten() i_dates = [] i_temps = [] for inter in intersections: """ No need for a bounds check, since intersection can't happen unless there's a point following this one that causes the intersection to take place """ vt0 = temps[inter] vt1 = temps[inter + 1] slope_temp = vt1 - vt0 va0 = averages[inter] va1 = averages[inter + 1] slope_avg = va1 - va0 # Distance from point0 to the intersection point dist = abs(vt0 - va0) / abs(slope_temp - slope_avg) # Calculate the actual time and temperature of intersection time_delta = dates[inter + 1] - dates[inter] i_date = dates[inter] + time_delta * dist i_temp = vt0 + slope_temp * dist print("Průnik nalezen {0:%d. %m. %Y v %H:%M} o teplotě {1}°C".format( i_date, round(i_temp, 1))) i_dates.append(i_date) i_temps.append(i_temp) plt.plot(dates, temps, 'b-', dates, averages, 'g-', linewidth=1) plt.plot(i_dates, i_temps, 'r+', mew=2, ms=8) plt.ylabel('Teplota (°C)') plt.xlabel('Čas') plt.title('Průměrná templota v Chodově, Praha') plt.grid() plt.show()
df = df[start_time:end_time] st.dataframe(df) st.subheader('3.训练集划分') number = st.number_input("请输入训练集所占比例:",min_value=0.5,max_value=0.9,value=0.8,step=0.1) split = int(number * len(df)) st.write("选择的数据集大小:",len(df)) st.write("训练集大小:",split) st.write("预测集大小:",len(df)-split) st.subheader('4.选择预测目标') type = st.selectbox('请选择预测目标:',('Close','Turnover')) st.line_chart(df[type]) st.subheader('5.选择机器学习算法') genre = st.selectbox("请选择时间序列预测算法", ('移动平均算法', '线性回归算法', '最近邻算法', 'AutoARIMA算法', 'LSTM算法')) if genre == '移动平均算法': MovingAverage(df, type, split) elif genre == '线性回归算法': LinearRegression(df, type, split) elif genre == '最近邻算法': KNearestNeighbours(df, type, split) elif genre == 'AutoARIMA算法': AutoARIMA(df, type, split) elif genre == 'LSTM算法': LongShortTM(df, type, split)
TakeProfit.__init__(self, data, tradeSignal) self.takeProfitRule = takeProfitRule self.CalcTakeProfitSeries() def CalcTakeProfitLevel(self, highWaterMark, tradeSignal): if tradeSignal == 1: level = highWaterMark * ( 1 - (self.takeProfitRule + 0.0) / 100 ) # Set the NEW take profit BELOW the market tpLevel = [level, np.nan] else: level = highWaterMark * ( 1 + (self.takeProfitRule + 0.0) / 100 ) # Set the OLD take profit ABOVE the market tpLevel = [np.nan, level] return tpLevel if __name__ == "__main__": data = pd.DataFrame([ 1.0455, 1.0405, 1.0489, 1.0607, 1.0532, 1.0574, 1.0554, 1.0582, 1.0613, 1.0643, 1.0601, 1.0713, 1.063, 1.0664, 1.0703, 1.0765, 1.0731, 1.0748, 1.0682, 1.0699, 1.0695, 1.0798, 1.0769, 1.0759, 1.0783, 1.075, 1.0683, 1.0698, 1.0655, 1.0643 ]) MAWindow = 5 ma = MovingAverage(data, 'EMA', 5) tradeSignal = ma.TradeSignal t = TakeProfitFixed(data, tradeSignal, 5) print t.takeProfitContainer
#fname = '.\camera_cal\calibration3.jpg' #img = cv2.imread(fname) # #top_down, perspective_M = cc.corners_unwarp(img, cc.nx, cc.ny, mtx, dist) # #f, (ax1, ax2) = plt.subplots(1,2, figsize=(12, 7)) #f.tight_layout() #ax1.imshow(img) #ax1.set_title('Original Image') #ax2.imshow(top_down) #ax2.set_title('Undistorted and Warped Image') ##plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.) #%% global counter, left_rad_filt, right_rad_filt left_rad_filt = MovingAverage(8) right_rad_filt = MovingAverage(8) left_fit_filt = MovingAverage(6) right_fit_filt = MovingAverage(6) counter = 0 def pipeline_image(image, force_first_frame=False, plot_figure=False): global counter if force_first_frame: counter = 1 else: counter += 1
def test_MovingAverageHasLessElements(self): values = [1, 2, 3] ma = MovingAverage().EvenlySpaced(values, 2) self.assertEqual(len(values), len(ma) + 1)
def test_MovingDateAverageWith2DayEvenSpacingLength4(self): d = {1: 1, 3: 4, 5: 5} expected = {5: (1 + 4 + 4 + 5) / 4} result = MovingAverage().ArbitrarySpacing(d, 3) self.assertEqual(expected, result)
def test_MovingDateAverageWith2DayEvenSpacing(self): d = {1: 1, 3: 4, 5: 5} expected = {3: 5 / 2, 5: 9 / 2} result = MovingAverage().ArbitrarySpacing(d, 2) self.assertEqual(expected, result)
def test_MovingAverageComputesCorrectlyForLength3(self): l = [0, 2, 0] expected = [2 / 3] result = MovingAverage().EvenlySpaced(l, 3) self.assertListEqual(expected, result)