def setUp(self): ddiirr = os.path.dirname(__file__) os.chdir(os.path.realpath(ddiirr)) os.chdir(os.path.realpath('../')) self.inputtype = 'IB_CSV' indir = 'data/' # f2 = 'ActivityStatement.20190313_PL.html' jf = JournalFiles(indir=indir, infile=self.f1, theDate=self.theDate, inputType='IB_CSV', mydevel=False) jf.inputType = 'IB_CSV' # statement = Statement_IBActivity(jf) # df = statement.getTrades_IBActivity(jf.inpathfile) ibs = IbStatement(db=self.testdb) ibdb = StatementDB(self.testdb) ibdb.reinitializeTradeTables() ibs.openIBStatementCSV(jf.inpathfile) df2 = ibdb.getStatement(self.theDate) if df2.empty: sdate = self.theDate.strftime('%Y-%m-%d') msg = f'In test_dailycontrol.setup: Error: found no trades in db for {sdate}' self.assertTrue(not df2.empty, msg) tu = DefineTrades(jf.inputType) self.df, ldf = tu.processDBTrades(df2) sc = SumControl() lf = LayoutForms(sc, jf, self.df) lf.runTtoSummaries(ldf) self.ts = lf.ts ibdb.addTradeSummariesSA(self.ts, ldf)
def setUpClass(cls): ''' Open up a bunch of statments and add them to a test database for testing stuff TODO: Develop the randomtradgenerator write trades to the db for more generic testing ''' settings = QSettings('zero_substance', 'structjour') for i, name in enumerate(cls.infiles): name = os.path.join(cls.datadir, name) x, cls.inputType = getStatementType(name) print(cls.inputType) if cls.inputType == 'DAS': ds = DasStatement(name, settings, cls.thedates[i]) ds.getTrades(testFileLoc=cls.datadir, testdb=cls.testdb) elif cls.inputType == "IB_CSV": ibs = IbStatement(db=cls.testdb) ibs.openIBStatement(name) else: continue # self.assertTrue(4 == 5, "Unsupported file type in test_TheTradeObject") statement = StatementDB(db=cls.testdb) df = statement.getStatement(cls.thedates[i]) # self.assertFalse(df.empty, f"Found no trades in db on {daDate}") dtrade = DefineTrades(cls.inputType) dframe, ldf = dtrade.processDBTrades(df) tto = TheTradeObject(ldf[0], False, SumReqFields()) cls.ttos.append(tto)
def test_addTradeSummariesSA(self): ''' Tests addTradeSummaries. The method requires trades are already in the database. We achieve that with openStuff. For this test, I load everything (openStuff) and run addTradeSummaries on all covered days. Its slow. Could be partially sqlite but all the APL BAPL stuff is probably the main crawler. In practice, this will add daily or monthly statements. And in running the program there is no way to run the trade summaries in mass. Its desinged to load up a single day. ITs day-trader centric. Let it stay slow for now. ''' ibdb = StatementDB() self.clearTables() ibs, x = self.openStuff() # ibdb.getUncoveredDays tcrud = TradeCrud() covered = tcrud.getCoveredDays() for count, day in enumerate(covered): df = ibdb.getStatement(day) if not df.empty: tu = DefineTrades("DB") dframe, ldf = tu.processDBTrades(df) tradeSummaries, ts, entries, initialImageNames = runSummaries(ldf) ibdb.addTradeSummariesSA(ts, ldf) summaries = ibdb.getTradeSumByDateSA(day) for summary in summaries: entryTrades = ibdb.getEntryTradesSA(summary.id) self.assertGreater(len(entryTrades), 0) break # Use this to just test addTradeSummaries once bu = Backup() bu.restore()
def test_findTradeSummary(self): ''' Test findTradeSummary, a helper method for addTradeSummaries and updateTradeSummaries. Note that one of those needs to have run and succeeded inorder to test this method. ''' infile = "data/flex.369463.ActivityFlexMonth.20191008.20191106.csv" theDate = pd.Timestamp('2019-10-16') # Create these three objects ibs = IbStatement(db=self.db) ibdb = StatementDB(self.db) ibdb.reinitializeTradeTables() trades = DefineTrades("DB") # This call loads the statement into the db ibs.openIBStatementCSV(infile) # Here is an example of processing a single day of trades (3 calls) # This gets a collection of trades from a single day that can become a trade_sum entry df = ibdb.getStatement(theDate) # The following method and function process the statement transactions into a collection # of trades where each trade is a single row representing multiple transactions dframe, ldf = trades.processDBTrades(df) tradeSummaries, ts, entries, initialImageNames = runSummaries(ldf) ibdb.addTradeSummaries(ts, ldf) # The test database trades_sum should now only the trades from theDate, one # entry per trade for i, trade in enumerate(tradeSummaries): x = ibdb.findTradeSummary(theDate, trade['Start'].unique()[0]) self.assertEqual(trade['Name'].unique()[0], x[1])
def setUpClass(cls): bu = Backup() bu.backup() if ModelBase.session: ModelBase.session.rollback() ddiirr = os.path.dirname(__file__) os.chdir(os.path.realpath(ddiirr + '/../')) outdir = 'test/out' cls.outdir = os.path.realpath(outdir) cls.db = 'data/testdb.sqlite' cls.db = os.path.realpath(cls.db) if os.path.exists(cls.db): clearTables(cls.db) cls.rtg = RTG(db=cls.db) # cls.dates = ['20200203 09:30', '20200204 07:30', '20200205 09:35', '20200206 11:40', '20200207 10:39'] cls.dates = ['20200207 10:39'] cls.infiles = cls.rtg.saveSomeTestFiles(cls.dates, cls.outdir, strict=True, overwrite=True) settings = QSettings('zero_substance', 'structjour') for i, name in enumerate(cls.infiles): name = os.path.join(cls.outdir, name) x, cls.inputType = getStatementType(name) # print(cls.inputType) if cls.inputType == 'DAS': ds = DasStatement(name, settings, cls.dates[i]) ds.getTrades(testFileLoc=cls.outdir, testdb=cls.db) elif cls.inputType == "IB_CSV": ibs = IbStatement(db=cls.db) ibs.openIBStatement(name) else: continue # self.assertTrue(4 == 5, "Unsupported file type in test_TheTradeObject") statement = StatementDB(db=cls.db) df = statement.getStatement(cls.dates[i]) # self.assertFalse(df.empty, f"Found no trades in db on {daDate}") dtrade = DefineTrades(cls.inputType) dframe, ldf = dtrade.processDBTrades(df) # tto = TheTradeObject(ldf[0], False, SumReqFields()) jf = JournalFiles(indir=cls.outdir, outdir=outdir, theDate=cls.dates[i], infile=name) cls.sc = SumControl() lf = LayoutForms(cls.sc, jf, dframe) lf.runTtoSummaries(ldf) cls.jfs.append(jf) cls.dframes.append(dframe) # cls.ttos.append(tto) cls.ldfs.append(ldf) cls.lfs.append(lf)
def test_addFinReqCol(self): ''' Test the method journal.definetrades.TestDefineTrades.addFinReqCol ''' rc = self.rc frc = FinReqCol() df = pd.DataFrame(np.random.randint(0, 1000, size=(10, len(rc.columns))), columns=rc.columns) dtrades = DefineTrades() df = dtrades.addFinReqCol(df) for x in frc.columns: self.assertIn(x, df.columns) self.assertGreaterEqual(len(df.columns), len(frc.columns))
def loadTradesFromDB(self, theDate=None): ''' User tweak-- if DAS or IB import files are checked when the load button is clicked, Change the selected radio to useDatabase and return. Update will show if there are any DB trades to load. ''' if self.sc.ui.dasImport.isChecked() or self.sc.ui.ibImport.isChecked(): self.sc.ui.useDatabase.setChecked(True) self.sc.dbDefault(True) return 1 ibdb = StatementDB() df = ibdb.getStatement(theDate) tu = DefineTrades('DB') self.df, ldf = tu.processDBTrades(df) ts, self.entries = ibdb.getTradeSummariesSA(theDate) if not ts: logging.info( f"No user data has been saved for {theDate.strftime('%A %B %d')}." ) return None self.ts = setTradeSummaryHeaders(ts) logging.info('load up the trade names now') tradeSummaries = [] self.sc.ui.tradeList.clear() for key in self.ts: self.sc.ui.tradeList.addItem(key) tradeSummaries.append(self.ts[key]) # Load dailyote dailyNoteModel = DailyNotesCrud(theDate) note = dailyNoteModel.getNote() self.dailyNoteModel = dailyNoteModel if not note: note = "" self.sc.ui.dailyNote.setText(note) inf = self.sc.ui.infileEdit.text() windowTitle = f"{self.sc.baseWindowTitle}: {inf}: User Data Loaded" self.sc.setWindowTitle(windowTitle) # In prep to do the mistake summary and excel export, return the list it uses now # It might be good to use the dict self.ts instead return tradeSummaries
def runDBInput(self, daDate, jf): ''' Get the trades from daDate in the DB and process the trades ''' self.statement = StatementDB() daDate = qtime2pd(daDate) df = self.statement.getStatement(daDate) if df.empty: return False tu = DefineTrades(self.inputtype) dframe, ldf = tu.processDBTrades(df) lf = LayoutForms(self.sc, jf, dframe) lf.pickleitnow() lf.runTtoSummaries(ldf) self.statement.addTradeSummariesSA(lf.ts, ldf) return True
def exportExcel(self): ''' Export to excel the trade tables, trade summaries, and daily forms ''' # Create the space in dframe to add the summary information for each trade. # Then create the Workbook. settings = QSettings('zero_substance', 'structjour') val = settings.value('inputType') # Get a list of Trades from self.df tu = DefineTrades(val) ldf = tu.getTradeList(self.df) # Lay out a dataframe with space for charts imageNames = self.getImageNamesFromTS() imageLocation, dframe = self.layoutExcelData(self.df, ldf, imageNames) assert len(ldf) == len(imageLocation) # Create an openpyxl wb from the dataframe ls = LayoutSheet(self.topMargin, len(self.df)) wb, ws, nt = ls.createWorkbook(dframe) tf = TradeFormat(wb) ls.styleTop(ws, len(nt.columns), tf) mstkAnchor = (len(dframe.columns) + 2, 1) mistake = MistakeSummary(numTrades=len(ldf), anchor=mstkAnchor) mistake.mstkSumStyle(ws, tf, mstkAnchor) mistake.dailySumStyle(ws, tf, mstkAnchor) # tradeSummaries = ls.runSummaries(imageLocation, ldf, self.jf, ws, tf) self.placeImagesAndSumStyles(imageLocation, ws, tf) self.populateXLDailyFormulas(imageLocation, ws) self.populateXLMistakeForm(mistake, ws, imageLocation) self.populateXLDailySummaryForm(mistake, ws, mstkAnchor) self.populateXLDailyNote(ws) self.saveXL(wb, self.jf) logging.info("Processing complete. Saved {}".format( self.jf.outpathfile))
def setUpClass(cls): bu = Backup() bu.backup() ddiirr = os.path.dirname(__file__) os.chdir(os.path.realpath(ddiirr)) os.chdir(os.path.realpath('../')) cls.outdir = os.path.realpath(cls.outdir) cls.db = os.path.realpath(cls.db) if os.path.exists(cls.db): clearTables(cls.db) cls.rtg = RTG(db=cls.db, overnight=100) cls.theDate = '20200207 10:39' cls.infile = cls.rtg.saveSomeTestFiles([cls.theDate], cls.outdir, strict=True, overwrite=False)[0] settings = QSettings('zero_substance', 'structjour') # for i, name in enumerate(cls.infiles): name = os.path.join(cls.outdir, cls.infile) x, cls.inputType = getStatementType(name) if cls.inputType == 'DAS': ds = DasStatement(name, settings, cls.theDate) ds.getTrades(testFileLoc=cls.outdir, testdb=cls.db) elif cls.inputType == "IB_CSV": ibs = IbStatement(db=cls.db) ibs.openIBStatement(name) else: raise ValueError(f'Unsupported File type: {cls.inputType}') statement = StatementDB(db=cls.db) cls.df = statement.getStatement(cls.theDate) cls.dtrades = DefineTrades(cls.inputType) cls.rc = FinReqCol(cls.inputType) cls.trades = cls.dtrades.addFinReqCol(cls.df) rccolumns = cls.rc.columns.copy() rccolumns = cls.dtrades.appendCols(rccolumns) cls.trades = cls.trades[rccolumns] cls.trades.copy() cls.trades = cls.trades.sort_values( [cls.rc.ticker, cls.rc.acct, cls.rc.date])
def setUpClass(cls): ddiirr = os.path.dirname(__file__) os.chdir(os.path.realpath(ddiirr + '/../')) outdir = 'test/out' cls.outdir = os.path.realpath(outdir) cls.db = 'data/testdb.sqlite' cls.db = os.path.realpath(cls.db) if os.path.exists(cls.db): clearTables(cls.db) cls.rtg = RTG(db=cls.db) # cls.dates = ['20200203 09:30', '20200204 07:30', '20200205 09:35', '20200206 11:40', '20200207 10:39'] cls.theDate = '20200207 10:39' cls.infile = cls.rtg.saveSomeTestFiles([cls.theDate], cls.outdir)[0] settings = QSettings('zero_substance', 'structjour') # for i, name in enumerate(cls.infiles): name = os.path.join(cls.outdir, cls.infile) x, inputType = getStatementType(name) if inputType == 'DAS': ds = DasStatement(name, settings, cls.theDate) ds.getTrades(testFileLoc=cls.outdir, testdb=cls.db) elif inputType == "IB_CSV": ibs = IbStatement(db=cls.db) ibs.openIBStatement(name) else: raise ValueError(f'Unsupported File type: {inputType}') statement = StatementDB(db=cls.db) df = statement.getStatement(cls.theDate) dtrade = DefineTrades(inputType) dframe, cls.ldf = dtrade.processDBTrades(df) cls.jf = JournalFiles(indir=cls.outdir, outdir=outdir, theDate=cls.theDate, infile=name) cls.sc = SumControl() lf = LayoutForms(cls.sc, cls.jf, dframe) lf.runTtoSummaries(cls.ldf) # Setup mistake note fields to test cls.note = 'Ground control to Major Tom' for i, key in enumerate(lf.ts): tto = lf.ts[key] notex = cls.note + str(i + 1) tto['MstkNote'] = notex # Setup a couple images to add imdir = 'images/' img1 = os.path.join(imdir, 'fractal-art-fractals.jpg') img2 = os.path.join(imdir, 'psych.jpg') assert os.path.exists(img1) assert os.path.exists(img2) for key in lf.ts: tto = lf.ts[key]['chart1'] = img1 tto = lf.ts[key]['chart2'] = img2 t = ExportToExcel(lf.ts, cls.jf, df) imageNames = t.getImageNamesFromTS() cls.imageLocation, dframe = t.layoutExcelData(t.df, cls.ldf, imageNames) assert len(cls.ldf) == len(cls.imageLocation) # Create an openpyxl wb from the dataframe ls = LayoutSheet(t.topMargin, len(t.df)) cls.wb, ws, nt = ls.createWorkbook(dframe) # Place both forms 2 cells to the right of the main table mstkAnchor = (len(dframe.columns) + 2, 1) cls.mistake = MistakeSummary(numTrades=len(cls.ldf), anchor=mstkAnchor) cls.t = t cls.a = mstkAnchor
def registerTrades(wb, theDate): ibdb = StatementDB() df = ibdb.getStatement(theDate) x = ibdb.getNumTicketsForDay(theDate) if not x[0] or not x[1]: msg = f'''found {x[0]} tickets and {x[1]} trades. Nothing to process for the date {theDate.strftime('%A %d, %Y')}in the DB''' return msg tu = DefineTrades('DB') df, ldf = tu.processDBTrades(df) ts, entries = ibdb.getTradeSummaries(theDate) # self.ts = setTradeSummaryHeaders(ts) drc = DisReqCol(theDate) tlog = wb["Trade Log"] account = tlog['I1'].value startSearch = False ix = -2 cols = drc.tfcolumns # Here is a list of the keys to use cols.keys() of the trade log DataFrame # ['date', 'time', 'side', 'symb', 'entry1', 'acctbal', 'shares', # 'stoploss', 'targ', 'avgexit', 'pl', 'notes']) # Not bothering with the abstraction (drc.name) because this is entirely ours. srf = SumReqFields() ids = list() for row in tlog.iter_rows(): # Unconventional loop stuff anchor = (1, row[0].row) keys = list(ts.keys()) if startSearch is True: idcol = cols['id'][0][0] - 1 if row[idcol].value: ids.append(row[idcol].value) if not row[0].value: startSearch = False ix = 0 if row[0].value == 'Date': startSearch = True if ix >= 0 and ix < len(keys): while True and ix < len(keys): key = keys[ix] tdf = ts[key] if not gotAnyExits(tdf) or tdf['id'].unique()[0] in ids or ( tdf['Account'].unique()[0] != account): if tdf['Account'].unique()[0] not in ['SIM', 'Live']: raise ValueError('Programmer exception in search of weird data') # Continue the inner loop -- keep row from the outer loop loop ix += 1 continue # date cell = tcell(cols['date'][0], anchor=anchor) tlog[cell] = theDate # time cell = tcell(cols['time'][0], anchor=anchor) tim = tdf[srf.start].unique()[0] tlog[cell] = tim # side name = tdf[srf.name].unique()[0] if name: cell = tcell(cols['side'][0], anchor=anchor) tlog[cell] = name.split()[1] # symb cell = tcell(cols['symb'][0], anchor=anchor) tlog[cell] = name.split()[0] # entry1 cell = tcell(cols['entry1'][0], anchor=anchor) tlog[cell] = tdf[srf.entry1].unique()[0] # Account Balance (setting an excel formula) cell = tcell(cols['acctbal'][0], anchor=anchor) formula = "=$M$3+SUM($N$7:$N{})".format(row[0].row - 1) tlog[cell] = formula # "shares" cell = tcell(cols['shares'][0], anchor=anchor) shares = tdf[srf.shares].unique()[0].split()[0] if len(shares) > 0: try: ishares = int(shares) except ValueError: ishares = 0 tlog[cell] = ishares # stoploss cell = tcell(cols['stoploss'][0], anchor=anchor) sl = tdf[srf.stoploss].unique()[0] if isinstance(sl, bytes): sl = None tlog[cell] = sl # target cell = tcell(cols['targ'][0], anchor=anchor) target = tdf[srf.targ].unique()[0] if isinstance(target, bytes): target = None tlog[cell] = target # avgExit cell = tcell(cols['avgexit'][0], anchor=anchor) tlog[cell] = getAvgExit(tdf) # P/L cell = tcell(cols['pl'][0], anchor=anchor) pl = tdf[srf.pl].unique()[0] # Don't know how these are sneaking through-- Is it only on legacy files? if isinstance(pl, bytes): pl = 0.0 tlog[cell] = pl # Strategy cell = tcell(cols['strat'][0], anchor=anchor) strat = tdf[srf.strat].unique()[0] tlog[cell] = strat # notes (from the mistake note field) cell = tcell(cols['notes'][0], anchor=anchor) note = tdf[srf.mstknote].unique()[0] tlog[cell] = note # id (from the database) cell = tcell(cols['id'][0], anchor=anchor) id = tdf['id'].unique()[0] tlog[cell] = id # break the inner loop break ix += 1 if ix >= len(keys): # done with outer loop break return None