def create_sort_data(self): account = 'Assets:Bank:Checking' date1 = date(2014, 1, 15) date2 = date(2014, 1, 18) date3 = date(2014, 1, 20) entries = [ data.Transaction(data.new_metadata(".", 1100), date3, FLAG, None, "Next day", None, None, []), data.Close(data.new_metadata(".", 1000), date2, account), data.Balance(data.new_metadata(".", 1001), date2, account, A('200.00 USD"'), None, None), data.Open(data.new_metadata(".", 1002), date2, account, 'USD', None), data.Transaction(data.new_metadata(".", 1009), date2, FLAG, None, "Transaction 2", None, None, []), data.Transaction(data.new_metadata(".", 1008), date2, FLAG, None, "Transaction 1", None, None, []), data.Transaction(data.new_metadata(".", 900), date1, FLAG, None, "Previous day", None, None, []), ] for entry in entries: if isinstance(entry, data.Transaction): data.create_simple_posting(entry, 'Assets:Bank:Checking', '123.45', 'USD') return entries
def match_postings(left_postings, rght_postings, keyfun): left_map = index_postings(left_postings, keyfun) rght_map = index_postings(rght_postings, keyfun) common_keys = set(left_map) & set(rght_map) matched = [] for units in common_keys: left_postings = left_map[units] rght_postings = rght_map[units] if len(left_postings) != 1 or len(rght_postings) != 1: continue del left_map[units] del rght_map[units] meta = { "left": left_postings[0].txn.narration, "rght": rght_postings[0].txn.narration } txn = data.Transaction(meta, left_postings[0].txn.date, '*', None, "", None, None, [ left_postings[0].posting, rght_postings[0].posting, ]) #printer.print_entry(txn) left_remain = list(itertools.chain.from_iterable(left_map.values())) rght_remain = list(itertools.chain.from_iterable(rght_map.values())) return matched, (left_remain, rght_remain)
def addPaymentFor(self, txn, trans_date, trans_amt): months_to_add = self.paymentMonthOffset + ( 1 if trans_date.day > self.statementCloseDay else 0) payment_date = set_days(add_months(trans_date, months_to_add), self.paymentDay) paymentTxn = self.payments[payment_date.isoformat( )] if payment_date.isoformat() in self.payments else data.Transaction( meta=txn.meta, date=payment_date, flag=flags.FLAG_OKAY, payee=self.account + " Payment", narration="", tags=set(), links=set(), postings=[ data.Posting(self.account, None, None, None, None, None) ]) paymentTxn.postings.extend([ data.Posting(self.autoPayAccount, amount.Amount(-1 * D(trans_amt), 'USD'), None, None, None, None), ]) self.payments[payment_date.isoformat()] = paymentTxn
def extract(self, file, existing_entries=None): self.initialize(file) counter = itertools.count() new_entries = [] config = self.config self.read_file(file) for ot in self.get_transactions(): metadata = data.new_metadata(file.name, next(counter)) # metadata['file_account'] = self.file_account(None) # metadata['type'] = ot.type # Optional metadata, useful for debugging #TODO # description fields: # - beancount: (payee, narration): # payee is optional, narration is mandatory # - OFX: ot.payee tends to be the "main" description field, while ot.memo is optional # Build transaction entry entry = data.Transaction(metadata, ot.date.date(), self.FLAG, self.get_transaction_type_desc(ot), ot.payee, data.EMPTY_SET, data.EMPTY_SET, []) data.create_simple_posting(entry, config['main_account'], ot.amount, self.currency) # TODO: Commented out so smart_importer can fill this in # target_acct = self.get_target_acct(ot) # data.create_simple_posting(entry, target_acct, None, None) new_entries.append(entry) if self.includes_balances: new_entries += self.extract_balance(file, counter) return(new_entries)
def Deposits(self, dep): # creates deposit transactions from IBKR Data depTransactions=[] # assumes you figured out how to deposit/ withdrawal without fees if len(self.depositAccount) == 0: # control this from the config file return [] for idx, row in dep.iterrows(): currency=row['currency'] amount_=amount.Amount(row['amount'],currency) # make the postings. two for deposits postings=[data.Posting(self.depositAccount, -amount_, None, None, None, None), data.Posting(self.getLiquidityAccount(currency), amount_,None, None, None, None) ] meta=data.new_metadata('deposit/withdrawel',0) depTransactions.append( data.Transaction(meta, # could add div per share, ISIN,.... row['reportDate'], self.flag, 'self', # payee "deposit / withdrawal", data.EMPTY_SET, data.EMPTY_SET, postings )) return depTransactions
def deserialise(json_entry): """Parse JSON to a Beancount entry. Args: json_entry: The entry. Raises: KeyError: if one of the required entry fields is missing. FavaAPIException: if the type of the given entry is not supported. """ if json_entry['type'] == 'Transaction': date = util.date.parse_date(json_entry['date'])[0] narration, tags, links = extract_tags_links(json_entry['narration']) postings = [deserialise_posting(pos) for pos in json_entry['postings']] return data.Transaction(json_entry['meta'], date, json_entry['flag'], json_entry['payee'], narration, tags, links, postings) if json_entry['type'] == 'Balance': date = util.date.parse_date(json_entry['date'])[0] number = parse_number(json_entry['number']) amount = Amount(number, json_entry['currency']) return data.Balance(json_entry['meta'], date, json_entry['account'], amount, None, None) if json_entry['type'] == 'Note': date = util.date.parse_date(json_entry['date'])[0] comment = json_entry['comment'].replace('"', '') return data.Note(json_entry['meta'], date, json_entry['account'], comment) raise FavaAPIException('Unsupported entry type.')
def entry_from_gl(self, entries: typing.List[Entry]) -> typing.Iterable: """Given a single GL_ID and as a list of Entrys """ first = entries[0] postings = [] all_tags = set() all_meta = {'lineno': 0, 'filename': "", 'gl-id': first.gl_id} all_links = set() for entry in entries: self.new_accounts.add(entry.account) if first.entry_type == "Balance": if entry.amount and entry.date.year == 2016: yield data.Pad(date=entry.date - datetime.timedelta(days=1), account=entry.account, source_account="Equity:OpeningBalances", meta={ 'lineno': 0, 'filename': '', 'note': entry.narration, 'gl-id': entry.gl_id }) yield data.Balance(date=entry.date, amount=data.Amount(entry.amount, entry.currency), account=entry.account, tolerance=None, diff_amount=None, meta={ 'lineno': 0, 'filename': '', 'note': entry.narration, 'gl-id': entry.gl_id }) else: posting = data.Posting(entry.account, data.Amount(entry.amount, entry.currency), None, None, flag='*', meta=entry.meta) all_tags.update(entry.tags) all_links.update(entry.links) postings.append(posting) all_meta.update(posting.meta or {}) if postings: yield data.Transaction(meta=all_meta, date=entry.date, flag='*', payee=entry.payee, narration=entry.narration, tags=all_tags, links=all_links, postings=postings)
def generate_transaction( meta, trans_date, trans_payee, trans_description, trans_account, trans_amount, trans_second_posting_account, ): txn = data.Transaction( meta=meta, date=trans_date, flag=flags.FLAG_OKAY, payee=trans_payee, narration=trans_description, tags=set(), links=set(), postings=[], ) txn.postings.append( data.Posting(trans_account, amount.Amount(round(D(trans_amount), 2), 'EUR'), None, None, None, None)) txn.postings.append( data.Posting(trans_second_posting_account, None, None, None, None, None)) return txn
def extractRow(self, row, meta): trans_date = self.getDate(row) trans_desc = self.getDesc(row) trans_amt = self.getAmt(row) account = self.getAccount(row) txn = data.Transaction( meta=meta, date=trans_date, flag=flags.FLAG_OKAY, payee=trans_desc, narration=self.getMemo(row), tags=set(), links=set(), postings=[ data.Posting( self.account, amount.Amount(-1*D(trans_amt), 'USD'), None, None, None, None ), ], ) if account is not None: txn.postings.append(data.Posting( account, amount.Amount(D(trans_amt), 'USD'), None, None, None, None )) return txn
def test_data_tuples_support_pickle(self): txn1 = data.Transaction(META, date(2014, 1, 15), FLAG, None, "Some example narration", data.EMPTY_SET, data.EMPTY_SET, []) pickled_str = pickle.dumps(txn1) txn2 = pickle.loads(pickled_str) self.assertEqual(txn1, txn2)
def extract(self, file): entries = [] with open(file.name) as file: for index, row in enumerate(csv.DictReader(file)): trans_date = parse(row['date']).date() trans_desc = titlecase(row['name'].rstrip()) trans_amt = row['amount'] meta = data.new_metadata(file.name, index) txn = data.Transaction(meta=meta, date=trans_date, flag=flags.FLAG_OKAY, payee=trans_desc, narration="", tags=set(), links=set(), postings=[]) if D(trans_amt) > 0: txn.postings.append( data.Posting('Assets:VSCU:Savings', amount.Amount(D(trans_amt), 'USD'), None, None, None, None)) txn.postings.append( data.Posting('FIXME', None, None, None, None, None)) else: txn.postings.append( data.Posting('FIXME', amount.Amount(D(trans_amt), 'USD'), None, None, None, None)) txn.postings.append( data.Posting('Assets:VSCU:Savings', None, None, None, None, None)) entries.append(txn) return entries
def test_insert_entry_align(tmpdir): file_content = dedent(""" 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD """) samplefile = tmpdir.mkdir('fava_util_file3').join('example.beancount') samplefile.write(file_content) postings = [ data.Posting('Liabilities:US:Chase:Slate', amount.Amount(D('-10.00'), 'USD'), None, None, None, None), data.Posting('Expenses:Food', amount.Amount(D('10.00'), 'USD'), None, None, None, None), ] transaction = data.Transaction(None, datetime.date(2016, 1, 1), '*', 'new payee', 'narr', None, None, postings) fava_options = { 'currency-column': 50, } insert_entry(transaction, [str(samplefile)], fava_options) assert samplefile.read() == dedent(""" 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD 2016-01-01 * "new payee" "narr" Liabilities:US:Chase:Slate -10.00 USD Expenses:Food 10.00 USD """)
def extract(self, file, existing_entries): entries = [] with StringIO(file.contents()) as csvfile: reader = csv.DictReader(csvfile, ['Date', 'Reference', 'PaidOut', 'PaidIn', 'ExchangeOut', 'ExchangeIn', 'Balance', 'Category', 'Notes'], delimiter=';', skipinitialspace=True) next(reader) for row in reader: metakv = { 'category': row['Category'].strip(), } exchangeIn = row['ExchangeIn'].strip() exchangeOut = row['ExchangeOut'].strip() if exchangeIn and exchangeOut: metakv['originalIn'] = exchangeIn metakv['originalOut'] = exchangeOut elif exchangeIn: metakv['original'] = exchangeIn elif exchangeOut: metakv['original'] = exchangeOut meta = data.new_metadata(file.name, 0, metakv) entry = data.Transaction( meta, parse(row['Date'].strip()).date(), '*', '', (row['Reference'].strip() + ' ' + row['Notes'].strip()).strip(), data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(self.account, amount.Amount(D(row['PaidIn'].strip()) - D(row['PaidOut'].strip()), self.currency), None, None, None, None), ] ) entries.append(entry) return entries
def createSingle(self, payout, withholding, quantity, assetAccount, asset, currency, date, priceLookup, description): narration = "Dividend for " + str(quantity) + " : " + description liquidityAccount = self.getLiquidityAccount(assetAccount, asset, currency) incomeAccount = self.getIncomeAccount(assetAccount, asset) price = priceLookup.fetchPrice(currency, date) postings = [ data.Posting(assetAccount, amount.Amount(D(0), asset), None, None, None, None), data.Posting(liquidityAccount, amount.Amount(payout, currency), None, price, None, None), ] if withholding > 0: receivableAccount = self.getReceivableAccount(assetAccount, asset) postings.append( data.Posting(receivableAccount, amount.Amount(withholding, currency), None, None, None, None) ) postings.append( data.Posting(incomeAccount, None, None, None, None, None) ) meta = data.new_metadata('dividend', 0) return data.Transaction( meta, date, '*', '', narration, data.EMPTY_SET, data.EMPTY_SET, postings )
def extract(self, file): #parse csv file if self.file_format_version == 1: buchungstag, auftraggeber_empfaenger, buchungstext, verwendungszweck, betrag, kontostand, indices, endsaldo = parse_csv_file_v1( file.name) elif self.file_format_version == 2: buchungstag, auftraggeber_empfaenger, buchungstext, verwendungszweck, betrag, kontostand, indices, endsaldo = parse_csv_file_v2( file.name) else: raise IOError("Unknown file format.") #create transactions entries = [] for i in range(len(buchungstag)): postings = self.guess_postings(auftraggeber_empfaenger[i], float(betrag[i])) meta = data.new_metadata(file.name, indices[i]) txn = data.Transaction(meta, buchungstag[i], self.flag, auftraggeber_empfaenger[i], verwendungszweck[i], data.EMPTY_SET, data.EMPTY_SET, postings) entries.append(txn) #create balance meta = data.new_metadata(file.name, endsaldo[2]) entries.append( data.Balance(meta, endsaldo[0] + datetime.timedelta(days=1), self.account, amount.Amount(D(endsaldo[1]), self.currency), None, None)) return entries
def __txn_common(self, meta, date, acc_in, acc_out, units_common, payee="", desc=""): """Return a transaction object for simple transactions.""" self.logger.debug("Entering Function") self.logger.debug("Receiving account: %s", acc_in) self.logger.debug("Sending account: %s", acc_out) txn = data.Transaction( meta, date, self.FLAG, payee, desc, data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(acc_in, units_common, None, None, None, None), data.Posting(acc_out, -units_common, None, None, None, None) ]) self.logger.debug('Transaction to be recorded: %s', str(txn)) self.logger.debug("Leaving Function") return txn
def extract(file, account_name, flag, currency): ofx = OfxParser.parse(strio(file.contents())) account = ofx.account statement = account.statement assert statement.currency.lower() == currency.lower(), ( statement.currency + " != " + currency ) ledger = [] # create transactions for transaction in statement.transactions: units = Amount(transaction.amount, currency) posting = data.Posting(account_name, units, None, None, None, None) ref = data.new_metadata(file.name, 0) entry = data.Transaction( ref, transaction.date.date(), flag, titlecase(transaction.payee), transaction.memo, data.EMPTY_SET, data.EMPTY_SET, [posting], ) ledger.append(entry) ledger = data.sorted(ledger) # make balance b = balance(file, account_name, currency, statement, ledger) if b != None: ledger.append(b) return ledger
def extract(self, file, existing_entries=None): # Open the CSV file and create directives. entries = [] index = 0 with open(file.name, 'rb') as f: eml = parser.BytesParser().parse(fp=f) b = base64.b64decode(eml.get_payload()[0].get_payload()) d = BeautifulSoup(b, "lxml") date_range = d.findAll(text=re.compile( '\d{4}\/\d{1,2}\/\d{1,2}-\d{4}\/\d{1,2}\/\d{1,2}'))[0] transaction_date = dateparse( date_range.split('-')[1].split('(')[0]).date() balance = '-' + d.find(src="https://pbdw.ebank.cmbchina.com/" "cbmresource/22/dyzd/jpkdyzd/xbgbdt/bqhkzz.jpg")\ .parent.parent.find_next_sibling( 'td').select('font')[0].text.replace('¥', '').replace(',', '').strip() txn_balance = data.Balance(account=self.account_name, amount=Amount(D(balance), 'CNY'), meta=data.new_metadata(".", 1000), tolerance=None, diff_amount=None, date=transaction_date) entries.append(txn_balance) bands = d.select('#fixBand29 #loopBand2>table>tr') for band in bands: tds = band.select('td #fixBand15 table table td') if len(tds) == 0: continue trade_date = tds[1].text.strip() if trade_date == '': trade_date = tds[2].text.strip() # date = datetime.strptime(trade_date,'%m%d').replace(year=transaction_date.year).date() date = datetime.strptime(trade_date, '%m%d') if date.month == 12 and transaction_date.month == 1: date = date.replace(year=transaction_date.year - 1).date() else: date = date.replace(year=transaction_date.year).date() full_descriptions = tds[3].text.strip().split('-') payee = full_descriptions[0] narration = '-'.join(full_descriptions[1:]) real_currency = 'CNY' real_price = tds[4].text.replace('¥', '').replace('\xa0', '').strip() # print("Importing {} at {}".format(narration, date)) flag = "*" amount = -Amount(D(real_price), real_currency) meta = data.new_metadata(file.name, index) txn = data.Transaction( meta, date, self.FLAG, payee, narration, data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(self.account_name, amount, None, None, None, None), ]) entries.append(txn) # Insert a final balance check. return entries
def Fee(self,fee): # calculates fees from IBKR data feeTransactions=[] for idx, row in fee.iterrows(): currency=row['currency'] amount_=amount.Amount(row['amount'],currency) text=row['description'] month=re.findall('\w{3} \d{4}',text)[0] # make the postings, two for fees postings=[data.Posting(self.getFeesAccount(currency), -amount_, None, None, None, None), data.Posting(self.getLiquidityAccount(currency), amount_,None, None, None, None)] meta=data.new_metadata(__file__,0, {}) # actually no metadata feeTransactions.append( data.Transaction(meta, row['reportDate'], self.flag, 'IB', # payee ' '.join(['Fee', currency , month]), data.EMPTY_SET, data.EMPTY_SET, postings)) return feeTransactions
def Interest(self,int_): # calculates interest payments from IBKR data intTransactions=[] for idx, row in int_.iterrows(): currency=row['currency'] amount_=amount.Amount(row['amount'],currency) text=row['description'] month=re.findall('\w{3}-\d{4}',text)[0] # make the postings, two for interest payments # received and paid interests are booked on the same account postings=[data.Posting(self.getInterestIncomeAcconut(currency), -amount_, None, None, None, None), data.Posting(self.getLiquidityAccount(currency), amount_,None, None, None, None) ] meta=data.new_metadata('Interest',0) intTransactions.append( data.Transaction(meta, # could add div per share, ISIN,.... row['reportDate'], self.flag, 'IB', # payee ' '.join(['Interest ', currency , month]), data.EMPTY_SET, data.EMPTY_SET, postings )) return intTransactions
def extract(self, file): entries = [] with open(file.name) as f: for index, row in enumerate(csv.DictReader(islice(f, 6, None))): if index == 0: if 'Beginning balance' in row['Description']: continue else: logging.error( "Missing 'Beginning balance' in '{}'".format( row['Description'])) meta = data.new_metadata(file.name, index) trans_date = parse(row['Date']).date() trans_desc = titlecase(row['Description']) trans_amt = row['Amount'] units = amount.Amount(D(row['Amount']), self.currency) txn = data.Transaction( meta, trans_date, self.FLAG, trans_desc, None, data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(self.account_root, units, None, None, None, None), data.Posting(self.account2, -units, None, None, None, None), ]) entries.append(txn) return entries
def test_insert_transaction(tmpdir): file_content = dedent(""" 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD ; FAVA-INSERT-MARKER """) samplefile = tmpdir.mkdir('fava_util_file3').join('example.beancount') samplefile.write(file_content) transaction = data.Transaction(None, datetime.date(2016, 1, 1), '*', 'payee', 'narr', None, None, []) insert_transaction(transaction, [str(samplefile)]) assert samplefile.read() == dedent(""" 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD 2016-01-01 * "payee" "narr" ; FAVA-INSERT-MARKER """) postings = [ data.Posting('Liabilities:US:Chase:Slate', amount.Amount(D('-10.00'), 'USD'), None, None, None, None), data.Posting('Expenses:Food', amount.Amount(D('10.00'), 'USD'), None, None, None, None), ] transaction = data.Transaction(None, datetime.date(2016, 1, 1), '*', 'new payee', 'narr', None, None, postings) insert_transaction(transaction, [str(samplefile)]) assert samplefile.read() == dedent(""" 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD 2016-01-01 * "payee" "narr" 2016-01-01 * "new payee" "narr" Liabilities:US:Chase:Slate -10.00 USD Expenses:Food 10.00 USD ; FAVA-INSERT-MARKER """)
def test_group_entries_by_link(self): entries, _, __ = loader.load_string(self.test_doc) entries = [entry._replace(meta=None, postings=None) for entry in entries if isinstance(entry, data.Transaction)] link_groups = basicops.group_entries_by_link(entries) date = datetime.date(2014, 5, 10) self.assertEqual( {'apple': [ data.Transaction(None, date, '*', None, 'B', None, {'apple'}, None), data.Transaction(None, date, '*', None, 'D', None, {'apple', 'banana'}, None)], 'banana': [ data.Transaction(None, date, '*', None, 'C', None, {'banana'}, None), data.Transaction(None, date, '*', None, 'D', None, {'apple', 'banana'}, None)]}, link_groups)
def extract(self, f): entries = [] with open(f.name) as f: for index, row in enumerate(csv.DictReader(f)): account = removeWhitespace(row['\ufeffAccount']) date = parse(row['Date']).date() desc = titlecase(row['Payee']) inflow = row['Outflow'][1:] outflow = row['Inflow'][1:] amt = amount.Amount(D('-'+inflow if inflow != '0.00' else outflow), 'USD') group = removeWhitespace(row['Category Group']) category = removeWhitespace(row['Category']) if desc.find("Transfer : ") != -1 and account != self.main_account: # Let other accounts handle transfers. to_account = removeWhitespace(desc[11:].replace("Transfer : ", "")) elif group == "Inflow": to_account = "Equity:Opening-Balances" else: to_account = "Expenses:"+group+":"+category meta = data.new_metadata(f.name, index) txn = data.Transaction( meta=meta, date=date, flag=flags.FLAG_OKAY, payee=desc, narration="", tags=set(), links=set(), postings=[], ) txn = data.Transaction( meta, date, flags.FLAG_OKAY, None, desc, data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(account, amt, None, None, None, None), data.Posting(to_account, -amt, None, None, None, None), ]) entries.append(txn) return entries
def _extract_normal_operations(self, filename, rd): rd = csv.reader(rd, dialect="fortuneo") entries = [] header = True line_index = 0 for row in rd: # Check header if header: if set(row) != set(FIELDS): raise InvalidFormatError() header = False line_index += 1 continue if len(row) != 5 and len(row) != 6: continue # Extract data row_date = datetime.strptime(row[0], "%d/%m/%Y") label = row[2] txn_amount = row[3] if txn_amount == '': txn_amount = row[4] txn_amount = parse_amount(txn_amount) # Prepare the transaction meta = data.new_metadata(filename, line_index) txn = data.Transaction( meta=meta, date=row_date.date(), flag=flags.FLAG_OKAY, payee="", narration=label, tags=set(), links=set(), postings=[], ) # Create the postings. txn.postings.append(make_posting(self.checking_account, txn_amount)) # Done entries.append(txn) line_index += 1 return entries
def get_holdings_entries(entries, options_map): """Summarizes the entries to list of entries representing the final holdings.. This list includes the latest prices entries as well. This can be used to load a full snapshot of holdings without including the entire history. This is a way of summarizing a balance sheet in a way that filters away history. Args: entries: A list of directives. options_map: A dict of parsed options. Returns: A string, the entries to print out. """ # The entries will be created at the latest date, against an equity account. latest_date = entries[-1].date _, equity_account, _ = options.get_previous_accounts(options_map) # Get all the assets. holdings_list, _ = holdings.get_assets_holdings(entries, options_map) # Create synthetic entries for them. holdings_entries = [] for index, holding in enumerate(holdings_list): meta = data.new_metadata('report_holdings_print', index) entry = data.Transaction(meta, latest_date, flags.FLAG_SUMMARIZE, None, "", None, None, []) # Convert the holding to a position. pos = holdings.holding_to_position(holding) entry.postings.append( data.Posting(holding.account, pos.units, pos.cost, None, None, None)) cost = -convert.get_cost(pos) entry.postings.append( data.Posting(equity_account, cost, None, None, None, None)) holdings_entries.append(entry) # Get opening directives for all the accounts. used_accounts = {holding.account for holding in holdings_list} open_entries = summarize.get_open_entries(entries, latest_date) used_open_entries = [ open_entry for open_entry in open_entries if open_entry.account in used_accounts ] # Add an entry for the equity account we're using. meta = data.new_metadata('report_holdings_print', -1) used_open_entries.insert( 0, data.Open(meta, latest_date, equity_account, None, None)) # Get the latest price entries. price_entries = prices.get_last_price_entries(entries, None) return used_open_entries + holdings_entries + price_entries
def createEntry(self, file, date, amt, text): meta = data.new_metadata(file.name, 0) return data.Transaction( meta, parse(date.strip(), dayfirst=True).date(), '*', '', text.strip(), data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(self.account, amount.Amount( D(amt), self.currency), None, None, None, None), ])
def settlement_date(entries, options_map, config): errors = [] for index, entry in enumerate(entries): if isinstance(entry, data.Transaction): for p_index, posting in enumerate(entry.postings): if posting.meta and "settle" in posting.meta: save_config = None postings = entry.postings s_date = posting.meta["settle"] link = "settle-{}".format(compare.hash_entry(entry)) original_account = posting.account if postings[p_index].units.number < 0: save_config = config config = "Liabilities:AccountsPayable" entry.postings[p_index] = entry.postings[p_index]._replace( account=config) links = (set(entries[index].links).union([link]) if entries[index].links else set([link])) entries[index] = entry._replace(postings=postings) entries[index] = entry._replace(links=links) # do not settle future dates yet if s_date >= date.today(): config = save_config if save_config else config continue new_posting = postings[p_index] new_posting = new_posting._replace(meta=dict()) postings = [new_posting, new_posting] postings[0] = postings[0]._replace(account=config) postings[0] = postings[0]._replace( units=postings[1].units._replace( number=postings[1].units.number * -1)) postings[1] = postings[1]._replace( account=original_account) if save_config: postings.reverse() entries.append( data.Transaction( entry.meta, s_date, entry.flag, "", "Settle: {}".format(entry.narration), entry.tags, set([link]), postings, )) config = save_config if save_config else config # break # allow use of multiple 'settle' return entries, errors
def extract(self, file, existing_entries): with open(file.name, 'r') as f: config = yaml.safe_load(f) token = config['token'] baseAccount = config['baseAccount'] startDate = datetime.combine(date.today() + relativedelta(months=-3), datetime.min.time(), timezone.utc).isoformat() endDate = datetime.combine(date.today(), datetime.max.time(), timezone.utc).isoformat() headers = {'Authorization': 'Bearer ' + token} r = requests.get('https://api.transferwise.com/v1/profiles', headers=headers) profiles = r.json() profileId = profiles[0]['id'] r = requests.get('https://api.transferwise.com/v1/borderless-accounts', params={'profileId': profileId}, headers=headers) accounts = r.json() accountId = accounts[0]['id'] entries = [] for account in accounts[0]['balances']: accountCcy = account['currency'] r = requests.get( f"https://api.transferwise.com/v3/profiles/{profileId}/borderless-accounts/{accountId}/statement.json", params={ 'currency': accountCcy, 'intervalStart': startDate, 'intervalEnd': endDate }, headers=headers) transactions = r.json() for transaction in transactions['transactions']: metakv = { 'ref': transaction['referenceNumber'], } meta = data.new_metadata('', 0, metakv) entry = data.Transaction( meta, dateutil.parser.parse(transaction['date']).date(), '*', '', transaction['details']['description'], data.EMPTY_SET, data.EMPTY_SET, [ data.Posting( baseAccount + accountCcy, amount.Amount( D(str(transaction['amount']['value'])), transaction['amount']['currency']), None, None, None, None), ]) entries.append(entry) return entries
def test_insert_entry_align(tmpdir): file_content = dedent( """ 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD """ ) samplefile = tmpdir.mkdir("fava_util_file3").join("example.beancount") samplefile.write(file_content) postings = [ data.Posting( "Liabilities:US:Chase:Slate", amount.Amount(D("-10.00"), "USD"), None, None, None, None, ), data.Posting( "Expenses:Food", amount.Amount(D("10.00"), "USD"), None, None, None, None, ), ] transaction = data.Transaction( {}, datetime.date(2016, 1, 1), "*", "new payee", "narr", None, None, postings, ) fava_options = {"currency-column": 50} insert_entry(transaction, [str(samplefile)], fava_options) assert samplefile.read() == dedent( """ 2016-02-26 * "Uncle Boons" "Eating out alone" Liabilities:US:Chase:Slate -24.84 USD Expenses:Food:Restaurant 24.84 USD 2016-01-01 * "new payee" "narr" Liabilities:US:Chase:Slate -10.00 USD Expenses:Food 10.00 USD """ )