def test_non_ascii_parse(filename, encoding): # Read as binary with open(filename, 'rb') as fh: data = fh.read() data = data.decode(encoding) mt940.parse(data) # Read as text with open(filename, 'r') as fh: data = fh.read() mt940.parse(data)
def test_transaction_details_post_processor_with_space(): filename = 'mt940_tests/betterplace/sepa_mt9401.sta' transactions = mt940.parse(filename) transaction2 = transactions[0].data transactions = mt940.parse(filename, processors=dict( post_transaction_details=[ mt940.processors.transaction_details_post_processor_with_space, ], )) transaction = transactions[0].data assert transaction2['end_to_end_reference'] != \ transaction['end_to_end_reference']
def test_parse(sta_file): transactions = mt940.parse(sta_file) write_yaml_data(sta_file, transactions) expected = get_yaml_data(sta_file) assert len(transactions) >= 0 repr(transactions) str(transactions) # Test string and representation methods for k, v in transactions.data.items(): str(v) repr(v) # Test string and representation methods for transaction in transactions: repr(transaction) str(transaction) for k, v in transaction.data.items(): str(v) repr(v) # Compare transaction data compare(transactions, expected) # Compare actual transactions compare(transactions[:], expected[:])
def test_parse(sta_file): transactions = mt940.parse(sta_file) # write_yaml_data(sta_file, transactions) expected = get_yaml_data(sta_file) assert len(transactions) >= 0 repr(transactions) str(transactions) # Test string and representation methods for k, v in transactions.data.items(): str(v) repr(v) # Test string and representation methods for transaction in transactions: repr(transaction) str(transaction) for k, v in transaction.data.items(): str(v) repr(v) # Compare transaction data compare(transactions, expected) # Compare actual transactions compare(transactions[:], expected[:])
def parse(file): data = file.read() try: import chardet charset = chardet.detect(data)['encoding'] except ImportError: charset = file.charset data = data.decode(charset or 'utf-8') mt = mt940.parse(io.StringIO(data.strip())) result = [] for t in mt: result.append({ 'reference': "\n".join([ t.data.get(f) for f in ('transaction_details', 'customer_reference', 'bank_reference', 'extra_details', 'non_swift_text') if t.data.get(f, '') ]), 'amount': str(round_decimal(t.data['amount'].amount)), 'date': t.data['date'].isoformat() }) return result
def upload(context): import mt940 if flask.request.method == 'POST': transactions = mt940.parse(flask.request.files['file']) print 'transactions', transactions for transaction in transactions: print 'trans', transaction
def parse_mt940(files: Iterable[str]) -> Iterable: handlers = [] for file in files: handlers.append(sys.stdin if file == '-' else open(file, 'r')) for handler in handlers: yield from mt940.parse(handler)
def test_parse(sta_file): transactions = mt940.parse(sta_file) # To update the yaml files after changing the code use the following # environment variable. # NOTE: Only for development purposes if os.environ.get('WRITE_YAML_FILES'): assert not os.environ.get('TRAVIS') write_yaml_data(sta_file, transactions) expected = get_yaml_data(sta_file) assert len(transactions) >= 0 repr(transactions) str(transactions) # Test string and representation methods for k, v in transactions.data.items(): string_type(v) repr(v) # Test string and representation methods for transaction in transactions: repr(transaction) string_type(transaction) for k, v in transaction.data.items(): string_type(v) repr(v) # Compare transaction data compare(expected, transactions) # Compare actual transactions compare(expected[:], transactions[:])
def test_parse(sta_file): transactions = mt940.parse(sta_file) # To update the yaml files after changing the code, uncomment the following # Only for development purposes # write_yaml_data(sta_file, transactions) expected = get_yaml_data(sta_file) assert len(transactions) >= 0 repr(transactions) str(transactions) # Test string and representation methods for k, v in transactions.data.items(): string_type(v) repr(v) # Test string and representation methods for transaction in transactions: repr(transaction) string_type(transaction) for k, v in transaction.data.items(): string_type(v) repr(v) # Compare transaction data compare(transactions, expected) # Compare actual transactions compare(transactions[:], expected[:])
def split_check(self): with open(self.file_dir) as f: data = f.read() data_split_list = data.split('-}{5') data_parse_list = [mt940.parse(i).data for i in data_split_list] for a, b in zip(data_split_list, data_parse_list): print(a) print(b)
def p940(self): import mt940 import pprint transactions = mt940.parse(self.file_name) for t in transactions: if 1: #'energie' in t.data['transaction_details'].lower(): print(t.data['date']) print(t.data['transaction_details'])
def _parse_file(self, data_file): currency = None account = None statements = [] try: transactions = mt940.parse(StringIO.StringIO(data_file)) # if no statements found if not transactions: _logger.debug( "Statement file was not recognized as an MT940 file, trying next parser", exc_info=True) return super(AccountBankStatementImport, self)._parse_file(data_file) statement = { 'name': transactions.data['account_identification'] + "-" + transactions.data['statement_number'] + "-" + transactions.data['sequence_number'], 'balance_start': transactions.data['final_opening_balance'].amount.amount, 'balance_end_real': transactions.data['final_closing_balance'].amount.amount, 'date': transactions.data['final_opening_balance'].date, 'transactions': [], } currency = transactions.data[ 'final_opening_balance'].amount.currency account = transactions.data['account_identification'].split('/')[1] # we iterate through each transaction for t in transactions: st_line = { 'date': t.data.get('entry_date') or t.data.get('date'), 'amount': t.data['amount'].amount, 'ref': t.data.get('bank_reference') or t.data.get('extra_details'), 'name': t.data['transaction_details'], } statement['transactions'].append(st_line) return currency, account, [statement] except Exception, e: _logger.info(e) raise UserError( _("The following problem occurred during import. The file might not be valid.\n\n %s" % e.message))
def parse(file): data = file.read() try: import chardet charset = chardet.detect(data)['encoding'] except ImportError: charset = file.charset data = data.decode(charset or 'utf-8') mt = mt940.parse(io.StringIO(data.strip())) result = [] for t in mt: td = t.data.get('transaction_details', '') if len(td) >= 4 and td[3] == '?': # SEPA content transaction_details = parse_transaction_details( td.replace("\n", "")) payer = { 'name': transaction_details.get('accountholder', ''), 'iban': transaction_details.get('accountnumber', ''), } reference, eref = join_reference( transaction_details.get('reference', '').split('\n'), payer) if not eref: eref = transaction_details.get('eref', '') result.append({ 'amount': str(round_decimal(t.data['amount'].amount)), 'reference': reference + (' EREF: {}'.format(eref) if eref else ''), 'payer': payer['name'].strip(), 'date': t.data['date'].isoformat(), **{ k: payer[k].strip() for k in ("iban", "bic") if payer.get(k) } }) else: result.append({ 'reference': "\n".join([ t.data.get(f) for f in ('transaction_details', 'customer_reference', 'bank_reference', 'extra_details', 'non_swift_text') if t.data.get(f, '') ]), 'amount': str(round_decimal(t.data['amount'].amount)), 'date': t.data['date'].isoformat() }) return result
def generuj(): # print('jpk_wb_support.generuj') # path to the mt940 file try: sys.argv[1] except IndexError: infile = '' else: infile = sys.argv[1] mtf = mtfile(infile) if PT.isfile(mtf): alltran, xmlf = mt940.parse(mtf), xmlfile() msg.set('NumerRachunku:' + repr(alltran.data['account_identification'])) opBal = alltran.data['final_opening_balance'] clBal = alltran.data['final_closing_balance'] Ok = True while Ok: dataOd = opBal.date #UserData('Od') dataDo = clBal.date #UserData('Do') if dataOd <= dataDo: Ok = False else: msg.set('Błędne daty') s_pocz = opBal.amount.amount s_kon = clBal.amount.amount # start write to xml file root = root_jpk() Naglowek(root[0], dataOd, dataDo) Podmiot1(root[0]) NumerRachunku(root[0], alltran) Salda(root[0], alltran) nrwiersza = 0 debet, credit = 0, 0 for tr in alltran: nrwiersza += 1 dwiersz = WyciagWiersz(root[0], tr, nrwiersza, s_pocz) s_pocz = dwiersz['saldo'] if dwiersz['status'] == 'D': debet += dwiersz['kw'] else: credit += dwiersz['kw'] WyciagCtrl(root[0], nrwiersza, debet, credit) msg.set('Ilość wierszy: ' + str(nrwiersza) + '\n' + 'Plik xml: ' + xmlf) root[1].write(xmlf, pretty_print=True, doctype='<?xml version="1.0" encoding="UTF-8"?>') else: msg.set('Nie ma pliku o nazwie: ' + mtf) sys.stdout.flush()
def _generate_and_parse_bank_statement(self, receipt_date=None): responses.add(responses.POST, api_url('/transactions/reconcile/'), status=200) test_data = mock_test_transactions() mock_balance() mock_bank_holidays() if receipt_date is None: receipt_date = date(2016, 9, 13) mt940_file = generate_bank_statement(self.get_api_session(), receipt_date) return mt940.parse(mt940_file), test_data
def split_account(self, display=False): '''Produce account in dictionary''' with open(self.file_dir) as f: data = f.read() data_split_list = data.split('-}{5') data_parse_list = [mt940.parse(i).data for i in data_split_list] if display == True: print(f'No. of items in splited list: {len(data_split_list)}') print(f'No. of item in the parsed list: {len(data_parse_list)}') else: pass return data_parse_list
def test_empty_statement_generated(self): responses.add(responses.POST, api_url('/transactions/reconcile/'), status=200) responses.add(responses.GET, api_url('/transactions/'), json=NO_TRANSACTIONS) mock_balance() mock_bank_holidays() today = date(2016, 9, 13) mt940_file = generate_bank_statement(self.get_api_session(), today) parsed_file = mt940.parse(mt940_file) self.assertEqual(len(parsed_file.transactions), 1) self.assertEqual(parsed_file.transactions[0].data, {})
def test_parse(input): transactions = mt940.parse(input) assert len(transactions) >= 0 repr(transactions) str(transactions) for k, v in transactions.data.iteritems(): str(v) repr(v) for transaction in transactions: repr(transaction) str(transaction) for k, v in transaction.data.iteritems(): str(v) repr(v)
def handle_mt940(self): try: error = 0 collection_name = extractor.get_valid_string( Path(self.file_path).stem) transactions = mt940.parse(self.file_path) if transactions.data is not None and len(transactions.data) != 0: # LOG.info('-> transactions => {0}'.format( # json.dumps(transactions.data, indent=4, sort_keys=True, cls=mt940.JSONEncoder))) records = json.loads( json.dumps(transactions.data, cls=mt940.JSONEncoder)) mongo = db_manager.get_client_instance_with_db_name( self.db_name) LOG.info('attempting to create collection -> {0}'.format( collection_name)) LOG.info('saving mt940 format -> {0}'.format(records)) mongo[collection_name].insert_one(records) else: error = 1 LOG.critical( 'mt-940 library failed to parse file need custom parsing RTFM => file not saved !!!!! :=> {0}' .format(self.file_path)) except Exception as e: error = e LOG.critical( 'failed to save to collection: {0}'.format(collection_name)) if extractor.is_valid_json(transactions.data): LOG.critical('-> data not saved !!!! => {0}'.format( json.dumps(transactions.data, indent=4, sort_keys=True, cls=mt940.JSONEncoder))) LOG.critical('ETM_MT940_ERROR -> {0}'.format(error)) raise SwiftSaverFileError(e) finally: if error == 0: LOG.info( colored( '--------------- mt940 successfully saved in ' + self.db_name + ' database to collection: ' + collection_name + ' --- \\o/ supa dupa dope !!! :) ', 'green'))
def main(argv): import mt940 import os if len(argv) < 2: sys.stderr.write("Usage: %s <mt940 file> <json file>" % (argv[0],)) return 1 transactions_file = argv[1] if not os.path.exists(transactions_file): sys.stderr.write("ERROR: File %r was not found!" % (transactions_file,)) return 1 transactions = mt940.parse(transactions_file) sys.stderr.write("Found %d transactions, exporting to JSON\n" % (len(transactions),)) result = [] for trans in transactions: record = {} record['id'] = trans.data['id'] record['amount'] = {} record['amount']['amount'] = trans.data['amount'].amount record['amount']['currency'] = trans.data['amount'].currency record['currency'] = trans.data['currency'] record['date'] = trans.data['date'] record['entryDate'] = trans.data['entry_date'] record['fundsCode'] = trans.data['funds_code'] record['status'] = trans.data['status'] if 'transaction_reference' in trans.data: record['reference'] = trans.data['transaction_reference'] record['details'] = trans.data['transaction_details'] record['extraDetails'] = trans.data['extra_details'] result.append(record) json_file = argv[2] resultWrap = {} resultWrap['accountIdentification'] = transactions.data['account_identification'] resultWrap['finalOpeningBalance'] = convertBalance(transactions.data['final_opening_balance']) resultWrap['finalClosingBalance'] = convertBalance(transactions.data['final_opening_balance']) resultWrap['transactions'] = result with open(json_file, 'w') as output: for chunk in SpecialEncoder().iterencode(resultWrap): output.write(chunk)
def parse(file): data = file.read() try: import chardet charset = chardet.detect(data)['encoding'] except ImportError: charset = file.charset data = data.decode(charset or 'utf-8') mt = mt940.parse(io.StringIO(data.strip())) result = [] for t in mt: result.append({ 'reference': "\n".join([ t.data.get(f) for f in ('transaction_details', 'customer_reference', 'bank_reference', 'extra_details') if t.data.get(f, '')]), 'amount': str(round_decimal(t.data['amount'].amount)), 'date': t.data['date'].isoformat() }) return result
def parse(file): data = file.read() try: import chardet charset = chardet.detect(data)['encoding'] except ImportError: charset = file.charset data = data.decode(charset or 'utf-8') mt = mt940.parse(io.StringIO(data.strip())) result = [] for t in mt: td = t.data.get('transaction_details', '') if len(td) >= 4 and td[3] == '?': # SEPA content transaction_details = parse_transaction_details(td.replace("\n", "")) payer = { 'name': transaction_details.get('accountholder', ''), 'iban': transaction_details.get('accountnumber', ''), } reference, eref = join_reference(transaction_details.get('reference', '').split('\n'), payer) if not eref: eref = transaction_details.get('eref', '') result.append({ 'amount': str(round_decimal(t.data['amount'].amount)), 'reference': reference + (' EREF: {}'.format(eref) if eref else ''), 'payer': (payer.get('name', '') + ' - ' + payer.get('iban', '')).strip(), 'date': t.data['date'].isoformat() }) else: result.append({ 'reference': "\n".join([ t.data.get(f) for f in ('transaction_details', 'customer_reference', 'bank_reference', 'extra_details', 'non_swift_text') if t.data.get(f, '')]), 'amount': str(round_decimal(t.data['amount'].amount)), 'date': t.data['date'].isoformat() }) return result
def do_POST(self): content_length = int(self.headers['Content-Length']) if content_length <= 1: self._response_400('BODY is empty.') return post_data = self.rfile.read(content_length) logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s", str(self.path), str(self.headers), post_data.decode('utf-8')) transactions = mt940.parse(post_data) message = json.dumps(transactions, cls=mt940.JSONEncoder) self.send_response(200) self.send_header('Content-type', 'text/json') self.send_header("Content-length", len(message)) self.end_headers() self.wfile.write(message.encode(encoding='utf_8')) return
def _parse_file(self,data_file): currency = None account = None statements = [] try: transactions = mt940.parse(StringIO.StringIO(data_file)) # if no statements found if not transactions: _logger.debug("Statement file was not recognized as an MT940 file, trying next parser", exc_info=True) return super(AccountBankStatementImport, self)._parse_file(data_file) statement = { 'name' : transactions.data['account_identification'] + "-" + transactions.data['statement_number'] + "-" + transactions.data['sequence_number'], 'balance_start': transactions.data['final_opening_balance'].amount.amount, 'balance_end_real': transactions.data['final_closing_balance'].amount.amount, 'date': transactions.data['final_opening_balance'].date, 'transactions' : [], } currency = transactions.data['final_opening_balance'].amount.currency account = transactions.data['account_identification'].split('/')[1] # we iterate through each transaction for t in transactions: st_line = { 'date' : t.data.get('entry_date') or t.data.get('date'), 'amount' : t.data['amount'].amount, 'ref' : t.data.get('bank_reference') or t.data.get('extra_details'), 'name' : t.data['transaction_details'], } statement['transactions'].append(st_line) return currency, account, [statement] except Exception, e: _logger.info(e) raise UserError(_("The following problem occurred during import. The file might not be valid.\n\n %s" % e.message))
def test_parse(sta_file): transactions = mt940.parse(sta_file) expected = get_yaml_data(sta_file) assert len(transactions) >= 0 repr(transactions) str(transactions) for k, v in transactions.data.items(): str(v) repr(v) for transaction in transactions: repr(transaction) str(transaction) for k, v in transaction.data.items(): str(v) repr(v) # Compare transaction data compare(transactions, expected) # Compare actual transactions compare(transactions[:], expected[:])
def test_parse_filename(): mt940.parse('tests/jejik/abnamro.sta')
def parse(file): data = file.read() try: import chardet charset = chardet.detect(data)['encoding'] except ImportError: charset = file.charset data = data.decode(charset or 'utf-8') mt = mt940.parse(io.StringIO(data.strip())) result = [] for t in mt: td = t.data.get('transaction_details', '') if len(td) >= 4 and td[3] == '?': # SEPA content transaction_details = parse_transaction_details( td.replace("\n", "")) payer = { 'name': transaction_details.get('accountholder', '') or t.data.get('applicant_name', ''), # In reality, these fields are sometimes IBANs and BICs, and sometimes legacy numbers. We don't # really know (except for a syntax check) which will be performed anyways much later in the stack. 'iban': transaction_details.get('accountnumber', '') or t.data.get('applicant_iban', ''), 'bic': transaction_details.get('blz', '') or t.data.get('applicant_bin', ''), } reference, eref = join_reference( transaction_details.get('reference', '').split('\n'), payer) if not eref: eref = transaction_details.get('eref', '') result.append({ 'amount': str(round_decimal(t.data['amount'].amount)), 'reference': reference + (' EREF: {}'.format(eref) if eref else ''), 'payer': payer['name'].strip(), 'date': t.data['date'].isoformat(), **{ k: payer[k].strip() for k in ("iban", "bic") if payer.get(k) } }) else: payer = { 'payer': t.data.get('applicant_name', ''), # In reality, these fields are sometimes IBANs and BICs, and sometimes legacy numbers. We don't # really know (except for a syntax check) which will be performed anyways much later in the stack. 'iban': t.data.get('applicant_iban', ''), 'bic': t.data.get('applicant_bin', ''), } result.append({ 'reference': "\n".join([ t.data.get(f) for f in ('transaction_details', 'customer_reference', 'bank_reference', 'purpose', 'extra_details', 'non_swift_text') if t.data.get(f, '') ]), 'amount': str(round_decimal(t.data['amount'].amount)), 'date': t.data['date'].isoformat(), **{ k: payer[k].strip() for k in ("iban", "bic", "payer") if payer.get(k) } }) return result
def test_parse_fh(): with open('tests/jejik/abnamro.sta') as fh: mt940.parse(fh)
def test_parse_data(): with open('tests/jejik/abnamro.sta') as fh: mt940.parse(fh.read())
def parse_func(path, filename): transactions = mt940.parse(path) # with open('combined.json', 'w') as json_file: # json.dump(transactions, json_file) # with open('combined.json') as f: # d = json.load(f) d = transactions df = json_normalize(transactions) df2 = (pd.concat( {i: json_normalize(x) for i, x in df.pop('transactions').items()}, sort=False).reset_index(level=1, drop=True).join( df, lsuffix='_in_transactions', rsuffix='_if_opening_NA').reset_index(drop=True)) try: df2 = df2[[ 'account_identification', 'date', 'amount.currency', 'amount.amount', 'status', 'customer_reference', 'transaction_reference', 'extra_details', 'transaction_details', 'final_opening_balance.date', 'final_opening_balance.status', 'final_opening_balance.amount.amount', 'final_opening_balance.amount.currency', 'entry_date', 'funds_code', 'guessed_entry_date', 'id', 'available_balance.date', 'available_balance.status', 'available_balance.amount.amount', 'available_balance.amount.currency', 'final_closing_balance.date', 'final_closing_balance.status', 'final_closing_balance.amount.amount', 'final_closing_balance.amount.currency', 'sequence_number', 'statement_number' ]] df2 = df2.rename( columns={ 'account_identification': 'Bank account no.', 'date': 'Transacton date', 'amount.currency': 'Amount currency', 'amount.amount': 'Amount', 'status': 'Transaction type', 'customer_reference': 'Transaction reference', 'transaction_reference': 'Reference no.', 'extra_details': 'Additional reference', 'transaction_details': 'Remarks', 'final_opening_balance.date': 'Opening balance date', 'final_opening_balance.status': 'Opening balance status', 'final_opening_balance.amount.amount': 'Opening balance amount', 'final_opening_balance.amount.currency': 'Opening balance currency', 'entry_date': 'Entry date', 'funds_code': 'Fund code', 'guessed_entry_date': 'Addl. Entry date', 'id': 'ID', 'available_balance.date': 'Available balance date', 'available_balance.status': 'Available balance type', 'available_balance.amount.amount': 'Available balance', 'available_balance.amount.currency': 'Available balance currency', 'final_closing_balance.date': 'Ledger balance date', 'final_closing_balance.status': 'Ledger balance type', 'final_closing_balance.amount.amount': 'Ledger balance amount', 'final_closing_balance.amount.currency': 'Ledger balance currency', 'sequence_number': 'Sequence no.', 'statement_number': 'Statement no.' }) except: pass #print(df2.head(3)) #print(df2.shape) writer = ExcelWriter(filename) df2.to_excel(writer, sheet_name='Transactional_Data', index=False) writer.save() try: d[:] = [item for item in d if not item['transactions']] df = json_normalize(d) df = df[[ 'account_identification', 'final_opening_balance.date', 'final_opening_balance.status', 'final_opening_balance.amount.amount', 'final_opening_balance.amount.currency', 'final_closing_balance.date', 'final_closing_balance.status', 'final_closing_balance.amount.amount', 'final_closing_balance.amount.currency', 'available_balance.date', 'available_balance.status', 'available_balance.amount.amount', 'available_balance.amount.currency', 'sequence_number', 'statement_number', 'transaction_reference' ]] df = df.rename( columns={ 'account_identification': 'Bank account no.', 'final_opening_balance.date': 'Opening balance date', 'final_opening_balance.status': 'Opening balance status', 'final_opening_balance.amount.amount': 'Opening balance amount', 'final_opening_balance.amount.currency': 'Opening balance currency', 'final_closing_balance.date': 'Ledger balance date', 'final_closing_balance.status': 'Ledger balance type', 'final_closing_balance.amount.amount': 'Ledger balance amount', 'final_closing_balance.amount.currency': 'Ledger balance currency', 'available_balance.date': 'Available balance date', 'available_balance.status': 'Available balance type', 'available_balance.amount.amount': 'Available balance', 'available_balance.amount.currency': 'Available balance currency', 'sequence_number': 'Sequence no.', 'statement_number': 'Statement no.', 'transaction_reference': 'Transaction reference' }) # del df['transactions'] # # print(df.head(3)) # print(df.shape) df.to_excel(writer, sheet_name='Non_Transactional_Data', index=False) writer.save() except: pass
def test_json_dump(sta_file): transactions = mt940.parse(sta_file) json.dumps(transactions, cls=mt940.JSONEncoder)
if __name__ == '__main__': infile = '' # path to the mt940 file path = './work' try: sys.argv[1] except IndexError: infile = '' else: infile = sys.argv[1] mtf = mtfile(infile) if PT.isfile(path + '/' + mtf): alltran, xmlf = mt940.parse(path + '/' + mtf), xmlfile() print('NumerRachunku:' + repr(alltran.data['account_identification'])) # print('SaldoPoczątkowe:'+repr(alltran.data['final_opening_balance'].amount.currency)) # print('SaldoKoncowe:'+repr(alltran.data['final_closing_balance'])) Ok = True while Ok: dataOd = UserData('Od') dataDo = UserData('Do') if dataOd <= dataDo: Ok = False else: print('Błędne daty') print(xmlf) root = root_jpk() Naglowek(root[0], dataOd, dataDo) Podmiot1(root[0])
#!/bin/python import mt940 import json from pprint import pprint as pretty_print def default(value): if isinstance(value, mt940.models.Transactions): data = value.data.copy() data['transactions'] = value.transactions return data elif hasattr(value, 'data'): return value.data # load the transactions rabo_transactions = mt940.parse('rabobank.txt') print("Final Opening Balance: ", rabo_transactions.data.get('final_opening_balance')) print("Final Closing Balance: ", rabo_transactions.data.get('final_closing_balance')) print("Transactions:") for transaction in rabo_transactions: pretty_print(transaction.data) # write to a json file with open('rabobank.json', 'w') as w: w.write(json.dumps(rabo_transactions, default=default, indent=4))
'STALPLEIN BV HAARZUI', 'VOLENDAM', 'RONDVAART DELFT'), 'TRAVEL': ('CHIPKAART', 'NS-', 'NS '), } not_found_count = 0 not_found_amount = Decimal(0.0) found_count = 0 found_amount = Decimal(0.0) month_data = {} for purchase_type in shop_list: month_data[purchase_type] = {} target_path = "/home/rick/Desktop/SpendingTracker/MT940.STA" transactions = mt940.parse(target_path) for transaction in transactions: found = False for purchase_type in shop_list: if any(s in transaction.data['transaction_details'] for s in shop_list[purchase_type]): found = True found_count += 1 found_amount += transaction.data['amount'].amount if transaction.data['date'].month in month_data[purchase_type]: month_data[purchase_type][transaction.data[ 'date'].month] += transaction.data['amount'].amount else: month_data[purchase_type][transaction.data[ 'date'].month] = transaction.data['amount'].amount