Exemple #1
0
def extract_signoninfos(markup: BytesIO) -> Iterator[models.SIGNONINFO]:
    """
    Input seralized OFX containing PROFRS
    Output list of ofxtools.models.SIGNONINFO instances
    """
    parser = OFXTree()
    parser.parse(markup)
    ofx = parser.convert()

    sonrs: Union[models.SONRS, None] = ofx.signonmsgsrsv1.sonrs
    assert isinstance(sonrs, models.SONRS)
    verify_status(sonrs)

    msgs: Union[models.PROFMSGSRSV1, None] = ofx.profmsgsrsv1
    assert msgs is not None

    def extract_signoninfo(trnrs: models.PROFTRNRS) -> List[models.SIGNONINFO]:
        verify_status(trnrs)
        rs: Union[models.PROFRS, None] = trnrs.profrs
        assert rs is not None

        list_: Union[models.SIGNONINFOLIST, None] = rs.signoninfolist
        assert list_ is not None
        return list_

    return itertools.chain.from_iterable(
        extract_signoninfo(trnrs) for trnrs in msgs)
Exemple #2
0
def get_ofx(filename, occ="", charges=(), mois=3):
    data = {"date": [], "amount": [], "name": []}
    parser = OFXTree()
    max_date = datetime.datetime.today() - datetime.timedelta(days=mois * 30)
    with open(filename, 'rb') as f:
        parser.parse(f)
        ofx = parser.convert()
        stmts = ofx.statements
        c_amount = float(stmts[0].ledgerbal.balamt)
        c_amount_init = float(stmts[0].ledgerbal.balamt)
        txs = stmts[0].transactions
        data["date"].append(datetime.datetime.today())
        data["amount"].append(c_amount_init)
        data["name"].append("")
        # print(datetime.datetime.today(), c_amount_init, "")
        for i in txs:
            #c_amount -= float(i.trnamt)
            if i.dtposted.replace(tzinfo=pytz.UTC) < max_date.replace(
                    tzinfo=pytz.UTC):
                continue
            data["date"].append(i.dtposted)
            data["amount"].append(float(i.trnamt))
            data["name"].append(i.name)
    aggregate(data, 2 * c_amount)
    update_with_occ(data, occ, c_amount_init, charges)
    return px.line(data, x="date", y="amount", color="type")
Exemple #3
0
def extract_acctinfos(markup: BytesIO) -> Iterator[AcctInfo]:
    """
    Input seralized OFX containing ACCTINFORS
    Output dict-like object containing parsed *ACCTINFOs
    """
    parser = OFXTree()
    parser.parse(markup)
    ofx = parser.convert()

    sonrs = ofx.signonmsgsrsv1.sonrs
    assert isinstance(sonrs, models.SONRS)
    verify_status(sonrs)

    msgs = ofx.signupmsgsrsv1
    assert msgs is not None and len(msgs) == 1
    trnrs = msgs[0]
    assert isinstance(trnrs, models.ACCTINFOTRNRS)
    verify_status(trnrs)

    acctinfors = trnrs.acctinfors
    assert isinstance(acctinfors, models.ACCTINFORS)

    # ACCTINFOs are ListItems of ACCTINFORS
    # *ACCTINFOs are ListItems of ACCTINFO
    # The data we want is in a nested list
    return itertools.chain.from_iterable(acctinfors)
Exemple #4
0
def agg(db_dir: str = cfg.DB_DIR) -> None:
    """Aggregate current ofx files to the database.

    Args:
        db_dir:  Database base directory path.

    Returns:
        None
    """
    parser = OFXTree()
    user_cfg = accounts.get_user_cfg()
    for server, server_config in user_cfg.items():
        if server != cfg.OFXGET_DEFAULT_SERVER:
            user = server_config[cfg.OFXGET_CFG_USER_LABEL]
            file_name = f'{db_dir}/{_STMT_FOLDER}/' \
                        f'{cfg.CURRENT_PREFIX}_{server}_{user}.{cfg.OFX_EXTENSION}'
            with open(file_name, 'rb') as ofx_file:
                parser.parse(ofx_file)
            ofx = parser.convert()
            agg_datetime = datetime.datetime.today().replace(tzinfo=cfg.OFX_TIMEZONE)
            agg_date = agg_datetime.date()
            acct_info = {'datetime': agg_datetime, 'date': agg_date, 'server': server, 'user': user}
            for stmt in ofx.statements:
                process_statement_model(stmt=stmt, acct_info=acct_info, db_dir=db_dir)
            process_ofx_model(
                ofx_model=ofx.securities, acct_info=acct_info, table='securities', db_dir=db_dir
            )
Exemple #5
0
def extract_signoninfos(markup: BytesIO) -> List[models.SIGNONINFO]:
    """
    Input seralized OFX containing PROFRS
    Output list of ofxtools.models.SIGNONINFO instances
    """
    parser = OFXTree()
    parser.parse(markup)
    ofx = parser.convert()

    sonrs = ofx.signonmsgsrsv1.sonrs
    assert isinstance(sonrs, models.SONRS)
    verify_status(sonrs)

    msgs = ofx.profmsgsrsv1
    assert msgs is not None

    def extract_signoninfo(trnrs):
        assert isinstance(trnrs, models.PROFTRNRS)
        verify_status(trnrs)
        rs = trnrs.profrs
        assert rs is not None

        list_ = rs.signoninfolist
        assert list_ is not None
        return list_[:]

    return list(itertools.chain.from_iterable(
        [extract_signoninfo(trnrs) for trnrs in msgs]))
Exemple #6
0
    def _parse_statement(self, file: FileMemo) -> CCSTMTRS:
        """
        for tx in doc.statements[0].transactions:
     ...:     print(f"{tx.dtposted}: {tx.trnamt} {tx.fitid} {tx.name}")

        We return a "CCSTMTRS" instance.  This has a few interesting properites:

        * curdef - the Currency of the statement
        * transactions - the List of Entries for each tx it has:
                tx.dtposted - datetime
                tx.trnamt - Decimal
                tx.fitid - String/unique
                tx.name - Name/Description

        * balance - balance.balamt -- balance
                    balance.dtasof -- datetime as of

        * account.acctid -- the Full account number (might be a CC number!)
        """
        try:
            parser = OFXTree()
            parser.parse(file.name)

            # Use the Convert to make this thing readable:
            ofx_doc = parser.convert()

            return ofx_doc.statements[0]
        except:
            if DEBUG:
                logging.exception(f"While Parsing {file}")
Exemple #7
0
def transactions_by_account(fileobj):
    parser = OFXTree()
    parser.parse(fileobj.buffer)
    ofx = parser.convert()
    result = {}
    for st in ofx.statements:
        result[str(st.invacctfrom.acctid)] = _statement_transactions(st)
    return result
def upload_ofx_bank_statement():
    ofx_parser = OFXTree()
    columns = [{
        "field": "id",
        "label": "ID",
    }, {
        "field": "type",
        "label": _("Type")
    }, {
        "field":
        "date",
        "label":
        _("Date"),
        "type":
        "date",
        "width":
        "100%",
        "dateInputFormat":
        "yyyy-MM-dd",
        "dateOutputFormat":
        frappe.db.get_default("date_format").replace("Y", "y").replace(
            "m", "M").replace("D", "d") or "yyyy-MM-dd"
    }, {
        "field": "description",
        "label": _("Description")
    }, {
        "field": "debit",
        "label": _("Debit"),
        "type": "decimal"
    }, {
        "field": "credit",
        "label": _("Credit"),
        "type": "decimal"
    }, {
        "field": "currency",
        "label": _("Currency")
    }]
    data = []
    try:
        from io import BytesIO
        with BytesIO(frappe.local.uploaded_file) as file:
            ofx_parser.parse(file)
            ofx = ofx_parser.convert()
            stmts = ofx.statements

            for stmt in stmts:
                txs = stmt.transactions or []
                for transaction in txs:
                    data.append(make_transaction_row(transaction, stmt.curdef))

        return {"columns": columns, "data": data}
    except Exception as e:
        frappe.log_error(frappe.get_traceback(), _("OFX Parser Error"))
        frappe.throw(_("OFX Parser Error. Please contact the support."))
Exemple #9
0
    def _get_service_urls(
        self,
        timeout: Optional[float] = None,
        gen_newfileuid: bool = True,
    ) -> dict:
        """Query OFX profile endpoint to construct mapping of statement request
        data container to URL providing that service.
        """
        profile = self.request_profile(
            gen_newfileuid=gen_newfileuid,
            timeout=timeout,
        )
        parser = OFXTree()
        parser.parse(profile)
        ofx = parser.convert()
        proftrnrs = ofx.profmsgsrsv1[0]
        msgsetlist = proftrnrs.msgsetlist  # proxy access to SubAggregate attributes
        classmap = {
            BANKMSGSET: StmtRq,
            CREDITCARDMSGSET: CcStmtRq,
            INVSTMTMSGSET: InvStmtRq,
        }
        urls = {
            RqCls: msgset.url  # proxy access to SubAggregate attributes
            for msgset in msgsetlist
            if (RqCls := classmap.get(type(msgset), None)) is not None
        }

        # Also map *STMTENDRQ
        def map_stmtendrq_urls(
            msgsetCls: MsgsetClass,
            stmtendrqCls: Union[Type[StmtEndRq], Type[CcStmtEndRq]],
        ):
            try:
                index = [type(msgset)
                         for msgset in msgsetlist].index(msgsetCls)
            except ValueError:
                pass
            else:
                msgset = msgsetlist[index]
                if msgset.closingavail:  # proxy access to SubAggregate attributes
                    urls[
                        stmtendrqCls] = msgset.url  # proxy access to SubAgg attributes

        map_stmtendrq_urls(BANKMSGSET, StmtEndRq)
        map_stmtendrq_urls(CREDITCARDMSGSET, CcStmtEndRq)

        return urls
Exemple #10
0
def ofxParse(file, tranloader):
    parser = OFXTree()
    with codecs.open(file, 'br') as fileobj:
        parser.parse(fileobj)
    ofx = parser.convert()
    sonr = ofx.sonrs
    statement = ofx.statements[0]
    acc = ofx.statements[0].account

    orga = getOrga(sonr, acc)
    impid = round(datetime.datetime.now().timestamp())
    for statement in ofx.statements:
        acc = getAcc(orga, statement)
        for tran in statement.transactions:
            tranloader.add(tran, orga, acc)
        tranloader.post(statement, impid)
    return impid
Exemple #11
0
def cli(input, patterns=True, keywords=True, silent=False):
    if silent:
        while log.hasHandlers():
            log.removeHandler(log.handlers[0])
        log.addHandler(logging.NullHandler())

    filename = click.format_filename(input)
    log.info("Preparing to parse and fix {}".format(filename))

    parser = OFXTree()
    parser.parse(filename)

    for e in parser.getroot().findall(".//STMTTRN"):
        node_name = e.find('./NAME')
        node_memo = e.find('./MEMO')

        node_dtposted = e.find('./DTPOSTED')
        node_dtposted.text = fix_date(node_dtposted.text, node_memo.text)

        filter_functions = []
        filter_functions.append(filter_minlength)
        if keywords: filter_functions.append(filter_keywords)
        if patterns: filter_functions.append(filter_patterns)

        node_name.text = fix_text(node_memo.text, filter_functions)

    parser.write(sys.stdout, encoding="unicode")
Exemple #12
0
    def save(self, **kwargs):
        saved = super().save(**kwargs)

        if self.ofx_endpoint:

            yesterday = (datetime.now() - timedelta(days=1)).replace(tzinfo=OFX_UTC)
            response = self.ofx_client.request_accounts(self.password, yesterday)
            parser = OFXTree()
            parsed_response = parser.parse(response)

            for account in parsed_response.findall('.//ACCTINFO'):
                account_type = account.find('.//ACCTTYPE').text
                account_number = account.find('.//ACCTID').text
                account_obj, created = Account.objects.get_or_create(account_type=account_type,
                                                                     account_number=account_number,
                                                                     bank=self)

        return saved
Exemple #13
0
class TransactionMachine(object):
    def __init__(self, bank):
        self.bank = bank
        self.parser = OFXTree()

    def fetch_new_transactions(self):
        results = []

        for account in self.bank.account_set.all():
            transactions = [self.make_transaction_object(t, account) for t in self.fetch_transactions_for_account(account)]
            results.extend(Transaction.objects.bulk_create(transactions, ignore_conflicts=True))

        return results

    def fetch_transactions_for_account(self, account):
        try:
            latest_transaction = Transaction.objects.filter(account=account).latest('date_posted')
            from_date = latest_transaction.date_posted.replace(tzinfo=OFX_UTC)
        except Transaction.DoesNotExist:
            from_date = datetime(2016, 1, 1, tzinfo=OFX_UTC)

        to_date = datetime.now().replace(tzinfo=OFX_UTC)

        statement_request = StmtRq(acctid=account.account_number,
                                   accttype=account.account_type,
                                   dtstart=from_date,
                                   dtend=to_date)

        response = self.bank.ofx_client.request_statements(self.bank.password,
                                                           statement_request)
        parsed_response = self.parser.parse(response)

        yield from parsed_response.findall('.//STMTTRN')

    def make_transaction_object(self, transaction_xml, account):
        transaction = Transaction()
        transaction.transaction_type = transaction_xml.find('TRNTYPE').text
        transaction.transaction_id = transaction_xml.find('FITID').text
        transaction.amount = transaction_xml.find('TRNAMT').text
        transaction.name = transaction_xml.find('NAME').text
        transaction.memo = transaction_xml.find('MEMO').text
        transaction.date_posted = datetime.strptime(transaction_xml.find('DTPOSTED').text[:8], '%Y%m%d').replace(tzinfo=OFX_UTC)
        transaction.account = account

        return transaction
Exemple #14
0
 def __init__(self, bank):
     self.bank = bank
     self.parser = OFXTree()
Exemple #15
0
 def setUp(self):
     self.tree = OFXTree()
Exemple #16
0
#!/usr/bin/env python
# vim: set fileencoding=utf-8

import argparse

from ofxtools.Parser import OFXTree

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('files', nargs='+')

    args = parser.parse_args()

    parser = OFXTree()
    for f in args.files:
        print("Parsing %s" % f)
        parser.parse(f) 
        parser.convert(strict=False)
Exemple #17
0
def extract_acctinfos(markup: BytesIO) -> ChainMap:
    """
    Input seralized OFX containing ACCTINFORS
    Output dict-like object containing parsed *ACCTINFOs
    """
    parser = OFXTree()
    parser.parse(markup)
    ofx = parser.convert()

    sonrs = ofx.signonmsgsrsv1.sonrs
    assert isinstance(sonrs, models.SONRS)
    verify_status(sonrs)

    msgs = ofx.signupmsgsrsv1
    assert msgs is not None and len(msgs) == 1
    trnrs = msgs[0]
    assert isinstance(trnrs, models.ACCTINFOTRNRS)
    verify_status(trnrs)

    acctinfors = trnrs.acctinfors
    assert isinstance(acctinfors, models.ACCTINFORS)

    # *ACCTINFO classes don't have rich comparison methods;
    # can't sort by class
    sortKey = attrgetter("__class__.__name__")

    # ACCTINFOs are ListItems of ACCTINFORS
    # *ACCTINFOs are ListItems of ACCTINFO
    # The data we want is in a nested list
    acctinfos = sorted(itertools.chain.from_iterable(acctinfors), key=sortKey)

    def _unique(ids, label):
        ids = set(ids)
        if len(ids) > 1:
            msg = "Multiple {} {}; can't configure automatically"
            raise ValueError(msg.format(label, list(ids)))
        try:
            id = ids.pop()
        except KeyError:
            msg = "{} is empty"
            raise ValueError(msg.format(label))
        return id

    def _ready(acctinfo):
        return acctinfo.svcstatus == "ACTIVE"

    def parse_bank(acctinfos):
        bankids = []
        args_ = defaultdict(list)
        for inf in acctinfos:
            if _ready(inf):
                bankids.append(inf.bankid)
                args_[inf.accttype.lower()].append(inf.acctid)

        args_["bankid"] = _unique(bankids, "BANKIDs")
        return dict(args_)

    def parse_inv(acctinfos):
        brokerids = []
        args_ = defaultdict(list)
        for inf in acctinfos:
            if _ready(inf):
                acctfrom = inf.invacctfrom
                brokerids.append(acctfrom.brokerid)
                args_["investment"].append(acctfrom.acctid)

        args_["brokerid"] = _unique(brokerids, "BROKERIDs")
        return dict(args_)

    def parse_cc(acctinfos):
        return {"creditcard": [inf.acctid for inf in acctinfos if _ready(inf)]}

    dispatcher = {"BANKACCTINFO": parse_bank,
                  "CCACCTINFO": parse_cc,
                  "INVACCTINFO": parse_inv}

    return ChainMap(*[dispatcher.get(clsName, lambda x: {})(_acctinfos)
                      for clsName, _acctinfos in itertools.groupby(
                          acctinfos, key=sortKey)])
Exemple #18
0
def ofx_parse(filename):
    tree = OFXTree()
    tree.parse(filename)
    return tree.convert()
Exemple #19
0
    def request_profile(
        self,
        version: Optional[int] = None,
        gen_newfileuid: bool = True,
        prettyprint: Optional[bool] = None,
        close_elements: Optional[bool] = None,
        dryrun: bool = False,
        timeout: Optional[float] = None,
        url: Optional[str] = None,
        persist: bool = True,
    ) -> BinaryIO:
        """Request/cache OFX profiles (PROFRS).

        ofxget.scan_profile() overrides version/prettyprint/close_elements.
        """
        filename = f"{self.org}-{self.fid}.profrs"
        persistdir = config.DATADIR / "fiprofiles"
        persistpath = persistdir / filename

        if persistpath.exists():
            with open(persistpath, "rb") as f:
                profrs: Optional[BytesIO] = BytesIO(f.read())

            parser = OFXTree()
            parser.parse(profrs)
            ofx = parser.convert()
            proftrnrs = ofx.profmsgsrsv1[0]
            dtprofup = proftrnrs.profrs.dtprofup
        else:
            persistdir.mkdir(parents=True, exist_ok=True)
            profrs = None
            dtprofup = None

        response = self._request_profile(
            dtprofup=dtprofup,
            version=version,
            gen_newfileuid=gen_newfileuid,
            prettyprint=prettyprint,
            close_elements=close_elements,
            dryrun=dryrun,
            timeout=timeout,
            url=url,
        )

        if dryrun:
            return response

        parser = OFXTree()
        parser.parse(response)
        ofx = parser.convert()

        #  If the client has the latest version of the FIs profile, the server returns
        #  status code 1 in the <STATUS> aggregate of the profile-transaction aggregate
        #  <PROFTRNRS>. The server does not return a profile- response aggregate <PROFRS>.

        #  If the client does not have the latest version of the FI profile, the server
        #  responds with the profile-response aggregate <PROFRS> in the profile-transaction
        #  aggregate <PROFTRNRS>.
        proftrnrs = ofx.profmsgsrsv1[0]
        if proftrnrs.status.code == 1:
            assert profrs is not None
            response = profrs
        else:
            assert proftrnrs.status.code == 0
            dtprofup_server = proftrnrs.profrs.dtprofup
            assert dtprofup is None or dtprofup <= dtprofup_server

            # Cache the updated PROFRS sent by the server
            response.seek(0)
            with open(persistpath, "wb") as f:
                f.write(response.read())

        # Rewind PROFRS so it can be returned cleanly after having been parsed.
        response.seek(0)

        return response
Exemple #20
0
import os
from ofxtools.Parser import OFXTree

file = open(file='/home/fabio/Downloads/Extrato_20180520.ofx', mode='r')

parser = OFXTree()

parser.parse('/home/fabio/Downloads/Extrato_20180520.ofx')

ofx = parser.convert()

for transaction in ofx.statements[0].transactions:
    print(transaction)
Exemple #21
0
import os
import glob
from lxml import html
import itertools as it
from operator import ofxtools

import numpy as np
import matplotlib.pyplot as plt

import gsheet_functions as gs
# Determine TSP shares & price on purchase day
# need BalanceByFund for all dates incl previous

#%%  Attempts to open/convert old Quicken file
from ofxtools.Parser import OFXTree
parser = OFXTree()
with open('Croat.qxf', 'rb') as f:
    parser.parse(f)


#%%
def matchDuplTrans(allCC, Aptexp, colset):
    '''  
    '''
    # remove drop transactions (unrelated categories)
    ccsub=allCC[ (allCC['Matched']=='') | (pd.isnull(allCC['Matched']))]
    ccdups=ccsub[ccsub.duplicated(colset, keep=False)]
    aptdups=Aptexp[Aptexp.duplicated(colset, keep=False)]
    indsCC=[]
    indsExp=[]
    # Process each duplicated subgroup 
Exemple #22
0
#!/usr/bin/env python
# coding: utf-8

from __future__ import print_function

import argparse
import sys

from ofxtools.Parser import OFXTree


def log(message, end='\n'):
    print(message, end=end)
    sys.stdout.flush()


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('files', nargs='+')
    args = parser.parse_args()

    parser = OFXTree()
    for filename in args.files:
        log('Parsing "{}"...'.format(filename), end='')
        parser.parse(filename)
        parser.convert()
        log('done!')
Exemple #23
0
class OFXTreeTestCase(TestCase):
    """ """
    def setUp(self):
        self.tree = OFXTree()

    def tearDown(self):
        del self.tree

    def test_parse(self):
        # OFXTree.parse() reads the source, strips the OFX header, feed()s
        # the OFX data to TreeBuilder, and stores the return value from
        # TreeBuilder.close() as its _root
        self.tree._read = MagicMock()
        self.tree._read.return_value = ('header', 'OFX payload')

        mockTreeBuilderClass = MagicMock()
        mockTreeBuilderInstance = mockTreeBuilderClass.return_value
        mockTreeBuilderInstance.close.return_value = 'ElementTree.Element'

        source = '/path/to/file.ofx'
        self.tree.parse(source, parser=mockTreeBuilderClass)
        self.tree._read.assert_called_once_with(source)
        mockTreeBuilderInstance.feed.assert_called_once_with('OFX payload')
        mockTreeBuilderInstance.close.assert_called_once()
        self.assertEqual(self.tree._root, 'ElementTree.Element')

    def test_read_filename(self):
        OFXHeader.parse = MagicMock()
        fake_header = MagicMock()
        fake_header.codec = 'utf8'
        OFXHeader.parse.return_value = fake_header

        source = NamedTemporaryFile()
        source.write(b'a bunch of text')
        source.seek(0)

        output = self.tree._read(source.name)
        source.close()
        self.assertEqual(output, (fake_header, 'a bunch of text'))

    def test_read_file(self):
        OFXHeader.parse = MagicMock()
        fake_header = MagicMock()
        fake_header.codec = 'utf8'
        OFXHeader.parse.return_value = fake_header

        source = NamedTemporaryFile()
        source.write(b'a bunch of text')
        source.seek(0)

        output = self.tree._read(source)
        source.close()
        self.assertEqual(output, (fake_header, 'a bunch of text'))

    def test_read_file_binary(self):
        OFXHeader.parse = MagicMock()
        fake_header = MagicMock()
        fake_header.codec = 'utf8'
        OFXHeader.parse.return_value = fake_header

        source = BytesIO('a bunch of text'.encode())

        output = self.tree._read(source)
        source.close()
        self.assertEqual(output, (fake_header, 'a bunch of text'))

    def test_read_illegal(self):
        source = 'a bunch of text'
        with self.assertRaises(ParseError):
            self.tree._read(source)

    def test_convert(self):
        # Fake the result of OFXTree.parse()
        self.tree._root = Element('FAKE')

        # OFXTree.convert() returns an OFX instance constructed from its root
        with patch('ofxtools.Parser.Aggregate') as MockAggregate:
            ofx = self.tree.convert()
            MockAggregate.from_etree.assert_called_once_with(self.tree._root)
            self.assertEqual(ofx, MockAggregate.from_etree())

    def test_convert_unparsed(self):
        # Calling OFXTree.convert() without first calling OFXTree.parse()
        # raises ValueError
        with self.assertRaises(ValueError):
            self.tree.convert()
 def setUp(self):
     parser = OFXTree()
     parser.parse(OFX_FILE_PATH)
     self.ofx = parser.convert()
     self.auth_headers = {"Authorization": "Bearer {}".format(ACCESS_TOKEN)}
     self.accounts_map = {}
Exemple #25
0
print("Connecting to DB for Prepare")
conn = psycopg2.connect(database=s_databasename,
                        user=s_username,
                        password=s_password,
                        host=s_host,
                        port=n_port)
cur = conn.cursor()
cur.execute("select * from books.get_bank_account_id (%s, %s)",
            (s_bank_account_number, s_bank_account_friendly_name))
row = cur.fetchone()
n_bank_account_id = row[0]
conn.commit()
cur.execute("select * from load.prepare_ofx (%s)", [n_bank_account_id])
conn.commit()

parser = OFXTree()

with open(s_ofxfile, 'rb') as f:  # N.B. need to open file in binary mode
    parser.parse(f)

ofx = parser.convert()

stmts = ofx.statements
txs = stmts[0].transactions

acct = stmts[0].account

s_bankid = acct.bankid
s_acctid = acct.acctid
s_accttype = acct.accttype
s_branchid = acct.branchid
Exemple #26
0
def read_ofx_from_file(file_path):
    # ING thinks it's a good idea to embed HTML inside XML with no escaping
    # This strips that out.
    ofx_file = open(file_path, 'r')
    ofx_file_content = ofx_file.read().replace("<BR/>", " ")
    ofx_file.close()

    with tempfile.NamedTemporaryFile() as temp:
        temp.write(ofx_file_content)
        temp.seek(0)

        parser = OFXTree()
        parser.parse(temp)

        # need this for a few fields later
        now = datetime.now().date()
        ofx_now_string = now.strftime("%Y%m%d")
        epoch_string = "19700101"

        #check acctfrom exists, if not make it
        #we can use a dummy value since don't need this field in our import
        stmtr = parser.find(".//STMTRS")
        if stmtr is None:
            print "Cannot find STMTR"
            exit(1)

        acctfrom = parser.find(".//STMTRS/BANKACCTFROM")
        if acctfrom is None:
            acctfrom_tree = ET.fromstring(
                "<BANKACCTFROM><BANKID>0</BANKID><ACCTID>0</ACCTID><ACCTTYPE>SAVINGS</ACCTTYPE></BANKACCTFROM>"
            )

            stmtr.insert(0, acctfrom_tree)

        # We also ignore this, so just put some dummy values
        ledger = parser.find(".//STMTRS/LEDGERBAL")
        if ledger is not None:
            stmtr.remove(ledger)
        ledger_tree = ET.fromstring("<LEDGERBAL><BALAMT>0</BALAMT><DTASOF>" +
                                    ofx_now_string + "</DTASOF></LEDGERBAL>")
        stmtr.insert(0, ledger_tree)

        avail = parser.find(".//STMTRS/AVAILBAL")
        if avail is not None:
            stmtr.remove(avail)
        avail_tree = ET.fromstring("<AVAILBAL><BALAMT>0</BALAMT><DTASOF>" +
                                   ofx_now_string + "</DTASOF></AVAILBAL>")
        stmtr.insert(0, avail_tree)

        # Again not really used with our import, so just set the maximum time frame
        banktranlist = parser.find(".//BANKTRANLIST")
        if banktranlist is None:
            print "Cannot find BANKTRANLIST"
            exit(1)

        dtstart = parser.find(".//BANKTRANLIST/DTSTART")
        if dtstart is None:

            dtstart_tree = ET.fromstring("<DTSTART>" + epoch_string +
                                         "</DTSTART>")
            dtend_tree = ET.fromstring("<DTEND>" + ofx_now_string + "</DTEND>")

            banktranlist.insert(0, dtstart_tree)
            banktranlist.insert(1, dtend_tree)

        # Commbanks fault this time. They have some non-standard date here, at least for this library
        # This standardises them to be the same
        sors = parser.find(".//SONRS")
        if sors is None:
            print "Cannot find SONRS"
            exit(1)

        dtserver = parser.find(".//SONRS/DTSERVER")
        if dtserver is not None:
            sors.remove(dtserver)

        dtserver_val = ET.fromstring("<DTSERVER>" + ofx_now_string +
                                     "</DTSERVER>")
        sors.insert(0, dtserver_val)

        return parser.convert()
Exemple #27
0
class OFXTreeTestCase(TestCase):
    def setUp(self):
        self.tree = OFXTree()

    def tearDown(self):
        del self.tree

    def test_parse(self):
        # OFXTree.parse() reads the source, strips the OFX header, feed()s
        # the OFX data to TreeBuilder, and stores the return value from
        # TreeBuilder.close() as its _root
        self.tree._read = MagicMock()
        self.tree._read.return_value = (sentinel.header, sentinel.ofx)

        mockTreeBuilderClass = MagicMock()
        mockTreeBuilderInstance = mockTreeBuilderClass.return_value
        mockTreeBuilderInstance.close.return_value = sentinel.root

        source = "/path/to/file.ofx"
        self.tree.parse(source, parser=mockTreeBuilderInstance)
        self.tree._read.assert_called_once_with(source)
        mockTreeBuilderInstance.feed.assert_called_once_with(sentinel.ofx)
        # FIXME - Fails on Python 3.5 ???
        #  mockTreeBuilderInstance.close.assert_called_once()
        self.assertEqual(self.tree._root, sentinel.root)

    def test_read_filename(self):
        with patch("builtins.open") as fake_open:
            with patch("ofxtools.Parser.parse_header") as fake_parse_header:
                fake_open.return_value = sentinel.file

                fake_header = sentinel.header
                fake_body = sentinel.ofx
                fake_parse_header.return_value = (fake_header, fake_body)

                source = NamedTemporaryFile()
                source.write(b"a bunch of text")
                source.seek(0)

                output = self.tree._read(source.name)
                source.close()
                fake_open.assert_called_once_with(source.name, "rb")
                fake_parse_header.assert_called_once_with(sentinel.file)
                self.assertEqual(output, (fake_header, fake_body))

    def test_read_file(self):
        with patch("ofxtools.Parser.parse_header") as fake_parse_header:
            fake_header = sentinel.header
            fake_body = sentinel.ofx
            fake_parse_header.return_value = (fake_header, fake_body)

            source = NamedTemporaryFile()
            source.write(b"a bunch of text")
            source.seek(0)

            output = self.tree._read(source)
            source.close()
            fake_parse_header.assert_called_once_with(source)
            self.assertEqual(output, (fake_header, fake_body))

    def test_read_not_bytes(self):
        source = NamedTemporaryFile(mode="w+")
        source.write("a bunch of text")
        source.seek(0)

        with self.assertRaises(ValueError):
            self.tree._read(source)

    def test_read_byteslike(self):
        # PR #15
        with patch("ofxtools.Parser.parse_header") as fake_parse_header:
            fake_header = sentinel.header
            fake_body = sentinel.ofx
            fake_parse_header.return_value = (fake_header, fake_body)

            source = BytesIO(b"a bunch of text")
            source.seek(0)

            output = self.tree._read(source)
            source.close()
            fake_parse_header.assert_called_once_with(source)
            self.assertEqual(output, (fake_header, fake_body))

    def test_read_illegal(self):
        source = "a bunch of text"
        with self.assertRaises(FileNotFoundError):
            self.tree._read(source)

    def test_convert(self):
        # Fake the result of OFXTree.parse()
        self.tree._root = Element("FAKE")

        # OFXTree.convert() returns an OFX instance constructed from its root
        with patch("ofxtools.Parser.Aggregate") as MockAggregate:
            ofx = self.tree.convert()
            MockAggregate.from_etree.assert_called_once_with(self.tree._root)
            self.assertEqual(ofx, MockAggregate.from_etree())

    def test_convert_unparsed(self):
        # Calling OFXTree.convert() without first calling OFXTree.parse()
        # raises ValueError
        with self.assertRaises(ValueError):
            self.tree.convert()
Exemple #28
0
class OFXTreeTestCase(TestCase):
    """ """
    def setUp(self):
        self.tree = OFXTree()

    def tearDown(self):
        del self.tree

    def test_parse(self):
        # OFXTree.parse() reads the source, strips the OFX header, feed()s
        # the OFX data to TreeBuilder, and stores the return value from
        # TreeBuilder.close() as its _root
        self.tree._read = MagicMock()
        self.tree._read.return_value = 'source contents'

        self.tree._stripHeader = MagicMock()
        self.tree._stripHeader.return_value = 'OFX data'

        mockTreeBuilderClass = MagicMock()
        mockTreeBuilderInstance = mockTreeBuilderClass.return_value
        mockTreeBuilderInstance.close.return_value = 'ElementTree.Element'

        source = '/path/to/file.ofx'
        self.tree.parse(source, parser=mockTreeBuilderClass)
        self.tree._read.assert_called_once_with(source)
        self.tree._stripHeader.assert_called_once_with('source contents')
        mockTreeBuilderInstance.feed.assert_called_once_with('OFX data')
        self.assertEqual(self.tree._root, 'ElementTree.Element')

    def test_stripHeader(self):
        # FIXME - can't make unittest.mock.patch work for OFXHeader
        strip = OFXHeader.strip
        OFXHeader.strip = MagicMock()
        OFXHeader.strip.return_value = 'OFX data'
        stripped = self.tree._stripHeader('source contents')
        OFXHeader.strip.assert_called_once_with('source contents')
        self.assertEqual(stripped, 'OFX data')
        OFXHeader.strip = strip
        with self.assertRaises(AttributeError):
            OFXHeader.strip.return_value

    def test_read_filename(self):
        source = NamedTemporaryFile()
        source.write(b'a bunch of text')
        source.seek(0)
        output = self.tree._read(source.name)
        source.close()
        self.assertEqual(output, 'a bunch of text')

    def test_read_file(self):
        source = TemporaryFile()
        source.write(b'a bunch of text')
        source.seek(0)
        output = self.tree._read(source)
        source.close()
        self.assertEqual(output, 'a bunch of text')

    def test_read_file_binary(self):
        source = BytesIO('a bunch of text'.encode())
        output = self.tree._read(source)
        source.close()
        self.assertEqual(output, 'a bunch of text')

    def test_read_string(self):
        source = 'a bunch of text'
        output = self.tree._read(source)
        self.assertEqual(output, 'a bunch of text')

    def test_read_illegal(self):
        source = 23
        with self.assertRaises(ParseError):
            self.tree._read(source)

    def test_convert(self):
        # Fake the result of OFXTree.parse()
        self.tree._root = Element('FAKE')

        # OFXTree.convert() returns an OFX instance constructed from its root
        with patch('ofxtools.Parser.Aggregate') as MockAggregate:
            ofx = self.tree.convert()
            MockAggregate.from_etree.assert_called_once_with(self.tree._root)
            self.assertEqual(ofx, MockAggregate.from_etree())

    def test_convert_unparsed(self):
        # Calling OFXTree.convert() without first calling OFXTree.parse()
        # raises ValueError
        with self.assertRaises(ValueError):
            self.tree.convert()
Exemple #29
0
 def setUp(self):
     self.tree = OFXTree()
Exemple #30
0
def ofx_parse(filename):
    tree = OFXTree()
    tree.parse(filename)
    return tree.convert()
Exemple #31
0
class OFXTreeTestCase(TestCase):
    def setUp(self):
        self.tree = OFXTree()

    def tearDown(self):
        del self.tree

    def test_parse(self):
        # OFXTree.parse() reads the source, strips the OFX header, feed()s
        # the OFX data to TreeBuilder, and stores the return value from
        # TreeBuilder.close() as its _root
        self.tree._read = MagicMock()
        self.tree._read.return_value = (sentinel.header, sentinel.ofx)

        mockTreeBuilderClass = MagicMock()
        mockTreeBuilderInstance = mockTreeBuilderClass.return_value
        mockTreeBuilderInstance.close.return_value = sentinel.root

        source = "/path/to/file.ofx"
        self.tree.parse(source, parser=mockTreeBuilderInstance)
        self.tree._read.assert_called_once_with(source)
        mockTreeBuilderInstance.feed.assert_called_once_with(sentinel.ofx)
        # FIXME - Fails on Python 3.5 ???
        #  mockTreeBuilderInstance.close.assert_called_once()
        self.assertEqual(self.tree._root, sentinel.root)

    def test_read_filename(self):
        with patch("builtins.open") as fake_open:
            with patch("ofxtools.Parser.parse_header") as fake_parse_header:
                fake_open.return_value = sentinel.file

                fake_header = sentinel.header
                fake_body = sentinel.ofx
                fake_parse_header.return_value = (fake_header, fake_body)

                source = NamedTemporaryFile()
                source.write(b"a bunch of text")
                source.seek(0)

                output = self.tree._read(source.name)
                source.close()
                fake_open.assert_called_once_with(source.name, "rb")
                fake_parse_header.assert_called_once_with(sentinel.file)
                self.assertEqual(output, (fake_header, fake_body))

    def test_read_file(self):
        with patch("ofxtools.Parser.parse_header") as fake_parse_header:
            fake_header = sentinel.header
            fake_body = sentinel.ofx
            fake_parse_header.return_value = (fake_header, fake_body)

            source = NamedTemporaryFile()
            source.write(b"a bunch of text")
            source.seek(0)

            output = self.tree._read(source)
            source.close()
            fake_parse_header.assert_called_once_with(source)
            self.assertEqual(output, (fake_header, fake_body))

    def test_read_not_bytes(self):
        source = NamedTemporaryFile(mode="w+")
        source.write("a bunch of text")
        source.seek(0)

        with self.assertRaises(ValueError):
            self.tree._read(source)

    def test_read_byteslike(self):
        # PR #15
        with patch("ofxtools.Parser.parse_header") as fake_parse_header:
            fake_header = sentinel.header
            fake_body = sentinel.ofx
            fake_parse_header.return_value = (fake_header, fake_body)

            source = BytesIO(b"a bunch of text")
            source.seek(0)

            output = self.tree._read(source)
            source.close()
            fake_parse_header.assert_called_once_with(source)
            self.assertEqual(output, (fake_header, fake_body))

    def test_read_illegal(self):
        source = "a bunch of text"
        with self.assertRaises(FileNotFoundError):
            self.tree._read(source)

    def test_convert(self):
        # Fake the result of OFXTree.parse()
        self.tree._root = Element("FAKE")

        # OFXTree.convert() returns an OFX instance constructed from its root
        with patch("ofxtools.Parser.Aggregate") as MockAggregate:
            ofx = self.tree.convert()
            MockAggregate.from_etree.assert_called_once_with(self.tree._root)
            self.assertEqual(ofx, MockAggregate.from_etree())

    def test_convert_unparsed(self):
        # Calling OFXTree.convert() without first calling OFXTree.parse()
        # raises ValueError
        with self.assertRaises(ValueError):
            self.tree.convert()
Exemple #32
0
 def setUpClass(cls):
     cls.tree = OFXTree()
     parser = TreeBuilder()
     parser.feed(cls.ofx)
     cls.tree._root = parser.close()
def main(qfx_file_in, qfx_file_out):
    """Main."""

    # parse xml
    doc, ns = getXmlEtree(qfx_file_in)
    logger.debug("doc: {}".format(pprintXml(doc).decode()))
    logger.debug("ns: {}".format(json.dumps(ns, indent=2)))

    # fix transactions
    for trn in doc.xpath('.//STMTTRN', namespaces=ns):
        logger.info("#" * 80)
        logger.info("trn: {}".format(pprintXml(trn).decode()))
        memo_elt = xpath(trn, 'MEMO', ns)
        memo = memo_elt.text[:32]
        logger.info("memo: {}".format(memo))
        logger.info("type memo: {}".format(type(memo)))

        # extract name
        match = TRN_RE.search(memo)
        if match:
            name = match.group(1)
            logger.info("name: {}".format(name))
            name_elt = SubElement(trn, "NAME")
            name_elt.text = name
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # monthly interest paid?
        match = INT_RE.search(memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = "Capital One"
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # check
        match = CHK_RE.search(memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # prenote
        match = PRENOTE_RE.search(memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # refund
        match = re.search(r'LMU', memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # refund
        match = re.search(r'360 Checking', memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # zelle
        match = re.search(r'Zelle money (received from|sent to|returned).*',
                          memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # transfer to savings
        match = re.search(r'Withdrawal to', memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # checkbook order
        match = re.search(r'Checkbook Order', memo)
        if match:
            name_elt = SubElement(trn, "NAME")
            name_elt.text = match.group(0)
            trn.remove(memo_elt)
            logger.info("trn: {}".format(pprintXml(trn).decode()))
            continue

        # uncaught case
        logger.info("trn: {}".format(pprintXml(trn).decode()))
        raise RuntimeError("Unhandled transaction.")

    # write output file
    v2_message = '<?xml version="1.0" encoding="utf-8"?>\n'
    v2_message += '<?OFX OFXHEADER="200" VERSION="202" SECURITY="NONE" OLDFILEUID="NONE" NEWFILEUID="NONE"?>\n'
    v2_message += pprintXml(doc).decode()
    parser = OFXTree()
    parser.parse(BytesIO(v2_message.encode()))
    ofx = parser.convert()
    client = OFXClient(None)
    v1_message = client.serialize(ofx,
                                  version=102,
                                  prettyprint=True,
                                  close_elements=False).decode()
    with open(qfx_file_out, 'w') as f:
        f.write(v1_message)