def doAccountList(self, opts):

    creds = {}

    scraper = TestBankScraper(creds)

    # are we parsing a raw file..........
    
    file = open('./pagedump/' + opts[0], 'r')
    raw = file.read();
    file.close()

    soup = UglySoup(raw)

    print soup.ppp("test file loaded")

    scraper.myAccounts = []
    scraper.accountLinks = []

    #***(5) call the right function

    print scraper.doStep4({}, raw)
    #scraper._parseLinks(raw)
    #scraper._parseNatWestLinks

    print str(scraper.myAccounts)

    return 'good'
Example #2
0
    def doAccountList(self, opts):

        creds = {}

        scraper = TestBankScraper(creds)

        # are we parsing a raw file..........

        file = open('./pagedump/' + opts[0], 'r')
        raw = file.read()
        file.close()

        soup = UglySoup(raw)

        print soup.ppp("test file loaded")

        scraper.myAccounts = []
        scraper.accountLinks = []

        #***(5) call the right function

        print scraper.doStep4({}, raw)
        #scraper._parseLinks(raw)
        #scraper._parseNatWestLinks

        print str(scraper.myAccounts)

        return 'good'
Example #3
0
    def display(self, opts):
        file = open(opts[0], 'r')
        raw = file.read()
        file.close()

        soup = UglySoup(raw)

        print soup.ppp("test file loaded")
  def display(self, opts):
    file = open(opts[0], 'r')
    raw = file.read();
    file.close()

    soup = UglySoup(raw)

    print soup.ppp("test file loaded")
  def doSynchro(self, opts):

    
    file = open('./tests/data/natwest-creds.json', 'r')
    raw = file.read();
    file.close()

    creds = simplejson.loads(raw)
    cb = creds[0]['credentials']
    print cb



    scraper = TestBankScraper(cb)
    scraper.facade = Facade('danm')
    scraper.token = 'splat'

    scraper._setupBrowser()


    #file = open('./tests/data/account1.html', 'r')
    file = open('./tests/data/account2.html', 'r')
    raw = file.read();
    file.close()

    soup = UglySoup(raw)

    print soup.ppp("test file loaded")


    scraper._processNormAccount( raw, ['Person','DanM','Account','a1'], 2304 )

    #scraper._processCCAccount( raw, ['Person','DanM','Account','a2'], -456.09 )

    s = scraper.statementlist[0]

    sm = {}
    sm['balance'] = s.getSynchBalance()
    sm['xacts'] = s.getxactlist()
    sm['path'] = s.get_s_path()

    print simplejson.dumps(sm, indent = 4)
Example #6
0
    def doSynchro(self, opts):

        file = open('./tests/data/natwest-creds.json', 'r')
        raw = file.read()
        file.close()

        creds = simplejson.loads(raw)
        cb = creds[0]['credentials']
        print cb

        scraper = TestBankScraper(cb)
        scraper.facade = Facade('danm')
        scraper.token = 'splat'

        scraper._setupBrowser()

        #file = open('./tests/data/account1.html', 'r')
        file = open('./tests/data/account2.html', 'r')
        raw = file.read()
        file.close()

        soup = UglySoup(raw)

        print soup.ppp("test file loaded")

        scraper._processNormAccount(raw, ['Person', 'DanM', 'Account', 'a1'],
                                    2304)

        #scraper._processCCAccount( raw, ['Person','DanM','Account','a2'], -456.09 )

        s = scraper.statementlist[0]

        sm = {}
        sm['balance'] = s.getSynchBalance()
        sm['xacts'] = s.getxactlist()
        sm['path'] = s.get_s_path()

        print simplejson.dumps(sm, indent=4)
Example #7
0
    def _pp(self, name, page):

        soup = UglySoup(page)

        print "\n>>>>>>>>>>>>>>>>>" + name