Ejemplo n.º 1
0
    def doAccountList(self, opts):

        creds = {}

        scraper = TestBankScraper(creds)

        # are we parsing a raw file..........

        file = open('./pagedump/' + opts[0], 'r')
        raw = file.read()
        file.close()

        soup = UglySoup(raw)

        print soup.ppp("test file loaded")

        scraper.myAccounts = []
        scraper.accountLinks = []

        #***(5) call the right function

        print scraper.doStep4({}, raw)
        #scraper._parseLinks(raw)
        #scraper._parseNatWestLinks

        print str(scraper.myAccounts)

        return 'good'
Ejemplo n.º 2
0
  def doAccountList(self, opts):

    creds = {}

    scraper = TestBankScraper(creds)

    # are we parsing a raw file..........
    
    file = open('./pagedump/' + opts[0], 'r')
    raw = file.read();
    file.close()

    soup = UglySoup(raw)

    print soup.ppp("test file loaded")

    scraper.myAccounts = []
    scraper.accountLinks = []

    #***(5) call the right function

    print scraper.doStep4({}, raw)
    #scraper._parseLinks(raw)
    #scraper._parseNatWestLinks

    print str(scraper.myAccounts)

    return 'good'
Ejemplo n.º 3
0
    def display(self, opts):
        file = open(opts[0], 'r')
        raw = file.read()
        file.close()

        soup = UglySoup(raw)

        print soup.ppp("test file loaded")
Ejemplo n.º 4
0
  def display(self, opts):
    file = open(opts[0], 'r')
    raw = file.read();
    file.close()

    soup = UglySoup(raw)

    print soup.ppp("test file loaded")
Ejemplo n.º 5
0
  def doSynchro(self, opts):

    
    file = open('./tests/data/natwest-creds.json', 'r')
    raw = file.read();
    file.close()

    creds = simplejson.loads(raw)
    cb = creds[0]['credentials']
    print cb



    scraper = TestBankScraper(cb)
    scraper.facade = Facade('danm')
    scraper.token = 'splat'

    scraper._setupBrowser()


    #file = open('./tests/data/account1.html', 'r')
    file = open('./tests/data/account2.html', 'r')
    raw = file.read();
    file.close()

    soup = UglySoup(raw)

    print soup.ppp("test file loaded")


    scraper._processNormAccount( raw, ['Person','DanM','Account','a1'], 2304 )

    #scraper._processCCAccount( raw, ['Person','DanM','Account','a2'], -456.09 )

    s = scraper.statementlist[0]

    sm = {}
    sm['balance'] = s.getSynchBalance()
    sm['xacts'] = s.getxactlist()
    sm['path'] = s.get_s_path()

    print simplejson.dumps(sm, indent = 4)
Ejemplo n.º 6
0
    def doSynchro(self, opts):

        file = open('./tests/data/natwest-creds.json', 'r')
        raw = file.read()
        file.close()

        creds = simplejson.loads(raw)
        cb = creds[0]['credentials']
        print cb

        scraper = TestBankScraper(cb)
        scraper.facade = Facade('danm')
        scraper.token = 'splat'

        scraper._setupBrowser()

        #file = open('./tests/data/account1.html', 'r')
        file = open('./tests/data/account2.html', 'r')
        raw = file.read()
        file.close()

        soup = UglySoup(raw)

        print soup.ppp("test file loaded")

        scraper._processNormAccount(raw, ['Person', 'DanM', 'Account', 'a1'],
                                    2304)

        #scraper._processCCAccount( raw, ['Person','DanM','Account','a2'], -456.09 )

        s = scraper.statementlist[0]

        sm = {}
        sm['balance'] = s.getSynchBalance()
        sm['xacts'] = s.getxactlist()
        sm['path'] = s.get_s_path()

        print simplejson.dumps(sm, indent=4)