dburl = 'postgresql+psycopg2:///' + db_name engine = create_engine(dburl) # Create schema and tables try: engine.execute(CreateSchema(schema)) except ProgrammingError: pass Account.__table__.schema = schema Position.__table__.schema = schema Account.__table__.create(engine, checkfirst=True) Position.__table__.create(engine, checkfirst=True) Session = sessionmaker(bind=engine) session = Session() # Create the account entry account_info = {'institution': 'Test', 'account': 'Test'} pos_data = {} pos_data['id'] = get_id(account_info, session) # Get the timestamp f = open(pos_fname, 'r') pos_data['timestamp'] = f.readline().strip().split()[-1] c = csv.DictReader(f) for row in c: pos_data.update(gen_position_data(row, fieldmap)) session.add(Position(**pos_data)) try: session.commit() except IntegrityError as err: if 'duplicate key' in str(err): session.rollback() else: raise(err)
try: dateinfo = pos_line.split('as of ')[-1].split()[0:2] date, time = add_timezone(*dateinfo) except ValueError as err: dateinfo = pos_line.split('as of ')[-1].split(',')[0:2] date, time = add_timezone(*dateinfo, fmt='%I:%M %p ET %m/%d/%Y"') pos_data['timestamp'] = '%s %s' % (date, time) # Split up the data by account accounts = [x for x in lines if 'xxxx-' in x.lower()] totals = [x for x in lines if 'account total' in x.lower()] starts = [lines.index(x) for x in accounts] ends = [lines[x:].index(y) + x for x,y in zip(starts, totals)] ranges = zip(starts, ends) data = ['\n'.join(lines[x[0]+1:x[1]]) for x in ranges] data = [csv.DictReader(StringIO(x)) for x in data] for i in range(0, len(accounts)): account_info.update({'account': accounts[i].replace('"','')}) pos_data['id'] = get_id(account_info, session) for row in data[i]: pos_data.update(gen_position_data(row, schwab_map, cashdesc='Brokerage')) session.add(Position(**pos_data)) try: session.commit() except IntegrityError as err: if 'duplicate key' in str(err): session.rollback() else: raise(err)
db_name = sys.argv[2] # Connect to db dburl = 'postgresql+psycopg2:///' + db_name engine = create_engine(dburl) try: engine.execute(CreateSchema(SCHEMA)) except ProgrammingError: pass Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) session = Session() account_info = {'institution': 'Fidelity'} pos_data = {} # Get the timestamp f = open(pos_fname, 'r') pos_data['timestamp'] = f.readline().strip().split()[-1] c = csv.DictReader(f) for row in c: del row[None] account_info['account'] = row['Account Name/Number'].strip() pos_data['id'] = get_id(account_info, session) pos_data.update(gen_position_data(row, fidelity_map)) session.add(Position(**pos_data)) try: session.commit() except IntegrityError as err: if 'duplicate key' in str(err): session.rollback() else: raise(err)
db_name = sys.argv[2] acct_num = sys.argv[3] # Connect to db dburl = 'postgresql+psycopg2:///' + db_name engine = create_engine(dburl) try: engine.execute(CreateSchema(SCHEMA)) except ProgrammingError: pass Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) session = Session() account_info = {'institution': 'Scottrade', 'account': acct_num} pos_data = {} pos_data['id'] = get_id(account_info, session) # Get the timestamp f = open(pos_fname, 'r') pos_data['timestamp'] = f.readline().strip().split()[-1] c = csv.DictReader(f) for row in c: pos_data.update(gen_position_data(row, scottrade_map)) session.add(Position(**pos_data)) try: session.commit() except IntegrityError as err: if 'duplicate key' in str(err): session.rollback() else: raise(err)