Exemplo n.º 1
0
def position_calculated(position, session, checkpoint=None, start=None, end=None):
    if start is None:
        start = position.position_cp_timestamp or timestamp_to_dt(0)
    if checkpoint is None:
        checkpoint = position.position_checkpoint or 0

    rows = session.query(func.sum(models.Posting.quantity).label('quantity_sum'),
                         func.max(models.Journal.timestamp).label('last_timestamp')).filter_by(
        username=position.username).filter_by(
        contract_id=position.contract_id).filter(
        models.Journal.id==models.Posting.journal_id).filter(
        models.Journal.timestamp > start)
    if end is not None:
        rows = rows.filter(models.Journal.timestamp <= end)

    try:
        grouped = rows.group_by(models.Posting.username).one()
        calculated = grouped.quantity_sum
        last_posting_timestamp = grouped.last_timestamp
    except NoResultFound:
        calculated = 0
        last_posting_timestamp = start


    return int(checkpoint + calculated), last_posting_timestamp
Exemplo n.º 2
0
def position_calculated(position,
                        session,
                        checkpoint=None,
                        start=None,
                        end=None):
    if start is None:
        start = position.position_cp_timestamp or timestamp_to_dt(0)
    if checkpoint is None:
        checkpoint = position.position_checkpoint or 0

    rows = session.query(
        func.sum(models.Posting.quantity).label('quantity_sum'),
        func.max(models.Journal.timestamp).label('last_timestamp')).filter_by(
            username=position.username).filter_by(
                contract_id=position.contract_id).filter(
                    models.Journal.id == models.Posting.journal_id).filter(
                        models.Journal.timestamp > start)
    if end is not None:
        rows = rows.filter(models.Journal.timestamp <= end)

    try:
        grouped = rows.group_by(models.Posting.username).one()
        calculated = grouped.quantity_sum
        last_posting_timestamp = grouped.last_timestamp
    except NoResultFound:
        calculated = 0
        last_posting_timestamp = start

    return int(checkpoint + calculated), last_posting_timestamp
Exemplo n.º 3
0
    def get_transaction_history(self, from_timestamp, to_timestamp, username):
        result = yield self.dbpool.runQuery(
            "SELECT contracts.ticker, SUM(posting.quantity) FROM posting, journal, contracts "
            "WHERE posting.journal_id=journal.id AND posting.username=%s AND journal.timestamp<%s "
            "AND posting.contract_id=contracts.id GROUP BY contracts.ticker",
            (username, util.timestamp_to_dt(from_timestamp)))

        balances = collections.defaultdict(int)
        for row in result:
            balances[row[0]] = int(row[1])

        result = yield self.dbpool.runQuery(
            "SELECT contracts.ticker, journal.timestamp, posting.quantity, journal.type, posting.note "
            "FROM posting, journal, contracts WHERE posting.journal_id=journal.id AND "
            "posting.username=%s AND journal.timestamp>=%s AND journal.timestamp<=%s "
            "AND posting.contract_id=contracts.id ORDER BY journal.timestamp",
            (username, util.timestamp_to_dt(from_timestamp),
             util.timestamp_to_dt(to_timestamp)))

        transactions = []
        for row in result:
            balances[row[0]] += row[2]
            quantity = abs(row[2])

            # Here we assume that the user is a Liability user
            if row[2] < 0:
                direction = 'debit'
            else:
                direction = 'credit'

            transactions.append({
                'contract': row[0],
                'timestamp': util.dt_to_timestamp(row[1]),
                'quantity': quantity,
                'type': row[3],
                'direction': direction,
                'balance': balances[row[0]],
                'note': row[4]
            })

        returnValue(transactions)
Exemplo n.º 4
0
    def get_transaction_history(self, from_timestamp, to_timestamp, username):
        result = yield self.dbpool.runQuery(
            "SELECT contracts.ticker, SUM(posting.quantity) FROM posting, journal, contracts "
            "WHERE posting.journal_id=journal.id AND posting.username=%s AND journal.timestamp<%s "
            "AND posting.contract_id=contracts.id GROUP BY contracts.ticker",
            (username, util.timestamp_to_dt(from_timestamp)))

        balances = collections.defaultdict(int)
        for row in result:
            balances[row[0]] = int(row[1])

        result = yield self.dbpool.runQuery(
            "SELECT contracts.ticker, journal.timestamp, posting.quantity, journal.type, posting.note "
            "FROM posting, journal, contracts WHERE posting.journal_id=journal.id AND "
            "posting.username=%s AND journal.timestamp>=%s AND journal.timestamp<=%s "
            "AND posting.contract_id=contracts.id ORDER BY journal.timestamp",
            (username, util.timestamp_to_dt(from_timestamp), util.timestamp_to_dt(to_timestamp)))

        transactions = []
        for row in result:
            balances[row[0]] += row[2]
            quantity = abs(row[2])

            # Here we assume that the user is a Liability user
            if row[2] < 0:
                direction = 'debit'
            else:
                direction = 'credit'

            transactions.append({'contract': row[0],
                                 'timestamp': util.dt_to_timestamp(row[1]),
                                 'quantity': quantity,
                                 'type': row[3],
                                 'direction': direction,
                                 'balance': balances[row[0]],
                                 'note': row[4]})

        returnValue(transactions)
Exemplo n.º 5
0
    def atomic_commit(self, postings):

        start = time.time()
        log.msg("atomic commit called for %s at %f" % (postings, start))
        try:
            # sanity check
            if len(postings) == 0:
                raise INTERNAL_ERROR


            types = [posting["type"] for posting in postings]
            counts = [posting["count"] for posting in postings]

            if not all(type == types[0] for type in types):
                raise TYPE_MISMATCH
            if not all(count == counts[0] for count in counts):
                raise COUNT_MISMATCH

            # balance check
            debitsum = defaultdict(int)
            creditsum = defaultdict(int)

            log.msg("auditing postings at %f" % (time.time() - start))
            for posting in postings:
                if posting["direction"] == "debit":
                    debitsum[posting["contract"]] += posting["quantity"]
                if posting["direction"] == "credit":
                    creditsum[posting["contract"]] += posting["quantity"]

            for ticker in debitsum:
                if debitsum[ticker] - creditsum[ticker] is not 0:
                    raise QUANTITY_MISMATCH

            # create the journal and postings
            # The journal is created separately from the postings but this is ok because
            # all the postings are created at once. If the posting commit fails then we'll
            # just end up with an empty journal which won't break anything
            # TODO: Create the journal and postings together
            log.msg("creating the journal at %f" % (time.time() - start))
            ins = Journal.__table__.insert()

            result = self.execute(ins, type=types[0], timestamp=datetime.datetime.utcnow())

            journal_id = result.inserted_primary_key[0]

            log.msg("creating the db postings at %f" % (time.time() - start))
            db_postings = []
            for posting in postings:
                contract_table = Contract.__table__
                s = select([contract_table.c.id], contract_table.c.ticker==posting["contract"])
                result = self.execute(s)
                contract_id = result.first()[0]

                user_table = User.__table__
                s = select([user_table.c.type], user_table.c.username==posting["username"])
                result = self.execute(s)
                user_type = result.first()[0]

                username = posting["username"]
                quantity = posting["quantity"]
                direction = posting["direction"]
                note = posting["note"]
                if posting["timestamp"] is not None:
                    timestamp = timestamp_to_dt(posting["timestamp"])
                else:
                    timestamp = None

                if direction == 'debit':
                    if user_type == 'Asset':
                        sign = 1
                    else:
                        sign = -1
                else:
                    if user_type == 'Asset':
                        sign = -1
                    else:
                        sign = 1

                posting = {'username': username,
                           'contract_id': contract_id,
                           'quantity': sign * quantity,
                           'note': note,
                           'timestamp': timestamp,
                           'journal_id': journal_id
                }
                db_postings.append(posting)
                log.msg("done making posting at %f: %s" % (time.time() - start, posting))


            ins = Posting.__table__.insert()
            result = self.execute(ins, db_postings)
            log.msg("Inserted %d rows of %d postings" % (result.rowcount, len(db_postings)))
            log.msg("Done committing postings at %f" % (time.time() - start))


            return True

        except Exception, e:
            log.err("Caught exception trying to commit. Postings were:")
            for posting in postings:
                log.err(str(posting))
            log.err("Stack trace follows:")
            log.err()
            if isinstance(e, SQLAlchemyError):
                raise DATABASE_ERROR
            raise e
Exemplo n.º 6
0
    def atomic_commit(self, postings):

        start = time.time()
        log.msg("atomic commit called for %s at %f" % (postings, start))
        try:
            # sanity check
            if len(postings) == 0:
                raise INTERNAL_ERROR

            types = [posting["type"] for posting in postings]
            counts = [posting["count"] for posting in postings]

            if not all(type == types[0] for type in types):
                raise TYPE_MISMATCH
            if not all(count == counts[0] for count in counts):
                raise COUNT_MISMATCH

            # balance check
            debitsum = defaultdict(int)
            creditsum = defaultdict(int)

            log.msg("auditing postings at %f" % (time.time() - start))
            for posting in postings:
                if posting["direction"] == "debit":
                    debitsum[posting["contract"]] += posting["quantity"]
                if posting["direction"] == "credit":
                    creditsum[posting["contract"]] += posting["quantity"]

            for ticker in debitsum:
                if debitsum[ticker] - creditsum[ticker] is not 0:
                    raise QUANTITY_MISMATCH

            # create the journal and postings
            # The journal is created separately from the postings but this is ok because
            # all the postings are created at once. If the posting commit fails then we'll
            # just end up with an empty journal which won't break anything
            # TODO: Create the journal and postings together
            log.msg("creating the journal at %f" % (time.time() - start))
            ins = Journal.__table__.insert()

            result = self.execute(ins,
                                  type=types[0],
                                  timestamp=datetime.datetime.utcnow())

            journal_id = result.inserted_primary_key[0]

            log.msg("creating the db postings at %f" % (time.time() - start))
            db_postings = []
            for posting in postings:
                contract_table = Contract.__table__
                s = select([contract_table.c.id],
                           contract_table.c.ticker == posting["contract"])
                result = self.execute(s)
                contract_id = result.first()[0]

                user_table = User.__table__
                s = select([user_table.c.type],
                           user_table.c.username == posting["username"])
                result = self.execute(s)
                user_type = result.first()[0]

                username = posting["username"]
                quantity = posting["quantity"]
                direction = posting["direction"]
                note = posting["note"]
                if posting["timestamp"] is not None:
                    timestamp = timestamp_to_dt(posting["timestamp"])
                else:
                    timestamp = None

                if direction == 'debit':
                    if user_type == 'Asset':
                        sign = 1
                    else:
                        sign = -1
                else:
                    if user_type == 'Asset':
                        sign = -1
                    else:
                        sign = 1

                posting = {
                    'username': username,
                    'contract_id': contract_id,
                    'quantity': sign * quantity,
                    'note': note,
                    'timestamp': timestamp,
                    'journal_id': journal_id
                }
                db_postings.append(posting)
                log.msg("done making posting at %f: %s" %
                        (time.time() - start, posting))

            ins = Posting.__table__.insert()
            result = self.execute(ins, db_postings)
            log.msg("Inserted %d rows of %d postings" %
                    (result.rowcount, len(db_postings)))
            log.msg("Done committing postings at %f" % (time.time() - start))

            return True

        except Exception, e:
            log.err("Caught exception trying to commit. Postings were:")
            for posting in postings:
                log.err(str(posting))
            log.err("Stack trace follows:")
            log.err()
            if isinstance(e, SQLAlchemyError):
                raise DATABASE_ERROR
            raise e