示例#1
0
def generate_credits(dont_simulate=True):
    """ Loops through all the blocks that haven't been credited out and
    attempts to process them """
    simulate = not dont_simulate
    unproc_blocks = redis_conn.keys("unproc_block*")
    for key in unproc_blocks:
        hash = key[13:]
        current_app.logger.info("==== Attempting to process block hash {}".format(hash))
        try:
            credit_block(key, simulate=simulate)
        except Exception:
            db.session.rollback()
            current_app.logger.error("Unable to payout block {}".format(hash), exc_info=True)
        current_app.logger.info("==== Done processing block hash {}".format(hash))
示例#2
0
def collect_minutes():
    """ Grabs all the pending minute shares out of redis and puts them in the
    database """
    unproc_mins = redis_conn.keys("min_*")
    for key in unproc_mins:
        current_app.logger.info("Processing key {}".format(key))
        share_type, algo, stamp = key.split("_")[1:]
        minute = datetime.datetime.utcfromtimestamp(float(stamp))
        # To ensure invalid stampt don't get committed
        minute = ShareSlice.floor_time(minute, 0)
        if stamp < (time.time() - 30):
            current_app.logger.info("Skipping timestamp {}, too young"
                                    .format(minute))
            continue

        redis_conn.rename(key, "processing_shares")
        for user, shares in redis_conn.hgetall("processing_shares").iteritems():

            shares = float(shares)
            # messily parse out the worker/address combo...
            parts = user.split(".")
            if len(parts) > 0:
                worker = parts[1]
            else:
                worker = ''
            address = parts[0]

            if not address.startswith("pool"):
                try:
                    curr = currencies.lookup_payable_addr(address)
                except InvalidAddressException:
                    curr = None

                if curr is None:
                    address = global_config.pool_payout_currency.pool_payout_addr

            try:
                slc = ShareSlice(user=address, time=minute, worker=worker, algo=algo,
                                 share_type=share_type, value=shares, span=0)
                db.session.add(slc)
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                db.session.rollback()
                slc = ShareSlice.query.with_lockmode('update').filter_by(
                    user=address, time=minute, worker=worker, algo=algo,
                    share_type=share_type).one()
                slc.value += shares
                db.session.commit()
        redis_conn.delete("processing_shares")
示例#3
0
def collect_minutes():
    """ Grabs all the pending minute shares out of redis and puts them in the
    database """
    unproc_mins = redis_conn.keys("min_*")
    for key in unproc_mins:
        current_app.logger.info("Processing key {}".format(key))
        share_type, algo, stamp = key.split("_")[1:]
        minute = datetime.datetime.utcfromtimestamp(float(stamp))
        # To ensure invalid stampt don't get committed
        minute = ShareSlice.floor_time(minute, 0)
        if stamp < (time.time() - 30):
            current_app.logger.info("Skipping timestamp {}, too young"
                                    .format(minute))
            continue

        redis_conn.rename(key, "processing_shares")
        for user, shares in redis_conn.hgetall("processing_shares").iteritems():

            shares = float(shares)
            # messily parse out the worker/address combo...
            parts = user.split(".")
            if len(parts) > 0:
                worker = parts[1]
            else:
                worker = ''
            address = parts[0]

            if address != "pool":
                try:
                    curr = currencies.lookup_payable_addr(address)
                except InvalidAddressException:
                    curr = None

                if not curr:
                    address = global_config.pool_payout_currency.pool_payout_addr

            try:
                slc = ShareSlice(user=address, time=minute, worker=worker, algo=algo,
                                 share_type=share_type, value=shares, span=0)
                db.session.add(slc)
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                db.session.rollback()
                slc = ShareSlice.query.with_lockmode('update').filter_by(
                    user=address, time=minute, worker=worker, algo=algo,
                    share_type=share_type).one()
                slc.value += shares
                db.session.commit()
        redis_conn.delete("processing_shares")
示例#4
0
def _grab_data(prefix, stat):
    proc_name = "processing_{}".format(stat)
    unproc_mins = redis_conn.keys(prefix)
    for key in unproc_mins:
        current_app.logger.info("Processing key {}".format(key))
        try:
            (stamp, ) = key.split("_")[1:]
        except Exception:
            current_app.logger.error("Error processing key {}".format(key),
                                     exc_info=True)
            continue
        minute = datetime.datetime.utcfromtimestamp(float(stamp))
        # To ensure invalid stampt don't get committed
        minute = ShareSlice.floor_time(minute, 0)
        if stamp < (time.time() - 30):
            current_app.logger.info("Skipping timestamp {}, too young".format(minute))
            continue

        redis_conn.rename(key, proc_name)
        for user, value in redis_conn.hgetall(proc_name).iteritems():
            try:
                address, worker, did = user.split("_")
                try:
                    value = float(value)
                except ValueError:
                    if value != "None":
                        current_app.logger.warn(
                            "Got bogus value {} from ppagent for stat {}"
                            .format(value, stat), exc_info=True)
                    continue

                # Megahashes are was cgminer reports
                if stat == "hashrate":
                    value *= 1000000
            except Exception:
                current_app.logger.error("Error processing key {} on hash {}"
                                         .format(user, key), exc_info=True)
                continue

            try:
                slc = DeviceSlice(user=address, time=minute, worker=worker,
                                  device=did, stat=stat, value=value, span=0)
                db.session.add(slc)
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                current_app.logger.warn("SQLAlchemy collision", exc_info=True)
                db.session.rollback()
        redis_conn.delete(proc_name)
示例#5
0
def _grab_data(prefix, stat):
    proc_name = "processing_{}".format(stat)
    unproc_mins = redis_conn.keys(prefix)
    for key in unproc_mins:
        current_app.logger.info("Processing key {}".format(key))
        try:
            (stamp, ) = key.split("_")[1:]
        except Exception:
            current_app.logger.error("Error processing key {}".format(key),
                                     exc_info=True)
            continue
        minute = datetime.datetime.utcfromtimestamp(float(stamp))
        # To ensure invalid stampt don't get committed
        minute = ShareSlice.floor_time(minute, 0)
        if stamp < (time.time() - 30):
            current_app.logger.info("Skipping timestamp {}, too young".format(minute))
            continue

        redis_conn.rename(key, proc_name)
        for user, value in redis_conn.hgetall(proc_name).iteritems():
            try:
                address, worker, did = user.split("_")
                try:
                    value = float(value)
                except ValueError:
                    if value != "None":
                        current_app.logger.warn(
                            "Got bogus value {} from ppagent for stat {}"
                            .format(value, stat), exc_info=True)
                    continue

                # Megahashes are was cgminer reports
                if stat == "hashrate":
                    value *= 1000000
            except Exception:
                current_app.logger.error("Error processing key {} on hash {}"
                                         .format(user, key), exc_info=True)
                continue

            try:
                slc = DeviceSlice(user=address, time=minute, worker=worker,
                                  device=did, stat=stat, value=value, span=0)
                db.session.add(slc)
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                current_app.logger.warn("SQLAlchemy collision", exc_info=True)
                db.session.rollback()
        redis_conn.delete(proc_name)