def historical_update(blockheight): """ Very long running task. Fills out the network difficulty values for all blocks before the site was running. """ def add_one_minute_diff(diff, time): try: m = OneMinuteType(typ='netdiff', value=diff, time=time) db.session.add(m) db.session.commit() except sqlalchemy.exc.IntegrityError: db.session.rollback() slc = OneMinuteType.query.with_lockmode('update').filter_by( time=time, typ='netdiff').one() # just average the diff of two blocks that occured in the same second.. slc.value = (diff + slc.value) / 2 db.session.commit() for ht in xrange(blockheight, 0, -1): hsh = coinserv.getblockhash(ht) info = coinserv.getblock(hsh) add_one_minute_diff(info['difficulty'] * 1000, datetime.datetime.utcfromtimestamp(info['time'])) current_app.logger.info("Processed block height {}".format(ht)) db.session.commit() OneMinuteType.compress() db.session.commit() FiveMinuteType.compress() db.session.commit()
def compress_minute(): """ Compresses OneMinute records (for temp, hashrate, shares, rejects) to FiveMinute """ OneMinuteShare.compress() OneMinuteReject.compress() OneMinuteTemperature.compress() OneMinuteHashrate.compress() OneMinuteType.compress() db.session.commit()
def compress_minute(self): """ Compresses OneMinute records (for temp, hashrate, shares, rejects) to FiveMinute """ try: OneMinuteShare.compress() OneMinuteReject.compress() OneMinuteTemperature.compress() OneMinuteHashrate.compress() OneMinuteType.compress() db.session.commit() except Exception: logger.error("Unhandled exception in compress_minute", exc_info=True) db.session.rollback()
def add_one_minute_diff(diff, time): try: m = OneMinuteType(typ='netdiff', value=diff, time=time) db.session.add(m) db.session.commit() except sqlalchemy.exc.IntegrityError: db.session.rollback() slc = OneMinuteType.query.with_lockmode('update').filter_by( time=time, typ='netdiff').one() # just average the diff of two blocks that occured in the same second.. slc.value = (diff + slc.value) / 2 db.session.commit()
def set_data(gbt, curr=None): prefix = "" if curr: prefix = curr + "_" prev_height = cache.get(prefix + 'blockheight') or 0 if gbt['height'] == prev_height: logger.debug( "Not updating {} net info, height {} already recorded.".format( curr or 'main', prev_height)) return logger.info("Updating {} net info for height {}.".format( curr or 'main', gbt['height'])) # set general information for this network difficulty = bits_to_difficulty(gbt['bits']) cache.set(prefix + 'blockheight', gbt['height'], timeout=1200) cache.set(prefix + 'difficulty', difficulty, timeout=1200) cache.set(prefix + 'reward', gbt['coinbasevalue'], timeout=1200) # keep a configured number of blocks in the cache for getting average difficulty cache.cache._client.lpush(prefix + 'block_cache', gbt['bits']) cache.cache._client.ltrim(prefix + 'block_cache', 0, current_app.config['difficulty_avg_period']) diff_list = cache.cache._client.lrange( prefix + 'block_cache', 0, current_app.config['difficulty_avg_period']) total_diffs = sum([bits_to_difficulty(diff) for diff in diff_list]) cache.set(prefix + 'difficulty_avg', total_diffs / len(diff_list), timeout=120 * 60) # add the difficulty as a one minute share, unless we're staging if not current_app.config.get('stage', False): now = datetime.datetime.utcnow() try: m = OneMinuteType(typ=prefix + 'netdiff', value=difficulty * 1000, time=now) db.session.add(m) db.session.commit() except sqlalchemy.exc.IntegrityError: db.session.rollback() slc = OneMinuteType.query.with_lockmode('update').filter_by( time=now, typ=prefix + 'netdiff').one() # just average the diff of two blocks that occured in the same second.. slc.value = ((difficulty * 1000) + slc.value) / 2 db.session.commit()