def syncShardDetails(conn, app, data, startTime): logger.info("syncing harmony shard data") shardDetails = data["shardDetails"] currentEpoch = data["currentEpoch"] # logger.info("processing data: {}".format(shardDetails)) inserts, updates = [], [] for shard in shardDetails: # logger.info("processing shard: {}".format(shard)) if "blockRateId" in shard: updates.append((shard["epochStartBlock"], shard["latestBlock"], shard["epochLastBlock"], shard["epochStartTime"], shard["latestBlockTime"], shard["epochLastBlockTime"], shard["epochEnded"], shard["blockRate"], datetime.now(), shard["blockRateId"])) else: inserts.append((shard["epochNumber"], shard["shardId"], shard["epochStartBlock"], shard["latestBlock"], shard["epochLastBlock"], shard["epochStartTime"], shard["latestBlockTime"], shard["epochLastBlockTime"], shard["epochEnded"], shard["blockRate"])) # logger.info("processing inserts: {}".format(inserts)) # logger.info("processing updates: {}".format(updates)) if len(inserts) > 0: batchCreateBlockRate(conn, inserts) if len(updates) > 0: batchUpdateBlockRate(conn, updates) updateCoinStatBlockRate(conn) # if normalMode: auditUtils.createEvent(conn, app, eventName.lastHarmonyShardSyncDetails, currentEpoch) # logger.info("finished processing all events") conn.commit()
def syncTransactions(conn, app, data, startTime): logger.info("syncing harmony transactions") startBlock = data["startBlockHeight"] shardId = data["shardId"] eventKey = eventName.lastSyncedHarmonyTxBlockHeight + str(shardId) normalMode = data["normalMode"] validateStartBlock(conn, startBlock, eventKey, normalMode) endBlock = data["blockHeight"] txData = data["txData"] processSummary(conn, txData["summaryMap"]) txs = txData["txs"] logger.info("txs: {}".format(len(txs))) poolMap = harmonyData.listHPoolsAsMap(conn) processTransactions(conn, app, txs, poolMap) # processStakingTransactions(conn, app, txs["staking"]) createAddressIfAny(conn) # if normalMode: auditUtils.createEvent(conn, app, eventKey, endBlock) # logger.info("finished processing all events") conn.commit()
def syncDelegates(conn, app, data, event): # logger.info("in harmony delegation sync") startTime = datetime.datetime.now() # conn = dbUtil.getConnection() # currentTime = datetime.datetime.now() # logger.info(body) logger.info("processing delegation deletes") if data["deletes"] is not None: processDelegationDeletes(conn, data["deletes"]) logger.info("processing delegation inserts") if data["inserts"] is not None: processDelegationInserts(conn, data["inserts"]) logger.info("processing delegation updates") if data["updates"] is not None: processDelegationUpdates(conn, data["updates"]) conn.commit() logger.info("updating balances") updateBalances(conn, True) conn.commit() updateRank(conn, 500) conn.commit() auditUtils.createEvent(conn, app, eventName.syncHarmonyDelegations) auditUtils.audit(conn, app, event, eventName.syncHarmonyDelegations, "service", startTime) logger.info("processing finished")
def electionSync(conn, app, data, event): logger.info("in electionSync for harmony") startTimeAudit = datetime.datetime.now() processElections(conn, app) auditUtils.createEvent(conn, app, eventName.syncHarmonyElections) auditUtils.audit(conn, app, event, eventName.syncHarmonyElections, "service", startTimeAudit)
def syncAddresses(conn, app, data, event): # logger.info("in harmony address sync") startTime = datetime.datetime.now() updated = updateAddressHistory(conn) conn.commit() if updated: logger.info("skipping address sync as address history was processed") return inserts = data["inserts"] updates = data["updates"] # logger.info("processing address inserts") if inserts is not None: # count = 0 insertArgs = [] for i in inserts: # logger.info("inserting address: {}".format(i)) # , i["txCount"] record = (i["address"], i["balance"]) insertArgs.append(record) # count += 1 # if count % 1000 == 0: # conn.commit() batchCreateAddress(conn, insertArgs) conn.commit() # logger.info("processing address updates") if updates is not None: updateArgs = [] for i in updates: # logger.info("updating address: {}".format(i)) # logger.info(i) # i["txCount"], updateArgs.append((i["balance"], i["addressId"])) batchUpdateAddress(conn, updateArgs) conn.commit() balanceIncludesStake = data["balanceIncludesStake"] # logger.info("balanceIncludesStake: {}".format(balanceIncludesStake)) updateBalances(conn, balanceIncludesStake) conn.commit() updateRank(conn, 50000) conn.commit() auditUtils.createEvent(conn, app, eventName.syncHarmonyAddresses) auditUtils.audit(conn, app, event, eventName.syncHarmonyAddresses, "service", startTime)
def syncEvents(conn, app, data, startTime): # logger.info("syncing harmony events") poolMap = harmonyData.listHPoolsAsMap(conn) # startBlock = data["startBlockHeight"] endBlock = data["blockHeight"] events = data["events"] inserts = [] coinStat = commonUtils.getCoinStat(conn, app) for e in events: if "validatorAddress" in e and e["validatorAddress"] not in poolMap: # logger.info("the validator {} is not known. skipping event.".format(e["validatorAddress"])) continue eventType = e["type"] if eventType == hConstants.H_EVENT_DELEGATE or eventType == hConstants.H_EVENT_UNDELEGATE: record = (eventType, e["blockNumber"], e["txHash"], e["address"], poolMap[e["validatorAddress"]]["hPoolId"], e["amount"], e["epochTimestamp"], None) inserts.append(record) elif eventType == hConstants.H_EVENT_COLLECT_REWARDS: record = (eventType, e["blockNumber"], e["txHash"], e["address"], None, e["amount"], e["epochTimestamp"], None) inserts.append(record) elif eventType == hConstants.H_EVENT_EDIT_VALIDATOR: record = (eventType, e["blockNumber"], e["txHash"], e["address"], poolMap[e["validatorAddress"]]["hPoolId"], None, e["epochTimestamp"], None) inserts.append(record) specialRecords = processEditValDetails(e, poolMap, coinStat) if specialRecords: inserts.extend(specialRecords) elif eventType == hConstants.H_CREATE_VALIDATOR: record = (eventType, e["blockNumber"], e["txHash"], e["address"], poolMap[e["validatorAddress"]]["hPoolId"], None, e["epochTimestamp"], None) inserts.append(record) # hTgNotify.handleEvent(conn, e, poolMap) # logger.info("inserts are:") # logger.info(inserts) if len(inserts) > 0: batchCreateEvents(conn, inserts) auditUtils.createEvent(conn, app, hConstants.EVENT_LAST_SYNCED_EVENT_BLOCK_HEIGHT, endBlock) auditUtils.createEvent(conn, app, eventName.syncHarmonyEvents) # logger.info("finished processing all events") conn.commit()
def blsKeySync(conn, app, data, event): logger.info("in blsKeySync") startTime = datetime.datetime.now() blockNumber = processShards(conn, data["shardDetails"]) processKeyInserts(conn, data["inserts"]) processKeyPerf(conn, data["blsPerfData"], data["currentEpoch"], blockNumber) auditUtils.createEvent(conn, app, eventName.syncHarmonyBlsKeys) auditUtils.audit(conn, app, event, eventName.syncHarmonyBlsKeys, "service", startTime) logger.info("processing finished")
def lambda_handler(event, context): logger.info("in updateData for harmony") startTime = datetime.datetime.now() conn, app, error = securityUtils.performSecurityCheckAndGetDetails(event) if error: return error # body = json.loads(event["body"]) # logger.info("body: {}".format(body)) sendAllNotifications(conn) auditUtils.createEvent(conn, app, eventName.syncHarmonyNotifications) auditUtils.audit(conn, app, event, eventName.syncHarmonyNotifications, "service", startTime) # harmonyBlsKeys.processKeyNodeVersionAlert(conn, app) conn.close() return utilities.getSuccessResponse()
def syncVersion(conn, app, data, startTime): logger.info("syncing harmony version data") logger.info("processing data: {}".format(data)) keyVersions = data["keyVersions"] logger.info("processing valVersions: {}".format(keyVersions)) updates = [] for key in keyVersions: logger.info("processing val: {}".format(key)) updates.append( (key["version"], datetime.datetime.now(), key["blsKey"])) logger.info("processing updates: {}".format(updates)) batchUpdateVersions(conn, updates) auditUtils.createEvent(conn, app, eventName.lastHarmonyVersionSync) logger.info("finished syncing versions") conn.commit()
def updateAddressHistory(conn): coinStat = commonUtils.getCoinStat(conn, app) syncStatus = commonUtils.getSyncStatus(conn, app, constants.H_ADDRESS_HISTORY_SYNC) epoch = coinStat["currentEpoch"] # logger.info("addresses are synced till epoch: {}. checking with current epoch: {}".format( # syncStatus["syncedTillEpoch"], epoch)) if syncStatus["syncedTillEpoch"] == epoch: logger.info( "skipping. addresses are already synced till current epoch: {}". format(syncStatus["syncedTillEpoch"])) return False logger.info("syncing address history") sql = harmonyData.getRichListSql() # logger.info(sql) currentAddresses = dbUtil.listResultsWithConn(sql, conn, 10000000) # logger.info(currentAddresses) # logger.info("processing address history") inserts = [] for address in currentAddresses: # logger.info("processing address history for: ".format(address["address"])) record = harmonyHistory.getCreateHistoryData( epoch, constants.H_HISTORY_ADDRESS, address["totalStake"], None, None, address["address"], address["totalBalance"], address["totalRewards"]) inserts.append(record) harmonyHistory.batchCreateHistory(conn, inserts) # logger.info("after processing address history") commonUtils.updateSyncStatus(conn, app, constants.H_ADDRESS_HISTORY_SYNC, None, epoch) auditUtils.createEvent(conn, app, eventName.syncHarmonyAddressHistory) logger.info("after syncing address history") conn.commit() return True
def epochSignSync(conn, app, data, event): logger.info("in blsKeySync") startTime = datetime.datetime.now() allData = data["allData"] currentTime = datetime.datetime.now() epochSignUpdates = [] epochSignSummaryUpdates = [] maxEpoch, signRate = 0, 0 for epochData in allData: logger.info("processing epoch: {}".format(epochData["epoch"])) summary = (epochData["signRate"], epochData["epoch"]) epochSignSummaryUpdates.append(summary) if epochData["epoch"] > maxEpoch: signRate = epochData["signRate"] maxEpoch = epochData["epoch"] for validator in epochData["validators"]: valSign = (validator["signed"], validator["askedToSign"], validator["signPer"], currentTime, currentTime.strftime('%s'), epochData["epoch"], validator["hPoolId"]) epochSignUpdates.append(valSign) batchUpdateEpochSigns(conn, epochSignUpdates) batchUpdateHistory(conn, epochSignSummaryUpdates) updateSignRate(conn, signRate, app) epoch = data["epoch"] commonUtils.updateSyncStatus(conn, app, constants.SYNC_STATUS_EPOCH_FOR_SIGN, None, epoch) auditUtils.createEvent(conn, app, eventName.syncEpochSign) auditUtils.audit(conn, app, event, eventName.syncEpochSign, "service", startTime) logger.info("processing finished")
def processElections(conn, app): validators = harmonyData.listAllValidators(conn) # logger.info(validators) valByAddMap = commonUtils.getMapFromList(validators, "address") data = commonUtils.getDataByUrl(hConstants.ELECTIONS_URL) currentEpoch = data["current_epoch"] currentBlock = data["current_block_number"] coinStat = commonUtils.getCoinStat(conn, constants.APP) timeLeft, eventToUpdate = getEpochEndTimeLeftForEpochAlert( conn, coinStat, currentEpoch, currentBlock) nextSlots = getSlots(data["live_table"], valByAddMap) processForWhatIf(nextSlots) nextEpochNotMap = notificationUtils.getNotMapForElectionCheck( conn, currentEpoch, True) notificationsNext, watchEndListNext = processNextEpochNotifications( nextSlots, nextEpochNotMap, currentEpoch, timeLeft) slots = getSlots(data["table"], valByAddMap) epochNotMap = notificationUtils.getNotMapForElectionCheck( conn, currentEpoch, False) notifications = processEpochNotifications(slots, epochNotMap, currentEpoch) notifications.extend(notificationsNext) notificationsUnelectedVal = processUnelectedValNotifications( conn, epochNotMap, currentEpoch) notifications.extend(notificationsUnelectedVal) # logger.info("notifications: {}, watchEndListNext: {}".format(notifications, watchEndListNext)) processForUnderbid(conn, currentEpoch, nextSlots, notifications, watchEndListNext, currentBlock, coinStat, timeLeft) if eventToUpdate: # logger.info("creating event: {}".format(eventToUpdate)) auditUtils.createEvent(conn, app, eventToUpdate, currentEpoch) notificationUtils.batchCreateNotifications(conn, notifications) notificationUtils.updateEndWatchList(conn, watchEndListNext)
def syncValidators(conn, app, data, event): # logger.info("in harmony validator sync") startTime = commonUtils.getCurrentTime() startTimeAudit = datetime.datetime.now() # conn = dbUtil.getConnection() # currentTime = datetime.datetime.now() currentHour = datetime.datetime.now().replace(microsecond=0, second=0, minute=0) previousHour = currentHour - datetime.timedelta(hours=1) # logger.info("currentTime: {}".format(currentTime)) # logger.info("loading request") # body = json.loads(event["body"]) validators = data["allValidators"] stakingInfo = data["stakingInfo"] epochInfo = data["epochInfo"] currentEpoch = epochInfo["currentEpoch"] logger.info("total number of validators returned is: " + str(len(validators))) if len(validators) <= 100: logger.info( "number of validators returned is much less than what was expected. skipping the processing for now." ) return getResponse(jsondumps({"result": "successful"})) currentEpochValMap = getCurrentValMap(conn, currentEpoch, None) previousEpochValMap = getCurrentValMap(conn, currentEpoch - 1, None) currentHourValMap = getCurrentValMap(conn, currentEpoch, currentHour) previousHourValMap = getCurrentValMap(conn, None, previousHour) commonUtils.logTimeDiff(startTime, "before coinstats") currentCoinStat = commonUtils.getCoinStat(conn, app) commonUtils.logTimeDiff(startTime, "before getBlockCompletionFactor") completionFactor, enoughDataForCurrentApr = getBlockCompletionFactor( epochInfo, stakingInfo, currentCoinStat, currentEpoch) # logger.info("is enough data captured for current apr: {}".format(enoughDataForCurrentApr)) valMap = harmonyData.listHPoolsAsMap(conn) networkStake = getNetworkStake(validators) commonUtils.logTimeDiff( startTime, "time spent before starting validators processing") i = 0 deltaInserts, deltaUpdates = [], [] stakeWeights = [] for validator in validators: address = validator["address"] # logger.info("{} - processing validator: {}".format(i, address)) # logger.info(validator) i += 1 commonUtils.logTimeDiff(startTime, "processing validator #: {}".format(i)) blsKeyCount = len(validator["blsPublicKeys"]) optimalBlsKeyCount, bidPerSeat = getOptimalBlsKeyCount( stakingInfo, blsKeyCount, validator) commonUtils.logTimeDiff( startTime, "before processing syncValidator #: {}".format(i)) stakeWeight = getStakeWeight(validator, networkStake) stakeWeights.append(stakeWeight) dbValidator = None if address in valMap: dbValidator = valMap[address] hPoolId = syncValidator(conn, validator, blsKeyCount, optimalBlsKeyCount, bidPerSeat, stakeWeight, currentEpoch, dbValidator) commonUtils.logTimeDiff( startTime, "after processing syncValidator #: {}".format(i)) currEpochSummary = getValidatorDetails(currentEpochValMap, hPoolId) commonUtils.logTimeDiff( startTime, "after getting currEpochSummary #: {}".format(i)) prevEpochSummary = getValidatorDetails(previousEpochValMap, hPoolId) commonUtils.logTimeDiff( startTime, "after getting prevEpochSummary #: {}".format(i)) syncValidatorEpochSummary(conn, validator, blsKeyCount, bidPerSeat, hPoolId, currEpochSummary, prevEpochSummary, currentEpoch, completionFactor, enoughDataForCurrentApr) commonUtils.logTimeDiff( startTime, "after processing syncValidatorEpochSummary #: {}".format(i)) # logger.info("processing hourly data") currHourValDetails = getValidatorDetails(currentHourValMap, hPoolId) commonUtils.logTimeDiff( startTime, "after obtaining currHourValDetails #: {}".format(i)) prevHourValDetails = getValidatorDetails(previousHourValMap, hPoolId) # logger.info("current hour existing details") # logger.info(currHourValDetails) # logger.info("previous hour existing details") # logger.info(prevHourValDetails) # logger.info("previous hourly delta performance") commonUtils.logTimeDiff( startTime, "before delta perf - processing validator #: {}".format(i)) processDeltaPerf(conn, validator, hPoolId, currHourValDetails, prevHourValDetails, currentEpoch, currentHour, deltaInserts, deltaUpdates) commonUtils.logTimeDiff(startTime, "after processDeltaPerf #: {}".format(i)) # logger.info("after processing deltas") commonUtils.logTimeDiff(startTime, "before processing hpoolperf inserts and updates") batchCreateHPoolPerf(conn, deltaInserts) batchUpdateHPoolPerf(conn, deltaUpdates) conn.commit() # processDeltaPerf(conn, app, validators, currentHour) commonUtils.logTimeDiff(startTime, "before processing perf index") if enoughDataForCurrentApr: processPerfIndex(conn, app, epochInfo) conn.commit() # uniqueDelegates = len(delegates) commonUtils.logTimeDiff(startTime, "before processing perf index") processEpochPerf(conn, app, epochInfo, currentCoinStat, enoughDataForCurrentApr) commonUtils.logTimeDiff(startTime, "before update coinstat") valForNetworkHalt = getValCountForNetworkHalt(stakeWeights) updateCoinStats(conn, app, stakingInfo, epochInfo, len(validators), valForNetworkHalt) conn.commit() harmonyValEvents.generateValSyncEvents(conn, currentEpoch, enoughDataForCurrentApr) commonUtils.logTimeDiff(startTime, "before creating event and audit") auditUtils.createEvent(conn, app, eventName.syncHarmonyValidators) auditUtils.audit(conn, app, event, eventName.syncHarmonyValidators, "service", startTimeAudit) # conn.close() commonUtils.logTimeDiff(startTime, "total time spent") return getResponse(jsondumps({"result": "successful"}))