def setup(): """ Connect to badger system, and configure multisig for running transactions in local fork without access to accounts """ # Connect badger system from file badger = connect_badger("deploy-final.json") digg = connect_digg("deploy-final.json") # Sanity check file addresses expectedMultisig = "0xB65cef03b9B89f99517643226d76e286ee999e77" assert badger.devMultisig == expectedMultisig # Multisig wrapper multi = GnosisSafe(badger.devMultisig, testMode=True) manager = BadgerRewardsManager.at( "0x5B60952481Eb42B66bdfFC3E049025AC5b91c127") for key in strat_keys: print(key) strategy = badger.getStrategy(key) multi.execute( MultisigTxMetadata( description="Transfer Keeper for {}".format(key)), { "to": strategy.address, "data": strategy.setKeeper.encode_input(manager) }, ) return manager
def test_deploy(test=False, deploy=True): # These should already be deployed deployer = accounts.at(decouple.config("TEST_ACCOUNT"), force=True) # deployer = accounts.at(dao_config.initialOwner, force=True) devProxyAdmin = "0x20dce41acca85e8222d6861aa6d23b6c941777bf" daoProxyAdmin = "0x11a9d034b1bbfbbdcac9cb3b86ca7d5df05140f2" console.log( "Initialize Digg System", { "deployer": deployer, "devProxyAdmin": devProxyAdmin, "daoProxyAdmin": daoProxyAdmin, }, ) if deploy: digg = deploy_digg_minimal(deployer, devProxyAdmin, daoProxyAdmin) digg.deploy_dao_digg_timelock() digg.deploy_digg_team_vesting() if test: # need some sweet liquidity for testing distribute_from_whale(whale_registry.wbtc, digg.owner) # deploy trading pairs (these deploys are always idempotent) digg.deploy_uniswap_pairs(test=test) # adds liqudity in test mode else: digg = connect_digg(digg_config.prod_json) return digg
def pre_deploy_setup(self, deploy=True): """ Deploy DIGG System and Dynamic oracle for testing. """ if not deploy: digg = connect_digg(digg_config.prod_json) self.digg = digg self._deploy_dynamic_oracle(self.digg.devMultisig) # digg.constantOracle.updateAndPush({"from": digg.devMultisig}) # Sleep long enough that the report is valid. chain.sleep(digg_config.cpiOracleParams.reportDelaySec) return deployer = self.deployer devProxyAdminAddress = web3.toChecksumAddress( "0x20dce41acca85e8222d6861aa6d23b6c941777bf") daoProxyAdminAddress = web3.toChecksumAddress( "0x11a9d034b1bbfbbdcac9cb3b86ca7d5df05140f2") self.digg = deploy_digg_minimal(deployer, devProxyAdminAddress, daoProxyAdminAddress, owner=deployer) self.badger.deploy_logic("DiggRewardsFaucet", DiggRewardsFaucet) self._deploy_dynamic_oracle(self.deployer)
def connect_badger( badger_deploy_file, load_deployer=True, load_keeper=False, load_guardian=False ): badger_deploy = {} console.print( "[grey]Connecting to Existing Badger 🦡 System at {}...[/grey]".format( badger_deploy_file ) ) with open(badger_deploy_file) as f: badger_deploy = json.load(f) """ Connect to existing badger deployment """ badger = BadgerSystem( badger_config, badger_deploy["deployer"], badger_deploy["keeper"], badger_deploy["guardian"], deploy=False, load_deployer=load_deployer, load_keeper=load_keeper, load_guardian=load_guardian, ) badger.globalStartBlock = badger_deploy["globalStartBlock"] badger.connect_proxy_admins( badger_deploy["devProxyAdmin"], badger_deploy["daoProxyAdmin"] ) badger.connect_logic(badger_deploy["logic"]) # badger.connect_dev_multisig(badger_deploy["devMultisig"]) badger.connect_uni_badger_wbtc_lp(badger_deploy["uniBadgerWbtcLp"]) # Connect Vesting / Rewards Infrastructure badger.connect_team_vesting(badger_deploy["teamVesting"]) badger.connect_badger_hunt(badger_deploy["badgerHunt"]) badger.connect_badger_tree(badger_deploy["badgerTree"]) badger.connect_rewards_escrow(badger_deploy["rewardsEscrow"]) badger.connect_honeypot_meme(badger_deploy["honeypotMeme"]) badger.connect_community_pool(badger_deploy["communityPool"]) badger.connect_dao_badger_timelock(badger_deploy["daoBadgerTimelock"]) # Connect Sett badger.connect_sett_system(badger_deploy["sett_system"], badger_deploy["geysers"]) digg = connect_digg(badger_deploy_file) badger.add_existing_digg(digg) return badger
def test_deploy(test=False, deploy=True): # These should already be deployed deployer = accounts.at(dao_config.initialOwner, force=True) devProxyAdmin = "0x20dce41acca85e8222d6861aa6d23b6c941777bf" daoProxyAdmin = "0x11a9d034b1bbfbbdcac9cb3b86ca7d5df05140f2" console.log( "Initialize Digg System", { "deployer": deployer, "devProxyAdmin": devProxyAdmin, "daoProxyAdmin": daoProxyAdmin }, ) if deploy: digg = deploy_digg_minimal(deployer, devProxyAdmin, daoProxyAdmin) digg.deploy_dao_digg_timelock() digg.deploy_digg_team_vesting() else: digg = connect_digg(digg_config.prod_json) return digg
def main(): """ Connect to badger, distribute assets to specified test user, and keep ganache open. Ganache will run with your default brownie settings for mainnet-fork """ # The address to test with user = accounts.at(decouple.config("TEST_ACCOUNT"), force=True) badger = connect_badger("deploy-final.json", load_deployer=True, load_keeper=True, load_guardian=True) digg = connect_digg("deploy-final.json") digg.token = digg.uFragments badger.add_existing_digg(digg) console.print( "[blue]=== 🦡 Test ENV for account {} 🦡 ===[/blue]".format(user)) distribute_test_ether(user, Wei("10 ether")) distribute_test_ether(badger.deployer, Wei("20 ether")) distribute_from_whales(user) wbtc = interface.IERC20(token_registry.wbtc) assert wbtc.balanceOf(user) >= 200000000 init_prod_digg(badger, user) accounts.at(digg.daoDiggTimelock, force=True) digg.token.transfer(user, 20000000000, {"from": digg.daoDiggTimelock}) digg_liquidity_amount = 1000000000 wbtc_liquidity_amount = 100000000 assert digg.token.balanceOf(user) >= digg_liquidity_amount * 2 assert wbtc.balanceOf(user) >= wbtc_liquidity_amount * 2 uni = UniswapSystem() wbtc.approve(uni.router, wbtc_liquidity_amount, {"from": user}) digg.token.approve(uni.router, digg_liquidity_amount, {"from": user}) uni.router.addLiquidity( digg.token, wbtc, digg_liquidity_amount, wbtc_liquidity_amount, digg_liquidity_amount, wbtc_liquidity_amount, user, chain.time() + 1000, {"from": user}, ) sushi = SushiswapSystem() wbtc.approve(sushi.router, wbtc_liquidity_amount, {"from": user}) digg.token.approve(sushi.router, digg_liquidity_amount, {"from": user}) sushi.router.addLiquidity( digg.token, wbtc, digg_liquidity_amount, wbtc_liquidity_amount, digg_liquidity_amount, wbtc_liquidity_amount, user, chain.time() + 1000, {"from": user}, ) console.print("[green]=== ✅ Test ENV Setup Complete ✅ ===[/green]") # Keep ganache open until closed time.sleep(days(365))
def test_main(): badger = connect_badger("deploy-final.json") digg = connect_digg("deploy-final.json") distribute_from_whales(badger.keeper) manager = setup() deployer = badger.deployer keeper = badger.keeper badger.token.transfer(manager, Wei("1000 ether"), {"from": badger.keeper}) digg.token.transfer(manager, Wei("100 gwei"), {"from": badger.keeper}) before = badger.token.balanceOf(badger.devMultisig) wbtc = interface.IERC20(registry.tokens.wbtc) badger_swap_amount = Wei("100 ether") digg_swap_amount = Wei("10 gwei") badger_transfer_amount = Wei("10 ether") digg_transfer_amount = Wei("1 gwei") # with brownie.reverts("Initializable: contract is already initialized"): # manager.initialize( # badger.deployer, # badger.keeper, # badger.keeper, # badger.guardian, # badger.devMultisig, # {"from": badger.keeper}, # ), testStrat = badger.getStrategy("native.badger") # # Can add strategy # manager.approveStrategy(testStrat, {"from": deployer}) # assert manager.isApprovedStrategy(testStrat) == True # # Can revoke strategy # manager.revokeStrategy(testStrat, {"from": deployer}) # assert manager.isApprovedStrategy(testStrat) == False # Get tokens before = wbtc.balanceOf(manager) manager.swapExactTokensForTokensUniswap( badger.token, badger_swap_amount, [badger.token, registry.tokens.wbtc], {"from": keeper}, ) after = wbtc.balanceOf(manager) console.print("token swap uni", {"before": before, "after": after}) assert after > before manager.swapExactTokensForTokensSushiswap( badger.token, badger_swap_amount, [badger.token, registry.tokens.wbtc], {"from": keeper}, ) after2 = wbtc.balanceOf(manager) console.print("token swap sushi", { "before": before, "after": after, "after2": after2 }) assert after2 > after for key in strat_keys: console.print("[blue]=== Running for {} ===[/blue]".format(key)) strat = badger.getStrategy(key) # manager.approveStrategy(strat, {"from": deployer}) # ===== Convert And Transfer Assets want = interface.IERC20(strat.want()) # Native Staking if key == "native.badger": before = snap_strategy_balance(strat, manager) manager.transferWant(strat.want(), strat, badger_transfer_amount, {"from": keeper}) after = snap_strategy_balance(strat, manager) diff = diff_numbers_by_key(before, after) console.log("transfer only", key, before, after, diff) if key == "native.digg": before = snap_strategy_balance(strat, manager) manager.transferWant(strat.want(), strat, digg_transfer_amount, {"from": keeper}) after = snap_strategy_balance(strat, manager) diff = diff_numbers_by_key(before, after) console.log("transfer only", key, before, after, diff) startToken = "" amount = 0 if "Badger" in key: startToken = badger.token amount = badger_swap_amount elif "Digg" in key: startToken = digg.token amount = digg_swap_amount # LP Setts if "uni" in key: before = snap_strategy_balance(strat, manager) console.print("PreSwap", { "key": key, "startToken": startToken, "amount": amount }) manager.swapExactTokensForTokensUniswap(startToken, amount, [startToken, wbtc], {"from": keeper}) manager.addLiquidityUniswap(startToken, wbtc, {"from": keeper}) after_swap = snap_strategy_balance(strat, manager) diff_swap = diff_numbers_by_key(before, after_swap) console.log("post swap", key, before, after_swap, diff_swap) manager.transferWant(strat.want(), strat, want.balanceOf(manager), {"from": keeper}) after_transfer = snap_strategy_balance(strat, manager) diff_transfer = diff_numbers_by_key(after_swap, after_transfer) console.log("post transfer", key, after_swap, after_transfer, diff_transfer) if "sushi" in key: before = snap_strategy_balance(strat, manager) manager.swapExactTokensForTokensSushiswap(startToken, amount, [startToken, wbtc], {"from": keeper}) manager.addLiquiditySushiswap(startToken, wbtc, {"from": keeper}) after_swap = snap_strategy_balance(strat, manager) diff_swap = diff_numbers_by_key(before, after_swap) console.log("post swap", key, before, after_swap, diff_swap) manager.transferWant(strat.want(), strat, want.balanceOf(manager), {"from": keeper}) after_transfer = snap_strategy_balance(strat, manager) diff_transfer = diff_numbers_by_key(after_swap, after_transfer) console.log("post transfer", key, after_swap, after_transfer, diff_transfer) tx = manager.deposit(strat, {"from": keeper}) print("deposit events", tx.events) if strat.isTendable(): tx = manager.tend(strat, {"from": keeper}) print("tend events", tx.events) if key != "native.uniBadgerWbtc": tx = manager.harvest(strat, {"from": keeper}) print("harvest events", tx.events)
def test_claims(badger: BadgerSystem, startBlock, endBlock, before_file, after_file): before = before_file["claims"] claims = after_file["claims"] digg = connect_digg("deploy-final.json") # Total claims must only increase total_claimable = sum_claims(claims) table = [] # Each users' cumulative claims must only increase total_claimed = 0 users = [ web3.toChecksumAddress("0x302218182415dc9800179f50a8b16ff98b8d04c3"), web3.toChecksumAddress("0xbc159b71c296c21a1895a8ddf0aa45969c5f17c2"), web3.toChecksumAddress("0x264571c538137922c6e8aF4927C3D3F681399E50"), web3.toChecksumAddress("0x57ef012861c4937a76b5d6061be800199a2b9100"), ] for user, claim in claims.items(): if not user in users: continue claimed = badger.badgerTree.getClaimedFor(user, [badger.token.address])[1][0] claimed_digg = badger.badgerTree.getClaimedFor(user, [digg.token.address])[1][0] badger_claimable = int(claim["cumulativeAmounts"][0]) digg_claimable = int(claim["cumulativeAmounts"][1]) badger_diff = badger_claimable - claimed digg_diff = digg_claimable - claimed_digg print("=== Claim: " + user + " ===") console.print( { "user": user, "badger_claimed": val(claimed), "badger_claimable": val(badger_claimable), "badger_diff": val(badger_diff), "digg_claimed": claimed_digg, "digg_claimable": digg_claimable, "digg_diff": digg_diff, "digg_claimed_scaled": val( digg_contract.sharesToFragments(claimed_digg), decimals=9 ), "digg_claimable_scaled": val( digg_contract.sharesToFragments(digg_claimable), decimals=9 ), "digg_diff_scaled": val( digg_contract.sharesToFragments(digg_diff), decimals=9 ), } ) accounts[0].transfer(user, Wei("0.5 ether")) accounts.at(user, force=True) pre = badger.token.balanceOf(user) pre_digg = digg.token.balanceOf(user) pre_digg_shares = digg.token.sharesOf(user) tx = badger.badgerTree.claim( claim["tokens"], claim["cumulativeAmounts"], claim["index"], claim["cycle"], claim["proof"], {"from": user, "allow_revert": True}, ) # tx = badger.badgerTree.claim( # claim["tokens"], # claim["cumulativeAmounts"], # claim["index"], # claim["cycle"], # claim["proof"], # {"from": user, "allow_revert": True}, # ) print(tx.events) post = badger.token.balanceOf(user) post_digg = digg.token.balanceOf(user) post_digg_shares = digg.token.sharesOf(user) diff = post - pre table.append([user, "badger", pre, post, diff, claim["cumulativeAmounts"][0]]) table.append( [ user, "digg shares", pre_digg_shares, post_digg_shares, post_digg_shares - pre_digg_shares, claim["cumulativeAmounts"][1], ] ) table.append([user, "digg tokens", pre_digg, post_digg, "", ""]) print( tabulate( table, headers=["user", "token", "before", "after", "diff", "claim"] ) ) total_claimed += int(claim["cumulativeAmounts"][0]) assert post == pre + (int(claim["cumulativeAmounts"][0]) - claimed) assert ( post_digg_shares - (pre_digg_shares + (int(claim["cumulativeAmounts"][1]) - claimed_digg)) < 10 ** 18 ) print(total_claimable, total_claimed, total_claimable - total_claimed)
def main(): """ Connect to badger system, and configure multisig for running transactions in local fork without access to accounts """ # Connect badger system from file badger = connect_badger("deploy-final.json") digg = connect_digg("deploy-final.json") # Sanity check file addresses expectedMultisig = "0xB65cef03b9B89f99517643226d76e286ee999e77" assert badger.devMultisig == expectedMultisig # Multisig wrapper multi = GnosisSafe(badger.devMultisig, testMode=True) # Get price data from sushiswap, uniswap, and coingecko digg_usd_coingecko = 41531.72 btc_usd_coingecko = 32601.13 digg_per_btc = digg_usd_coingecko / btc_usd_coingecko uniTWAP = get_average_daily_price("scripts/oracle/data/uni_digg_hour") sushiTWAP = get_average_daily_price("scripts/oracle/data/sushi_digg_hour") averageTWAP = Average([uniTWAP, sushiTWAP]) console.print({ "uniTWAP": uniTWAP, "sushiTWAP": sushiTWAP, "averageTWAP": averageTWAP }) supplyBefore = digg.token.totalSupply() print("spfBefore", digg.token._sharesPerFragment()) print("supplyBefore", digg.token.totalSupply()) marketValue = Wei(str(averageTWAP) + " ether") print(marketValue) print(int(marketValue * 10**18)) print("digg_per_btc", digg_per_btc, averageTWAP, marketValue) centralizedMulti = GnosisSafe(digg.centralizedOracle) print(digg.marketMedianOracle.providerReports(digg.centralizedOracle, 0)) print(digg.marketMedianOracle.providerReports(digg.centralizedOracle, 1)) print(digg.cpiMedianOracle.providerReports(digg.constantOracle, 0)) print(digg.cpiMedianOracle.providerReports(digg.constantOracle, 1)) print(digg.cpiMedianOracle.getData.call()) sushi = SushiswapSystem() pair = sushi.getPair(digg.token, registry.tokens.wbtc) uni = UniswapSystem() uniPair = uni.getPair(digg.token, registry.tokens.wbtc) print("pair before", pair.getReserves()) print("uniPair before", uniPair.getReserves()) in_rebase_window = digg.uFragmentsPolicy.inRebaseWindow() while not in_rebase_window: print("Not in rebase window...") chain.sleep(hours(0.1)) chain.mine() in_rebase_window = digg.uFragmentsPolicy.inRebaseWindow() tx = digg.orchestrator.rebase({"from": badger.deployer}) chain.mine() supplyAfter = digg.token.totalSupply() print("spfAfter", digg.token._sharesPerFragment()) print("supplyAfter", supplyAfter) print("supplyChange", supplyAfter / supplyBefore) print("supplyChangeOtherWay", supplyBefore / supplyAfter) print("pair after", pair.getReserves()) print("uniPair after", uniPair.getReserves())
def connect_badger( badger_deploy_file=False, load_deployer=False, load_keeper=False, load_guardian=False, load_method=LoadMethod.KEYSTORE, ): """ Connect to an existing badger deploy from file Required Fields: devMultisig opsMultisig deployer keeper guardian """ # TODO: fix this for networks # if not badger_deploy_file: badger_deploy_file = network_manager.get_active_network_badger_deploy() badger_deploy = {} console.print( "[grey]Connecting to Existing Badger 🦡 System at {}...[/grey]". format(badger_deploy_file)) with open(badger_deploy_file) as f: badger_deploy = json.load(f) """ Connect to existing badger deployment """ badger = BadgerSystem( badger_config, badger_deploy["deployer"], badger_deploy["keeper"], badger_deploy["guardian"], deploy=False, load_deployer=load_deployer, load_keeper=load_keeper, load_guardian=load_guardian, load_method=load_method, ) # badger.globalStartBlock = badger_deploy["globalStartBlock"] dev_proxy_admin = None dao_proxy_admin = None ops_proxy_admin = None if "devProxyAdmin" in badger_deploy: dev_proxy_admin = badger_deploy["devProxyAdmin"] if "daoProxyAdmin" in badger_deploy: dao_proxy_admin = badger_deploy["daoProxyAdmin"] if "opsProxyAdmin" in badger_deploy: ops_proxy_admin = badger_deploy["opsProxyAdmin"] badger.connect_proxy_admins(dev_proxy_admin, dao_proxy_admin, ops_proxy_admin) if "testProxyAdmin" in badger_deploy: badger.connect_test_proxy_admin("testProxyAdmin", badger_deploy["testProxyAdmin"]) badger.connect_multisig(badger_deploy["devMultisig"]) badger.connect_ops_multisig(badger_deploy["opsMultisig"]) if "dao" in badger_deploy: badger.connect_dao() if "treasuryMultisig" in badger_deploy: badger.connect_treasury_multisig(badger_deploy["treasuryMultisig"]) badger.connect_logic(badger_deploy["logic"]) # badger.connect_dev_multisig(badger_deploy["devMultisig"]) # Connect Vesting / Rewards Infrastructure if "teamVesting" in badger_deploy: badger.connect_team_vesting(badger_deploy["teamVesting"]) if "badgerHunt" in badger_deploy: badger.connect_badger_hunt(badger_deploy["badgerHunt"]) if "badgerTree" in badger_deploy: badger.connect_badger_tree(badger_deploy["badgerTree"]) if "rewardsEscrow" in badger_deploy: badger.connect_rewards_escrow(badger_deploy["rewardsEscrow"]) if "honeypotMeme" in badger_deploy: badger.connect_honeypot_meme(badger_deploy["honeypotMeme"]) if "communityPool" in badger_deploy: badger.connect_community_pool(badger_deploy["communityPool"]) if "daoBadgerTimelock" in badger_deploy: badger.connect_dao_badger_timelock(badger_deploy["daoBadgerTimelock"]) if "timelock" in badger_deploy: badger.connect_governance_timelock(badger_deploy["timelock"]) if "badgerRewardsManager" in badger_deploy: badger.connect_rewards_manager(badger_deploy["badgerRewardsManager"]) if "unlockScheduler" in badger_deploy: badger.connect_unlock_scheduler(badger_deploy["unlockScheduler"]) if "rewardsLogger" in badger_deploy: badger.connect_rewards_logger(badger_deploy["rewardsLogger"]) if "unifiedLogger" in badger_deploy: badger.connect_unified_logger(badger_deploy["unifiedLogger"]) if "testGatedProxy" in badger_deploy: badger.connect_test_gated_proxy(badger_deploy["testGatedProxy"]) if "opsGatedProxy" in badger_deploy: badger.connect_ops_gated_proxy(badger_deploy["opsGatedProxy"]) # Connect Sett if "geysers" in badger_deploy: badger.connect_sett_system(badger_deploy["sett_system"], geysers=badger_deploy["geysers"]) else: badger.connect_sett_system(badger_deploy["sett_system"], geysers=None) # Connect DIGG if "digg_system" in badger_deploy: digg = connect_digg(badger_deploy_file) badger.add_existing_digg(digg) else: console.print("[yellow]No Digg found[/yellow]") return badger