Beispiel #1
0
 def status(self):
     string = ""
     for s in self.contracts:
         string += s.__repr__() + "\n"
     gl("Farm instance initiated with the following contracts\n\n{}".format(
         string))
     time.sleep(1)
     return self
Beispiel #2
0
 def safe_end(self):
     try:
         with open("config/end.txt") as endfile:
             run = False if endfile.read().strip() == "True" else True
             if not run:
                 gl("Termination initiated through `config/end.txt` file")
             return run
     except:
         return True
Beispiel #3
0
 def save_results(self, chunk, contract, aws_bucket, useBigQuery, sync=False):
     del chunk['day']
     filename = datetime.utcfromtimestamp(chunk.iloc[0][0]).strftime("%Y_%m_%d")
     
     chunk.columns = contract.headerColumn
     
     csv_buf = io.StringIO()
     chunk.to_csv(csv_buf, index = False)
     
     # BigQuery Upload
     if useBigQuery:
         try:
             client
         except:
             # Google Stuff
             os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = 'tokendata.json'
             client = bigquery.Client()
         table_id = '{}.{}'.format(contract.name, contract.method.simpleExp)
         ts = self.get_table_schema(contract)
         chunk['txhash'] = chunk['txhash'].astype(str)
         for column in chunk:
             if chunk[column].dtype == "object":
                 chunk[column] = chunk[column].astype(str)
         if ts:
             chunk.to_gbq(table_id, if_exists="append", chunksize=10000000, table_schema=ts)
             sync=True
         else:
             try:
                 chunk.to_gbq(table_id, if_exists="append", chunksize=10000000)
                 sync=True
             except InvalidSchema:
                 ts = self.get_table_schema(contract, None, True)
                 chunk.to_gbq(table_id, if_exists="append", chunksize=10000000, table_schema=ts)
                 sync=True
         assert(sync == True)
         gl(" -- BigQuery Sync successfull --")
     
     res=s3.put_object(Body = csv_buf.getvalue(), 
                   Bucket = aws_bucket, 
                   Key = 'contracts/{}_{}/csv/{}.csv'.format(contract.name, 
                                                             contract.method.canonicalExpression.split("(")[0].lower(),
                                                             filename))
     assert(res["ResponseMetadata"]["HTTPStatusCode"]==200)                
             
     fK = 'config/{}/lastSafedBlock/{}_{}.txt'.format("contracts",
                                                      contract.name,
                                                      contract.method.canonicalExpression.split("(")[0].lower())
     res = s3.put_object(Body=str(chunk.iloc[-1]['blocknumber']),Bucket=aws_bucket,Key=fK)
     assert(res["ResponseMetadata"]["HTTPStatusCode"]==200)
     gl(" -- AWS Sync successfull --")
     return True
def restore_fromBlock_from_AWS(contract, aws_bucket=None, secureStart=True):
    gl("Restoring last safed Block from AWS", animated=True)

    # Loop over dates backwards, starting from today
    for date in (datetime.datetime.now() - datetime.timedelta(days=i)
                 for i in range(1000)):
        fileKey = "contracts/{}_{}/csv/{}.csv".format(
            contract.name, contract.method.simpleExp,
            date.strftime("%d_%m_%Y"))

        # get last row of file and set last mined block to the contract's `fromBlock`
        try:
            df = pd.read_csv(
                s3.get_object(Bucket=aws_bucket, Key=fileKey)['Body'])
            contract.fromBlock = df.iloc[-1]['blocknumber'] + 1
            gl("'FromBlock' successfully loaded from AWS")
            if secureStart == True:
                ip = input(
                    "Overwritting `startBlock` for {} to {} - please verify (y/n)"
                    .format(contract.name, contract.fromBlock))
                assert (ip != "n")

            # Create config file on AWS
            fK = 'config/{}/lastSafedBlock/{}_{}.txt'.format(
                "contracts", contract.name, contract.method.simpleExp)
            s3.put_object(Body=str(contract.fromBlock - 1),
                          Bucket=aws_bucket,
                          Key=fK)
            gl("FromBlock' stored on AWS\n")
            time.sleep(2)
            return True

        except ClientError as ex:
            if ex.response['Error']['Code'] == 'NoSuchKey':
                continue
            else:
                gl(ex)
                break
        except:
            continue
    gl("--- Nothing loaded from AWS ---\n")
    return False
Beispiel #5
0
   def try_to_save_day(self, results, contract, aws_bucket, useBigQuery):
       # This helps for entering recursive mode
       # When the dataFrame is split, it can happen that nothing remains but the empty dataFrame
       if results.empty == True and contract.endAtBlock == None:
           gl("Empty Dataframe...")
           return False
       
       # Get day of month (ex. 04) for the first entry in the results
       if self.results.empty:
           firstEntry = results.iloc[0]["day"]
       else:
           firstEntry = self.results.iloc[0]["day"]
 
       # Get day of month (ex. 04) for the last entry in the results
       lastEntry = results.iloc[-1]["day"]
       
       # No save cause day isn't over...except endAtBlock is set
       if firstEntry == lastEntry:
           if contract.endAtBlock != None:
               if contract.fromBlock >= contract.endAtBlock:
                   self.results = self.results.append(results)
                   self.save_results(self.results, contract, aws_bucket, useBigQuery)
                   contract.fileCounter += 1
                   self.results = pd.DataFrame()
                   return True
           self.results = self.results.append(results)
           return True
       
       # Different days in the results
       else:
           # First day in the dataFrame will be saved
           res = results[results['day'] == firstEntry]
           self.results = self.results.append(res)
           self.save_results(self.results, contract, aws_bucket, useBigQuery)
           contract.fileCounter += 1
           self.results = pd.DataFrame()
           # Second part including other days than the one save will be split and 
           # are thrown into the function again
           rest = results[results['day'] != firstEntry]
           return self.try_to_save_day(rest, contract, aws_bucket, useBigQuery)
Beispiel #6
0
    def __init__(self,
                 contracts,
                 keyPath=".apikey/key.txt",
                 logging=True,
                 aws_bucket=None,
                 useBigQuery=False,
                 secureSwitch=True):
        self.contracts = contracts  # Contracts objs.
        self.contract_length = len(contracts)  # Number of contracts
        self.waitingMonitor = 0  # Helper to slow down scraping
        with open(keyPath) as k:  # Load API KEY
            self.KEY = str(k.read().strip())
        self.latestBlock = self.get_latest_block()  # Set latest block
        self.aws_bucket = aws_bucket  # AWS Bucket name

        self.lag = 4000  # Block delay to not risk invalid blocks
        self.useBigQuery = useBigQuery  # BigQuery Upload
        self.canSwitch = False  # Specify if the contracts.csv file can be switched
        self.secureSwitch = secureSwitch  # no confirmation needed after config-file switch
        self.currentContractPath = self.contracts[0].path
        self.justSwitched = False  # Binary variable to alter contracts' chunksize after config switch
        gl("\nInitiating Farm Instance with {} Contracts/Methods".format(
            len(contracts)),
           animated=True)
Beispiel #7
0
 def get_latest_block(self):
     q = 'https://api.etherscan.io/api?module=proxy&action=eth_blockNumber&apikey={}'
     try:
         return from_hex(
             json.loads(requests.get(q.format(self.KEY)).content)['result'])
     except:
         gl("Something failed, while getting the latest block:")
         q = q.format(self.KEY)
         if "Bad Gateway" in str(q):
             gl("Bad Gateway - latest Block")
             time.sleep(10)
             return self.latestBlock
         gl(q)
         try:
             q = q.content
             q = requests.get(q)
             gl(q)
             q = json.loads(q)['result']
             lB = from_hex(q)
         except:
             lB = self.latestBlock
         time.sleep(10)
         return lB
Beispiel #8
0
def prepare_event(e, methodId, KEY):
    if methodId in [
            "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",  # Transfer
            "0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",  # Approval
            "0xab8530f87dc9b59234c4623bf917212bb2536d647574c8e7e5da92c2ede0c9f8"
    ]:
        va = from_hex(e['data'])
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        tf = '0x' + e['topics'][1][-40:]
        tt = '0x' + e['topics'][2][-40:]
        return [ts, bn, th, ti, li, tf, tt, va, gp, gu]

    elif methodId in [
            "0xcb8241adb0c3fdb35b70c24ce35c5eb0c17af7431c99f827d44a445ca624176a",  # Issue
            "0x702d5967f45f6513a38ffc42d6ba9bf230bd40e8f53b16363c7eb4fd2deb9a44",  # Redeem
    ]:
        va = from_hex(e['data'])
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        return [ts, bn, th, ti, li, va, gp, gu]

    elif methodId in [
            "0xf5c174d57843e57fea3c649fdde37f015ef08750759cbee88060390566a98797",  # SupplyIncreased
            "0x1b7e18241beced0d7f41fbab1ea8ed468732edbcb74ec4420151654ca71c8a63",  # SupplyDecreased
            "0x0f6798a560793a54c3bcfe86a93cde1e73087d944c0ea20544137d4121396885",  # Mint
            "0xcc16f5dbb4873280815c1ee09dbd06736cffcc184412cf7a71a0fdb75d397ca5",  # Burn
            "0xc65a3f767206d2fdcede0b094a4840e01c0dd0be1888b5ba800346eaa0123c16",  # Issue
            "0x222838db2794d11532d940e8dec38ae307ed0b63cd97c233322e221f998767a6"  # Redeem
    ]:

        va = from_hex(e['data'])
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        tf = '0x' + e['topics'][1][-40:]
        return [ts, bn, th, ti, li, tf, va, gp, gu]

    elif methodId in [
            "0x61e6e66b0d6339b2980aecc6ccc0039736791f0ccde9ed512e789a7fbdd698c6"
    ]:  # DestroyedBlackFunds
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        da1 = '0x' + e['data'][26:66]
        da2 = from_hex('0x' + e['data'][-20:])
        return [ts, bn, th, ti, li, da1, da2, gp, gu]

    elif methodId in [
            "0x649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"
    ]:  # DepositEvent
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        pk = e['data'][386:482]
        si = e['data'][834:1026]
        cr = e['data'][578:642]
        am = e['data'][706:722]
        ix = e['data'][1090:1106]
        return [ts, bn, th, ti, li, pk, cr, am, si, ix, gp, gu]

    elif methodId in [
            "0x77f92a1b6a1a11de8ca49515ad4c1fad45632dd3442167d74b90b304a3c7a758"
    ]:  # Swap TORN
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        re = '0x' + e['topics'][1][-40:]
        pt = from_hex(e['data'][:66])
        to = from_hex("0x" + e['data'][-60:])
        return [ts, bn, th, ti, li, re, pt, to, gp, gu]

    elif methodId in [
            "0xa945e51eec50ab98c161376f0db4cf2aeba3ec92755fe2fcd388bdbbb80ff196"
    ]:  # DepositEvent TORN ETH
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])

        # Extra Query to get the initiator of the tx
        _API = "https://api.etherscan.io/api?{}"
        _QUERY = "module=proxy&action=eth_getTransactionByHash&txhash={}&apikey={}"
        _queryString = _API.format(_QUERY.format(th, KEY))
        _res = None
        while _res == None:
            try:
                _res = json.loads(requests.get(_queryString).content)
            except JSONDecodeError:
                _res = None
                gl(requests.get(_queryString).content)
                gl("Some strange JSONDecodeError")
                time.sleep(1)

        _res = _res['result']
        time.sleep(0.1)
        tf = _res["from"]
        no = from_hex(_res["nonce"])
        va = from_hex(_res['value'])

        return [ts, bn, th, ti, li, tf, va, no, gp, gu]

    elif methodId in [
            "0xe9e508bad6d4c3227e881ca19068f099da81b5164dd6d62b2eaf1e8bc6c34931"
    ]:  # WithdrawEvent TORN ETH
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])

        tt = "0x" + e['data'][26:66]

        # Extra Query to get the initiator of the tx
        _API = "https://api.etherscan.io/api?{}"
        _QUERY = "module=proxy&action=eth_getTransactionByHash&txhash={}&apikey={}"
        _queryString = _API.format(_QUERY.format(th, KEY))
        _res = None
        while _res == None:
            try:
                _res = json.loads(requests.get(_queryString).content)
            except JSONDecodeError:
                _res = None
                gl(requests.get(_queryString).content)
                gl("Some strange JSONDecodeError")
                time.sleep(1)

        _res = _res['result']
        time.sleep(0.1)
        tf = _res["from"]
        no = from_hex(_res["nonce"])
        va = from_hex(_res['value'])

        return [ts, bn, th, ti, li, tf, tt, va, no, gp, gu]

    elif methodId in [
            '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67'
    ]:  # Uniswap V3 Pool Swap
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        se = '0x' + e['topics'][1][-40:]
        re = '0x' + e['topics'][2][-40:]
        a0 = e['data'][2:66]
        a1 = e['data'][66:130]
        sP = from_hex(e['data'][130:194])
        lq = from_hex(e['data'][194:258])
        tk = from_hex(e['data'][258:322])
        return [ts, bn, th, ti, li, se, re, a0, a1, sP, lq, tk, gp, gu]

    elif methodId in [
            "0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822"
    ]:  # Uniswap V2 Pool Swap
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        se = '0x' + e['topics'][1][-40:]
        re = '0x' + e['topics'][2][-40:]
        a0I = from_hex(e['data'][2:66])
        a1I = from_hex(e['data'][66:130])
        a0O = from_hex(e['data'][130:194])
        a1O = from_hex(e['data'][194:258])
        return [ts, bn, th, ti, li, se, re, a0I, a1I, a0O, a1O, gp, gu]

    else:
        bn = from_hex(e['blockNumber'])
        ts = from_hex(e['timeStamp'])
        th = e['transactionHash']
        ti = from_hex(e['transactionIndex'])
        gp = from_hex(e['gasPrice'])
        gu = from_hex(e['gasUsed'])
        li = from_hex(e['logIndex'])
        da = e['data']
        chunk = [ts, bn, th, ti, li, gp, gu]
        if len(e['topics']) > 1:
            to1 = e['topics'][1]
            chunk.append(to1)
        if len(e['topics']) > 2:
            to2 = e['topics'][2]
            chunk.append(to2)
        if len(e['topics']) > 3:
            to3 = e['topics'][3]
            chunk.append(to3)
        return chunk
Beispiel #9
0
    def start_farming(self):
        try:
            # Endless == True if end.txt == False => allows to safely end the program at the beginning of an iteration
            endless = True
            self.log_header()
            while (endless):
                endless = self.safe_end()
                # Slow down program if the latest block is reached for every token
                self.adjust_speed()
                # Update latestBlock
                self.latestBlock = self.get_latest_block()

                if self.canSwitch:
                    print("START SWITCH")
                    gl("Monitor Count: {}\nContracts: {}".format(
                        self.waitingMonitor, self.contract_length))
                    gl("Switch contract.csv config file", animated=True)
                    self.currentContractPath = self.get_next_file()
                    self.contracts = []
                    gl("File switched", animated=True)
                    gl("Waiting until {} - Crtl C to proceed".format(
                        self.get_future_startTime()),
                       animated=True)
                    try:
                        time.sleep(86400 / 2)
                    except KeyboardInterrupt:
                        gl("Continuing", animated=True)

                    start = True
                    self.justSwitched = True
                    self.secureSwitch = False
                    self.waitingMonitor = 0  # Reset
                    self.canSwitch = False  # Reset
                else:
                    self.currentContractPath = self.contracts[0].path
                    start = False
                # Load or remove new contracts
                self.contracts = load_contracts(
                    self.contracts,
                    start,
                    config_location=self.currentContractPath,
                    aws_bucket=self.aws_bucket,
                    secureStart=self.secureSwitch)
                if start and self.justSwitched:
                    self.justSwitched = False
                    gl("Contracts' chunksize set to 100")
                    for contract in self.contracts:
                        contract.chunksize = 100

                self.contract_length = len(self.contracts)

                # Loop over the list of contracts
                for i in self.contracts:
                    # If latestBlock is reached => wait
                    if self.not_wait(i):
                        # API request
                        query = i.query_API(self.KEY)
                        # Try to increase the chunksize
                        if i.chunksize_could_be_larger(
                        ) and i.chunksizeLock == False:
                            i.increase_chunksize()
                        if query:
                            # Prepare raw request for further processing
                            chunk = i.mine(query, i.method.id, self.KEY)
                            gl(i.log_to_console(chunk))
                            result = i.DailyResults.enrich_daily_results_with_day_of_month(
                                chunk)

                            # Try to safe results
                            i.DailyResults.try_to_save_day(
                                result, i, self.aws_bucket, self.useBigQuery)
                        else:
                            gl(" {} - No records for {} with method {} and chunk {:,}-{:,}"
                               .format(from_unix(datetime.now()), i.name,
                                       i.method.simpleExp,
                                       i.fromBlock - i.chunksize, i.fromBlock))

                    else:
                        gl(" {} - Waiting for {} with method {}".format(
                            from_unix(datetime.now()), i.name,
                            i.method.simpleExp))
                        if i.shouldWait == False:
                            i.shouldWait = True
                            self.waitingMonitor += 1
                            gl("Switch activated: {}".format(
                                str(self.canSwitch)))
                            gl("Monitor Count: {}\nContracts:{}".format(
                                self.waitingMonitor, self.contract_length))
                        self.wait(i)
        except KeyboardInterrupt:
            gl("Application stops", animated=True)
            endless = False
Beispiel #10
0
 def log_header(self):
     header = ("Timestamp", "Contract", "Current Chunk", "Chunk Timestamp",
               "Events", "Chsz", "Fc")
     log = "\033[4m{:^23}-{:^18}|{:^21}| {:^20}|{:^6}|{:^6}|{:^6}\033[0m".format(
         *header)
     gl(log)
Beispiel #11
0
 def try_activate_contract_change(self):
     contractPaths = self.get_config_files()
     if len(contractPaths) > 1:
         gl("Activating config file switch...")
         self.canSwitch = True
Beispiel #12
0
    def query_API(self, KEY):
        res = None
        # Create the actual API Request
        queryString = APIQuery.format(
            API, self.fromBlock,
            int(self.fromBlock) + int(self.chunksize), self.method.id,
            self.addr, KEY)

        gl(queryString, False, False)
        # Submit Request
        headers = {
            'User-Agent':
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36'
        }
        try:
            res = " "
            while res == " ":
                try:
                    res = json.loads(
                        requests.get(queryString, headers=headers).content)
                except KeyboardInterrupt:
                    gl("Application stops", animated=True)
                    exit(1)
                except:
                    time.sleep(60)
                    continue
        except JSONDecodeError:
            gl(requests.get(queryString).content)
            gl("Some strange JSONDecodeError")
            return None
        except KeyboardInterrupt:
            gl("Application stops", animated=True)
            exit(1)
        except Exception as e:
            gl("Some other strange error")
            gl(str(e))
            time.sleep(60)
            return None

        # Catch fails
        # If nothing is found, then the requestes blockrange didn't contain any relevant events
        if res['message'] == 'No records found':
            self.fromBlock += self.chunksize + 1
            self.chunksizeAdjuster = np.append(self.chunksizeAdjuster,
                                               [0])[-10:]
            return None

        # If API endpoint blocks request, then wait and try again next iteration (within contract array in farm)
        if (res['status'] == '0' or not res):
            gl('... request failed for {}'.format(self.addr))
            time.sleep(5)
            return

        # Check if len of returned results is the maximum of 1000
        # If so, enter recursive mode with a smaller chunksize - try again
        if (len(res['result']) >= 1000):  # Request to large
            if self.chunksize != 1:
                self.chunksize -= round(self.chunksize / 3)
                gl('... decreasing chunksize for {} to {:,.0f}'.format(
                    self.name, self.chunksize))
                return self.query_API(KEY)  # Recursive bby
            gl("Chunksize was already at 1. Wrong block minded")

        # Add len of result to chunksizeAdjuster list and remove first element
        self.chunksizeAdjuster = np.append(self.chunksizeAdjuster,
                                           [len(res['result'])])[-10:]

        #Set new fromBlock for the next round
        self.fromBlock += self.chunksize + 1

        return res['result']
def load_contracts(contracts=[],
                   start=True,
                   config_location="contracts",
                   aws_bucket=None,
                   secureStart=True):
    # If first call of function => gl header
    if start:
        gl("Loading new contracts", animated=True)
    # Create dict of contracts to check for newly appended ones
    cont = {}
    for contract in contracts:
        if contract.addr in cont.keys():
            cont[contract.addr].append(contract.method.simpleExp)
        else:
            cont[contract.addr] = [contract.method.simpleExp]

    #
    # Load Contracts
    #
    # AWS storage location
    fileKey = "config/{}/contracts.csv".format(config_location)
    # Load file
    objB = s3.get_object(Bucket=aws_bucket, Key=fileKey)['Body']
    contractArray = objB.read().decode("utf-8").strip().split("\n")
    #Loop over list of contract entries
    for contract_string in contractArray:
        contAddr = contract_string.split(",")[0]
        # remove contracts whos address is set to `remove`
        if contAddr == "remove":
            for i in contracts:
                if i.name == contract_string.split(
                        ",")[1] and contract_string.split(",")[2].split(
                            "(")[0].lower() == i.method.simpleExp:
                    gl("\n ---Contract of `{}` with method {} removed---\n".
                       format(i.name, i.method.simpleExp))
                    del contracts[contracts.index(i)]

        # if nothing change and contract remains in the farm
        elif contAddr in cont.keys() and contract_string.split(",")[2].split(
                "(")[0].lower() in cont[contAddr]:
            continue
        # new contract => Contract object initiated and provided to the farm
        else:
            contracts.append(
                Contract(*tuple(
                    re.split("\,(?=.*\()|\,(?!.*\))", contract_string))))
            contracts[-1].path = config_location
            if start:
                gl("Contract loaded @  {}".format(contracts[-1].addr))
                gl("  |--- Name        {}".format(contracts[-1].name))
                gl("  |--- Method      {}".format(
                    contracts[-1].method.canonicalExpression))
                gl("  |--- Method ID   {}".format(contracts[-1].method.id))
                gl("  |--- StartBlock  {:,}".format(
                    int(contracts[-1].startBlock)))
                gl("  |--- Chunksize   {:,}\n".format(
                    int(contracts[-1].chunksize)))

            # Manage `startBlock`

            filename = "config/contracts/lastSafedBlock/" + contracts[
                -1].name + "_" + contracts[-1].method.simpleExp
            # try to load config file with startBlock from AWS
            try:
                awsfile = s3.get_object(Bucket=aws_bucket,
                                        Key=filename + ".txt")
            except:
                awsfile = False
            # if config file => set `startBlock`
            if awsfile:
                gl("Loading `startBlock` for {} from AWS config file".format(
                    contracts[-1].name))
                newStartBlock = s3.get_object(Bucket=aws_bucket,
                                              Key=filename +
                                              ".txt")['Body'].read()
                newStartBlock = int(newStartBlock.decode("utf-8"))
                contracts[-1].fromBlock = newStartBlock + 1
                if secureStart == True:
                    ip = input(
                        "Overwritting `startBlock` for {} to {} - please verify (y/n)"
                        .format(contracts[-1].name, contracts[-1].fromBlock))
                    assert (ip != "n")
                gl("`Startblock` overwritten for {} to Block {:,}\n".format(
                    contracts[-1].name, contracts[-1].fromBlock))

            # else, if already safed results in AWS => tset `startBlock` to  last safed Block
            elif existing_aws_results(contracts[-1], aws_bucket=aws_bucket):
                restore_fromBlock_from_AWS(contracts[-1],
                                           aws_bucket=aws_bucket,
                                           secureStart=secureStart)

            # else, take `startBlock`from contracts file
            else:
                if secureStart == True:
                    ip = input(
                        "FromBlock not overwritten for {}. Please verify (y/n)"
                        .format(contracts[-1].name))
                    assert (ip != "n")
                gl("FromBlock for {} taken from config file".format(
                    contracts[-1].name))
                time.sleep(2)
    return contracts