class ClickhouseTokenHolders(): def __init__(self, indices=INDICES): self.indices = indices self.client = CustomClickhouse() def extract_token_transactions(self): """ Creates materialized view with token transactions extracted from Transfer events This function is an entry point for prepare-erc-transactions-view operation """ value_sql = utils.generate_sql_for_value("data") sql = """ CREATE MATERIALIZED VIEW IF NOT EXISTS {index} ENGINE = ReplacingMergeTree() ORDER BY id POPULATE AS ( SELECT concat('0x', substring(topics[2], 27, 40)) AS from, concat('0x', substring(topics[3], 27, 40)) AS to, {value_sql}, data_value AS value, id, address AS token, transactionHash, blockNumber FROM {event} ANY INNER JOIN ( SELECT id AS address, decimals FROM {contract} ) USING address WHERE topics[1] = '{transfer_topic}' ) """.format( index=self.indices["token_transaction"], value_sql=value_sql, transfer_topic=TRANSFER_EVENT, event=self.indices["event"], contract=self.indices["contract_description"], ) self.client.send_sql_request(sql)
class ClickhouseIndices: def __init__(self, indices=INDICES): self.client = CustomClickhouse() self.indices = indices def _create_index(self, index, fields={}, primary_key=["id"]): """ Create specified index in database with specified field types and primary key Parameters ---------- index : str Name of index fields : dict Fields and their types and index primary_key : list All possible primary keys in index """ fields["id"] = "String" fields_string = ", ".join( ["{} {}".format(name, type) for name, type in fields.items()]) primary_key_string = ",".join(primary_key) create_sql = """ CREATE TABLE IF NOT EXISTS {} ({}) ENGINE = ReplacingMergeTree() ORDER BY ({}) """.format(index, fields_string, primary_key_string) self.client.send_sql_request(create_sql) def prepare_indices(self): """ Create all indices specified in schema/schema.py This function is an entry point for prepare-indices operation """ for key, index in self.indices.items(): if key in INDEX_FIELDS: self._create_index(index, INDEX_FIELDS[key], PRIMARY_KEYS.get(key, ["id"]))
class ClickhouseTestCase(unittest.TestCase): def setUp(self): self.client = Client('localhost') self.client.execute('DROP TABLE IF EXISTS test') self.client.execute( 'CREATE TABLE test (id String, x Int32, dict String) ENGINE = ReplacingMergeTree() ORDER BY id' ) self.new_client = CustomClickhouse() def _add_records(self): documents = [{ 'x': 1, "id": "1" }, { 'x': 2, "id": "2" }, { 'x': 3, "id": "3" }, { 'x': 100, "id": "100" }] formatted_documents = [{ "_id": doc["id"], "_source": { 'x': doc["x"] } } for doc in documents] self.client.execute('INSERT INTO test (id, x) VALUES', documents) return formatted_documents def test_search(self): formatted_documents = self._add_records() result = self.new_client.search(index="test", fields=["x"]) self.assertCountEqual(formatted_documents, result) def test_search_with_query(self): formatted_documents = self._add_records() formatted_documents = [ doc for doc in formatted_documents if doc["_source"]['x'] < 3 ] result = self.new_client.search(index="test", query="WHERE x < 3", fields=["x"]) self.assertSequenceEqual(formatted_documents, result) def test_count(self): formatted_documents = self._add_records() formatted_documents = [ doc for doc in formatted_documents if doc["_source"]['x'] < 3 ] result = self.new_client.count(index="test", query="WHERE x < 3") assert result == len(formatted_documents) def test_iterate(self): test_per = 2 formatted_documents = self._add_records() formatted_documents = [ doc for doc in formatted_documents if doc["_source"]['x'] < 4 ] result = self.new_client.iterate(index="test", fields=["x"], query="WHERE x < 4", per=test_per) self.assertSequenceEqual(formatted_documents[0:test_per], next(result)) self.assertSequenceEqual(formatted_documents[test_per:2 * test_per], next(result)) def test_multiple_iterate(self): test_per = 2 self._add_records() first_result = self.new_client.iterate(index="test", fields=["x"], per=test_per) next(first_result) second_result = self.new_client.iterate(index="test", fields=["x"], per=test_per) next(second_result) def test_iterate_without_id(self): self._add_records() result = self.new_client.iterate(index="test", fields=["distinct(ceiling(x / 10))"], return_id=False) result = next(result) assert len(result) == 2 def test_iterate_with_and_without_final(self): self._add_records() self._add_records() result_with_final = self.new_client.iterate(index="test", fields=[]) result_without_final = self.new_client.iterate(index="test", fields=[], final=False) assert len(next(result_without_final)) > len(next(result_with_final)) def test_iterate_with_derived_fields(self): self._add_records() result = self.new_client.iterate(index="test", fields=["x - 1 AS y"]) result_record = next(result)[0] assert "y" in result_record["_source"] def test_bulk_index(self): documents = [{"x": i} for i in range(10)] self.new_client.bulk_index(index="test", docs=[d.copy() for d in documents], id_field="x") result = self.client.execute('SELECT id FROM test') self.assertCountEqual(result, [(str(doc["x"]), ) for doc in documents]) def test_bulk_index_check_schema(self): self.new_client.bulk_index(index="test", docs=[{"y": 1, "id": 1}]) result = self.client.execute('SELECT id FROM test') self.assertCountEqual(result, [('1', )]) def test_bulk_index_empty_fields(self): documents = [{"id": 1, "x": 1}] self.new_client.bulk_index(index="test", docs=[d for d in documents]) def test_bulk_index_dict_values(self): documents = [{"x": i, "dict": {"test": i}} for i in range(10)] self.new_client.bulk_index(index="test", docs=[d.copy() for d in documents], id_field="x") result = self.client.execute('SELECT dict FROM test') self.assertCountEqual(result, [(json.dumps(doc["dict"]), ) for doc in documents]) def test_bulk_index_split_records(self): test_docs = [{"docs": True}] test_chunks = ["records1", "records2"] self.new_client._split_records = MagicMock(return_value=test_chunks) self.new_client.client.execute = MagicMock() self.new_client._set_id = MagicMock() self.new_client._filter_schema = MagicMock() self.new_client.bulk_index(index="test_index", docs=test_docs) self.new_client._split_records.assert_called_with(test_docs) calls = [call(ANY, records) for records in test_chunks] self.new_client.client.execute.assert_has_calls(calls) def test_send_sql_request(self): formatted_documents = self._add_records() result = self.new_client.send_sql_request("SELECT max(x) FROM test") assert result == max(doc["_source"]["x"] for doc in formatted_documents) def test_split_records(self): test_record = {"test": "123"} test_record_size = sys.getsizeof(test_record) test_records = [test_record] * 5 chunks = list( self.new_client._split_records(test_records, max_bytes=test_record_size * 2 + 1)) self.assertSequenceEqual( chunks, [[test_record] * 2, [test_record] * 2, [test_record]]) def test_split_records_one_record(self): test_record = {"test": "123"} test_record_size = sys.getsizeof(test_record) test_records = [test_record] chunks = list( self.new_client._split_records(test_records, max_bytes=test_record_size)) self.assertSequenceEqual(chunks, [[test_record]]) def test_split_records_same_chunk(self): test_record = {"test": "123"} test_record_size = sys.getsizeof(test_record) test_records = [test_record] * 6 chunks = list( self.new_client._split_records(test_records, max_bytes=test_record_size * 2)) self.assertSequenceEqual( chunks, [[test_record] * 2, [test_record] * 2, [test_record] * 2])
class ClickhouseTokenPrices(ClickhouseContractTransactionsIterator): doc_type = 'token' block_prefix = 'prices_extracted' def __init__(self, indices=INDICES, parity_host=PARITY_HOSTS[0][-1]): self.indices = indices self.client = CustomClickhouse() self.web3 = Web3(HTTPProvider(parity_host)) def _iterate_cc_tokens(self): """ Iterate over ERC20 tokens Returns ------- generator Generator that iterates over ERC20 tokens """ return self._iterate_contracts(partial_query='WHERE standard_erc20 = 1', fields=["address"]) def _get_cc_tokens(self): """ Extract list of tokens Returns ------- list List of ERC20 contracts """ tokens = [token_chunk for token_chunk in self._iterate_cc_tokens()] token_list = [t['_source'] for token_chunk in tokens for t in token_chunk] return token_list def _construct_bulk_insert_ops(self, docs): """ Assign id to each document Parameters ---------- docs: list List of price records """ for doc in docs: doc["id"] = doc['address'] + '_' + doc['timestamp'].strftime("%Y-%m-%d") def _insert_multiple_docs(self, docs, index_name): """ Index multiple documents simultaneously Parameters ---------- docs: list List of dictionaries with new data doc_type: str Type of inserted documents index_name: str Name of the index that contains inserted documents """ for chunk in bulk_chunks(docs, docs_per_chunk=1000): self._construct_bulk_insert_ops(chunk) self.client.bulk_index(index=index_name, docs=chunk) def _set_moving_average(self, prices, window_size=MOVING_AVERAGE_WINDOW): """ Perform moving average procedure over a daily close prices Parameters ---------- prices: list List of prices window_size: str Size of window Returns ------- list Prices processed with moving average """ prices_stack = [] for price in prices: prices_stack.append(price["close"]) if len(prices_stack) == window_size: price["average"] = np.mean(prices_stack) prices_stack.pop(0) else: price["average"] = price["close"] def _process_hist_prices(self, prices): """ Prepare extracted prices to a database Performs moving average procedure over prices, sets address and timestamp fields Parameters ---------- prices: list List of tokens prices Returns ------- list List if prepared prices """ points = [] self._set_moving_average(prices) for price in prices: point = {} point['BTC'] = price["average"] point['BTC'] = float('{:0.10f}'.format(point['BTC'])) point['timestamp'] = datetime.datetime.fromtimestamp(price['time']) point['address'] = price['address'] points.append(point) return points def _make_historical_prices_req(self, address, days_count): """ Make call to CryptoCompare API to extract token historical data Parameters ---------- address: str Token address days_count: int Days limit Returns ------- list List of prices for specified symbol """ symbol = self._get_symbol_by_address(address) url = 'https://min-api.cryptocompare.com/data/histoday?fsym={}&tsym=BTC&limit={}'.format(symbol, days_count) try: res = requests.get(url).json() for point in res['Data']: point['address'] = address return res['Data'] except: print("No exchange rate for {}".format(symbol)) return def _get_last_avail_price_date(self): """ Get last price available in token_price index Returns ------- string Timestamp of last available date """ return self.client.send_sql_request('SELECT MAX(timestamp) FROM {}'.format(self.indices['price'])) def _get_days_count(self, now, last_price_date, limit=DAYS_LIMIT): """ Count number of days for that prices are unavailable Parameters ---------- now: date Current date last_price_date: date Timestamp of last available price Returns ------- int Number of days between current date and last price in database """ days_count = (now - last_price_date).days + 1 return min(days_count, DAYS_LIMIT) def _get_symbol_abi(self, output_type): """Return mock ABI to get token symbol""" return [{ "constant": True, "inputs": [], "name": "symbol", "outputs": [ { "name": "", "type": output_type } ], "payable": False, "stateMutability": "view", "type": "function" }] # TODO replace with contract_methods.py call def _get_symbol_by_address(self, address): """ Get symbol of specified token Parameters ---------- address: str Address of token Returns ------- str Symbol of specified token """ address = self.web3.toChecksumAddress(address) symbols = {} for output_type in ['string', 'bytes32']: contract = self.web3.eth.contract(abi=self._get_symbol_abi(output_type), address=address) try: symbols[output_type] = contract.functions.symbol().call() except Exception as e: print(e) pass if 'string' in symbols: return symbols['string'] else: return symbols.get('bytes32', "".encode('utf-8')).decode('utf-8').rstrip('\0') def _get_historical_multi_prices(self): """ Extract historical token prices from CryptoCompare Returns ------- list List ot token historical prices """ token_addresses = [ token['address'] for token in self._get_cc_tokens() ] now = datetime.datetime.now() last_price_date = self._get_last_avail_price_date() days_count = self._get_days_count(now, last_price_date) prices = [] for token in tqdm(token_addresses): price = self._make_historical_prices_req(token, days_count) if price != None: price = self._process_hist_prices(price) prices.append(price) else: continue prices = [p for price in prices for p in price] return prices def get_prices_within_interval(self): """ Extract historcial token prices and then add to this prices data from Coinmarketcap This function is an entry point for download-prices operation """ prices = self._get_historical_multi_prices() if prices != None: self._insert_multiple_docs(prices, self.indices['price'])
class ClickhouseContractTransactions: def __init__(self, indices=INDICES): self.indices = indices self.client = CustomClickhouse() def _extract_first_bytes(self, func): """ Create contract method signature and return first 4 bytes of this signature Parameters ---------- func: str String that contains function name and arguments Returns ------- str String with first 4 bytes of method signature in hex format """ return str(Web3.toHex(Web3.sha3(text=func)[0:4]))[2:] def _extract_methods_signatures(self): """ Return dictionary with first bytes of standard method signatures Returns ------- dict Dictionary with first 4 bytes of methods signatures in hex format """ return { 'erc20': { 'totalSupply': self._extract_first_bytes('totalSupply()'), 'balanceOf': self._extract_first_bytes('balanceOf(address)'), 'allowance': self._extract_first_bytes('allowance(address,address)'), 'transfer': self._extract_first_bytes('transfer(address,uint256)'), 'transferFrom': self._extract_first_bytes('transferFrom(address,address,uint256)'), 'approve': self._extract_first_bytes('approve(address,uint256)'), }, 'erc223': { 'tokenFallback': self._extract_first_bytes('tokenFallback(address,uint256,bytes)') }, 'bancor_converter': { 'convert': self._extract_first_bytes('convert(address,address,uint256,uint256)') } } def _get_standards(self): """ Create dict with sql to create "standard_*" flag fields Returns ------- dict Dictionary with keys "standard_*", where * is standard name like ERC20, ERC721 and values that are queries for database to define related standard """ standards = self._extract_methods_signatures() return { "standard_" + standard: " AND ".join([ "(bytecode LIKE '%{}%')".format(signature) for signature in signatures.values() ]) for standard, signatures in standards.items() } def _get_fields(self): """ Get string with material view fields names and related queries Returns ------- str Part of SQL request to create material view. Contains field names and definitions """ standard_fields = self._get_standards() fields = { "id": "coalesce(address, id)", "blockNumber": "blockNumber", "address": "address", "owner": "from", "bytecode": "code" } fields.update(standard_fields) fields_string = ", ".join([ "{} AS {}".format(field, alias) for alias, field in fields.items() ]) return fields_string def extract_contract_addresses(self): """ Create material view for contracts extracted from internal transactions table This function is an entry point for prepare-erc-transactions-view operation """ fields_string = self._get_fields() engine_string = 'ENGINE = ReplacingMergeTree() ORDER BY id' condition = "type = 'create' AND error IS NULL AND parent_error IS NULL" sql = "CREATE MATERIALIZED VIEW IF NOT EXISTS {} {} POPULATE AS (SELECT {} FROM {} WHERE {})".format( self.indices["contract"], engine_string, fields_string, self.indices["internal_transaction"], condition ) self.client.send_sql_request(sql)
class ClickhouseBancorTrades: def __init__(self, indices=INDICES): self.indices = indices self.client = CustomClickhouse() def extract_trades(self): return_raw_sql = utils.generate_sql_for_value("return_raw") amount_raw_sql = utils.generate_sql_for_value("amount_raw") self.client.send_sql_request(""" CREATE VIEW {trades_index} AS ( SELECT id, from_token, to_token, trader, amount, return, transactionHash FROM ( SELECT id, from_token, to_token, trader, amount, substring(data, 65, 66) AS return_raw, {return_raw_sql}, return_raw_value AS return, transactionHash FROM ( SELECT id, concat('0x', substring(topics[2], 27)) AS from_token, concat('0x', substring(topics[3], 27)) AS to_token, concat('0x', substring(topics[4], 27)) AS trader, data, substring(data, 3, 64) AS amount_raw, {amount_raw_sql}, amount_raw_value AS amount, transactionHash FROM ( SELECT * FROM {events_index} WHERE topics[1] = '{conversion_event}' AND address IN( SELECT address FROM {contracts_index} WHERE standard_bancor_converter = 1 ) ) ANY LEFT JOIN ( SELECT id AS from_token, decimals FROM {tokens_index} ) USING from_token ) ANY LEFT JOIN ( SELECT id AS to_token, decimals FROM {tokens_index} ) USING to_token ) ) """.format(trades_index=self.indices["bancor_trade"], events_index=self.indices["event"], tokens_index=self.indices["contract_description"], contracts_index=self.indices["contract"], transactions_index=self.indices["internal_transaction"], conversion_event=CONVERSION_EVENT, amount_raw_sql=amount_raw_sql, return_raw_sql=return_raw_sql))