async def process_tokens_from_old_transfers(startBlockNumber: int, endBlockNumber: int, batchSize: int): databaseConnectionString = Database.create_psql_connection_string(username=os.environ["DB_USERNAME"], password=os.environ["DB_PASSWORD"], host=os.environ["DB_HOST"], port=os.environ["DB_PORT"], name=os.environ["DB_NAME"]) database = Database(connectionString=databaseConnectionString) saver = Saver(database=database) retriever = Retriever(database=database) s3manager = S3Manager(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) workQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-work-queue') tokenQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-token-queue') awsRequester = AwsRequester(accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) ethClient = RestEthClient(url='https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) requester = Requester() tokenMetadataProcessor = TokenMetadataProcessor(requester=requester, ethClient=ethClient, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) openseaApiKey = os.environ['OPENSEA_API_KEY'] tokenOwnershipProcessor = TokenOwnershipProcessor(retriever=retriever) collectionProcessor = CollectionProcessor(requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) tokenManager = TokenManager(saver=saver, retriever=retriever, tokenQueue=tokenQueue, collectionProcessor=collectionProcessor, tokenMetadataProcessor=tokenMetadataProcessor, tokenOwnershipProcessor=tokenOwnershipProcessor) revueApiKey = os.environ['REVUE_API_KEY'] await database.connect() await workQueue.connect() await s3manager.connect() await tokenQueue.connect() cache = set() registryCache = set() currentBlockNumber = startBlockNumber while currentBlockNumber < endBlockNumber: start = currentBlockNumber end = min(currentBlockNumber + batchSize, endBlockNumber) currentBlockNumber = end logging.info(f'Working on {start}-{end}...') query = ( sqlalchemy.select(TokenTransfersTable.c.registryAddress, TokenTransfersTable.c.tokenId) .where(TokenTransfersTable.c.blockNumber >= start) .where(TokenTransfersTable.c.blockNumber < end) ) result = await database.execute(query=query,) tokensToProcess = set() collectionsToProcess = set() for (registryAddress, tokenId) in result: if (registryAddress, tokenId) in cache: continue cache.add((registryAddress, tokenId)) tokensToProcess.add((registryAddress, tokenId)) if registryAddress in registryCache: continue registryCache.add(registryAddress) collectionsToProcess.add(registryAddress) print('len(tokensToProcess)', len(tokensToProcess)) print('len(collectionsToProcess)', len(collectionsToProcess)) try: await _update_token_metadatas(tokensToProcess=tokensToProcess, tokenManager=tokenManager, retriever=retriever) await _update_collections(collectionsToProcess=collectionsToProcess, tokenManager=tokenManager, retriever=retriever) except: logging.error(f'Failed during: {start}-{end}') raise await database.disconnect() await workQueue.disconnect() await tokenQueue.disconnect() await s3manager.disconnect()
async def main(): requestIdHolder = RequestIdHolder() name = os.environ.get('NAME', 'notd-api') version = os.environ.get('VERSION', 'local') environment = os.environ.get('ENV', 'dev') isRunningDebugMode = environment == 'dev' if isRunningDebugMode: logging.init_basic_logging() else: logging.init_json_logging(name=name, version=version, environment=environment, requestIdHolder=requestIdHolder) databaseConnectionString = Database.create_psql_connection_string(username=os.environ["DB_USERNAME"], password=os.environ["DB_PASSWORD"], host=os.environ["DB_HOST"], port=os.environ["DB_PORT"], name=os.environ["DB_NAME"]) database = Database(connectionString=databaseConnectionString) saver = Saver(database=database) retriever = Retriever(database=database) s3manager = S3Manager(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) workQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-work-queue') tokenQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-token-queue') awsRequester = AwsRequester(accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) ethClient = RestEthClient(url='https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) blockProcessor = BlockProcessor(ethClient=ethClient) requester = Requester() tokenMetadataProcessor = TokenMetadataProcessor(requester=requester, ethClient=ethClient, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) openseaApiKey = os.environ['OPENSEA_API_KEY'] collectionProcessor = CollectionProcessor(requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) tokenOwnershipProcessor = TokenOwnershipProcessor(retriever=retriever) collectionActivityProcessor = CollectionActivityProcessor(retriever=retriever) revueApiKey = os.environ['REVUE_API_KEY'] tokenManager = TokenManager(saver=saver, retriever=retriever, tokenQueue=tokenQueue, collectionProcessor=collectionProcessor, tokenMetadataProcessor=tokenMetadataProcessor, tokenOwnershipProcessor=tokenOwnershipProcessor, collectionActivityProcessor=collectionActivityProcessor) notdManager = NotdManager(blockProcessor=blockProcessor, saver=saver, retriever=retriever, workQueue=workQueue, tokenManager=tokenManager, requester=requester, revueApiKey=revueApiKey) processor = NotdMessageProcessor(notdManager=notdManager) slackClient = SlackClient(webhookUrl=os.environ['SLACK_WEBHOOK_URL'], requester=requester, defaultSender='worker', defaultChannel='notd-notifications') workQueueProcessor = MessageQueueProcessor(queue=workQueue, messageProcessor=processor, slackClient=slackClient, requestIdHolder=requestIdHolder) tokenQueueProcessor = MessageQueueProcessor(queue=tokenQueue, messageProcessor=processor, slackClient=slackClient, requestIdHolder=requestIdHolder) await database.connect() await s3manager.connect() await workQueue.connect() await tokenQueue.connect() try: while True: hasProcessedWork = await workQueueProcessor.execute_batch(batchSize=3, longPollSeconds=1, shouldProcessInParallel=True) if hasProcessedWork: continue hasProcessedToken = await tokenQueueProcessor.execute_batch(batchSize=10, longPollSeconds=1, shouldProcessInParallel=True) if hasProcessedToken: continue logging.info('No message received.. sleeping') time.sleep(60) finally: await database.disconnect() await s3manager.disconnect() await workQueue.disconnect() await tokenQueue.disconnect() await requester.close_connections()
async def process_token_ownerships(startTokenId: int, endTokenId: int, batchSize: int): databaseConnectionString = Database.create_psql_connection_string(username=os.environ["DB_USERNAME"], password=os.environ["DB_PASSWORD"], host=os.environ["DB_HOST"], port=os.environ["DB_PORT"], name=os.environ["DB_NAME"]) database = Database(connectionString=databaseConnectionString) saver = Saver(database=database) retriever = Retriever(database=database) s3manager = S3Manager(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) workQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-work-queue') tokenQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-token-queue') awsRequester = AwsRequester(accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) ethClient = RestEthClient(url='https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) requester = Requester() tokenMetadataProcessor = TokenMetadataProcessor(requester=requester, ethClient=ethClient, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) openseaApiKey = os.environ['OPENSEA_API_KEY'] tokenOwnershipProcessor = TokenOwnershipProcessor(retriever=retriever) collectionProcessor = CollectionProcessor(requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) tokenManager = TokenManager(saver=saver, retriever=retriever, tokenQueue=tokenQueue, collectionProcessor=collectionProcessor, tokenMetadataProcessor=tokenMetadataProcessor, tokenOwnershipProcessor=tokenOwnershipProcessor) revueApiKey = os.environ['REVUE_API_KEY'] slackClient = SlackClient(webhookUrl=os.environ['SLACK_WEBHOOK_URL'], requester=requester, defaultSender='worker', defaultChannel='notd-notifications') await database.connect() await workQueue.connect() await s3manager.connect() await tokenQueue.connect() await database.connect() await slackClient.post(text=f'process_token_ownerships → 🚧 started: {startTokenId}-{endTokenId}') try: currentTokenId = startTokenId while currentTokenId < endTokenId: start = currentTokenId end = min(currentTokenId + batchSize, endTokenId) currentTokenId = end logging.info(f'Working on {start}-{end}') query = TokenMetadatasTable.select() \ .where(TokenMetadatasTable.c.tokenMetadataId >= start) \ .where(TokenMetadatasTable.c.tokenMetadataId < end) tokenMetadatas = await retriever.query_token_metadatas(query=query) await asyncio.gather(*[process_token_ownership(tokenManager=tokenManager, registryAddress=tokenMetadata.registryAddress, tokenId=tokenMetadata.tokenId) for tokenMetadata in tokenMetadatas]) await slackClient.post(text=f'process_token_ownerships → ✅ completed : {startTokenId}-{endTokenId}') except Exception as exception: await slackClient.post(text=f'process_token_ownerships → � error: {startTokenId}-{endTokenId}\n```{str(exception)}```') raise exception finally: await database.disconnect() await workQueue.disconnect() await tokenQueue.disconnect() await s3manager.disconnect()
async def process_collection(address: str, shouldDefer: bool): databaseConnectionString = Database.create_psql_connection_string(username=os.environ["DB_USERNAME"], password=os.environ["DB_PASSWORD"], host=os.environ["DB_HOST"], port=os.environ["DB_PORT"], name=os.environ["DB_NAME"]) database = Database(connectionString=databaseConnectionString) saver = Saver(database=database) retriever = Retriever(database=database) s3manager = S3Manager(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) workQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-work-queue') tokenQueue = SqsMessageQueue(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl='https://sqs.eu-west-1.amazonaws.com/097520841056/notd-token-queue') requester = Requester() awsRequester = AwsRequester(accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) ethClient = RestEthClient(url='https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) blockProcessor = BlockProcessor(ethClient=ethClient) tokenMetadataProcessor = TokenMetadataProcessor(requester=requester, ethClient=ethClient, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) openseaApiKey = os.environ['OPENSEA_API_KEY'] collectionProcessor = CollectionProcessor(requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) revueApiKey = os.environ['REVUE_API_KEY'] tokenManager = TokenManager(saver=saver, retriever=retriever, tokenQueue=tokenQueue, collectionProcessor=collectionProcessor, tokenMetadataProcessor=tokenMetadataProcessor) notdManager = NotdManager(blockProcessor=blockProcessor, saver=saver, retriever=retriever, workQueue=workQueue, tokenManager=tokenManager, requester=requester, revueApiKey=revueApiKey) await database.connect() await s3manager.connect() await workQueue.connect() await tokenQueue.connect() retrievedCollectionTokenMetadatas = await retriever.list_token_metadatas( fieldFilters=[ StringFieldFilter(fieldName=TokenTransfersTable.c.registryAddress.key, eq=address), ], ) for tokenMetadata in retrievedCollectionTokenMetadatas: if shouldDefer: await notdManager.update_token_metadata_deferred(registryAddress=address, tokenId=tokenMetadata.tokenId, shouldForce=True) else: await notdManager.update_token_metadata(registryAddress=address, tokenId=tokenMetadata.tokenId, shouldForce=True) await database.disconnect() await s3manager.disconnect() await workQueue.disconnect() await tokenQueue.disconnect() await requester.close_connections()
async def reprocess_metadata(startId: Optional[int], endId: Optional[int], batchSize: Optional[int]): databaseConnectionString = Database.create_psql_connection_string( username=os.environ["DB_USERNAME"], password=os.environ["DB_PASSWORD"], host=os.environ["DB_HOST"], port=os.environ["DB_PORT"], name=os.environ["DB_NAME"]) database = Database(connectionString=databaseConnectionString) saver = Saver(database=database) retriever = Retriever(database=database) s3manager = S3Manager(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) tokenQueue = SqsMessageQueue( region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET'], queueUrl= 'https://sqs.eu-west-1.amazonaws.com/097520841056/notd-token-queue') awsRequester = AwsRequester(accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) requester = Requester() ethClient = RestEthClient( url= 'https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) tokenMetadataProcessor = TokenMetadataProcessor( requester=requester, ethClient=ethClient, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) openseaApiKey = os.environ['OPENSEA_API_KEY'] collectionProcessor = CollectionProcessor( requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) tokenManger = TokenManager(saver=saver, retriever=retriever, tokenQueue=tokenQueue, collectionProcessor=collectionProcessor, tokenMetadataProcessor=tokenMetadataProcessor) await s3manager.connect() await tokenQueue.connect() await database.connect() if not startId: startId = 0 if not endId: maxTokenMetadata = await retriever.list_token_metadatas( limit=1, orders=[ Order(fieldName=TokenMetadatasTable.c.tokenMetadataId.key, direction=Direction.DESCENDING) ]) print(maxTokenMetadata) endId = maxTokenMetadata[0].tokenMetadataId + 1 currentId = startId while currentId < endId: start = currentId end = min(currentId + batchSize, endId) query = TokenMetadatasTable.select() query = query.where(TokenMetadatasTable.c.tokenMetadataId >= start) query = query.where(TokenMetadatasTable.c.tokenMetadataId < end) query = query.where( TokenMetadatasTable.c.updatedDate < datetime.datetime(2022, 2, 13)) query = query.order_by(TokenMetadatasTable.c.tokenMetadataId.asc()) tokenMetadatasToChange = [ token_metadata_from_row(row) for row in await database.execute(query=query) ] logging.info(f'Working on {start} - {end}') logging.info(f'Updating {len(tokenMetadatasToChange)} transfers...') await asyncio.gather(*[ _reprocess_metadata_from_s3( tokenMetadataProcessor=tokenMetadataProcessor, s3manager=s3manager, tokenManger=tokenManger, tokenMetadata=tokenMetadata) for tokenMetadata in tokenMetadatasToChange ]) currentId = currentId + batchSize await s3manager.disconnect() await tokenQueue.disconnect() await awsRequester.close_connections() await requester.close_connections() await database.disconnect()
async def reprocess_collections(startId: int, endId: int, batchSize: int): databaseConnectionString = Database.create_psql_connection_string( username=os.environ["DB_USERNAME"], password=os.environ["DB_PASSWORD"], host=os.environ["DB_HOST"], port=os.environ["DB_PORT"], name=os.environ["DB_NAME"]) database = Database(connectionString=databaseConnectionString) saver = Saver(database) retriever = Retriever(database) openseaApiKey = os.environ['OPENSEA_API_KEY'] awsRequester = AwsRequester(accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) requester = Requester() # ethClient = RestEthClient(url=f'https://mainnet.infura.io/v3/{os.environ["INFURA_PROJECT_ID"]}', requester=requester) ethClient = RestEthClient( url= 'https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) s3manager = S3Manager(region='eu-west-1', accessKeyId=os.environ['AWS_KEY'], accessKeySecret=os.environ['AWS_SECRET']) requester = Requester() collectionProcessor = CollectionProcessor( requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) tokenManager = TokenManager(saver=saver, retriever=retriever, tokenQueue=None, collectionProcessor=collectionProcessor, tokenMetadataProcessor=None) await database.connect() await s3manager.connect() currentId = startId while currentId < endId: start = currentId end = min(currentId + batchSize, endId) logging.info(f'Working on {start} to {end}...') async with database.transaction(): query = TokenCollectionsTable.select() query = query.where(TokenCollectionsTable.c.collectionId >= start) query = query.where(TokenCollectionsTable.c.collectionId < end) collectionsToChange = [ collection_from_row(row) async for row in database.iterate(query=query) ] logging.info(f'Updating {len(collectionsToChange)} collections...') for collection in collectionsToChange: logging.info(f'Updating collection: {collection.address}') try: await tokenManager.update_collection( address=collection.address, shouldForce=True) except Exception as e: logging.exception( f'Error processing {collection.collectionId}: {e}') currentId = currentId + batchSize await database.disconnect() await s3manager.disconnect() await requester.close_connections() await awsRequester.close_connections()
accessKeySecret=os.environ['AWS_SECRET']) ethClient = RestEthClient( url= 'https://nd-foldvvlb25awde7kbqfvpgvrrm.ethereum.managedblockchain.eu-west-1.amazonaws.com', requester=awsRequester) blockProcessor = BlockProcessor(ethClient=ethClient) requester = Requester() tokenMetadataProcessor = TokenMetadataProcessor( requester=requester, ethClient=ethClient, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) openseaApiKey = os.environ['OPENSEA_API_KEY'] collectionProcessor = CollectionProcessor(requester=requester, ethClient=ethClient, openseaApiKey=openseaApiKey, s3manager=s3manager, bucketName=os.environ['S3_BUCKET']) tokenOwnershipProcessor = TokenOwnershipProcessor(retriever=retriever) collectionActivityProcessor = CollectionActivityProcessor(retriever=retriever) revueApiKey = os.environ['REVUE_API_KEY'] tokenManager = TokenManager( saver=saver, retriever=retriever, tokenQueue=tokenQueue, collectionProcessor=collectionProcessor, tokenMetadataProcessor=tokenMetadataProcessor, tokenOwnershipProcessor=tokenOwnershipProcessor, collectionActivityProcessor=collectionActivityProcessor) notdManager = NotdManager(blockProcessor=blockProcessor, saver=saver,
async def main(): s3Client = boto3.client(service_name='s3', region_name='eu-west-1', aws_access_key_id=os.environ['AWS_KEY'], aws_secret_access_key=os.environ['AWS_SECRET']) s3manager = S3Manager(s3Client=s3Client) requester = Requester() ethClient = RestEthClient( url=f'https://mainnet.infura.io/v3/{os.environ["INFURA_PROJECT_ID"]}', requester=requester) requester = Requester() openseaApiKey = os.environ['OPENSEA_API_KEY'] collectionProcessor = CollectionProcessor( requester=requester, ethClient=ethClient, s3manager=s3manager, openseaApiKey=openseaApiKey, bucketName=os.environ['S3_BUCKET']) #HAs No contractURI result = await collectionProcessor.retrieve_collection( '0xE3f92992BB4F0f0D173623A52b2922d65172601d') expected = RetrievedCollection( address='0xE3f92992BB4F0f0D173623A52b2922d65172601d', name='Knights of Degen', symbol='KNIGHTS', description= '8,888 NFT Degens who love sports, betting, alpha and nfts ⚔️', imageUrl= 'https://lh3.googleusercontent.com/yxdb_995UrIS6W9YIHMfMCRsdYRqcITlBvO5w7OoUx35rkClUeq9rPCvTMXdtw_zION07O_qRZSuNfZu6R6o8bI_KmbrfThhFtA4SBc=s120', twitterUsername='******', instagramUsername=None, wikiUrl=None, openseaSlug='knights-of-degen-official', url='https://www.knightsofdegen.io/', discordUrl='https://discord.gg/knightsofdegen', bannerImageUrl= 'https://lh3.googleusercontent.com/328JHSQ2nPpzfTxpZjV5xiiBp6R17GgUdjHJ7BF0mU-SH0Ou1LiiYmcET0WeHt26LD4tVXenIvYZD7VC8jV__bqRCiA_CzmWfwg7=s2500', doesSupportErc721=True, doesSupportErc1155=False) assert (result == expected) result = await collectionProcessor.retrieve_collection( '0xd153f0014db6d1F339c6340d2C9F59214355D9d7') expected = RetrievedCollection( address='0xd153f0014db6d1F339c6340d2C9F59214355D9d7', name='Crypto Hobos', symbol='CryptoHobos', description= 'Crypto Hobos Pet Partners: https://opensea.io/collection/crypto-hobos-pet-partners\n\nThe first generative NFT project to be painted by hand, Crypto Hobos fuses the fine art and profile picture genres of the NFT space.\nArtist Valiahmed Popov has destroyed the source paintings of the over 200 traits that comprise the project, and so the 8000 original Crypto Hobos now exist solely on the Ethereum blockchain in the form of ERC-721 tokens.', imageUrl= 'https://lh3.googleusercontent.com/VLkOkaJ9QuVwHahQqHUrI5ZDVEqNBLCK_xEnMv4rBZ7kciwtlj_klhcwmvi5mM77hn5uSS10uVZH_9uNjkwpshppFhmNFc3a-r3YMuI=s120', twitterUsername='******', instagramUsername='******', wikiUrl=None, openseaSlug='crypto-hobos', url='https://cryptohobos.io/', discordUrl='https://discord.gg/uFpbRYxMeA', bannerImageUrl= 'https://lh3.googleusercontent.com/kiqCwFkf5RWIl_jiJJ5hiElKTV4UYXlcATldxYZ4zr0E2XHiaDOwixPGLs-Led2DFBgQx26dd3AmhON914jllitPE0zZu2dYWF4w=s2500', doesSupportErc721=True, doesSupportErc1155=False) assert (result == expected) #Has ContractURI result = await collectionProcessor.retrieve_collection( '0x3E3bF91740a8363D9433c8d3535B9b3C9E55f669') expected = RetrievedCollection( address='0x3E3bF91740a8363D9433c8d3535B9b3C9E55f669', name='Civit Illustrations', symbol='CIV20', description='Series of digital illustrations and animations', imageUrl='QmXKBRxpTFdRsqLDRUxXNoTJRChwcEQAqQVF8LXKaXmMTf', twitterUsername='******', instagramUsername=None, wikiUrl=None, openseaSlug='civit-illustrations', url= 'https://app.rarible.com/collection/0x3e3bf91740a8363d9433c8d3535b9b3c9e55f669', discordUrl='https://discord.gg/CIVIT', bannerImageUrl= 'https://lh3.googleusercontent.com/zJOeUEKzl2Rs2JiGOxlZQR5WUru-6I-a8n_sp22USaBVxt0LxeYTe-xSfQhjIK8N6u1SyRaEfRWnyp9j-nDpPfeJWxNgSeDwziAI6Q=s2500', doesSupportErc721=True, doesSupportErc1155=False) assert (result == expected) result = await collectionProcessor.retrieve_collection( '0xDb68Df0e86Bc7C6176E6a2255a5365f51113BCe8') expected = RetrievedCollection( address='0xDb68Df0e86Bc7C6176E6a2255a5365f51113BCe8', name='Rope Makers United', symbol='RMU', description='Rope Makers United Storefront', imageUrl='https://rope.lol/images/RopeLogo3D.gif', twitterUsername='******', instagramUsername=None, wikiUrl=None, openseaSlug='rope-makers-united', url='https://rope.lol', discordUrl='https://discord.gg/kWE5G2', bannerImageUrl=None, doesSupportErc721=False, doesSupportErc1155=True) assert (result == expected) # Has no Name or Symbol result = await collectionProcessor.retrieve_collection( '0x12F01AF7FBEAFB088E1d3384BFf67390f41E8404') expected = RetrievedCollection( address='0x12F01AF7FBEAFB088E1d3384BFf67390f41E8404', name='FVCK_BAEIGE//', symbol=None, description='Collaborative contract between Baeige and Fvckrender', imageUrl= 'https://lh3.googleusercontent.com/BJECOBeDJqpaVLWgxza8DYaP9SQGq6h7kLFsOUAlTk3G7naycl4GsjjALsnCPayhHTlctEkvChvpxhGWfDh0hiH2-xd9eUU_yBqmYQ=s120', twitterUsername=None, instagramUsername=None, wikiUrl=None, openseaSlug='unidentified-contract-b46angemew', url=None, discordUrl=None, bannerImageUrl= 'https://lh3.googleusercontent.com/BJECOBeDJqpaVLWgxza8DYaP9SQGq6h7kLFsOUAlTk3G7naycl4GsjjALsnCPayhHTlctEkvChvpxhGWfDh0hiH2-xd9eUU_yBqmYQ=s2500', doesSupportErc721=False, doesSupportErc1155=True) assert (result == expected) result = await collectionProcessor.retrieve_collection( '0x236E7Af5FcAb94770E621c97a1E58b4d0143E95B') expected = RetrievedCollection( address='0x236E7Af5FcAb94770E621c97a1E58b4d0143E95B', name="Ethernity's Master Collection", symbol=None, description= "Ethernity's Master Collection of Exclusive NFTs on Opensea.", imageUrl= 'https://lh3.googleusercontent.com/GId53RudYB4l7e6Irj3_5JGyC3bwybdotwXAEgGaq4KCqjtfQChbolTQSJRYzb1bYrHM_G9xH4Il9vh_CM3ZPtMTwSgtkU5Wu7RVVQ=s120', twitterUsername='******', instagramUsername=None, wikiUrl=None, openseaSlug='ethernity-master', url='http://ethernity.io', discordUrl='https://discord.gg/EthernityChain', bannerImageUrl= 'https://lh3.googleusercontent.com/xWwplVCKh2mIkbsHObxlVWPkW-kT0vlptGihpjaqyU4zTvAD90BwkcnTe25sPQuNlreb3cfT_LgRlYiuqLVJD0YYYWipq1s42A0T=s2500', doesSupportErc721=False, doesSupportErc1155=True) assert (result == expected) #dynamic contractURI result = await collectionProcessor.retrieve_collection( '0x700CE4AB68aD109224Be3aC85f5A99213bf04f67') expected = RetrievedCollection( address='0x700CE4AB68aD109224Be3aC85f5A99213bf04f67', name='Microdoses', symbol='DOSES', description= 'Smaller graphics and topical comics from Killer Acid, in small editions. ', imageUrl='ipfs://ipfs/QmQ52XALRGEfRKCjDryeqXkq5nnSnDuRaiGaLwhs3R77Dz', twitterUsername=None, instagramUsername=None, wikiUrl=None, openseaSlug='microdoses', url= 'https://app.rarible.com/collection/0x700ce4ab68ad109224be3ac85f5a99213bf04f67', discordUrl=None, bannerImageUrl=None, doesSupportErc721=False, doesSupportErc1155=True) result = await collectionProcessor.retrieve_collection( '0x48531836e57bc28d6fee33840f43826b889aa2fc') expected = RetrievedCollection( address='0x48531836e57bc28d6fee33840f43826b889aa2fc', name='Super Crypto Man', symbol='PIPPI', description= 'New generation of Japanese sticker culture × NFT.Making 3D Collective NFT with Cryptoworld as a theme. ', imageUrl='ipfs://ipfs/QmUvzee6ZvY41S5W842fm8wiFr8ueqQSDimfLQhHyp76Tk', twitterUsername='******', instagramUsername=None, wikiUrl=None, openseaSlug='super-crypto-man', url= 'https://app.rarible.com/collection/0x48531836e57bc28d6fee33840f43826b889aa2fc', discordUrl=None, bannerImageUrl= 'https://lh3.googleusercontent.com/TvePJaCzrlt4m-kNSiD0HUkryq5wTYlU0vkugYC9F0CBhThEO2PGCN0m6BRawoNDF2RWNn3E0AixFsSefikyr-5tmWIZkLHoPmxQ-w=s2500', doesSupportErc721=False, doesSupportErc1155=True) result = await collectionProcessor.retrieve_collection( '0xDD97c0b7ED3DC93d09A681dE0E7228b5dfEAE463') expected = RetrievedCollection( address='0xDD97c0b7ED3DC93d09A681dE0E7228b5dfEAE463', name='Beanterra', symbol='BEANEL', description= 'Beanels of Beanterra - Diverse and lovable creatures that roam the realm filled with lush landscapes, snow-capped mountains, and deep blue oceans.', imageUrl= 'https://lh3.googleusercontent.com/ZT-Zq2CWSG0mBzzeeTCsAdMFVyu3au6sPyN_v3CVYXguuI19EL0JrB9QVgg-3tFMJxvg2FJbtpjM8iz5n77tC_gg4t37y-9UP7dQmmA=s120', twitterUsername=None, instagramUsername=None, wikiUrl=None, openseaSlug='beanterra', url='https://beanterra.io/', discordUrl='https://discord.gg/beanterra', bannerImageUrl= 'https://lh3.googleusercontent.com/tBVqF5do5ZpKVqqM6qgcyU44u_Utin0JAH_ygwaqVXMDq3KtsfxG10xXtYliGz6rhjEfngx4eb8LA8IcHn7U2oH_5WlRt9JIUZea7Oc=s2500', doesSupportErc721=True, doesSupportErc1155=False)