async def forget( account: Account, hashes: List[str], reason: Optional[str], storage_engine: StorageEnum = StorageEnum.storage, channel: str = settings.DEFAULT_CHANNEL, address: Optional[str] = settings.ADDRESS_TO_USE, session: Optional[ClientSession] = None, api_server: str = settings.API_HOST, ): address = address or account.get_address() content = ForgetContent( hashes=hashes, reason=reason, address=address, time=time.time(), ) return await submit( account, content=content.dict(exclude_none=True), message_type=MessageType.forget, channel=channel, api_server=api_server, storage_engine=storage_engine, session=session, inline=True, )
async def create_aggregate( account: Account, key, content, address: Optional[str] = settings.ADDRESS_TO_USE, channel: str = settings.DEFAULT_CHANNEL, session: Optional[ClientSession] = None, api_server: str = settings.API_HOST, inline: bool = True, ): address = address or account.get_address() content_ = AggregateContent( key=key, address=address, content=content, time=time.time(), ) return await submit( account=account, content=content_.dict(exclude_none=True), message_type=MessageType.aggregate, channel=channel, api_server=api_server, session=session, inline=inline )
async def create_post( account: Account, post_content, post_type: str, ref: Optional[str] = None, address: Optional[str] = settings.ADDRESS_TO_USE, channel: str = settings.DEFAULT_CHANNEL, session: Optional[ClientSession] = None, api_server: str = settings.API_HOST, inline: bool = True, storage_engine: StorageEnum = StorageEnum.storage, ): address = address or account.get_address() content = PostContent( type=post_type, address=address, content=post_content, time=time.time(), ref=ref, ) return await submit( account=account, content=content.dict(exclude_none=True), message_type=MessageType.post, channel=channel, api_server=api_server, session=session, inline=inline, storage_engine=storage_engine, )
async def create_aggregate_on_target( account: Account, key: str, content: Dict, emitter_node: str, receiver_node: str, channel="INTEGRATION_TESTS", ): aggregate_dict = await create_aggregate( account=account, key=key, content=content, channel="INTEGRATION_TESTS", api_server=emitter_node, ) assert aggregate_dict["sender"] == account.get_address() assert aggregate_dict["channel"] == channel # Note: lots of duplicates in the response item_content = json.loads(aggregate_dict["item_content"]) assert item_content["key"] == key assert item_content["content"] == content assert item_content["address"] == account.get_address() assert aggregate_dict["content"]["key"] == key assert aggregate_dict["content"]["address"] == account.get_address() assert aggregate_dict["content"]["content"] == content aggregate_from_receiver = await try_until( fetch_aggregate, lambda aggregate: aggregate is not None, timeout=5, address=account.get_address(), key=key, api_server=receiver_node, ) for key, value in content.items(): assert key in aggregate_from_receiver assert aggregate_from_receiver[key] == value
async def submit( account: Account, content: Dict, message_type: str, channel: str = "IOT_TEST", api_server: str = settings.API_HOST, storage_engine: StorageEnum = StorageEnum.storage, session: Optional[ClientSession] = None, inline: bool = True, ): message: Dict[str, Any] = { #'item_hash': ipfs_hash, "chain": account.CHAIN, "channel": channel, "sender": account.get_address(), "type": message_type, "time": time.time(), } item_content: str = json.dumps(content, separators=(",", ":")) if inline and (len(item_content) < 50000): message["item_content"] = item_content h = hashlib.sha256() h.update(message["item_content"].encode("utf-8")) message["item_hash"] = h.hexdigest() else: if storage_engine == StorageEnum.ipfs: message["item_hash"] = await ipfs_push(content, session=session, api_server=api_server) else: # storage assert storage_engine == StorageEnum.storage message["item_hash"] = await storage_push(content, session=session, api_server=api_server) message = await account.sign_message(message) await broadcast(message, session=session, api_server=api_server) # let's add the content to the object so users can access it. message["content"] = content return message
async def create_and_forget_post(account: Account, emitter_node: str, receiver_node: str, channel=TEST_CHANNEL) -> str: async def wait_matching_posts(item_hash: str, condition: Callable[[Dict], bool], timeout: int = 5): return await try_until( get_posts, condition, timeout=timeout, hashes=[item_hash], api_server=receiver_node, ) create_post_response = await create_post( account=account, post_content="A considerate and politically correct post.", post_type="POST", channel="INTEGRATION_TESTS", session=None, api_server=emitter_node, ) # Wait for the message to appear on the receiver. We don't check the values, # they're checked in other integration tests. get_post_response = await wait_matching_posts( create_post_response["item_hash"], lambda response: len(response["posts"]) > 0, ) print(get_post_response) post_hash = create_post_response["item_hash"] reason = "This well thought-out content offends me!" forget_response = await forget( account, hashes=[post_hash], reason=reason, channel=channel, api_server=emitter_node, ) assert forget_response["sender"] == account.get_address() assert forget_response["content"]["reason"] == reason assert forget_response["content"]["hashes"] == [post_hash] print(forget_response) # Wait until the message is forgotten forgotten_posts = await wait_matching_posts( create_post_response["item_hash"], lambda response: "forgotten_by" in response["posts"][0], timeout=15, ) assert len(forgotten_posts["posts"]) == 1 forgotten_post = forgotten_posts["posts"][0] assert forgotten_post["forgotten_by"] == [forget_response["item_hash"]] assert forgotten_post["item_content"] is None print(forgotten_post) return post_hash
async def create_program( account: Account, program_ref: str, entrypoint: str, runtime: str, storage_engine: StorageEnum = StorageEnum.storage, channel: str = settings.DEFAULT_CHANNEL, address: Optional[str] = settings.ADDRESS_TO_USE, session: Optional[ClientSession] = None, api_server: str = settings.API_HOST, memory: int = settings.DEFAULT_VM_MEMORY, vcpus: int = settings.DEFAULT_VM_VCPUS, timeout_seconds: float = settings.DEFAULT_VM_TIMEOUT, encoding: Encoding = Encoding.zip, volumes: List[Dict] = None, subscriptions: Optional[List[Dict]] = None, ): volumes = volumes if volumes is not None else [] address = address or account.get_address() # TODO: Check that program_ref, runtime and data_ref exist ## Register the different ways to trigger a VM if subscriptions: # Trigger on HTTP calls and on Aleph message subscriptions. triggers = {"http": True, "message": subscriptions} else: # Trigger on HTTP calls. triggers = {"http": True} content = ProgramContent( **{ "type": "vm-function", "address": address, "allow_amend": False, "code": { "encoding": encoding, "entrypoint": entrypoint, "ref": program_ref, "use_latest": True, }, "on": triggers, "environment": { "reproducible": False, "internet": True, "aleph_api": True, }, "resources": { "vcpus": vcpus, "memory": memory, "seconds": timeout_seconds, }, "runtime": { "ref": runtime, "use_latest": True, "comment": "Aleph Alpine Linux with Python 3.8", }, "volumes": volumes, # { # "mount": "/opt/venv", # "ref": "5f31b0706f59404fad3d0bff97ef89ddf24da4761608ea0646329362c662ba51", # "use_latest": False # }, # { # "comment": "Working data persisted on the VM supervisor, not available on other nodes", # "mount": "/var/lib/sqlite", # "name": "database", # "persistence": "host", # "size_mib": 5 # } "time": time.time(), } ) return await submit( account=account, content=content.dict(exclude_none=True), message_type=MessageType.program, channel=channel, api_server=api_server, storage_engine=storage_engine, session=session, )
async def create_store( account: Account, address=settings.ADDRESS_TO_USE, file_content: Optional[bytes] = None, file_hash: Optional[str] = None, guess_mime_type: bool = False, ref: Optional[str] = None, storage_engine=StorageEnum.storage, extra_fields: Optional[dict] = None, channel: str = settings.DEFAULT_CHANNEL, session: Optional[ClientSession] = None, api_server: str = settings.API_HOST, ): address = address or account.get_address() extra_fields = extra_fields or {} if file_hash is None: if file_content is None: raise ValueError("Please specify at least a file_content or a file_hash") if storage_engine == StorageEnum.storage: file_hash = await storage_push_file( file_content, session=session, api_server=api_server ) elif storage_engine == StorageEnum.ipfs: file_hash = await ipfs_push_file( file_content, session=session, api_server=api_server ) else: raise ValueError(f"Unknown storage engine: '{storage_engine}'") assert file_hash, "File hash should be empty" if magic is None: pass elif file_content and guess_mime_type and ("mime_type" not in extra_fields): extra_fields["mime_type"] = magic.from_buffer(file_content, mime=True) if ref: extra_fields["ref"] = ref values = { "address": address, "item_type": storage_engine, "item_hash": file_hash, "time": time.time(), } if extra_fields is not None: values.update(extra_fields) content = StoreContent(**values) return await submit( account=account, content=content.dict(exclude_none=True), message_type=MessageType.store, channel=channel, api_server=api_server, session=session, inline=True, )