def file_upload_start_v1( customer: str, remote_path: str, local_path: str, open_share=True, wait_result=True, attempts=30, delay=3, wait_job_finish=True, wait_packets_finish=True, wait_transfers_finish=True, ): response = request_post( customer, 'file/upload/start/v1', json={ 'remote_path': remote_path, 'local_path': local_path, 'wait_result': '1' if wait_result else '0', 'open_share': '1' if open_share else '0', }, timeout=20, ) assert response.status_code == 200 print('\nfile/upload/start/v1 [%r] remote_path=%s local_path=%s : %s\n' % ( customer, remote_path, local_path, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() if wait_job_finish: for _ in range(attempts): response = request_get(customer, 'file/upload/v1', timeout=20) assert response.status_code == 200 print('\nfile/upload/v1 [%s] : %s\n' % ( customer, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() if len(response.json()['result']['pending']) == 0 and len( response.json()['result']['running']) == 0: break time.sleep(delay) else: assert False, 'some uploading tasks are still running on [%s]' % customer if wait_packets_finish: packet_list_v1(customer, wait_all_finish=True, attempts=attempts, delay=delay) if wait_transfers_finish: transfer_list_v1(customer, wait_all_finish=True, attempts=attempts, delay=delay) return response.json()
def group_join_v1(customer: str, group_key_id): response = request_post(customer, 'group/join/v1', json={ 'group_key_id': group_key_id, }, timeout=60) assert response.status_code == 200 print('\ngroup/join/v1 [%s] group_key_id=%r : %s\n' % (customer, group_key_id, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()
def share_create_v1(customer: str, key_size=1024): response = request_post(customer, 'share/create/v1', json={ 'key_size': key_size, }, timeout=20) assert response.status_code == 200 print('share/create/v1 [%s] : %s\n' % (customer, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()['result']['key_id']
def share_open_v1(customer: str, key_id): response = request_post(customer, 'share/open/v1', json={ 'key_id': key_id, }, timeout=60) assert response.status_code == 200 print('share/open/v1 [%s] key_id=%r : %s\n' % (customer, key_id, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()
def config_set_v1(node, key, value): response = request_post(node, 'config/set/v1', json={ 'key': key, 'value': value, }, timeout=20, ) assert response.status_code == 200 dbg('config/set/v1 [%s] key=%r value=%r : %s\n' % ( node, key, value, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()
def file_create_v1(node, remote_path): response = request_post(node, 'file/create/v1', json={'remote_path': remote_path}, timeout=20) assert response.status_code == 200 print('file/create/v1 [%s] remote_path=%s : %s\n' % ( node, remote_path, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() return response.json()
def service_stop_v1(node, service_name, timeout=10): response = request_post(node, 'service/stop/%s/v1' % service_name, json={}, timeout=timeout) assert response.status_code == 200 print('service/stop/%s/v1 [%s]: %s\n' % ( service_name, node, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() return response.json()
def friend_add_v1(node, friend_idurl, friend_alias=''): response = request_post(node, 'friend/add/v1', json={ 'idurl': friend_idurl, 'alias': friend_alias, }, timeout=20, ) assert response.status_code == 200 dbg('friend/add/v1 [%s] idurl=%r alias=%r : %s\n' % ( node, friend_idurl, friend_alias, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() return response.json()
def group_create_v1(customer: str, key_size=1024, label=''): response = request_post(customer, 'group/create/v1', json={ 'key_size': key_size, 'label': label, }, timeout=20) assert response.status_code == 200 print('\ngroup/create/v1 [%s] : %s\n' % (customer, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()['result']['group_key_id']
def message_send_group_v1(node, group_key_id, data, timeout=120): dbg('message/send/group/v1 [%s] group_key_id=%r data=%r' % (node, group_key_id, data, )) response = request_post(node, 'message/send/group/v1', json={ 'group_key_id': group_key_id, 'data': data, }, timeout=timeout, ) assert response.status_code == 200 dbg(f'message/send/group/v1 [%s] : %s\n' % ( node, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()
def message_send_v1(node, recipient, data, timeout=30): response = request_post( node, 'message/send/v1', json={ 'id': recipient, 'data': data, 'timeout': timeout, }, timeout=20, ) assert response.status_code == 200 print(f'\nmessage/send/v1 [%s] : %s\n' % (node, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() return response.json()
def message_send_v1(node, recipient, data, timeout=30, expect_consumed=True): response = request_post(node, 'message/send/v1', json={ 'id': recipient, 'data': data, 'timeout': timeout, }, timeout=timeout+1, ) assert response.status_code == 200 dbg(f'message/send/v1 [%s] : %s\n' % ( node, pprint.pformat(response.json()))) assert response.json()['status'] == 'OK', response.json() if expect_consumed is not None: assert response.json()['result']['consumed'] is expect_consumed return response.json()
def file_download_start_v1(customer: str, remote_path: str, destination: str, open_share=True, wait_result=True, download_attempts=1, wait_finish_attempts=20, delay=5, wait_tasks_finish=True): for _ in range(download_attempts): response = request_post(customer, 'file/download/start/v1', json={ 'remote_path': remote_path, 'destination_folder': destination, 'wait_result': '1' if wait_result else '0', 'open_share': '1' if open_share else '0', }, timeout=30, ) assert response.status_code == 200 dbg('file/download/start/v1 [%s] remote_path=%s destination_folder=%s : %s\n' % ( customer, remote_path, destination, pprint.pformat(response.json()), )) if response.json()['status'] == 'OK': # print('file/download/start/v1 [%s] remote_path=%s destination_folder=%s : %s\n' % ( # customer, remote_path, destination, pprint.pformat(response.json()), )) break if response.json()['errors'][0].count('downloading') and response.json()['errors'][0].count('already scheduled'): # print('file/download/start/v1 [%s] remote_path=%s destination_folder=%s : %s\n' % ( # customer, remote_path, destination, 'ALREADY STARTED', )) break if response.json()['errors'][0].count('failed') and response.json()['errors'][0].count('downloading'): time.sleep(delay) else: assert False, response.json() else: assert False, 'failed to start downloading uploaded file on [%r]: %r' % (customer, response.json(), ) if wait_tasks_finish: for _ in range(wait_finish_attempts): response = request_get(customer, 'file/download/v1', timeout=20) assert response.status_code == 200 dbg('file/download/v1 [%s] : %s\n' % (customer, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() if len(response.json()['result']) == 0: break time.sleep(delay) else: assert False, 'some downloading tasks are still running on [%s]' % customer return response.json()
def dht_value_set_v1( node, key, new_data, record_type='skip_validation', ): response = request_post( node, 'dht/value/set/v1', json={ 'key': key, 'record_type': record_type, 'value': { 'data': new_data, 'type': record_type, 'key': key, }, }, timeout=20, ) assert response.status_code == 200 print('dht/value/set/v1 [%s] key=%s : %s\n' % ( node, key, pprint.pformat(response.json()), )) assert response.json()['status'] == 'OK', response.json() assert len(response.json()['result']) > 0, response.json() assert response.json()['result']['write'] == 'success', response.json() assert response.json()['result']['key'] == key, response.json() assert response.json( )['result']['value']['data'] == new_data, response.json() assert response.json()['result']['value']['key'] == key, response.json() assert response.json( )['result']['value']['type'] == record_type, response.json() assert len(response.json()['result']['closest_nodes']) > 0, response.json() return response.json()
def test_customer_1_replace_supplier_at_position_0(): if os.environ.get('RUN_TESTS', '1') == '0': return pytest.skip() # @UndefinedVariable packet_list_v1('supplier-1', wait_all_finish=True) packet_list_v1('supplier-2', wait_all_finish=True) packet_list_v1('supplier-3', wait_all_finish=True) packet_list_v1('supplier-4', wait_all_finish=True) packet_list_v1('supplier-5', wait_all_finish=True) packet_list_v1('supplier-6', wait_all_finish=True) packet_list_v1('supplier-7', wait_all_finish=True) packet_list_v1('supplier-8', wait_all_finish=True) packet_list_v1('customer-1', wait_all_finish=True) transfer_list_v1('customer-1', wait_all_finish=True) supplier_list_v1('customer-1', expected_min_suppliers=2, expected_max_suppliers=2) supplier_list_dht_v1( customer_id='customer-1@id-a_8084', observers_ids=['customer-1@id-a_8084', 'customer-3@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-1@id-a_8084', observers_ids=['customer-3@id-a_8084', 'customer-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-1@id-a_8084', observers_ids=['supplier-2@id-a_8084', 'customer-3@id-a_8084', 'customer-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) share_id_customer_1 = share_create_v1('customer-1') filename = 'file_to_be_distributed.txt' virtual_filename = filename volume_customer_1 = '/customer_1' filepath_customer_1 = f'{volume_customer_1}/{filename}' remote_path_customer_1 = f'{share_id_customer_1}:{virtual_filename}' run_ssh_command_and_wait('customer-1', f'echo customer_1 > {filepath_customer_1}') file_create_v1('customer-1', remote_path_customer_1) file_upload_start_v1('customer-1', remote_path_customer_1, filepath_customer_1) service_info_v1('customer-1', 'service_shared_data', 'ON') packet_list_v1('customer-1', wait_all_finish=True) transfer_list_v1('customer-1', wait_all_finish=True) file_download_start_v1('customer-1', remote_path=remote_path_customer_1, destination=volume_customer_1) response_before = request_get('customer-1', 'supplier/list/v1') assert response_before.status_code == 200 supplier_list_before = response_before.json()['result'] suppliers_before = list([x['global_id'] for x in supplier_list_before]) assert len(suppliers_before) == 2 response = request_post('customer-1', 'supplier/replace/v1', json={'position': '0'}) assert response.status_code == 200 packet_list_v1('supplier-1', wait_all_finish=True) packet_list_v1('supplier-2', wait_all_finish=True) packet_list_v1('supplier-3', wait_all_finish=True) packet_list_v1('supplier-4', wait_all_finish=True) packet_list_v1('supplier-5', wait_all_finish=True) packet_list_v1('supplier-6', wait_all_finish=True) packet_list_v1('supplier-7', wait_all_finish=True) packet_list_v1('supplier-8', wait_all_finish=True) supplier_list_v1('customer-1', expected_min_suppliers=2, expected_max_suppliers=2) service_info_v1('customer-1', 'service_shared_data', 'ON') supplier_list_dht_v1( customer_id='customer-1@id-a_8084', observers_ids=['customer-1@id-a_8084', 'customer-3@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-1@id-a_8084', observers_ids=['customer-3@id-a_8084', 'customer-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-1@id-a_8084', observers_ids=['supplier-2@id-a_8084', 'customer-3@id-a_8084', 'customer-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) count = 0 while True: if count > 20: assert False, 'supplier was not replaced after many attempts' break response_after = request_get('customer-1', 'supplier/list/v1') assert response_after.status_code == 200 supplier_list_after = response_after.json()['result'] suppliers_after = list([x['global_id'] for x in supplier_list_after]) assert len(suppliers_after) == 2 assert suppliers_after[1] == suppliers_before[1] if suppliers_after[0] != suppliers_before[0]: break count += 1 time.sleep(1)
def test_customer_1_upload_download_file_with_master_key(): if os.environ.get('RUN_TESTS', '1') == '0': return pytest.skip() # @UndefinedVariable packet_list_v1('customer-1', wait_all_finish=True) transfer_list_v1('customer-1', wait_all_finish=True) key_id = 'master$customer-1@id-a_8084' shared_volume = '/customer_1' origin_filename = 'file_customer_1.txt' run_ssh_command_and_wait( 'customer-1', f'python -c "import os, base64; print(base64.b64encode(os.urandom(30000)).decode())" > {shared_volume}/{origin_filename}' ) local_path = '%s/%s' % (shared_volume, origin_filename) virtual_file = 'virtual_file.txt' remote_path = '%s:%s' % (key_id, virtual_file) download_volume = '/customer_1' downloaded_file = '%s/%s' % (download_volume, virtual_file) count = 0 while True: if count > 10: assert False, 'customer-1 failed to hire enough suppliers after many attempts' return response = request_get('customer-1', 'supplier/list/v1') assert response.status_code == 200 assert response.json()['status'] == 'OK', response.json() print('\n\nsupplier/list/v1 : %s\n' % response.json()) if len(response.json()['result']) == 2: for s in response.json()['result']: assert s['supplier_state'] == 'CONNECTED' assert s['contact_state'] == 'CONNECTED' assert True break else: print('\nstill see %d suppliers, expect 2 suppliers\n' % len(response.json()['result'])) count += 1 time.sleep(5) service_info_v1('customer-1', 'service_shared_data', 'ON') file_create_v1('customer-1', remote_path) file_upload_start_v1( 'customer-1', remote_path, local_path, wait_result=True, ) packet_list_v1('customer-1', wait_all_finish=True) transfer_list_v1('customer-1', wait_all_finish=True) for _ in range(20): response = request_post( 'customer-1', 'file/download/start/v1', json={ 'remote_path': remote_path, 'destination_folder': download_volume, 'wait_result': '1', }, ) assert response.status_code == 200 if response.json()['status'] == 'OK': break if response.json()['errors'][0].startswith( 'download not possible, uploading'): time.sleep(1) else: assert False, response.json() else: assert False, 'download was not successful: %r' % response.json() local_file_src = run_ssh_command_and_wait('customer-1', 'cat %s' % local_path)[0].strip() print('customer-1: file %s is %d bytes long' % (local_path, len(local_file_src))) downloaded_file_src = run_ssh_command_and_wait('customer-1', 'cat %s' % downloaded_file)[0].strip() print('customer-1: file %s is %d bytes long' % (downloaded_file, len(downloaded_file_src))) assert local_file_src == downloaded_file_src, ( local_file_src, downloaded_file_src, )
def test_identity_recover_from_customer_backup_to_customer_restore(): if os.environ.get('RUN_TESTS', '1') == '0': return pytest.skip() # @UndefinedVariable # step1: first upload/download one file on customer_backup key_id = 'master$customer-backup@id-a_8084' source_volume = '/customer_backup' origin_filename = 'file_customer_backup.txt' source_local_path = '%s/%s' % (source_volume, origin_filename) virtual_file = 'virtual_file.txt' remote_path = '%s:%s' % (key_id, virtual_file) download_volume = '/customer_backup' downloaded_file = '%s/%s' % (download_volume, virtual_file) supplier_list_v1('customer-backup', expected_min_suppliers=2, expected_max_suppliers=2) service_info_v1('customer-backup', 'service_shared_data', 'ON') supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'customer-backup@id-a_8084', 'supplier-1@id-a_8084', 'supplier-2@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'supplier-1@id-a_8084', 'supplier-2@id-a_8084', 'customer-backup@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'supplier-2@id-a_8084', 'customer-backup@id-a_8084', 'supplier-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) file_create_v1('customer-backup', remote_path) run_ssh_command_and_wait( 'customer-backup', f'python -c "import os, base64; print(base64.b64encode(os.urandom(30000)).decode())" > /customer_backup/{origin_filename}' ) file_upload_start_v1('customer-backup', remote_path, source_local_path) service_info_v1('customer-backup', 'service_shared_data', 'ON') file_download_start_v1('customer-backup', remote_path, download_volume, open_share=False) source_local_file_src = run_ssh_command_and_wait( 'customer-backup', 'cat %s' % source_local_path)[0].strip() print('customer-backup: file %s is %d bytes long' % (source_local_path, len(source_local_file_src))) downloaded_file_src = run_ssh_command_and_wait( 'customer-backup', 'cat %s' % downloaded_file)[0].strip() print('customer-backup: file %s is %d bytes long' % (downloaded_file, len(downloaded_file_src))) assert source_local_file_src == downloaded_file_src, ( source_local_file_src, downloaded_file_src, ) # step2: backup customer-backup private key and stop that container backup_file_directory_c2 = '/customer_backup/identity.backup' backup_file_directory_c3 = '/customer_restore/identity.backup' assert not os.path.exists(backup_file_directory_c2) response = request_post( 'customer-backup', 'identity/backup/v1', json={ 'destination_path': backup_file_directory_c2, }, ) print('\n\nidentity/backup/v1 : %s\n' % response.json()) assert response.json()['status'] == 'OK', response.json() # copy private key from one container to another # just like when you backup your private key and restore it from USB stick on another device shutil.move(backup_file_directory_c2, backup_file_directory_c3) # to make sure all uploads to finish transfer_list_v1('customer-backup', wait_all_finish=True) packet_list_v1('customer-backup', wait_all_finish=True) file_list_all_v1('customer-backup') supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'customer-backup@id-a_8084', 'supplier-1@id-a_8084', 'supplier-2@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'supplier-1@id-a_8084', 'supplier-2@id-a_8084', 'customer-backup@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'supplier-2@id-a_8084', 'customer-backup@id-a_8084', 'supplier-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) try: response = request_get('customer-backup', 'process/stop/v1') assert response.json()['status'] == 'OK', response.json() except Exception as exc: print(f'\n\nprocess/stop/v1 failed with {exc}') # step3: recover key on customer-restore container and join network for _ in range(5): response = request_post( 'customer-restore', 'identity/recover/v1', json={ 'private_key_local_file': backup_file_directory_c3, }, ) print('\n\nidentity/recover/v1 : %s\n' % response.json()) if response.json()['status'] == 'OK': break time.sleep(1) else: assert False, 'customer-restore was not able to recover identity after few seconds' response = request_get('customer-restore', 'network/connected/v1?wait_timeout=1') assert response.json()['status'] == 'ERROR' for _ in range(5): response = request_get('customer-restore', 'network/connected/v1?wait_timeout=5') if response.json()['status'] == 'OK': break time.sleep(5) else: assert False, 'customer-restore was not able to join the network after identity recover' supplier_list_v1('customer-restore', expected_min_suppliers=2, expected_max_suppliers=2) service_info_v1('customer-restore', 'service_shared_data', 'ON') supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'customer-restore@id-a_8084', 'supplier-3@id-a_8084', 'supplier-1@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'supplier-3@id-a_8084', 'supplier-1@id-a_8084', 'customer-restore@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) supplier_list_dht_v1( customer_id='customer-backup@id-a_8084', observers_ids=[ 'supplier-1@id-a_8084', 'customer-restore@id-a_8084', 'supplier-3@id-a_8084', ], expected_ecc_map='ecc/2x2', expected_suppliers_number=2, ) file_list_all_v1('customer-restore') # step4: try to recover stored file again key_id = 'master$customer-backup@id-a_8084' recover_volume = '/customer_restore' virtual_file = 'virtual_file.txt' remote_path = '%s:%s' % (key_id, virtual_file) recovered_file = '%s/%s' % (recover_volume, virtual_file) for _ in range(20): response = request_post( 'customer-restore', 'file/download/start/v1', json={ 'remote_path': remote_path, 'destination_folder': recover_volume, 'wait_result': '1', }, ) assert response.status_code == 200 if response.json()['status'] == 'OK': break if response.json()['errors'][0].startswith( 'download not possible, uploading'): time.sleep(1) else: assert False, response.json() else: assert False, 'download was not successful: %r' % response.json() recovered_file_src = run_ssh_command_and_wait( 'customer-restore', 'cat %s' % recovered_file)[0].strip() print('customer-restore:%s' % recovered_file, recovered_file_src) assert source_local_file_src == recovered_file_src, ( source_local_file_src, recovered_file_src, )