def generate_record(self): adoption = None if self.nested_count or chance.boolean(likelihood=70): immunizations = [] for i in range(0, self.nested_count or random.randint(0, 4)): immunizations.append({ 'type': chance.pickone( ['FIV', 'Panleukopenia', 'Rabies', 'Feline Leukemia']), 'date_administered': chance.date(minyear=2012).isoformat() }) adoption = { 'adopted_on': chance.date(minyear=2012).isoformat(), 'was_foster': chance.boolean(), 'immunizations': immunizations } return { 'id': self.id, 'name': fake.first_name(), 'pattern': chance.pickone(['Tabby', 'Tuxedo', 'Calico', 'Tortoiseshell']), 'age': random.randint(1, 15), 'adoption': adoption }
def generate_record(self): value_null = None value_integer = random.randint(-314159265359, 314159265359) value_integer_as_number = float( random.randint(-314159265359, 314159265359)) value_number = random.uniform(-314159265359, 314159265359) value_boolean = chance.boolean() value_date_time_string = chance.date(minyear=2012).isoformat() value_array = [] for i in range(random.randint(0, 1000)): value_array.append(random.randint(-314, 314)) value_object = { 'i': random.randint(-314159265359, 314159265359), 'n': random.uniform(-314159265359, 314159265359), 'b': chance.boolean() } return { 'every_type': chance.pickone([ value_null, value_integer, value_integer_as_number, value_number, value_boolean, value_date_time_string, value_array, value_object ]), 'number_which_only_comes_as_integer': value_integer }
def document(): doc = { '_id': uuid.uuid4().hex, '_rev': uuid.uuid4().hex, 'description': chance.paragraph(), 'date': chance.date(year=0, month=0, day=0, hour=0, minutes=0, minyear=1985).strftime("%Y-%m-%d %H:%M:%S"), 'firstname': chance.first(), 'lastname': chance.last(), 'email': chance.email(), 'ip': chance.ip(), 'rating': { 'imdb': randint(0, 9), 'rottentomatoes': randint(0, 9), 'empire': randint(0, 9), 'totalfilm': randint(0, 9), 'guardian': randint(0, 9) }, 'data': [randint(0, 9) for _ in range(0, 10)] } return doc
def test_ipfs(app): json_file = "tests/res/test_ipfs.json" ipfs_peer_host = app.config["ipfs"]["host"] ipfs_peer_port = app.config["ipfs"]["port"] # Instantiate IPFS client from src lib ipfsclient = IPFSClient(ipfs_peer_host, ipfs_peer_port, []) remove_test_file(json_file) api = ipfshttpclient.connect( f"/dns/{ipfs_peer_host}/tcp/{ipfs_peer_port}/http") # Create generic metadata object w/above IPFS multihash test_metadata_object = dict(track_metadata_format) test_metadata_object["title"] = chance.name() test_metadata_object["release_date"] = str(chance.date()) test_metadata_object["file_type"] = "mp3" test_metadata_object["license"] = "HN" with open(json_file, "w") as f: json.dump(test_metadata_object, f) # Add metadata object to ipfs node json_res = api.add(json_file) metadata_hash = json_res["Hash"] # Invoke audius-ipfs ipfs_metadata = ipfsclient.get_metadata(metadata_hash, track_metadata_format) remove_test_file(json_file) # Confirm retrieved metadata object state for key in test_metadata_object: assert key in ipfs_metadata assert test_metadata_object[key] == ipfs_metadata[key]
def generate_record(self): value = None if self.changing_literal_type == 'integer': value = random.randint(-314159265359, 314159265359) elif self.changing_literal_type == 'number': value = chance.pickone([ random.uniform(-314159265359, 314159265359), float(random.randint(-314159265359, 314159265359)), random.randint(-314159265359, 314159265359) ]) elif self.changing_literal_type == 'boolean': value = chance.boolean() elif self.changing_literal_type == 'string': value = chance.date(minyear=2012).isoformat() elif self.changing_literal_type == 'date-time': value = chance.date(minyear=2012).isoformat() else: raise Exception('Unknown changing_literal_type: `{}`'.format( self.changing_literal_type)) return { 'id': self.id + self.starting_id, 'changing_literal_type': value, }
def seed_contract_data(task, contracts, web3): user_factory_contract = contracts["user_factory_contract"] track_factory_contract = contracts["track_factory_contract"] ipfs_peer_host = task.shared_config["ipfs"]["host"] ipfs_peer_port = task.shared_config["ipfs"]["port"] ipfs = ipfshttpclient.connect(f"/dns/{ipfs_peer_host}/tcp/{ipfs_peer_port}/http") # Retrieve web3 instance from fixture chain_id = web3.net.version # Give the user some randomness so repeat tests can succeed new_user_handle = "troybarnes" + secrets.token_hex(2) new_user_nonce = "0x" + secrets.token_hex(32) new_user_signature_data = { "types": { "EIP712Domain": [ {"name": "name", "type": "string"}, {"name": "version", "type": "string"}, {"name": "chainId", "type": "uint256"}, {"name": "verifyingContract", "type": "address"}, ], "AddUserRequest": [ {"name": "handle", "type": "bytes16"}, {"name": "nonce", "type": "bytes32"}, ], }, "domain": { "name": "User Factory", "version": "1", "chainId": chain_id, "verifyingContract": user_factory_contract.address, }, "primaryType": "AddUserRequest", "message": {"handle": new_user_handle, "nonce": new_user_nonce}, } new_user_signature = web3.eth.signTypedData( web3.eth.defaultAccount, new_user_signature_data ) # Add creator to blockchain new_user_tx_hash = user_factory_contract.functions.addUser( web3.eth.defaultAccount, toBytes(new_user_handle, 16), new_user_nonce, new_user_signature, ).transact() # parse chain transaction results tx_receipt = web3.eth.waitForTransactionReceipt(new_user_tx_hash) tx_new_user_info = user_factory_contract.events.AddUser().processReceipt(tx_receipt) new_user_args = tx_new_user_info[0].args user_id_from_event = int(new_user_args._userId) # Add audio file to ipfs node res = ipfs.add(test_file) test_audio_file_hash = res["Hash"] test_track_segments = [{"multihash": test_audio_file_hash, "duration": 28060}] # Create track metadata object track_metadata = { "owner_id": user_id_from_event, "title": chance.name(), "length": 0.4, "cover_art": test_audio_file_hash, "description": "putin sucks", "is_unlisted": False, "field_visibility": "", "license": "", "isrc": "", "iswc": "", "cover_art_sizes": [], "tags": "unit test, tags", "genre": "treality", "mood": "wavy", "credits_splits": "random_string?", "create_date": str(chance.date()), "release_date": str(chance.date()), "file_type": "mp3", "track_segments": test_track_segments, } # dump metadata to file with open(track_metadata_json_file, "w") as f: json.dump(track_metadata, f) # add track metadata to ipfs metadata_res = ipfs.add(track_metadata_json_file) metadata_hash = metadata_res["Hash"] # get track metadata multihash metadata_decoded = src.utils.multihash.from_b58_string(metadata_hash) metadata_decoded_multihash = src.utils.multihash.decode(metadata_decoded) new_track_nonce = "0x" + secrets.token_hex(32) new_track_multihash_digest = "0x" + metadata_decoded_multihash["digest"].hex() new_track_multihash_hash_fn = int(metadata_decoded_multihash["code"]) new_track_multihash_size = int(metadata_decoded_multihash["length"]) new_track_signature_data = { "types": { "EIP712Domain": [ {"name": "name", "type": "string"}, {"name": "version", "type": "string"}, {"name": "chainId", "type": "uint256"}, {"name": "verifyingContract", "type": "address"}, ], "AddTrackRequest": [ {"name": "trackOwnerId", "type": "uint"}, {"name": "multihashDigest", "type": "bytes32"}, {"name": "multihashHashFn", "type": "uint8"}, {"name": "multihashSize", "type": "uint8"}, {"name": "nonce", "type": "bytes32"}, ], }, "domain": { "name": "Track Factory", "version": "1", "chainId": chain_id, "verifyingContract": track_factory_contract.address, }, "primaryType": "AddTrackRequest", "message": { "trackOwnerId": user_id_from_event, "multihashDigest": new_track_multihash_digest, "multihashHashFn": new_track_multihash_hash_fn, "multihashSize": new_track_multihash_size, "nonce": new_track_nonce, }, } new_track_signature = web3.eth.signTypedData( web3.eth.defaultAccount, new_track_signature_data ) # add track to blockchain track_factory_contract.functions.addTrack( user_id_from_event, new_track_multihash_digest, new_track_multihash_hash_fn, new_track_multihash_size, new_track_nonce, new_track_signature, ).transact() return { "new_user_handle": new_user_handle, "new_user_id": user_id_from_event, "track_metadata": track_metadata, }
def test_index_operations(app, client, celery_app, contracts): """ Confirm indexing of creator operations results in expected state change """ test_file = "tests/res/test_audio_file.mp3" creator_metadata_json_file = "tests/res/test_creator_metadata.json" track_metadata_json_file = "tests/res/test_track_metadata.json" user_factory_contract = contracts["user_factory_contract"] track_factory_contract = contracts["track_factory_contract"] ipfs_peer_host = app.config["ipfs"]["host"] ipfs_peer_port = app.config["ipfs"]["port"] api = ipfshttpclient.connect(ipfs_peer_host, ipfs_peer_port) # Retrieve web3 instance from fixture web3 = contracts["web3"] # Generate new random creator name random_creator_name = chance.name() # create creator metadata object creator_metadata = { "handle": "test" + str(random.randint(1, 100000)), "wallet": web3.eth.defaultAccount, "is_creator": 1, "name": random_creator_name, "profile_picture": '0x1a5a5d47bfca6be2872d8076920683a3ae112b455a7a444be5ebb84471b16c4e', "cover_photo": '0x1a5a5d47bfca6be2872d8076920683a3ae112b455a7a444be5ebb84471b16c4e', "bio": "Leslie David Baker", "location": "San Francisco", } # dump metadata to file with open(creator_metadata_json_file, "w") as f: json.dump(creator_metadata, f) # add creator metadata to ipfs metadata_res = api.add(creator_metadata_json_file) metadata_hash = metadata_res["Hash"] # get creator metadata multihash metadata_decoded = src.utils.multihash.from_b58_string(metadata_hash) metadata_decoded_multihash = src.utils.multihash.decode(metadata_decoded) # Add creator to blockchain newcreator_tx_hash = user_factory_contract.functions.addUser( metadata_decoded_multihash["digest"], to_bytes(creator_metadata['handle'], 16), to_bytes(creator_metadata['name']), to_bytes(creator_metadata['location']), to_bytes(creator_metadata['bio']), to_bytes(creator_metadata['profile_picture']), to_bytes(creator_metadata['cover_photo']), True).transact() # parse chain transaction results txReceipt = web3.eth.waitForTransactionReceipt(newcreator_tx_hash) txNewUserInfo = user_factory_contract.events.NewUser().processReceipt( txReceipt) newCreatorArgs = txNewUserInfo[0].args # get creator id user_id_from_event = int(newCreatorArgs._id) # Add audio file to ipfs node res = api.add(test_file) test_audio_file_hash = res["Hash"] test_track_segments = [{ "multihash": test_audio_file_hash, "duration": 28060 }] # Create track metadata object track_metadata = { "owner_id": user_id_from_event, "title": chance.name(), "length": 0.4, "cover_art": test_audio_file_hash, "tags": "unit test, tags", "genre": "treality", "mood": "wavy", "credits_splits": "random_string?", "create_date": str(chance.date()), "release_date": str(chance.date()), "file_type": "mp3", "track_segments": test_track_segments, } # dump metadata to file with open(track_metadata_json_file, "w") as f: json.dump(track_metadata, f) # add track metadata to ipfs metadata_res = api.add(track_metadata_json_file) metadata_hash = metadata_res["Hash"] # get track metadata multihash metadata_decoded = src.utils.multihash.from_b58_string(metadata_hash) metadata_decoded_multihash = src.utils.multihash.decode(metadata_decoded) # add track to blockchain track_factory_contract.functions.addTrack( user_id_from_event, metadata_decoded_multihash["digest"], int(metadata_decoded_multihash["code"]), int(metadata_decoded_multihash["length"]), ).transact() # Run update discovery provider task celery_app.celery.autodiscover_tasks(["src.tasks"], "index", True) celery_app.celery.finalize() celery_app.celery.tasks["update_discovery_provider"].run() # Confirm the update task ran and inserted new creator current_creators = query_creator_by_name(app, random_creator_name) num_query_results = len(current_creators) assert num_query_results == 1 # Confirm new track has been indexed indexed_value = client.get("/tracks").get_json() added_creator = False for track in indexed_value: if track["title"] == track_metadata["title"]: added_creator = True assert added_creator is True # clean up state remove_test_file(creator_metadata_json_file) remove_test_file(track_metadata_json_file)