async def dump(self) -> None: """Pickles index and loads data to MongoDB""" data = pickle.dumps((self.index, self.dictionary, self.corpus, self.model, self.documents)) fs = AsyncIOMotorGridFSBucket(db()) try: grid_out = await fs.open_download_stream_by_name("index") await fs.delete(grid_out._id) except NoFile: pass grid_in = fs.open_upload_stream("index") await grid_in.write(data) await grid_in.close() self.md5 = grid_in.md5 self._id = str(grid_in._id)
async def test_iter_gridfs(self): gfs = AsyncIOMotorGridFSBucket(self.db) async def cleanup(): await self.db.fs.files.delete_many({}) await self.db.fs.chunks.delete_many({}) await cleanup() # Empty iterator. async for _ in gfs.find({'_id': 1}): self.fail() data = b'data' for n_files in 1, 2, 10: for i in range(n_files): async with gfs.open_upload_stream(filename='filename') as f: await f.write(data) # Force extra batches to test iteration. j = 0 async for _ in gfs.find({'filename': 'filename'}).batch_size(3): j += 1 self.assertEqual(j, n_files) await cleanup() await gfs.upload_from_stream_with_id(1, 'filename', source=data, chunk_size_bytes=1) cursor = gfs.find({'_id': 1}) await cursor.fetch_next gout = cursor.next_object() chunks = [] async for chunk in gout: chunks.append(chunk) self.assertEqual(len(chunks), len(data)) self.assertEqual(b''.join(chunks), data)
async def test_iter_gridfs(self): gfs = AsyncIOMotorGridFSBucket(self.db) async def cleanup(): await self.db.fs.files.delete_many({}) await self.db.fs.chunks.delete_many({}) await cleanup() # Empty iterator. async for _ in gfs.find({'_id': 1}): self.fail() data = b'data' for n_files in 1, 2, 10: for i in range(n_files): async with gfs.open_upload_stream(filename='filename') as f: await f.write(data) # Force extra batches to test iteration. j = 0 async for _ in gfs.find({'filename': 'filename'}).batch_size(3): j += 1 self.assertEqual(j, n_files) await cleanup() await gfs.upload_from_stream_with_id( 1, 'filename', source=data, chunk_size_bytes=1) cursor = gfs.find({'_id': 1}) await cursor.fetch_next gout = cursor.next_object() chunks = [] async for chunk in gout: chunks.append(chunk) self.assertEqual(len(chunks), len(data)) self.assertEqual(b''.join(chunks), data)
async def mix_two_files(config, song_a_name, song_b_name, bpm_a, bpm_b, desired_bpm, mix_name, scenario_name, transition_points, entry_point, exit_point, num_songs_a, mix_id, mix_db: Collection, fs: AsyncIOMotorGridFSBucket): # check that mongodb mix file exists mix_mongo = await mix_db.find_one({"_id": ObjectId(mix_id)}) if mix_mongo: # read the original wav files song_a_path = f"{config['song_analysis_path']}/{song_a_name}" song_a_data = b"" song_b_path = f"{config['song_analysis_path']}/{song_b_name}" song_b_data = b"" cursor_a = fs.find({"filename": song_a_name}) async for grid_data in cursor_a: song_a_data = grid_data.read() with open(song_a_path, 'wb') as a_f: a_f.write(song_a_data) # song_a = util.read_wav_file(config, song_a_path, identifier='songA') song_a = util.read_wav_file(config, io.BytesIO(song_a_data), identifier='songA') cursor_b = fs.find({"filename": song_b_name}) async for grid_data in cursor_b: song_b_data = grid_data.read() with open(song_b_path, 'wb') as b_f: b_f.write(song_b_data) # song_b = util.read_wav_file(config, song_b_path, identifier='songB') song_b = util.read_wav_file(config, io.BytesIO(song_a_data), identifier='songB') # if num_songs_a > 1: # song_a = util.read_wav_file(config, f"{config['mix_path']}/{song_a_name}", identifier='songA') # else: # song_a = util.read_wav_file(config, f"{config['song_path']}/{song_a_name}", identifier='songA') # song_b = util.read_wav_file(config, f"{config['song_path']}/{song_b_name}", identifier='songB') # song_a = util.read_wav_file(config, f"{config['song_path']}/{song_a_name}", identifier='songA') # song_b = util.read_wav_file(config, f"{config['song_path']}/{song_b_name}", identifier='songB') update_data = {"progress": 20} mix_update0 = await mix_db.update_one({"_id": ObjectId(mix_id)}, {"$set": update_data}) if not mix_update0: print("mix update #0 failed") # TSL = Transition Segment Length tsl_list = [ config['transition_midpoint'], config['transition_length'] - config['transition_midpoint'] ] # 1 match tempo of both songs before analysis # TODO write adjusted songs to db if desired_bpm != bpm_a: song_a_adjusted, song_b_adjusted = bpm_match.match_bpm_desired( config, song_a, song_b, desired_bpm, bpm_a, bpm_b) else: song_a_adjusted, song_b_adjusted = bpm_match.match_bpm_first( config, song_a, bpm_a, song_b, bpm_b) update_data = {"progress": 40} mix_update1 = await mix_db.update_one({"_id": ObjectId(mix_id)}, {"$set": update_data}) if not mix_update1: print("mix update #1 failed") # 2. analyse songs if transition_points: transition_points['b'] = round( transition_points['a'] + (transition_points['d'] - transition_points['c']), 3) transition_points['x'] = round( transition_points['a'] + (transition_points['e'] - transition_points['c']), 3) if not transition_points: then = time.time() transition_points = analysis.get_transition_points( config, song_a_adjusted, song_b_adjusted, exit_point, entry_point, tsl_list) now = time.time() print("INFO - Analysing file took: %0.1f seconds. \n" % (now - then)) update_data = {"transition_points": transition_points, "progress": 60} mix_update2 = await mix_db.update_one({"_id": ObjectId(mix_id)}, {"$set": update_data}) if not mix_update2: print("mix update #2 failed") print(f"Transition points (seconds): {transition_points}") print( f"Transition points (minutes): {util.get_length_for_transition_points(config, transition_points)}" ) print( f"Transition interval lengths (C-D-E): {round(transition_points['d']-transition_points['c'], 3)}s, {round(transition_points['e']-transition_points['d'], 3)}s" ) print( f"Transition interval lengths (A-B-X): {round(transition_points['b']-transition_points['a'], 3)}s, {round(transition_points['x']-transition_points['b'], 3)}s" ) print() # 3. mix both songs then = time.time() frames = util.calculate_frames(config, song_a_adjusted, song_b_adjusted, transition_points) # print("Frames: %s" % frames) mixed_song = mixer.create_mixed_wav_file(config, song_a_adjusted, song_b_adjusted, transition_points, frames, tsl_list, mix_name, scenario_name) now = time.time() print("INFO - Mixing file took: %0.1f seconds" % (now - then)) mix_name_wav = mixed_song['name'] file_path_wav = mixed_song['path'] with open(file_path_wav, 'rb') as f: grid_in = fs.open_upload_stream(mix_name_wav) await grid_in.write(f.read()) await grid_in.close() update_data = {"title": mix_name_wav, "progress": 80} mix_update3 = await mix_db.update_one({"_id": ObjectId(mix_id)}, {"$set": update_data}) if not mix_update3: print("mix update #3 failed") # 4. convert to mp3 if mixed_song: mix_name_mp3 = converter.convert_result_to_mp3( config, mixed_song['name']) if mix_name_mp3: mixed_song['name_mp3'] = mix_name_mp3 mixed_song['path_mp3'] = f"{config['mix_path']}/{mix_name_mp3}" mix_name_mp3 = mixed_song['name_mp3'] file_path_mp3 = mixed_song['path_mp3'] with open(file_path_mp3, 'rb') as f: grid_in = fs.open_upload_stream(mix_name_mp3) await grid_in.write(f.read()) await grid_in.close() update_data = {"progress": 100, "title_mp3": mix_name_mp3} mix_update4 = await mix_db.update_one({"_id": ObjectId(mix_id)}, {"$set": update_data}) if not mix_update4: print("mix update #4 failed") # 5. export json data scenario_data = util.get_scenario(config, scenario_name) scenario_data['short_name'] = scenario_name new_num_songs = num_songs_a + 1 json_data = util.export_transition_parameters_to_json( config, [song_a, song_b, mixed_song], transition_points, scenario_data, tsl_list, new_num_songs, desired_bpm) os.remove(song_a_path) os.remove(song_b_path) return json_data else: return error_response_model("Not Found", 404, f"Mix with id {mix_id} does not exist")