print('detection %f' % (time.time()-start_time)) #detectionResults = detectMusic.yoloDetect(img_two) print("detectionResults", detectionResults) # 認識結果を楽譜形式に変換 currentNote = imageYOLO.makeSound(detectionResults) print('currentNote %f' % (time.time()-start_time)) print("currentNote", currentNote) # 認識結果を表示 detectMusic.writeBoundingBox(detectionResults, img_two, int(imageNum)) #print('bounding box %f' % (time.time() - start_time)) # if not detectionResults: # # 認識結果が空なら終了 # move.stop() # break # 新規楽譜の抽出 newNote = imageYOLO.findNewNotes(currentNote, musicList) print('newNote %f' % (time.time()-start_time)) # 新規楽譜を結合 musicList.extend(newNote) #楽譜をev3に送信する if newNote != []: #postData.postMusic(newNote) pass postData.postMusic(currentNote) print('music list extend %f' % (time.time()-start_time)) print("musicList", musicList) time.sleep(1.1) imageNum += 18 #print('music list send %f' % (time.time()-start_time))
detectMusic.writeBoundingBox(detectionResults, img_two, int(imageNum)) #print('TIME bounding box %f' % (time.time() - start_time)) #if not detectionResults: # # 認識結果が空なら終了 # move.stop() # break # 新規楽譜の抽出 newNote = imageYOLO.findNewNotes(currentNote, pastNote) pastNote = currentNote print('newNote %f' % (time.time() - start_time)) print('offline cur', currentNote) print('offline new', newNote) # 新規楽譜を結合 musicList.extend(newNote) print('TIME music list extend %f' % (time.time() - start_time)) #モーターの左右値を取得 r, l = pid.post(detectionResults) #楽譜をev3に送信する postData.postMusic(newNote, r, l) print('TIME postMoveData %f' % (time.time() - start_time)) print('TIME postMusic%f' % (time.time() - start_time)) print("musicList", musicList) # time.sleep(1.1) imageNum += 4 ###imageNum += 3 print('offline music', musicList) #print('TIME music list send %f' % (time.time()-start_time))
#!/usr/bin/python3 # -*- coding: utf-8 -*- import postData import imageYOLO detectionResults = \ [(b'u_do', 0.9993329048156738, (224.17959594726562, 378.47900390625, 178.75448608398438, 328.29620361328125)), (b'u_re', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'mi', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'fa', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'so', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'la', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'si_F', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'stop', 0.9165929555892944, (580.9117431640625, 125.05439758300781, 208.13427734375, 87.27819061279297)), (b'h_do', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625)), (b'h_re', 0.991621732711792, (344.5289306640625, 286.759765625, 486.18890380859375, 321.3658447265625))] musicList = imageYOLO.convertNotes(detectionResults, imageYOLO.dic_10jingle) postData.postMusic(musicList)
print("currentNote", currentNote) # 認識結果を表示 detectMusic.writeBoundingBox(detectionResults, img_two, int(imageNum)) #print('TIME bounding box %f' % (time.time() - start_time)) #if not detectionResults: # # 認識結果が空なら終了 # move.stop() # break # 新規楽譜の抽出 newNote = imageYOLO.findNewNotes(currentNote, pastNote) pastNote = currentNote print('newNote %f' % (time.time() - start_time)) print('offline cur', currentNote) print('offline new', newNote) # 新規楽譜を結合 musicList.extend(newNote) #楽譜をev3に送信する #if newNote != []: # #postData.postMusic(newNote) # pass postData.postMusic(newNote) print('TIME music list extend %f' % (time.time() - start_time)) print("musicList", musicList) # time.sleep(1.1) # imageNum += 18 imageNum += 8 print('offline music', musicList) #print('TIME music list send %f' % (time.time()-start_time))