def standard_b(parent, img): brightness = watchman.get_brightness(img.histogram(250), 20) [red_brightness, green_brightness, blue_brightness] = watchman.count_colours(img) motion = watchman.get_motion() # if performer.bar % 4 == 0: # facecount = watchman.get_facecount(img) motion_threshold = 10 if motion > motion_threshold: watchman.activity_boost = 1 stabs.multifire(motion) else: watchman.activity_boost = 0 mixer.set_volume(parent, "bass", 127 * tools.invlerp(motion_threshold, 200, motion)) mixer.set_volume(parent, "drums", 127 * tools.invlerp(motion_threshold, 200, motion)) mixer.set_volume(parent, "chords", 127 * tools.invlerp(motion_threshold, 200, motion)) mixer.set_volume(parent, "melody", 127 * tools.invlerp(motion_threshold, 200, motion)) mixer.set_volume(parent, "stabs", 127 * tools.invlerp(motion_threshold, 200, motion)) parent.set_user_tempo_modifier(0.5 + brightness) watchman.change_activity("bass", red_brightness, 16) watchman.change_activity("drums", green_brightness, 16) watchman.change_activity("melody", red_brightness, 8) watchman.change_activity("chords", blue_brightness, 16) if blue_brightness > 0.5: if parent.user_mode == "major": parent.set_user_mode("Minor") else: if parent.user_mode == "minor": parent.set_user_mode("Major")
def play_chord(chan, speed, pattern): beat = 0 while beat < (tsig * timing): noteinfo = pattern[beat] if noteinfo != "." and noteinfo.startswith("r") == False: chord = tools.get_chord(noteinfo) length = tools.lengths[str(noteinfo[-2:])] chordsize = 6 * int(tools.invlerp(0, 100, profiles.motion)) threading.Thread(target = play_notes, args = [chan,chord,0,length,chordsize]).start() nextbeat = float(float(speed)/float(timing)) time.sleep(nextbeat) beat += 1
def play_chord(chan, speed, pattern): beat = 0 while beat < (tsig * timing): noteinfo = pattern[beat] if noteinfo != "." and noteinfo.startswith("r") == False: chord = tools.get_chord(noteinfo) length = tools.lengths[str(noteinfo[-2:])] chordsize = 6 * int(tools.invlerp(0, 100, profiles.motion)) threading.Thread(target=play_notes, args=[chan, chord, 0, length, chordsize]).start() nextbeat = float(float(speed) / float(timing)) time.sleep(nextbeat) beat += 1
def sparse(parent, img): global motion brightness = watchman.get_brightness(img.histogram(250), 20) [red_brightness, green_brightness, blue_brightness] = watchman.count_colours(img) motion = watchman.get_motion() # if performer.bar % 4 == 0: # facecount = watchman.get_facecount(img) if motion > 5: watchman.activity_boost = 1 stabs.multifire(motion) #tempomod = tools.clamp(1, 1.5, (motion / 100)) tempomod = 1 + (tools.invlerp(0, 0.5, (motion / 100) / 2)) parent.set_user_tempo_modifier(tempomod) else: watchman.activity_boost = 0 parent.set_user_tempo_modifier(1) watchman.change_activity("bass", red_brightness, 16) watchman.change_activity("drums", green_brightness, 16) watchman.change_activity("melody", red_brightness, 8) watchman.change_activity("chords", max(green_brightness, blue_brightness), 16) if blue_brightness > 0.5: if parent.user_mode == "major": parent.set_user_mode("Minor") else: if parent.user_mode == "minor": parent.set_user_mode("Major") mixer.set_volume(parent, "bass", 100 * (1 - brightness)) mixer.set_volume(parent, "drums", 100 * (1 - brightness)) mixer.set_volume(parent, "chords", 100 * brightness) mixer.set_volume(parent, "melody", 127 * brightness) mixer.set_volume(parent, "stabs", 127 * brightness)