Beispiel #1
0
def make_beat_events(end_beat: song.BeatsTime,
                     time_map: TimeMap) -> List[Event]:
    start = song.BeatsTime(0)
    stop = end_beat + song.BeatsTime(1, 2)
    step = song.BeatsTime(1)
    beats = numeric_range(start, stop, step)
    return [make_beat_event(beat, time_map) for beat in beats]
Beispiel #2
0
def naive_approach(beats: song.Timing, beat: song.BeatsTime) -> Fraction:
    if beat < 0:
        raise ValueError("Can't compute seconds at negative beat")

    if not beats.events:
        raise ValueError("No BPM defined")

    grouped_by_time = group_by(beats.events, key=lambda e: e.time)
    for time, events in grouped_by_time.items():
        if len(events) > 1:
            raise ValueError(
                f"Multiple BPMs defined on beat {time} : {events}")

    sorted_events = sorted(beats.events, key=lambda e: e.time)
    first_event = sorted_events[0]
    if first_event.time != song.BeatsTime(0):
        raise ValueError("First BPM event is not on beat zero")

    if beat > sorted_events[-1].time:
        events_before = sorted_events
    else:
        last_index = next(i for i, e in enumerate(sorted_events)
                          if e.time >= beat)
        events_before = sorted_events[:last_index]
    total_seconds = Fraction(0)
    current_beat = beat
    for event in reversed(events_before):
        beats_since_previous = current_beat - event.time
        seconds_since_previous = (60 * beats_since_previous) / Fraction(
            event.BPM)
        total_seconds += seconds_since_previous
        current_beat = event.time

    total_seconds = total_seconds + Fraction(beats.beat_zero_offset)
    return total_seconds
Beispiel #3
0
def load_memon_0_1_0(path: Path, **kwargs: Any) -> jbt.Song:
    raw_memon = _load_raw_memon(path)
    schema = Memon_0_1_0()
    memon = schema.load(raw_memon)
    metadata = jbt.Metadata(
        title=memon["metadata"]["title"],
        artist=memon["metadata"]["artist"],
        audio=Path(memon["metadata"]["audio"]),
        cover=Path(memon["metadata"]["cover"]),
    )
    common_timing = jbt.Timing(
        events=[
            jbt.BPMEvent(time=jbt.BeatsTime(0), BPM=memon["metadata"]["BPM"])
        ],
        beat_zero_offset=jbt.SecondsTime(-memon["metadata"]["offset"]),
    )
    charts: MultiDict[jbt.Chart] = MultiDict()
    for difficulty, memon_chart in memon["data"].items():
        charts.add(
            difficulty,
            jbt.Chart(
                level=memon_chart["level"],
                notes=[
                    _load_memon_note_v0(note, memon_chart["resolution"])
                    for note in memon_chart["notes"]
                ],
            ),
        )

    return jbt.Song(metadata=metadata,
                    charts=charts,
                    common_timing=common_timing)
Beispiel #4
0
def dump_bgm(audio: Path, timing: song.Timing) -> malody.Sound:
    return malody.Sound(
        beat=beats_to_tuple(song.BeatsTime(0)),
        sound=str(audio),
        vol=100,
        offset=-int(timing.beat_zero_offset * 1000),
        type=malody.SoundType.BACKGROUND_MUSIC,
        isBgm=None,
        x=None,
    )
Beispiel #5
0
 def convert_to_timing_info(self, beat_snap: int = 240) -> song.Timing:
     return song.Timing(
         events=[
             song.BPMEvent(
                 time=round_beats(e.beats, beat_snap),
                 BPM=fraction_to_decimal(e.BPM),
             )
             for e in self.events_by_beats
         ],
         beat_zero_offset=self.seconds_at(song.BeatsTime(0)),
     )
Beispiel #6
0
def compute_last_note_beat(notes: List[AnyNote]) -> song.BeatsTime:
    """Returns the last beat at which a note event happens, either a tap note,
    the start of a long note or the end of a long note.

    If we don't take long notes ends into account we might end up with a long
    note end happening after the END tag which will cause jubeat to freeze when
    trying to render the note density graph"""
    note_times = set(n.time for n in notes)
    long_note_ends = set(n.time + n.duration for n in notes
                         if isinstance(n, song.LongNote))
    all_note_times = note_times | long_note_ends
    return max(all_note_times, default=song.BeatsTime(0))
Beispiel #7
0
def test_that_notes_roundtrip(
        notes: List[Union[song.TapNote, song.LongNote]]) -> None:
    timing = song.Timing(
        events=[song.BPMEvent(song.BeatsTime(0), Decimal(120))],
        beat_zero_offset=song.SecondsTime(0),
    )
    chart = song.Chart(
        level=Decimal(0),
        timing=timing,
        notes=sorted(notes, key=lambda n: (n.time, n.position)),
    )
    metadata = song.Metadata("", "", Path(""), Path(""))
    string_io = _dump_memo1_chart("", chart, metadata, timing)
    chart_text = string_io.getvalue()
    parser = Memo1Parser()
    for line in chart_text.split("\n"):
        parser.load_line(line)
    parser.finish_last_few_notes()
    actual = set(parser.notes())
    assert notes == actual
Beispiel #8
0
def choose_end_beat(notes: List[AnyNote]) -> song.BeatsTime:
    """Leave 2 empty measures (4 beats) after the last event"""
    last_note_beat = compute_last_note_beat(notes)
    measure = last_note_beat - (last_note_beat % 4)
    return measure + song.BeatsTime(2 * 4)
Beispiel #9
0
def round_beats(beats: Number, denominator: int = 240) -> song.BeatsTime:
    """Rounds a given beat value to the nearest given division (default to
    nearest 1/240th"""
    nearest = round(beats * denominator)
    return song.BeatsTime(nearest, denominator)
Beispiel #10
0
def tuple_to_beats(b: Tuple[int, int, int]) -> song.BeatsTime:
    return b[0] + song.BeatsTime(b[1], b[2])