Ejemplo n.º 1
0
    def rate_partial(self,
                     partial: sndtrck.Partial,
                     maxrange: int,
                     minmargin: float = None) -> float:
        """
        Rates how good this partial fits in this track. Returns > 0 if partial fits,
        the value returned indicates how good it fits
        """
        if minmargin is None:
            minmargin = self.mingap
        assert minmargin is not None

        isempty = self.isempty()
        if isempty:
            margin = partial.t0
        elif not self.partial_fits(partial.t0, partial.t1):
            return -1
        else:
            partial_left_idx = self.partial_left_to(partial.t0)
            if partial_left_idx is None:
                assert all(p.t0 > partial.t1 for p in self.partials)
                margin = partial.t0

            else:
                # the found partial is either the last partial, or the partial next
                # to it shoudl start AFTER the partial being rated
                assert partial_left_idx == len(
                    self.partials) - 1 or self.partials[partial_left_idx +
                                                        1].t0 > partial.t1
                p0 = self.partials[partial_left_idx]
                # Partials should be left indented
                margin = partial.t0 - p0.t1
                assert margin >= minmargin

        # Try to pack as tight as possible
        margin_rating = bpf.halfcos(minmargin, 1, 1, 0.01, exp=0.6)(margin)

        margin_weight, range_weight, wrange_weight = 3, 1, 1

        if isempty:
            return euclidian_distance(
                [margin_rating, 1, 1],
                [margin_weight, range_weight, wrange_weight])

        trackminnote, trackmaxnote = self.track_range()
        minnote = f2m(partial.minfreq)
        maxnote = f2m(partial.maxfreq)
        range_with_note = max(trackmaxnote, maxnote) - min(
            trackminnote, minnote)
        if range_with_note > maxrange:
            return -1
        range_rating = bpf.expon(0, 1, maxrange, 0.0001,
                                 exp=1)(range_with_note)
        avgpitch = self.avgpitch()
        avgdiff = abs(avgpitch - f2m(partial.meanfreq_weighted))
        wrange_rating = bpf.halfcos(0, 1, maxrange, 0.0001, exp=0.5)(avgdiff)
        total = euclidian_distance(
            [margin_rating, range_rating, wrange_rating],
            [margin_weight, range_weight, wrange_weight])
        return total
Ejemplo n.º 2
0
def _ascurve(curve) -> Opt[bpf.BpfInterface]:
    if isinstance(curve, bpf.BpfInterface):
        return curve
    elif isinstance(curve, (int, float)):
        return bpf.expon(0, 0, 1, 1, exp=curve)
    elif curve is None:
        return None
    else:
        raise TypeError(
            f"curve should be a bpf, the exponent of a bpf, or None, got {curve}"
        )
Ejemplo n.º 3
0
def _test_distribute_weighted_streams():
    A = "AAAAAAAAAAAAAAAAAAAAAAAA"
    C = "CCCCC"
    D = "DDD"

    streams = (A, C, D)
    stream_quantities = (len(A), len(C), len(D))
    weight_bpfs = (bpf.linear(0, 1, 1, 1),     # bpfs must be defined within the unity
                   bpf.halfcos(0, 0, 0.5, 1, 1, 0),
                   bpf.expon(0, 0, 1, 1, exp=3)
                   )
    distributed_frames = distribute_weighted_streams(stream_quantities, weight_bpfs)
    for frame in distributed_frames:
        print(streams[frame.stream][frame.index_in_stream])
Ejemplo n.º 4
0
def fib_curve_between(n, y0, y1, N=5):
    h = bpf.fib(0, y0, 1, y1)

    def func(r: float) -> float:
        b = bpf.expon(0, 0, 1, 1, exp=r)|h
        samples = b.map(N)
        ratios = samples[1:] / samples[:-1]
        dif = abs(ratios - PHI).sum()
        return dif

    try:
        r = _brentq(func, 0, 20)
        out = bpf.expon(0, 0, 1, 1, exp=r)|h
    except ArithmeticError:
        out = None
    return out
Ejemplo n.º 5
0
    lines = f.readlines()[1:]
    out = []
    for line in lines:
        freq, level = list(map(float, line.split()))
        out.append(Bin(freq, level))
    return out


_dbToStepCurve = bpf.expon(-120,
                           0,
                           -60,
                           0.0,
                           -40,
                           0.1,
                           -30,
                           0.4,
                           -18,
                           0.9,
                           -6,
                           1,
                           0,
                           1,
                           exp=0.3333333)


def dbToStep(db: float, numsteps: int) -> int:
    """
    Used by readSpectrumAsChords to convert the db value of each bin to
    a historgram step
    """
    return int(_dbToStepCurve(db) * numsteps)
Ejemplo n.º 6
0
def _pack(spectrum: sndtrck.Spectrum,
          numtracks: int,
          weighter: PartialWeighter,
          maxrange: int,
          minmargin: float,
          chanexp: float,
          method: str,
          numchannels=-1,
          minfreq=120.0,
          maxfreq=4500.0) -> Tup[List[Track], List[sndtrck.Partial]]:
    """
    Pack the partials in spectrum into `numtracks` Tracks.
    
    numchannels: if negative, a sensible default will be chosen
    minfreq, maxfreq: these are used to calculate the channelisation of the spectrum
                      NB: Partials lower than minfreq will still be included in the first channel,
                          Partials higher than maxfreq will still be included in the last channel
    minmargin: time-gap between Partials (should be bigger than 0)
    chanexp: an exponential deterining the distribution of channels across minfreq-maxfreq
    maxrange: the maximum range (in midi notes) a voice can hold
    
    Returns (tracks, rejectedpartials)
    """
    if numchannels < 0:
        numchannels = int(numtracks / 2 + 0.5)
    numchannels = min(numtracks, numchannels)
    weighter = weighter or _PARTIALWEIGHTER
    chanFreqCurve = bpf.expon(0,
                              f2m(minfreq * 0.9),
                              1,
                              f2m(maxfreq),
                              exp=chanexp).m2f()
    # splitpoints = [10] + list(chanFreqCurve.map(numchannels))
    splitpoints = list(chanFreqCurve.map(numchannels + 1))
    channels = [
        Channel(f0, f1, weighter=weighter) for f0, f1 in pairwise(splitpoints)
    ]

    logger.debug(
        f"_pack: Enumerating Channels. (numtracks: {numtracks}, numchannels: {numchannels}, chanexp: {chanexp}"
    )

    for partial in spectrum:
        for ch in channels:
            if ch.freq0 <= partial.meanfreq_weighted < ch.freq1:
                ch.append(partial)
                break

    chanWeights = [ch.weight() for ch in channels]
    # Each channel should have at least 1 track
    numtracksPerChan = [
        numtracks + 1
        for numtracks in dohndt(numtracks - numchannels, chanWeights)
    ]
    tracks = []  # type: List[Track]
    rejected0 = []  # type: List[sndtrck.Partial]
    for ch, tracksPerChan in zip(channels, numtracksPerChan):
        ch.pack(tracksPerChan,
                maxrange=maxrange,
                minmargin=minmargin,
                method=method)
        tracks.extend(ch.tracks)
        rejected0.extend(ch.rejected)
    for ch in channels:
        packedPartials = sum(len(track) for track in ch.tracks)
        logger.debug(
            f"    Channel: {ch.freq0:.0f}-{ch.freq1:.0f}Hz  # tracks: {len(ch.tracks)}, # partials: {len(ch.partials)}, packed: {packedPartials}"
        )

    # Try to fit rejected partials
    # rejected0.sort(key=lambda par:weighter.partialweight(par), reverse=True)
    # rejected = []  # type: List[sndtrck.Partial]
    rejected = rejected0
    rejected1 = []
    for partial in rejected:
        track = get_best_track(tracks,
                               partial,
                               maxrange=maxrange * 0.7,
                               minmargin=minmargin)
        if track is not None:
            track.add_partial(partial)
        else:
            rejected1.append(partial)
    rejected.extend(rejected1)
    tracks = [track for track in tracks if len(track) > 0]
    logger.debug(
        f"$$$$$ num. tracks: {len(tracks)}, num partials: {sum(len(track) for track in tracks)}, rejected: {len(rejected)}"
    )

    def trackweight(track):
        return (sum(p.meanfreq_weighted * p.duration
                    for p in track) / sum(p.duration for p in track))

    tracks.sort(key=trackweight)
    assert all(isinstance(track, Track) for track in tracks)
    return tracks, rejected
Ejemplo n.º 7
0
 def func(r: float) -> float:
     b = bpf.expon(0, 0, 1, 1, exp=r)|h
     samples = b.map(N)
     ratios = samples[1:] / samples[:-1]
     dif = abs(ratios - PHI).sum()
     return dif
Ejemplo n.º 8
0
 def func(r):
     return sum((bpf.expon(x0, x0, x1, x1, exp=r)|curve).map(numpart)) - n
Ejemplo n.º 9
0
def partition_with_curve(x, numpart, curve, method='brentq', return_exp=False, excluded=[]):
    """
    Partition `x` in `numparts` parts following bpf

    x : float     --> the value to partition
    numpart : int --> the number of partitions
    curve : bpf   --> the curve to follow.
                      It is not important over which interval x it is defined.
                      The y coord defines the width of the partition (see example)
    return_exp : bool --> | False -> the return value is the list of the partitions
                          | True  -> the return value is a tuple containing the list
                                     of the partitions and the exponent of the
                                     weighting function

    Returns: the list of the partitions

    Example
    =======

    # Partition the value 45 into 7 partitions following the given curve
    >>> import bpf4 as bpf
    >>> curve = bpf.halfcos2(0, 11, 1, 0.5, exp=0.5)
    >>> distr = partition_with_curve(45, 7, curve)
    >>> distr
    array([ 11.        ,  10.98316635,  10.4796218 ,   7.89530421,
             3.37336152,   0.76854613,   0.5       ])
    >>> abs(sum(distr) - 45) < 0.001
    True
    """
    x0, x1 = curve.bounds()
    n = x

    def func(r):
        return sum((bpf.expon(x0, x0, x1, x1, exp=r)|curve).map(numpart)) - n
    try:
        if method == 'brentq':
            r = _brentq(func, x0, x1)
            curve = bpf.expon(x0, x0, x1, x1, exp=r)|curve
            parts = curve.map(numpart)
        elif method == 'fsolve':
            xs = np.linspace(x0, x1, 100)
            rs = [round(float(_fsolve(func, x)), 10) for x in xs]
            rs = set(r for r in rs if x0 <= r <= x1 and r not in excluded)
            parts = []
            for r in rs:
                curve = bpf.expon(x0, x0, x1, x1, exp=r)|curve
                parts0 = curve.map(numpart)
                parts.extend(parts0)
    except ValueError:
        minvalue = curve(bpf.minimum(curve))
        maxvalue = curve(bpf.maximum(curve))
        if n/numpart < minvalue:
            s = """
        no solution can be found for the given parameters. x is too small
        for the possible values given in the bpf, for this amount of partitions
        try either giving a bigger x, lowering the number of partitions or
        allowing smaller possible values in the bpf
                """
        elif n/numpart > maxvalue:
            s = """
            no solution can be found for the given parameters. x is too big
        for the possible values given in the bpf. try either giving a
        smaller x, increasing the number of partitions or allowing bigger
        possible values in the bpf
                """
        else:
            s = """???"""
        ERROR['partition_with_curve.func'] = func
        raise ValueError(s)
    if abs(sum(parts) - n)/n > 0.001:
        print("Error exceeds threshold: ", parts, sum(parts))
    if return_exp:
        return parts, r
    return parts
Ejemplo n.º 10
0
def asCurve(curve) -> bpf.BpfInterface:
    if isinstance(curve, (int, float)):
        return bpf.expon(0, 0, 1, 1, exp=curve)
    return bpf.asbpf(curve)