def test_backward_forward_cone_relations(r, i1, i2):
    lo, hi = relative_lo_hi(r, i1, i2)
    b, f = r.backward_cone(hi), r.forward_cone(lo)
    # TODO
    # assert mdt.utils.intersect(b, f)
    intervals = tuple(zip(b.bot, f.top))
    assert r == mdt.to_rec(intervals=intervals)
def test_staircase_refinement(xys):
    xs, ys = xys
    f = staircase_oracle(xs, ys)

    # Check bounding box is tight
    max_xy = np.array([max(xs), max(ys)])
    unit_rec = mdt.to_rec(((0, 1), (0, 1)))
    bounding = mdt.bounding_box(unit_rec, f)

    assert all(a >= b for a, b in zip(unit_rec.top, bounding.top))
    assert all(a <= b for a, b in zip(unit_rec.bot, bounding.bot))
    np.testing.assert_array_almost_equal(bounding.top, max_xy, decimal=1)

    refiner = mdt.volume_guided_refinement([unit_rec], oracle=f)
    prev = None
    # Test properties until refined to fixed point
    for i, tagged_rec_set in enumerate(refiner):
        rec_set = set(r for _, r in tagged_rec_set)
        # TODO: assert convergence rather than hard coded limit
        if max(r.volume for r in rec_set) < 1e-1:
            break
        assert i <= 2 * len(xs)
        prev = rec_set

    # TODO: check that the recset contains the staircase
    # Check that the recset refines the previous one
    event(f"len {len(rec_set)}")
    event(f"volume {max(r.volume for r in rec_set)}")
    if len(rec_set) > 1:
        assert all(any(r2 in r1 for r2 in rec_set) for r1 in prev)
def test_refine():
    rec = mdt.to_rec([(0, 1), (0, 1)])
    refiner = _refiner(lambda p: p[0] >= 0.5)
    next(refiner)
    subdivided = refiner.send(rec)
    assert min(r.volume for r in subdivided) > 0
    assert max(r.volume for r in subdivided) < rec.volume
    assert all(r2 in rec for r2 in subdivided)
Esempio n. 4
0
 def normalize_bounds(bounds):
     normalized_bounds = []
     for bound in bounds:
         normalized_bounds.append([
             mdt.to_rec(list(np.array(b[0]) / np.array(bound_limits)))
             for b in bound
         ])
     return normalized_bounds
Esempio n. 5
0
def test_rec_bounds(r):
    lb = mdth.dist_rec_lowerbound(r, r)
    ub = mdth.dist_rec_upperbound(r, r)
    assert 0 == lb
    if r.degenerate:
        assert 0 == ub

    bot, top = np.array(r.bot), np.array(r.top)
    diam = np.linalg.norm(top - bot, ord=float('inf'))
    r2 = mdt.to_rec(zip(bot + (diam + 1), top + (diam + 1)))
    ub = mdth.dist_rec_upperbound(r, r2)
    lb = mdth.dist_rec_lowerbound(r, r2)

    assert lb <= ub
Esempio n. 6
0
def test_staircase_hausdorff_bounds_diag(xys):
    (xs, ys) = xys

    f = [Point2d(x, y) for x, y in zip(*(xs, ys))]
    oracle = staircase_oracle(xs, ys)
    unit_rec = mdt.to_rec([(0, 1), (0, 1)])
    d_true = staircase_hausdorff(f, f)
    d_bounds = mdt.oracle_hausdorff_bounds(unit_rec, oracle, oracle)
    for i, (d, _) in enumerate(d_bounds):
        assert d.bot <= d_true <= d.top
        if d.radius < 1e-2:
            break
        elif i > 7:
            assert False
            break
Esempio n. 7
0
def test_staircase_hausdorff_bounds2(xys1, xys2):
    (xs1, ys1), (xs2, ys2) = xys1, xys2

    f1 = [Point2d(x, y) for x, y in zip(*(xs1, ys1))]
    f2 = [Point2d(x, y) for x, y in zip(*(xs2, ys2))]

    o1 = staircase_oracle(xs1, ys1)
    o2 = staircase_oracle(xs2, ys2)
    unit_rec = mdt.to_rec([(0, 1), (0, 1)])
    d_true = staircase_hausdorff(f1, f2)
    d_bounds = mdt.oracle_hausdorff_bounds2([unit_rec], [unit_rec], o1, o2)
    for i, d in enumerate(d_bounds):
        # TODO: Tighten why is this slack required.
        assert d.bot < d_true + 1e-1
        assert d_true < d.top + 1e-1
        assert d.bot <= d.top
        if d.radius < 1e-1:
            break
Esempio n. 8
0
    lb = mdth.dist_rec_lowerbound(r, r2)

    assert lb <= ub


Point2d = namedtuple("Point2d", ['x', 'y'])


class Interval(namedtuple("Interval", ['start', 'end'])):
    def __contains__(self, point):
        return (self.start.x <= point.x <= self.end.x
                and self.start.y <= point.y <= self.end.y)


@given(st.lists(GEN_RECS, min_size=1), st.lists(GEN_RECS, min_size=1))
@example([mdt.to_rec(((0, 0.4), (0, 0.4)))],
         [mdt.to_rec(((0.5, 1), (0.5, 1)))])
def test_directed_hausdorff(rec_set1, rec_set2):
    d12, req12 = mdth.directed_hausdorff(rec_set1, rec_set2)
    assert len(req12[0]) > 0
    assert len(req12[1]) > 0
    _d12, _req12 = mdth.directed_hausdorff(*req12)
    assert req12 == _req12
    assert len(req12[0]) <= len(rec_set1)
    assert len(req12[1]) <= len(rec_set2)
    assert d12 == _d12
    event(f"d={d12}")


@given(st.lists(GEN_RECS, min_size=1), st.lists(GEN_RECS, min_size=1))
def test_hausdorff(rec_set1, rec_set2):
def to_rec(xs):
    bots = [b for b, _ in xs]
    tops = [max(b + d, 1) for b, d in xs]
    intervals = tuple(zip(bots, tops))
    return mdt.to_rec(intervals=intervals)
Esempio n. 10
0
def test_gen_incomparables(r, i1, i2):
    lo, hi = relative_lo_hi(r, i1, i2)
    subdivison = list(r.subdivide(mdt.to_rec(zip(lo, hi))))
    assert all(i in r for i in subdivison)
Esempio n. 11
0
import multidim_threshold as mdt 
import operator
from functools import reduce
import stl
from stl.load import from_pandas
from scipy.spatial.distance import euclidean
from fastdtw import fastdtw
import matplotlib.pyplot as plt

def dtw_dist(x, y):
    x = from_pandas(x)
    y = from_pandas(y)
    return fastdtw(x['Y'].sample(0.1), y['Y'].sample(0.1), dist=euclidean)[0]


rectangle = mdt.to_rec([(0, 1), (0, 19.799)])

import funcy
@fn.autocurry
def phi(x, params):
    h, tau = params
    x= x['Y'].slice(tau, None)
    return all(map(lambda y:y[1] <= h, x))

def compute_boundary(trace, eps=0.1):
    refinements = mdt.volume_guided_refinement([rectangle], phi(trace))
    return list(fn.pluck(1, fn.first(fn.dropwhile(lambda x :-min(fn.pluck(0, x))> eps, refinements))))

def boundary(trace):
    np_boundary = np.array(trace)
    df = pd.DataFrame(np_boundary, columns=['X', 'Y']).set_index('X')
Esempio n. 12
0
def main():
    data_path2 = Path('toy_car_speeds/')
    dfs = [pd.DataFrame.from_csv(p) for p in sorted(data_path2.glob("*.csv"))]

    for i, df in enumerate(dfs):
        df['name'] = i
        df['car speed'] = df.Y
        df['speed'] = df.Y
        df['time'] = df.index

    @fn.autocurry
    def phi(x, params):
        h, tau = params
        x = x['Y'].slice(tau, None)
        return all(map(lambda y: y[1] <= h, x))

    rectangle = mdt.to_rec([(0, 1), (0, 19.999)])

    # @profile
    def compute_boundary(trace, eps=0.1):
        refinements = mdt.volume_guided_refinement([rectangle], phi(trace))
        return list(
            fn.pluck(1,
                     fn.first(
                         fn.dropwhile(lambda x: -min(fn.pluck(0, x)) > eps,
                                      refinements))))

    traces = [from_pandas(df) for df in dfs]

    bounds = list(map(compute_boundary, traces))

    def find_bb(bounds):
        bounds_1D = reduce(operator.concat, bounds)
        lbs = [(np.array(
            [k.bot for k in fn.pluck(i, list(fn.pluck(0, bounds_1D)))])).min()
               for i in range(len(bounds_1D[0]))]
        ubs = [(np.array(
            [k.top for k in fn.pluck(i, list(fn.pluck(0, bounds_1D)))])).max()
               for i in range(len(bounds_1D[0]))]
        return np.array([[ubs[i] - lbs[i]] for i in range(len(lbs))]), lbs, ubs

    bound_limits, lbs, ubs = find_bb(bounds)

    def normalize_bounds(bounds):
        normalized_bounds = []
        for bound in bounds:
            normalized_bounds.append([
                mdt.to_rec(list(np.array(b[0]) / np.array(bound_limits)))
                for b in bound
            ])
        return normalized_bounds

    normalized_bounds = normalize_bounds(bounds)

    @fn.autocurry
    def norm_phi(x, params):
        a, tau = params
        a = a * bound_limits[0]
        tau = tau * bound_limits[1]
        return phi(x, (a, tau))

    # @profile
    def stl_dist(i, j):
        if i == j:
            return 0.0
        itvl = fn.first(
            oracle_hausdorff_bounds2(normalized_bounds[i],
                                     normalized_bounds[j],
                                     norm_phi(traces[i]), norm_phi(traces[j])))
        return sum(itvl) / 2.

    M_stl = np.zeros((6, 6))
    for (i, x), (j, y) in product(enumerate(traces), enumerate(traces)):
        M_stl[i, j] = stl_dist(i, j)

    M_stl = M_stl / np.max(M_stl)