Example #1
0
def fruits_vs_chairs_L3(template=None):
    """

    :param template: Template of features to sample from
    :return: scope evaluation of fruits vs chair classification for dataset 1
    """
    dataset = dataset1()
    fruits_and_chairs_1 = lambda x: (x['category'] in frozenset(['fruits', 'chairs'])) and \
                                    (x['obj'] in frozenset(dataset.obj_set1)) and \
                                    (x['obj'] not in frozenset(Broken_objects))
    # fruits_and_chairs_1 = {'obj': dataset.obj_set1, 'category': ['Fruits', 'Chairs']}
    if template is None:
        template = devthor_new_new_params.l3_params
    eval_config = {
        'npc_train': 40,
        'npc_test': 40,
        'npc_validate': 0,
        'num_splits': 4,
        'split_by': 'category',
        'labelfunc': 'category',
        'train_q': fruits_and_chairs_1,
        'test_q': fruits_and_chairs_1,
        'metric_screen': 'classifier',
        'metric_kwargs': {'model_type': 'MCC2'}}
    return scope.dp_sym_loss(template, dataset, eval_config)
def get_detection_windows(args):
    (burnin, run_length, alpha_count_slow, alpha_count_fast,
     alpha_mu_slow, alpha_mu_fast, buckets_slow, buckets_fast,
     threshold) = args

    np.random.seed((os.getpid() << 16) | (int(time.time()) & 0xFFFF))
    rh_slow = create_rolling_histogram_class(
            Bucket=create_bucket_class(
                alpha_mu=alpha_mu_slow, alpha_count=alpha_count_slow),
            target_buckets=buckets_slow)()
    rh_fast = create_rolling_histogram_class(
            Bucket=create_bucket_class(
                alpha_mu=alpha_mu_fast, alpha_count=alpha_count_fast),
            target_buckets=buckets_fast)()

   #for val in gen_value([dist0], burnin):
   #    rh_slow.update(val)
   #    rh_fast.update(val)

    data = []
    #for i, val in enumerate(gen_value([dist1], run_length)):
    for i, val in enumerate(datasets.dataset1()):
        rh_slow.update(val)
        rh_fast.update(val)
        cdf_slow = rh_slow.get_CDF()
        cdf_fast = rh_fast.get_CDF()
        KS = get_KS(cdf_slow, cdf_fast)
        if KS > threshold:
            data.append(i)
    return data
import numpy as np
import sys
import os
import sys
from roll import create_rolling_histogram_class, create_bucket_class
from roll import gen_value, get_KS
import time

import decay_equations
import datasets
from show_density import ergodic_chain, simple_histogram


dist0 = stats.norm(0, 1)
#dist1 = stats.norm(0, 1.5)
dist1 = datasets.dataset1()


def get_detection_windows(args):
    (burnin, run_length, alpha_count_slow, alpha_count_fast,
     alpha_mu_slow, alpha_mu_fast, buckets_slow, buckets_fast,
     threshold) = args

    np.random.seed((os.getpid() << 16) | (int(time.time()) & 0xFFFF))
    rh_slow = create_rolling_histogram_class(
            Bucket=create_bucket_class(
                alpha_mu=alpha_mu_slow, alpha_count=alpha_count_slow),
            target_buckets=buckets_slow)()
    rh_fast = create_rolling_histogram_class(
            Bucket=create_bucket_class(
                alpha_mu=alpha_mu_fast, alpha_count=alpha_count_fast),
    dists = [stats.expon(-2, 2), stats.norm(4, 2), stats.norm(-6, 1)]
    upper = 10
    for i in range(upper):
        print "Generating data... {0} / {1}\r".format(i, upper),
        sys.stdout.flush()
        data = []
        rh_slow = create_rolling_histogram_class(
                Bucket=create_bucket_class(
                    alpha_mu=0.005, alpha_count=0.005),
                target_buckets=200)()
        rh_fast = create_rolling_histogram_class(
                Bucket=create_bucket_class(
                    alpha_mu=0.005, alpha_count=0.01),
                target_buckets=75)()

        for val in datasets.dataset1():
            rh_slow.update(val)
            rh_fast.update(val)
            cdf_long = rh_slow.get_CDF()
            cdf_short = rh_fast.get_CDF()
            data.append(calc_jaccard_distance(cdf_long, cdf_short))

    #   #for val in gen_value(dists[:2], size):
    #   for val in gen_value([stats.norm(0, 1)], size):
    #       rh_slow.update(val)
    #       rh_fast.update(val)
    #       cdf_long = rh_slow.get_CDF()
    #       cdf_short = rh_fast.get_CDF()
    #       data.append(calc_KS(cdf_long, cdf_short))

    #   #for val in gen_value(dists[2:3], size):