Ejemplo n.º 1
0
def classify(request):
    C = json.loads(request.POST["C"])
    try:
        features, labels = get_multi_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    
    learn = "No"  
    values=[]

    try:
        domain = json.loads(request.POST['axis_domain'])
        x, y, z = svm.classify_svm(sg.GMNPSVM, features, labels, kernel, domain, learn, values, C, False)
    except Exception as e:
        return HttpResponse(json.dumps({"status": repr(e)}))

#    z = z + np.random.rand(*z.shape) * 0.01
	
    z_max = np.nanmax(z)
    z_min = np.nanmin(z)
    z_delta = 0.1*(np.nanmax(z)-np.nanmin(z))
    data = {"status": "ok",
            "domain": [z_min-z_delta, z_max+z_delta],
            "max": z_max+z_delta,
            "min": z_min-z_delta,
            "z": z.tolist()}

    return HttpResponse(json.dumps(data))
Ejemplo n.º 2
0
def classify(request):
    C = json.loads(request.POST["C"])
    try:
        features, labels = get_multi_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    learn = "No"
    values = []

    try:
        domain = json.loads(request.POST['axis_domain'])
        x, y, z = svm.classify_svm(sg.GMNPSVM, features, labels, kernel,
                                   domain, learn, values, C, False)
    except Exception as e:
        return HttpResponse(json.dumps({"status": repr(e)}))

#    z = z + np.random.rand(*z.shape) * 0.01

    z_max = np.nanmax(z)
    z_min = np.nanmin(z)
    z_delta = 0.1 * (np.nanmax(z) - np.nanmin(z))
    data = {
        "status": "ok",
        "domain": [z_min - z_delta, z_max + z_delta],
        "max": z_max + z_delta,
        "min": z_min - z_delta,
        "z": z.tolist()
    }

    return HttpResponse(json.dumps(data))
Ejemplo n.º 3
0
def classify(request):
    try:
        features, labels = get_binary_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        lik = get_likelihood(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        learn = request.POST["learn"]
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        scale = float(request.POST["scale"])
    except:
        raise ValueError("Scale is not correct")
    try:
        domain = json.loads(request.POST['axis_domain'])
        x, y, z, width, param, best_scale = gaussian_process.classify_gp(features, labels, kernel, domain, lik, learn, scale)
    except Exception as e:
        return HttpResponse(json.dumps({"status": repr(e)}))

    return HttpResponse(json.dumps({ 'status': 'ok',
                                     'best_width': float(width),
                                     'best_param': float(param),
                                     'best_scale': float(best_scale),
                                     'domain': [np.min(z), np.max(z)],
                                     'z': z.tolist() }))
Ejemplo n.º 4
0
def classify(request):
    value=[]
    try:
        features, labels = get_binary_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))    

    try:
        learn = request.POST["learn"]
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    if kernel.get_name() == 'PolyKernel' and learn == "GridSearch":
        value.append(int(request.POST["polygrid1"]))
        value.append(int(request.POST["polygrid2"]))
        if value[1] <= value[0]:
            return HttpResponse(json.dumps({"status": "Bad values"}))

    try:
        C = float(request.POST["C"])
        domain = json.loads(request.POST['axis_domain'])
        x, y, z = svm.classify_svm(sg.LibSVM, features, labels, kernel, domain, learn, value, C=C)
    except Exception as e:
        import traceback
        return HttpResponse(json.dumps({"status": repr(traceback.format_exc(0))}))

    return HttpResponse(json.dumps({ 'status': 'ok',
                                     'domain': [np.min(z), np.max(z)],
                                     'z': z.tolist() }))
Ejemplo n.º 5
0
def _read_toy_data(request):
    y_set = []
    x_set = []
    toy_data = json.loads(request.POST['point_set'])
    for pt in toy_data:
        y_set.append(float(pt["y"]))
        x_set.append(float(pt["x"]))
    noise_level = float(request.POST['noise_level'])
    scale = float(request.POST['scale'])
    domain = json.loads(request.POST['axis_domain'])
    
    labels = np.array(y_set, dtype = np.float64)
    num = len(x_set)
    if num == 0:
        raise Http404
    examples = np.zeros((1, num))
    for i in xrange(num):
        examples[0,i] = x_set[i]
    feat_train = sg.RealFeatures(examples)
    labels = sg.RegressionLabels(labels)
    kernel = get_kernel(request, feat_train)
    try:
        learn = request.POST["learn"]
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    return (feat_train, labels, noise_level, scale, kernel, domain, learn)
Ejemplo n.º 6
0
def _read_toy_data(request):
    y_set = []
    x_set = []
    x_set_induc = []
    points = []
    points_induc = []
    model_sel_error = False
    toy_data = json.loads(request.POST['point_set'])
    for pt in toy_data:
        if int(pt['label']) == 1:
            points.append(pt)
        elif pt['label'] == -1:
            points_induc.append(pt)

    for pt in points:
        y_set.append(float(pt["y"]))
        x_set.append(float(pt["x"]))

    for pt in points_induc:
        x_set_induc.append(float(pt["x"]))

    noise_level = float(request.POST['noise_level'])
    scale = float(request.POST['scale'])
    inf = request.POST['inf']
    domain = json.loads(request.POST['axis_domain'])

    labels = np.array(y_set, dtype=np.float64)
    num = len(x_set)
    if num == 0:
        raise Http404
    examples = np.zeros((1, num))
    for i in xrange(num):
        examples[0, i] = x_set[i]
    feat_train = sg.RealFeatures(examples)
    labels = sg.RegressionLabels(labels)

    #Get inducing points
    num_induc = len(x_set_induc)

    if num_induc != 0:
        examples_induc = np.zeros((1, num_induc))
        for i in xrange(num_induc):
            examples_induc[0, i] = x_set_induc[i]
        feat_train_induc = sg.RealFeatures(examples_induc)
    elif num_induc == 0:
        feat_train_induc = None

    kernel = get_kernel(request, feat_train)
    try:
        learn = request.POST["learn"]
    except:
        raise ValueError("Argument Error")

    if int(feat_train.get_num_vectors()) > 100 and learn == "ML2":
        model_sel_error = True

    return (feat_train, labels, noise_level, scale, kernel, domain, learn,
            feat_train_induc, inf), model_sel_error
Ejemplo n.º 7
0
def _read_toy_data(request):
    y_set = []
    x_set = []
    x_set_induc=[]
    points=[]
    points_induc=[]
    model_sel_error=False
    toy_data = json.loads(request.POST['point_set'])
    for pt in toy_data:
        if int(pt['label'])==1:
            points.append(pt)
        elif pt['label']==-1:
            points_induc.append(pt)

    for pt in points:
        y_set.append(float(pt["y"]))
        x_set.append(float(pt["x"]))

    for pt in points_induc:
        x_set_induc.append(float(pt["x"]))

    noise_level = float(request.POST['noise_level'])
    scale = float(request.POST['scale'])
    inf = request.POST['inf']
    domain = json.loads(request.POST['axis_domain'])
    
    labels = np.array(y_set, dtype = np.float64)
    num = len(x_set)
    if num == 0:
        raise Http404
    examples = np.zeros((1, num))
    for i in xrange(num):
        examples[0,i] = x_set[i]
    feat_train = sg.RealFeatures(examples)
    labels = sg.RegressionLabels(labels)

    #Get inducing points
    num_induc = len(x_set_induc)
    
    if num_induc != 0:
        examples_induc = np.zeros((1, num_induc))
        for i in xrange(num_induc):
            examples_induc[0,i] = x_set_induc[i]
        feat_train_induc = sg.RealFeatures(examples_induc)
    elif num_induc == 0:
        feat_train_induc = None

    kernel = get_kernel(request, feat_train)
    try:
        learn = request.POST["learn"]
    except:
        raise ValueError("Argument Error")

    if int(feat_train.get_num_vectors()) > 100 and learn == "ML2":
        model_sel_error=True

    return (feat_train, labels, noise_level, scale, kernel, domain, learn, feat_train_induc, inf), model_sel_error
Ejemplo n.º 8
0
def classify(request):
    value = []
    try:
        features, labels = get_binary_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    try:
        learn = request.POST["learn"]
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    if int(features.get_num_vectors()) > 100 and learn == "GridSearch":
        return HttpResponse(
            json.dumps({
                "status":
                ("Model Selection "
                 "allowed only for less than 100 samples due to computational costs"
                 )
            }))

    if kernel.get_name() == 'PolyKernel' and learn == "GridSearch":
        value.append(int(request.POST["polygrid1"]))
        value.append(int(request.POST["polygrid2"]))
        if value[1] <= value[0]:
            return HttpResponse(json.dumps({"status":
                                            "Bad values for degree"}))

    try:
        C = float(request.POST["C"])
        domain = json.loads(request.POST['axis_domain'])
        x, y, z = svm.classify_svm(sg.LibSVM,
                                   features,
                                   labels,
                                   kernel,
                                   domain,
                                   learn,
                                   value,
                                   C=C)
    except Exception as e:
        import traceback
        return HttpResponse(
            json.dumps({"status": repr(traceback.format_exc(0))}))

    return HttpResponse(
        json.dumps({
            'status': 'ok',
            'domain': [np.min(z), np.max(z)],
            'z': z.tolist()
        }))
Ejemplo n.º 9
0
def classify(request):
    try:
        features, labels = get_binary_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        lik = get_likelihood(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        learn = request.POST["learn"]
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    if int(features.get_num_vectors()) > 100 and learn == "ML2":
        return HttpResponse(
            json.dumps({
                "status":
                ("Model Selection "
                 "allowed only for less than 100 samples due to computational costs"
                 )
            }))

    try:
        scale = float(request.POST["scale"])
    except:
        raise ValueError("Scale is not correct")
    try:
        domain = json.loads(request.POST['axis_domain'])
        x, y, z, width, param, best_scale = gaussian_process.classify_gp(
            features, labels, kernel, domain, lik, learn, scale)
    except Exception as e:
        return HttpResponse(json.dumps({"status": repr(e)}))

    return HttpResponse(
        json.dumps({
            'status': 'ok',
            'best_width': float(width),
            'best_param': float(param),
            'best_scale': float(best_scale),
            'domain': [np.min(z), np.max(z)],
            'z': z.tolist()
        }))
Ejemplo n.º 10
0
def classify(request):
    try:
        features, labels = get_binary_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))

    try:
        C = float(request.POST["C"])
        x, y, z = svm.classify_svm(sg.LibSVM, features, labels, kernel, domain, C=C)
    except Exception as e:
        import traceback
        return HttpResponse(json.dumps({"status": repr(traceback.format_exc())}))

    return HttpResponse(json.dumps({ 'status': 'ok',
                                     'domain': [np.min(z), np.max(z)],
                                     'z': z.tolist() }))
Ejemplo n.º 11
0
def _read_toy_data(request):
    y_set = []
    x_set = []
    toy_data = json.loads(request.POST['point_set'])
    for pt in toy_data:
        y_set.append(float(pt["y"]))
        x_set.append(float(pt["x"]))
    noise_level = float(request.POST['noise_level'])
    domain = json.loads(request.POST['axis_domain'])
    
    labels = np.array(y_set, dtype = np.float64)
    num = len(x_set)
    if num == 0:
        raise Http404
    examples = np.zeros((1, num))
    for i in xrange(num):
        examples[0,i] = x_set[i]
    feat_train = sg.RealFeatures(examples)
    labels = sg.RegressionLabels(labels)
    kernel = get_kernel(request, feat_train)
    return (feat_train, labels, noise_level, kernel, domain)
Ejemplo n.º 12
0
def classify(request):
    try:
        features, labels = get_binary_features(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        kernel = get_kernel(request, features)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        lik = get_likelihood(request)
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    try:
        learn = request.POST["learn"]
    except ValueError as e:
        return HttpResponse(json.dumps({"status": e.message}))
    
    if int(features.get_num_vectors()) > 100 and learn == "ML2":
        return HttpResponse(json.dumps({"status": ("Model Selection " 
            "allowed only for less than 100 samples due to computational costs")}))

    try:
        scale = float(request.POST["scale"])
    except:
        raise ValueError("Scale is not correct")
    try:
        domain = json.loads(request.POST['axis_domain'])
        x, y, z, width, param, best_scale = gaussian_process.classify_gp(features, labels, kernel, domain, lik, learn, scale)
    except Exception as e:
        return HttpResponse(json.dumps({"status": repr(e)}))

    return HttpResponse(json.dumps({ 'status': 'ok',
                                     'best_width': float(width),
                                     'best_param': float(param),
                                     'best_scale': float(best_scale),
                                     'domain': [np.min(z), np.max(z)],
                                     'z': z.tolist() }))
Ejemplo n.º 13
0
def _read_data(request):
    labels = []
    features = []
    data = json.loads(request.POST['point_set'])
    cost = float(request.POST['C'])
    tubeeps = float(request.POST['tube'])
    kernel_name = request.POST['kernel']
    for pt in data:
        labels.append(float(pt["y"]))
        features.append(float(pt["x"]))
    labels = np.array(labels, dtype=np.float64)
    num = len(features)
    if num == 0:
        raise TypeError
    examples = np.zeros((1,num))
    
    for i in xrange(num):
        examples[0,i] = features[i]
    
    lab = sg.RegressionLabels(labels)
    train = sg.RealFeatures(examples)
    kernel = get_kernel(request, train)
    return (cost, tubeeps, lab, kernel)