u'000002': {u'name': u'petal length', u'optype': u'numeric'},
                  u'000003': {u'name': u'petal width', u'optype': u'numeric'},
                  u'000004': {u'name': u'species',
                              u'optype': u'categorical',
                              u'term_analysis': {u'enabled': True}}}}
 source2 = api.create_source(source1_file, args)
 api.ok(source2)
 
 args = \
     {u'objective_field': {u'id': u'000004'}}
 dataset1 = api.create_dataset(source2, args)
 api.ok(dataset1)
 
 args = \
     {u'anomaly_seed': u'bigml', u'seed': u'bigml'}
 anomaly1 = api.create_anomaly(dataset1, args)
 api.ok(anomaly1)
 
 args = \
     {u'fields_map': {u'000000': u'000000',
                      u'000001': u'000001',
                      u'000002': u'000002',
                      u'000003': u'000003',
                      u'000004': u'000004'},
      u'output_dataset': True}
 batchanomalyscore1 = api.create_batch_anomaly_score(anomaly1, dataset1, args)
 api.ok(batchanomalyscore1)
 
 dataset2 = api.get_dataset(batchanomalyscore1["object"]["output_dataset_resource"])
 api.ok(dataset2)
 
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'name': u"iris dataset's anomaly detector"})
api.ok(anomaly1)

batchanomalyscore1 = api.create_batch_anomaly_score(anomaly1, dataset1, \
    {'name': u"Batch Anomaly Score of iris dataset's anomaly detector with iris dataset",
     'output_dataset': True})
api.ok(batchanomalyscore1)

dataset2 = api.get_dataset(batchanomalyscore1['object']['output_dataset_resource'])
api.ok(dataset2)

dataset2 = api.update_dataset(dataset2, \
    {'fields': {u'000000': {'name': u'score'}},
     'name': u'my_dataset_from_batch_anomaly_score_name'})
api.ok(dataset2)
Ejemplo n.º 3
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris'})
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'anomaly_seed': u'2c249dda00fbf54ab4cdd850532a584f286af5b6', 'name': u'iris'})
api.ok(anomaly1)

anomalyscore1 = api.create_anomaly_score(anomaly1, \
    {u'petal length': 0.5,
     u'petal width': 0.5,
     u'sepal length': 1,
     u'sepal width': 1,
     u'species': u'Iris-setosa'}, \
    {'name': u'my_anomaly_score_name'})
api.ok(anomalyscore1)
Ejemplo n.º 4
0
from bigml.api import BigML

api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1)
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1)
api.ok(anomaly1)

anomalyscore1 = api.create_anomaly_score(anomaly1, \
    {u'petal length': 0.5,
     u'petal width': 0.5,
     u'sepal length': 1,
     u'sepal width': 1,
     u'species': u'Iris-setosa'}, \
    {'name': u'my_anomaly_score_name'})
api.ok(anomalyscore1)
Ejemplo n.º 5
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'name': u"iris dataset's anomaly detector"})
api.ok(anomaly1)

batchanomalyscore1 = api.create_batch_anomaly_score(anomaly1, dataset1, \
    {'name': u'my_batch_anomaly_score_name'})
api.ok(batchanomalyscore1)
Ejemplo n.º 6
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1)
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'name': u'my_anomaly_name'})
api.ok(anomaly1)
Ejemplo n.º 7
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'anomaly_seed': u'2c249dda00fbf54ab4cdd850532a584f286af5b6',
     'name': u"iris dataset's anomaly detector"})
api.ok(anomaly1)

anomalyscore1 = api.create_anomaly_score(anomaly1, \
    {u'petal length': 0.5,
     u'petal width': 0.5,
     u'sepal length': 1,
     u'sepal width': 1,
     u'species': u'Iris-setosa'}, \
    {'name': u'my_anomaly_score_name'})
api.ok(anomalyscore1)
Ejemplo n.º 8
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris'})
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'anomaly_seed': u'2c249dda00fbf54ab4cdd850532a584f286af5b6',
     'name': u'my_anomaly_name'})
api.ok(anomaly1)
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

anomaly1 = api.create_anomaly(dataset1, \
    {'anomaly_seed': u'2c249dda00fbf54ab4cdd850532a584f286af5b6',
     'name': u"iris dataset's anomaly detector"})
api.ok(anomaly1)

batchanomalyscore1 = api.create_batch_anomaly_score(anomaly1, dataset1, \
    {'name': u"Batch Anomaly Score of iris dataset's anomaly detector with iris dataset",
     'output_dataset': True})
api.ok(batchanomalyscore1)

dataset2 = api.get_dataset(
    batchanomalyscore1['object']['output_dataset_resource'])
api.ok(dataset2)

dataset2 = api.update_dataset(dataset2, \
    {'fields': {u'000000': {'name': u'score'}},
     'name': u'my_dataset_from_batch_anomaly_score_name'})
api.ok(dataset2)