Example #1
0
from bigml.api import BigML

api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1)
api.ok(dataset1)

cluster1 = api.create_cluster(dataset1)
api.ok(cluster1)

batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, {"name": u"my_batch_centroid_name"})
api.ok(batchcentroid1)
    from bigml.api import BigML
    api = BigML()

    source1 = api.create_source("iris.csv")
    api.ok(source1)

    dataset1 = api.create_dataset(source1, \
        {'name': 'iris'})
    api.ok(dataset1)

    cluster1 = api.create_cluster(dataset1, \
        {'name': 'iris'})
    api.ok(cluster1)

    batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, \
        {'name': 'iris dataset with iris', 'output_dataset': True})
    api.ok(batchcentroid1)

    dataset2 = api.get_dataset(batchcentroid1['object']['output_dataset_resource'])
    api.ok(dataset2)

    dataset2 = api.update_dataset(dataset2, \
        {'name': 'iris dataset with iris'})
    api.ok(dataset2)

    dataset3 = api.create_dataset(dataset2, \
        {'name': 'my_dataset_from_dataset_from_batch_centroid_name',
         'new_fields': [{'field': '( integer ( replace ( field "cluster" ) '
                                  '"Cluster " "" ) )',
                         'name': 'Cluster'}],
     'objective_field': {'id': '100000'}})
Example #3
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

cluster1 = api.create_cluster(dataset1, \
    {'name': u"iris dataset's cluster"})
api.ok(cluster1)

batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, \
    {'name': u"Batch Centroid of iris dataset's cluster with iris dataset",
     'output_dataset': True})
api.ok(batchcentroid1)

dataset2 = api.get_dataset(batchcentroid1['object']['output_dataset_resource'])
api.ok(dataset2)

dataset2 = api.update_dataset(dataset2, \
    {'fields': {u'000000': {'name': u'cluster'}},
     'name': u'iris dataset - batchcentroid'})
api.ok(dataset2)

dataset3 = api.create_dataset(dataset2, \
    {'name': u'my_dataset_from_dataset_from_batch_centroid_name',
     'new_fields': [{'field': u'( integer ( replace ( field "cluster" ) "Cluster " "" ) )',
                     u'name': u'Cluster'}]})
Example #4
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

cluster1 = api.create_cluster(dataset1, \
    {'name': u"iris dataset's cluster"})
api.ok(cluster1)

batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, \
    {'name': u"Batch Centroid of iris dataset's cluster with iris dataset",
     'output_dataset': True})
api.ok(batchcentroid1)

dataset2 = api.get_dataset(batchcentroid1['object']['output_dataset_resource'])
api.ok(dataset2)

dataset2 = api.update_dataset(dataset2, \
    {'name': u'my_dataset_from_batch_centroid_name'})
api.ok(dataset2)
    dataset1 = api.create_dataset(source2, args)
    api.ok(dataset1)

    args = \
        {u'cluster_seed': u'bigml', u'critical_value': 5}
    cluster1 = api.create_cluster(dataset1, args)
    api.ok(cluster1)

    args = \
        {u'fields_map': {u'000000': u'000000',
                         u'000001': u'000001',
                         u'000002': u'000002',
                         u'000003': u'000003',
                         u'000004': u'000004'},
         u'output_dataset': True}
    batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, args)
    api.ok(batchcentroid1)

    dataset2 = api.get_dataset(batchcentroid1["object"]["output_dataset_resource"])
    api.ok(dataset2)

    args = \
        {u'fields': {u'100000': {u'name': u'cluster', u'preferred': True}},
         u'objective_field': {u'id': u'100000'}}
    dataset3 = api.update_dataset(dataset2, args)
    api.ok(dataset3)

    args = \
        {u'all_fields': False,
         u'new_fields': [{u'field': u'(all)', u'names': [u'cluster']},
                         {u'field': u'( integer ( replace ( field "cluster" ) "Cluster " "" ) )',
'000004': {'name': 'species',
'optype': 'categorical',
'term_analysis': {'enabled': True}}}}
source2 = api.create_source(source1_file, args)
api.ok(source2)
args = \
{'objective_field': {'id': '000004'}}
dataset1 = api.create_dataset(source2, args)
api.ok(dataset1)
args = \
{'cluster_seed': 'bigml', 'critical_value': 5}
cluster1 = api.create_cluster(dataset1, args)
api.ok(cluster1)
args = \
{'output_dataset': True}
batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, args)
api.ok(batchcentroid1)
dataset2 = api.get_dataset(batchcentroid1["object"]["output_dataset_resource"])
api.ok(dataset2)
args = \
{'fields': {'100000': {'name': 'cluster', 'preferred': True}},
'objective_field': {'id': '100000'}}
dataset3 = api.update_dataset(dataset2, args)
api.ok(dataset3)
args = \
{'all_fields': True,
'new_fields': [{'field': '( integer ( replace ( field "cluster" ) "Cluster " '
'"" ) )',
'names': ['Cluster']}],
'objective_field': {'id': '100000'}}
dataset4 = api.create_dataset(dataset3, args)
Example #7
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1, \
    {'name': u'iris dataset'})
api.ok(dataset1)

cluster1 = api.create_cluster(dataset1, \
    {'name': u"iris dataset's cluster"})
api.ok(cluster1)

batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, \
    {'name': u'my_batch_centroid_name'})
api.ok(batchcentroid1)
Example #8
0
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1)
api.ok(dataset1)

cluster1 = api.create_cluster(dataset1)
api.ok(cluster1)

batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, \
    {'output_dataset': True})
api.ok(batchcentroid1)

dataset2 = api.get_dataset(batchcentroid1['object']['output_dataset_resource'])
api.ok(dataset2)

dataset2 = api.update_dataset(dataset2, \
    {'fields': {u'000000': {'name': u'cluster'}},
     'name': u'my_dataset_from_batch_centroid_name'})
api.ok(dataset2)
from bigml.api import BigML
api = BigML()

source1 = api.create_source("iris.csv")
api.ok(source1)

dataset1 = api.create_dataset(source1)
api.ok(dataset1)

cluster1 = api.create_cluster(dataset1)
api.ok(cluster1)

batchcentroid1 = api.create_batch_centroid(cluster1, dataset1, \
    {'output_dataset': True})
api.ok(batchcentroid1)

dataset2 = api.create_dataset(batchcentroid1)
api.ok(dataset2)

dataset2 = api.get_dataset(batchcentroid1)
api.ok(dataset2)

dataset2 = api.update_dataset(dataset2, \
    {'fields': {u'000000': {'name': u'cluster'}}})
api.ok(dataset2)

dataset3 = api.create_dataset(dataset2, \
    {'input_fields': [u'000000'],
     'name': u'my_dataset_from_dataset_from_batch_centroid_name',
     'new_fields': [{'field': u'( integer ( replace ( field "cluster" ) "Cluster " "" ) )',
                     u'name': u'Cluster'}]})
Example #10
0
source = api.get_source(source)
api.ok(source)

dataset = api.get_dataset(dataset)
api.ok(dataset)

model = api.get_model(model)
api.ok(model)

#cluster = api.create_cluster(dataset,{"name": "my cluster","k":8})
cluster = api.create_cluster(dataset,{"name": "my cluster","critical_value":1}) #default value is 5 for g-means


api.ok(cluster)

batch_centroid = api.create_batch_centroid(cluster, dataset, {"name": "my batch centroid", "all_fields": True, "header": True})
api.ok(batch_centroid)


api.download_batch_centroid(batch_centroid, filename='my_clusters.csv')
#api.download_batch_centroid(batch_centroid, filename='https://raw.githubusercontent.com/gsingle/GXsaiL/master/my_clusters.csv')

from git import Repo

repo_dir = 'GXsaiL'
repo = Repo(repo_dir)
file_list = [
    'gxsail.py',
    'my_clusters.csv'
]
commit_message = 'Add simple regression analysis'