Esempio n. 1
0
def wait_until_source_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    read.i_get_the_source(step, world.source["resource"])
    status = get_status(world.source)
    while status["code"] != int(code1) and status["code"] != int(code2):
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        read.i_get_the_source(step, world.source["resource"])
        status = get_status(world.source)
    assert status["code"] == int(code1)
Esempio n. 2
0
def wait_until_prediction_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    i_get_the_prediction(step, world.prediction["resource"])
    status = get_status(world.prediction)
    while status["code"] != int(code1) and status["code"] != int(code2):
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        i_get_the_prediction(step, world.prediction["resource"])
        status = get_status(world.prediction)
    assert status["code"] == int(code1)
def wait_until_prediction_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    step.given('I get the prediction "{id}"'.format(id=world.prediction["resource"]))
    status = get_status(world.prediction)
    while status["code"] != int(code1) and status["code"] != int(code2):
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        step.given('I get the prediction "{id}"'.format(id=world.prediction["resource"]))
        status = get_status(world.prediction)
    assert status["code"] == int(code1)
def wait_until_batch_centroid_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    i_get_the_batch_centroid(step, world.batch_centroid['resource'])
    status = get_status(world.batch_centroid)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
        time.sleep(3)
        assert_less(datetime.utcnow() - start, timedelta(seconds=int(secs)))
        i_get_the_batch_centroid(step, world.batch_centroid['resource'])
        status = get_status(world.batch_centroid)
    eq_(status['code'], int(code1))
def wait_until_logistic_model_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    read.i_get_the_logistic_model(step, world.logistic_regression['resource'])
    status = get_status(world.logistic_regression)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=int(secs)))
           read.i_get_the_logistic_model(step, world.logistic_regression['resource'])
           status = get_status(world.logistic_regression)
    eq_(status['code'], int(code1))
def wait_until_evaluation_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    step.given('I get the evaluation "{id}"'.format(id=world.evaluation['resource']))
    status = get_status(world.evaluation)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        step.given('I get the evaluation "{id}"'.format(id=world.evaluation['resource']))
        status = get_status(world.evaluation)
    assert status['code'] == int(code1)
def wait_until_batch_centroid_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    step.given('I get the batch centroid "{id}"'.format(id=world.batch_centroid['resource']))
    status = get_status(world.batch_centroid)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        step.given('I get the batch centroid "{id}"'.format(id=world.batch_centroid['resource']))
        status = get_status(world.batch_centroid)
    assert status['code'] == int(code1)
Esempio n. 8
0
def wait_until_model_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    read.i_get_the_model(step, world.model['resource'])
    status = get_status(world.model)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert datetime.utcnow() - start < timedelta(seconds=int(secs))
           read.i_get_the_model(step, world.model['resource'])
           status = get_status(world.model)
    assert status['code'] == int(code1)
Esempio n. 9
0
def wait_until_time_series_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    read.i_get_the_time_series(step, world.time_series['resource'])
    status = get_status(world.time_series)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=int(secs)))
           read.i_get_the_time_series(step, world.time_series['resource'])
           status = get_status(world.time_series)
    eq_(status['code'], int(code1))
Esempio n. 10
0
def wait_until_deepnet_model_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    delta = int(secs) * world.delta
    status = get_status(world.deepnet)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=delta))
           read.i_get_the_deepnet_model(step, world.deepnet['resource'])
           status = get_status(world.deepnet)
    eq_(status['code'], int(code1))
Esempio n. 11
0
def wait_until_anomaly_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    step.given('I get the anomaly detector "{id}"'.format(id=world.anomaly['resource']))
    status = get_status(world.anomaly)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert datetime.utcnow() - start < timedelta(seconds=int(secs))
           step.given('I get the anomaly detector "{id}"'.format(id=world.anomaly['resource']))
           status = get_status(world.anomaly)
    assert status['code'] == int(code1)
Esempio n. 12
0
def wait_until_cluster_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    i_get_the_cluster(step, world.cluster['resource'])
    status = get_status(world.cluster)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert datetime.utcnow() - start < timedelta(seconds=int(secs))
           i_get_the_cluster(step, world.cluster['resource'])
           status = get_status(world.cluster)
    assert status['code'] == int(code1)
def wait_until_evaluation_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    i_get_the_evaluation(step, world.evaluation['resource'])
    status = get_status(world.evaluation)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
        time.sleep(3)
        assert_less(datetime.utcnow() - start, timedelta(seconds=int(secs)))
        i_get_the_evaluation(step, world.evaluation['resource'])
        status = get_status(world.evaluation)
    eq_(status['code'], int(code1))
def wait_until_batch_anomaly_score_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    i_get_the_batch_anomaly_score(step, world.batch_anomaly_score['resource'])
    status = get_status(world.batch_anomaly_score)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        i_get_the_batch_anomaly_score(step, world.batch_anomaly_score['resource'])
        status = get_status(world.batch_anomaly_score)
    assert status['code'] == int(code1)
def wait_until_library_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    library_id = world.library['resource']
    i_get_the_library(step, library_id)
    status = get_status(world.library)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=int(secs)))
           i_get_the_library(step, library_id)
           status = get_status(world.library)
    eq_(status['code'], int(code1))
def wait_until_configuration_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    delta = int(secs) * world.delta
    i_get_the_configuration(step, world.configuration['resource'])
    status = get_status(world.configuration)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert datetime.utcnow() - start < timedelta(seconds=delta)
           i_get_the_configuration(step, world.configuration['resource'])
           status = get_status(world.configuration)
    assert status['code'] == int(code1)
def wait_until_tst_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    statistical_test_id = world.statistical_test['resource']
    i_get_the_tst(step, statistical_test_id)
    status = get_status(world.statistical_test)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=int(secs)))
           i_get_the_tst(step, statistical_test_id)
           status = get_status(world.statistical_test)
    eq_(status['code'], int(code1))
Esempio n. 18
0
def wait_until_cluster_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    delta = int(secs) * world.delta
    i_get_the_cluster(step, world.cluster['resource'])
    status = get_status(world.cluster)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=delta))
           i_get_the_cluster(step, world.cluster['resource'])
           status = get_status(world.cluster)
    eq_(status['code'], int(code1))
Esempio n. 19
0
def wait_until_sample_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    sample_id = world.sample['resource']
    i_get_the_sample(step, sample_id)
    status = get_status(world.sample)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert datetime.utcnow() - start < timedelta(seconds=int(secs))
           i_get_the_sample(step, sample_id)
           status = get_status(world.sample)
    assert status['code'] == int(code1)
Esempio n. 20
0
def wait_until_execution_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    execution_id = world.execution['resource']
    i_get_the_execution(step, execution_id)
    status = get_status(world.execution)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert datetime.utcnow() - start < timedelta(seconds=int(secs))
           i_get_the_execution(step, execution_id)
           status = get_status(world.execution)
    assert status['code'] == int(code1)
Esempio n. 21
0
def wait_until_script_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    delta = int(secs) * world.delta
    script_id = world.script['resource']
    i_get_the_script(step, script_id)
    status = get_status(world.script)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=delta))
           i_get_the_script(step, script_id)
           status = get_status(world.script)
    eq_(status['code'], int(code1))
Esempio n. 22
0
def wait_until_linear_regression_status_code_is(step, code1, code2, secs):
    start = datetime.utcnow()
    delta = int(secs) * world.delta
    linear_regression_id = world.linear_regression['resource']
    i_get_the_linear_regression(step, linear_regression_id)
    status = get_status(world.linear_regression)
    while (status['code'] != int(code1) and
           status['code'] != int(code2)):
           time.sleep(3)
           assert_less(datetime.utcnow() - start, timedelta(seconds=delta))
           i_get_the_linear_regression(step, linear_regression_id)
           status = get_status(world.linear_regression)
    eq_(status['code'], int(code1))
Esempio n. 23
0
def the_source_has_been_created_async(step, secs):
    start = datetime.utcnow()
    status = get_status(world.resource)
    while status['code'] == UPLOADING:
        time.sleep(3)
        assert datetime.utcnow() - start < timedelta(seconds=int(secs))
        status = get_status(world.resource)
    assert world.resource['code'] == HTTP_CREATED
    # update status
    world.status = world.resource['code']
    world.location = world.resource['location']
    world.source = world.resource['object']
    # save reference
    world.sources.append(world.resource['resource'])
Esempio n. 24
0
    def __init__(self, anomaly, api=None):

        self.resource_id = None
        self.sample_size = None
        self.input_fields = None
        self.mean_depth = None
        self.expected_mean_depth = None
        self.iforest = None
        self.top_anomalies = None
        self.id_fields = []
        if not (isinstance(anomaly, dict) and 'resource' in anomaly and
                anomaly['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_anomaly_id(anomaly)
            if self.resource_id is None:
                raise Exception(api.error_message(anomaly,
                                                  resource_type='anomaly',
                                                  method='get'))
            query_string = ONLY_MODEL
            anomaly = retrieve_resource(api, self.resource_id,
                                        query_string=query_string)
        else:
            self.resource_id = get_anomaly_id(anomaly)
        if 'object' in anomaly and isinstance(anomaly['object'], dict):
            anomaly = anomaly['object']
            self.sample_size = anomaly.get('sample_size')
            self.input_fields = anomaly.get('input_fields')
            self.id_fields = anomaly.get('id_fields', [])
        if 'model' in anomaly and isinstance(anomaly['model'], dict):
            ModelFields.__init__(self, anomaly['model'].get('fields'))
            if ('top_anomalies' in anomaly['model'] and
                    isinstance(anomaly['model']['top_anomalies'], list)):
                self.mean_depth = anomaly['model'].get('mean_depth')
                status = get_status(anomaly)
                if 'code' in status and status['code'] == FINISHED:
                    self.expected_mean_depth = None
                    if self.mean_depth is None or self.sample_size is None:
                        raise Exception("The anomaly data is not complete. "
                                        "Score will"
                                        " not be available")
                    else:
                        default_depth = (
                            2 * (DEPTH_FACTOR + \
                            math.log(self.sample_size - 1) - \
                            (float(self.sample_size - 1) / self.sample_size)))
                        self.expected_mean_depth = min(self.mean_depth,
                                                       default_depth)
                    iforest = anomaly['model'].get('trees', [])
                    if iforest:
                        self.iforest = [
                            AnomalyTree(anomaly_tree['root'], self.fields)
                            for anomaly_tree in iforest]
                    self.top_anomalies = anomaly['model']['top_anomalies']
                else:
                    raise Exception("The anomaly isn't finished yet")
            else:
                raise Exception("Cannot create the Anomaly instance. Could not"
                                " find the 'top_anomalies' key in the"
                                " resource:\n\n%s" % anomaly['model'].keys())
Esempio n. 25
0
def topic_model_from_shared_key(step):

    username = os.environ.get("BIGML_USERNAME")
    world.topic_model = world.api.get_topic_model( \
        world.topic_model['resource'],
        shared_username=username, shared_api_key=world.sharing_key)
    eq_(get_status(world.topic_model)['code'], FINISHED)
Esempio n. 26
0
    def __init__(self, model, api=None):

        if not (isinstance(model, dict) and 'resource' in model and
                model['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_model_id(model)
            if self.resource_id is None:
                raise Exception(error_message(model,
                                              resource_type='model',
                                              method='get'))
            query_string = ONLY_MODEL
            model = retrieve_model(api, self.resource_id,
                                   query_string=query_string)
        BaseModel.__init__(self, model, api=api)
        if ('object' in model and isinstance(model['object'], dict)):
            model = model['object']

        if ('model' in model and isinstance(model['model'], dict)):
            status = get_status(model)
            if ('code' in status and status['code'] == FINISHED):
                self.tree = Tree(
                    model['model']['root'],
                    self.fields,
                    self.objective_field)
            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception("Cannot create the Model instance. Could not"
                            " find the 'model' key in the resource:\n\n%s" %
                            model)
Esempio n. 27
0
    def __init__(self, model, api=None):

        if (isinstance(model, dict) and 'resource' in model and
                model['resource'] is not None):
            self.resource_id = model['resource']
        else:
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_model_id(model)
            if self.resource_id is None:
                raise Exception(error_message(model,
                                              resource_type='model',
                                              method='get'))
            model = retrieve_model(api, self.resource_id)

        if ('object' in model and isinstance(model['object'], dict)):
            model = model['object']

        if ('model' in model and isinstance(model['model'], dict)):
            status = get_status(model)
            if ('code' in status and status['code'] == FINISHED):
                if 'model_fields' in model['model']:
                    fields = model['model']['model_fields']
                    # pagination or exclusion might cause a field not to
                    # be in available fields dict
                    if not all(key in model['model']['fields']
                               for key in fields.keys()):
                        raise Exception("Some fields are missing"
                                        " to generate a local model."
                                        " Please, provide a model with"
                                        " the complete list of fields.")
                    for field in fields:
                        field_info = model['model']['fields'][field]
                        fields[field]['summary'] = field_info['summary']
                        fields[field]['name'] = field_info['name']
                else:
                    fields = model['model']['fields']
                self.inverted_fields = invert_dictionary(fields)
                self.all_inverted_fields = invert_dictionary(model['model']
                                                             ['fields'])
                self.tree = Tree(
                    model['model']['root'],
                    fields,
                    model['objective_fields'])
                self.description = model['description']
                self.field_importance = model['model'].get('importance',
                                                           None)
                if self.field_importance:
                    self.field_importance = [element for element
                                             in self.field_importance
                                             if element[0] in fields]
                self.locale = model.get('locale', DEFAULT_LOCALE)

            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception("Cannot create the Model instance. Could not"
                            " find the 'model' key in the resource:\n\n%s" %
                            model)
Esempio n. 28
0
    def __init__(self, association, api=None):

        self.resource_id = None
        self.complement = None
        self.discretization = {}
        self.field_discretizations = {}
        self.items = []
        self.max_k = None
        self.max_lhs = None
        self.min_confidence = None
        self.min_leverage = None
        self.min_support = None
        self.min_lift = None
        self.search_strategy = DEFAULT_SEARCH_STRATEGY
        self.rules = []
        self.significance_level = None

        self.resource_id, association = get_resource_dict( \
            association, "association", api=api)

        if 'object' in association and isinstance(association['object'], dict):
            association = association['object']

        if 'associations' in association and \
                isinstance(association['associations'], dict):
            status = get_status(association)
            if 'code' in status and status['code'] == FINISHED:
                associations = association['associations']
                fields = associations['fields']
                ModelFields.__init__(self, fields)
                self.complement = associations.get('complement', False)
                self.discretization = associations.get('discretization', {})
                self.field_discretizations = associations.get(
                    'field_discretizations', {})
                self.items = [Item(index, item, fields) for index, item in
                              enumerate(associations.get('items', []))]
                self.max_k = associations.get('max_k', 100)
                self.max_lhs = associations.get('max_lhs', 4)
                self.min_confidence = associations.get('min_confidence', 0)
                self.min_leverage = associations.get('min_leverage', -1)
                self.min_support = associations.get('min_support', 0)
                self.min_lift = associations.get('min_lift', 0)
                self.search_strategy = associations.get('search_strategy', \
                    DEFAULT_SEARCH_STRATEGY)
                self.rules = [AssociationRule(rule) for rule in
                              associations.get('rules', [])]
                self.significance_level = associations.get(
                    'significance_level', 0.05)
            else:
                raise Exception("The association isn't finished yet")
        else:
            raise Exception("Cannot create the Association instance. Could not"
                            " find the 'associations' key in the "
                            "resource:\n\n%s" %
                            association)
Esempio n. 29
0
    def __init__(self, model, api=None):

        if check_model_structure(model):
            self.resource_id = model["resource"]
        else:
            # If only the model id is provided, the short version of the model
            # resource is used to build a basic summary of the model
            if api is None:
                api = BigML()
            self.resource_id = get_model_id(model)
            if self.resource_id is None:
                raise Exception(api.error_message(model, resource_type="model", method="get"))
            query_string = ONLY_MODEL
            model = retrieve_resource(api, self.resource_id, query_string=query_string)
            # Stored copies of the model structure might lack some necessary
            # keys
            if not check_model_structure(model):
                model = api.get_model(self.resource_id, query_string=query_string)

        if "object" in model and isinstance(model["object"], dict):
            model = model["object"]

        if "model" in model and isinstance(model["model"], dict):
            status = get_status(model)
            if "code" in status and status["code"] == FINISHED:
                if "model_fields" in model["model"] or "fields" in model["model"]:
                    fields = model["model"].get("model_fields", model["model"].get("fields", []))
                    # pagination or exclusion might cause a field not to
                    # be in available fields dict
                    if not all(key in model["model"]["fields"] for key in fields.keys()):
                        raise Exception(
                            "Some fields are missing"
                            " to generate a local model."
                            " Please, provide a model with"
                            " the complete list of fields."
                        )
                    for field in fields:
                        field_info = model["model"]["fields"][field]
                        if "summary" in field_info:
                            fields[field]["summary"] = field_info["summary"]
                        fields[field]["name"] = field_info["name"]
                objective_field = model["objective_fields"]
                ModelFields.__init__(self, fields, objective_id=extract_objective(objective_field))
                self.description = model["description"]
                self.field_importance = model["model"].get("importance", None)
                if self.field_importance:
                    self.field_importance = [element for element in self.field_importance if element[0] in fields]
                self.locale = model.get("locale", DEFAULT_LOCALE)

            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception(
                "Cannot create the BaseModel instance. Could not" " find the 'model' key in the resource:\n\n%s" % model
            )
Esempio n. 30
0
    def __init__(self, cluster, api=None):

        if not (isinstance(cluster, dict) and 'resource' in cluster and
                cluster['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_cluster_id(cluster)
            if self.resource_id is None:
                raise Exception(api.error_message(cluster,
                                                  resource_type='cluster',
                                                  method='get'))
            query_string = ONLY_MODEL
            cluster = retrieve_resource(api, self.resource_id,
                                        query_string=query_string)
        if 'object' in cluster and isinstance(cluster['object'], dict):
            cluster = cluster['object']

        if 'clusters' in cluster and isinstance(cluster['clusters'], dict):
            status = get_status(cluster)
            if 'code' in status and status['code'] == FINISHED:
                clusters = cluster['clusters']['clusters']
                self.centroids = [Centroid(centroid) for centroid in clusters]
                self.scales = {}
                self.scales.update(cluster['scales'])
                self.term_forms = {}
                self.tag_clouds = {}
                self.term_analysis = {}
                fields = cluster['clusters']['fields']
                for field_id, field in fields.items():
                    if field['optype'] == 'text':

                        self.term_forms[field_id] = {}
                        self.term_forms[field_id].update(field[
                            'summary']['term_forms'])
                        self.tag_clouds[field_id] = {}
                        self.tag_clouds[field_id].update(field[
                            'summary']['tag_cloud'])
                        self.term_analysis[field_id] = {}
                        self.term_analysis[field_id].update(
                            field['term_analysis'])
                ModelFields.__init__(self, fields)
                if not all([field_id in self.fields for
                            field_id in self.scales]):
                    raise Exception("Some fields are missing"
                                    " to generate a local cluster."
                                    " Please, provide a cluster with"
                                    " the complete list of fields.")
            else:
                raise Exception("The cluster isn't finished yet")
        else:
            raise Exception("Cannot create the Cluster instance. Could not"
                            " find the 'clusters' key in the resource:\n\n%s" %
                            cluster)
Esempio n. 31
0
    def __init__(self, cluster, api=None):

        self.resource_id = None
        self.centroids = None
        self.cluster_global = None
        self.total_ss = None
        self.within_ss = None
        self.between_ss = None
        self.ratio_ss = None
        self.critical_value = None
        self.default_numeric_value = None
        self.k = None
        self.summary_fields = []
        self.scales = {}
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.item_analysis = {}
        self.items = {}
        self.datasets = {}
        self.api = api

        if self.api is None:
            self.api = BigML(storage=STORAGE)

        self.resource_id, cluster = get_resource_dict( \
            cluster, "cluster", api=api)

        if 'object' in cluster and isinstance(cluster['object'], dict):
            cluster = cluster['object']

        if 'clusters' in cluster and isinstance(cluster['clusters'], dict):
            status = get_status(cluster)
            if 'code' in status and status['code'] == FINISHED:
                self.default_numeric_value = cluster.get( \
                    "default_numeric_value")
                self.summary_fields = cluster.get("summary_fields", [])
                self.datasets = cluster.get("cluster_datasets", {})
                the_clusters = cluster['clusters']
                cluster_global = the_clusters.get('global')
                clusters = the_clusters['clusters']
                self.centroids = [Centroid(centroid) for centroid in clusters]
                self.cluster_global = cluster_global
                if cluster_global:
                    self.cluster_global = Centroid(cluster_global)
                    # "global" has no "name" and "count" then we set them
                    self.cluster_global.name = GLOBAL_CLUSTER_LABEL
                    self.cluster_global.count = \
                        self.cluster_global.distance['population']
                self.total_ss = the_clusters.get('total_ss')
                self.within_ss = the_clusters.get('within_ss')
                if not self.within_ss:
                    self.within_ss = sum(centroid.distance['sum_squares']
                                         for centroid in self.centroids)
                self.between_ss = the_clusters.get('between_ss')
                self.ratio_ss = the_clusters.get('ratio_ss')
                self.critical_value = cluster.get('critical_value', None)
                self.k = cluster.get('k')
                self.scales.update(cluster['scales'])
                self.term_forms = {}
                self.tag_clouds = {}
                self.term_analysis = {}
                fields = cluster['clusters']['fields']
                summary_fields = cluster['summary_fields']
                for field_id in summary_fields:
                    try:
                        del fields[field_id]
                    except KeyError:
                        # clusters retrieved from API will only contain
                        # model fields
                        pass
                for field_id, field in fields.items():
                    if field['optype'] == 'text':
                        self.term_forms[field_id] = {}
                        self.term_forms[field_id].update(
                            field['summary']['term_forms'])
                        self.tag_clouds[field_id] = {}
                        self.tag_clouds[field_id].update(
                            field['summary']['tag_cloud'])
                        self.term_analysis[field_id] = {}
                        self.term_analysis[field_id].update(
                            field['term_analysis'])
                    if field['optype'] == 'items':
                        self.items[field_id] = {}
                        self.items[field_id].update(
                            dict(field['summary']['items']))
                        self.item_analysis[field_id] = {}
                        self.item_analysis[field_id].update(
                            field['item_analysis'])

                ModelFields.__init__(self, fields)
                if not all(
                    [field_id in self.fields for field_id in self.scales]):
                    raise Exception("Some fields are missing"
                                    " to generate a local cluster."
                                    " Please, provide a cluster with"
                                    " the complete list of fields.")
            else:
                raise Exception("The cluster isn't finished yet")
        else:
            raise Exception("Cannot create the Cluster instance. Could not"
                            " find the 'clusters' key in the resource:\n\n%s" %
                            cluster)
Esempio n. 32
0
    def __init__(self, logistic_regression, api=None):

        self.resource_id = None
        self.input_fields = []
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.items = {}
        self.item_analysis = {}
        self.categories = {}
        self.coefficients = {}
        self.data_field_types = {}
        self.field_codings = {}
        self.numeric_fields = {}
        self.bias = None
        self.missing_numerics = None
        self.c = None
        self.eps = None
        self.lr_normalize = None
        self.regularization = None
        old_coefficients = False
        if not (isinstance(logistic_regression, dict)
                and 'resource' in logistic_regression
                and logistic_regression['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_logistic_regression_id(logistic_regression)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(logistic_regression,
                                      resource_type='logistic_regression',
                                      method='get'))
            query_string = ONLY_MODEL
            logistic_regression = retrieve_resource(api,
                                                    self.resource_id,
                                                    query_string=query_string)
        else:
            self.resource_id = get_logistic_regression_id(logistic_regression)

        if 'object' in logistic_regression and \
            isinstance(logistic_regression['object'], dict):
            logistic_regression = logistic_regression['object']
        try:
            self.input_fields = logistic_regression.get("input_fields", [])
            self.dataset_field_types = logistic_regression.get(
                "dataset_field_types", {})
            objective_field = logistic_regression['objective_fields'] if \
                logistic_regression['objective_fields'] else \
                logistic_regression['objective_field']
        except KeyError:
            raise ValueError("Failed to find the logistic regression expected "
                             "JSON structure. Check your arguments.")
        if 'logistic_regression' in logistic_regression and \
            isinstance(logistic_regression['logistic_regression'], dict):
            status = get_status(logistic_regression)
            if 'code' in status and status['code'] == FINISHED:
                logistic_regression_info = logistic_regression[ \
                    'logistic_regression']
                fields = logistic_regression_info.get('fields', {})

                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(self.fields.items(),
                               key=lambda x: x[1].get("column_number"))]
                self.coefficients.update(logistic_regression_info.get( \
                    'coefficients', []))
                if (not isinstance(self.coefficients.values()[0][0], list)):
                    old_coefficients = True
                self.bias = logistic_regression_info.get('bias', 0)
                self.c = logistic_regression_info.get('c')
                self.eps = logistic_regression_info.get('eps')
                self.lr_normalize = logistic_regression_info.get('normalize')
                self.regularization = logistic_regression_info.get( \
                    'regularization')
                self.field_codings = logistic_regression_info.get( \
                     'field_codings', {})
                # old models have no such attribute, so we set it to False in
                # this case
                self.missing_numerics = logistic_regression_info.get( \
                    'missing_numerics', False)
                objective_id = extract_objective(objective_field)
                for field_id, field in fields.items():
                    if field['optype'] == 'text':
                        self.term_forms[field_id] = {}
                        self.term_forms[field_id].update(
                            field['summary']['term_forms'])
                        self.tag_clouds[field_id] = []
                        self.tag_clouds[field_id] = [
                            tag for [tag, _] in field['summary']['tag_cloud']
                        ]
                        self.term_analysis[field_id] = {}
                        self.term_analysis[field_id].update(
                            field['term_analysis'])
                    if field['optype'] == 'items':
                        self.items[field_id] = []
                        self.items[field_id] = [item for item, _ in \
                            field['summary']['items']]
                        self.item_analysis[field_id] = {}
                        self.item_analysis[field_id].update(
                            field['item_analysis'])
                    if field['optype'] == 'categorical':
                        self.categories[field_id] = [category for \
                            [category, _] in field['summary']['categories']]
                    if self.missing_numerics and field['optype'] == 'numeric':
                        self.numeric_fields[field_id] = True
                ModelFields.__init__(self, fields, objective_id=objective_id)
                self.field_codings = logistic_regression_info.get( \
                  'field_codings', {})
                self.format_field_codings()
                for field_id in self.field_codings:
                    if field_id not in fields and \
                            field_id in self.inverted_fields:
                        self.field_codings.update( \
                            {self.inverted_fields[field_id]: \
                             self.field_codings[field_id]})
                        del self.field_codings[field_id]
                if old_coefficients:
                    self.map_coefficients()
            else:
                raise Exception("The logistic regression isn't finished yet")
        else:
            raise Exception("Cannot create the LogisticRegression instance."
                            " Could not find the 'logistic_regression' key"
                            " in the resource:\n\n%s" % logistic_regression)
Esempio n. 33
0
    def __init__(self, logistic_regression, api=None, cache_get=None):

        if use_cache(cache_get):
            # using a cache to store the model attributes
            self.__dict__ = load(get_logistic_regression_id( \
                logistic_regression), cache_get)
            return

        self.resource_id = None
        self.class_names = None
        self.input_fields = []
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.items = {}
        self.item_analysis = {}
        self.categories = {}
        self.coefficients = {}
        self.data_field_types = {}
        self.field_codings = {}
        self.numeric_fields = {}
        self.bias = None
        self.missing_numerics = None
        self.c = None
        self.eps = None
        self.lr_normalize = None
        self.balance_fields = None
        self.regularization = None
        api = get_api_connection(api)

        old_coefficients = False

        self.resource_id, logistic_regression = get_resource_dict( \
            logistic_regression, "logisticregression", api=api)

        if 'object' in logistic_regression and \
            isinstance(logistic_regression['object'], dict):
            logistic_regression = logistic_regression['object']
        try:
            self.input_fields = logistic_regression.get("input_fields", [])
            self.dataset_field_types = logistic_regression.get(
                "dataset_field_types", {})
            self.weight_field = logistic_regression.get("weight_field")
            objective_field = logistic_regression['objective_fields'] if \
                logistic_regression['objective_fields'] else \
                logistic_regression['objective_field']
        except KeyError:
            raise ValueError("Failed to find the logistic regression expected "
                             "JSON structure. Check your arguments.")
        if 'logistic_regression' in logistic_regression and \
            isinstance(logistic_regression['logistic_regression'], dict):
            status = get_status(logistic_regression)
            if 'code' in status and status['code'] == FINISHED:
                logistic_regression_info = logistic_regression[ \
                    'logistic_regression']
                fields = logistic_regression_info.get('fields', {})

                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(list(fields.items()),
                               key=lambda x: x[1].get("column_number"))]
                self.coefficients.update(logistic_regression_info.get( \
                    'coefficients', []))
                if not isinstance(
                        list(self.coefficients.values())[0][0], list):
                    old_coefficients = True
                self.bias = logistic_regression_info.get('bias', True)
                self.c = logistic_regression_info.get('c')
                self.eps = logistic_regression_info.get('eps')
                self.lr_normalize = logistic_regression_info.get('normalize')
                self.balance_fields = logistic_regression_info.get( \
                    'balance_fields')
                self.regularization = logistic_regression_info.get( \
                    'regularization')
                self.field_codings = logistic_regression_info.get( \
                     'field_codings', {})
                # old models have no such attribute, so we set it to False in
                # this case
                self.missing_numerics = logistic_regression_info.get( \
                    'missing_numerics', False)
                objective_id = extract_objective(objective_field)
                missing_tokens = logistic_regression_info.get("missing_tokens")
                ModelFields.__init__(self,
                                     fields,
                                     objective_id=objective_id,
                                     terms=True,
                                     categories=True,
                                     numerics=True,
                                     missing_tokens=missing_tokens)
                self.field_codings = logistic_regression_info.get( \
                  'field_codings', {})
                self.format_field_codings()
                for field_id in self.field_codings:
                    if field_id not in self.fields and \
                            field_id in self.inverted_fields:
                        self.field_codings.update( \
                            {self.inverted_fields[field_id]: \
                             self.field_codings[field_id]})
                        del self.field_codings[field_id]
                if old_coefficients:
                    self.map_coefficients()
                categories = self.fields[self.objective_id].get( \
                    "summary", {}).get('categories')
                if len(list(self.coefficients.keys())) > len(categories):
                    self.class_names = [""]
                else:
                    self.class_names = []
                self.class_names.extend(
                    sorted([category[0] for category in categories]))
                # order matters
                self.objective_categories = [
                    category[0] for category in categories
                ]
            else:
                raise Exception("The logistic regression isn't finished yet")
        else:
            raise Exception("Cannot create the LogisticRegression instance."
                            " Could not find the 'logistic_regression' key"
                            " in the resource:\n\n%s" % logistic_regression)
Esempio n. 34
0
    def __init__(self, model, api=None):
        """The Model constructor can be given as first argument:
            - a model structure
            - a model id
            - a path to a JSON file containing a model structure

        """
        # the string can be a path to a JSON file
        if isinstance(model, basestring):
            try:
                with open(model) as model_file:
                    model = json.load(model_file)
                    self.resource_id = get_model_id(model)
                    if self.resource_id is None:
                        raise ValueError("The JSON file does not seem"
                                         " to contain a valid BigML model"
                                         " representation.")
            except IOError:
                # if it is not a path, it can be a model id
                self.resource_id = get_model_id(model)
                if self.resource_id is None:
                    if model.find('model/') > -1:
                        raise Exception(
                            api.error_message(model,
                                              resource_type='model',
                                              method='get'))
                    else:
                        raise IOError("Failed to open the expected JSON file"
                                      " at %s" % model)
            except ValueError:
                raise ValueError("Failed to interpret %s."
                                 " JSON file expected.")

        if not (isinstance(model, dict) and 'resource' in model
                and model['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            query_string = ONLY_MODEL
            model = retrieve_resource(api,
                                      self.resource_id,
                                      query_string=query_string)
        BaseModel.__init__(self, model, api=api)
        if 'object' in model and isinstance(model['object'], dict):
            model = model['object']

        if 'model' in model and isinstance(model['model'], dict):
            status = get_status(model)
            if 'code' in status and status['code'] == FINISHED:
                distribution = model['model']['distribution']['training']
                self.ids_map = {}
                self.tree = Tree(model['model']['root'],
                                 self.fields,
                                 objective_field=self.objective_id,
                                 root_distribution=distribution,
                                 parent_id=None,
                                 ids_map=self.ids_map)
                self.terms = {}
            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception("Cannot create the Model instance. Could not"
                            " find the 'model' key in the resource:\n\n%s" %
                            model)
        if self.tree.regression:
            try:
                import numpy
                import scipy
                self.regression_ready = True
            except ImportError:
                self.regression_ready = False
Esempio n. 35
0
def model_from_shared_url(step):
    world.model = world.api.get_model("shared/model/%s" % world.shared_hash)
    eq_(get_status(world.model)['code'], FINISHED)
Esempio n. 36
0
    def __init__(self, topic_model, api=None):

        self.resource_id = None
        self.stemmer = None
        self.seed = None
        self.case_sensitive = False
        self.bigrams = False
        self.ntopics = None
        self.temp = None
        self.phi = None
        self.term_to_index = None
        self.topics = []

        if not (isinstance(topic_model, dict) and 'resource' in topic_model
                and topic_model['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_topic_model_id(topic_model)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(topic_model,
                                      resource_type='topicmodel',
                                      method='get'))
            query_string = ONLY_MODEL
            topic_model = retrieve_resource(api,
                                            self.resource_id,
                                            query_string=query_string)
        else:
            self.resource_id = get_topic_model_id(topic_model)

        if 'object' in topic_model and isinstance(topic_model['object'], dict):
            topic_model = topic_model['object']

        if 'topic_model' in topic_model \
                and isinstance(topic_model['topic_model'], dict):
            status = get_status(topic_model)
            if 'code' in status and status['code'] == FINISHED:

                model = topic_model['topic_model']
                self.topics = model['topics']

                if 'language' in model and model['language'] is not None:
                    lang = model['language']
                    if lang in CODE_TO_NAME:
                        self.stemmer = Stemmer.Stemmer(CODE_TO_NAME[lang])

                self.term_to_index = {
                    self.stem(term): index
                    for index, term in enumerate(model['termset'])
                }

                self.seed = abs(model['hashed_seed'])
                self.case_sensitive = model['case_sensitive']
                self.bigrams = model['bigrams']

                self.ntopics = len(model['term_topic_assignments'][0])

                self.alpha = model['alpha']
                self.ktimesalpha = self.ntopics * self.alpha

                self.temp = [0] * self.ntopics

                assignments = model['term_topic_assignments']
                beta = model['beta']
                nterms = len(self.term_to_index)

                sums = [
                    sum(n[index] for n in assignments)
                    for index in range(self.ntopics)
                ]

                self.phi = [[0 for _ in range(nterms)]
                            for _ in range(self.ntopics)]

                for k in range(self.ntopics):
                    norm = sums[k] + nterms * beta
                    for w in range(nterms):
                        self.phi[k][w] = (assignments[w][k] + beta) / norm

                ModelFields.__init__(self, model['fields'])
            else:
                raise Exception("The topic model isn't finished yet")
        else:
            raise Exception("Cannot create the topic model instance. Could not"
                            " find the 'topic_model' key in the"
                            " resource:\n\n%s" % topic_model)
Esempio n. 37
0
def cluster_from_shared_key(step):
   
    username = os.environ.get("BIGML_USERNAME")
    world.cluster = world.api.get_cluster(world.cluster['resource'],
        shared_username=username, shared_api_key=world.sharing_key)
    assert get_status(world.cluster)['code'] == FINISHED
Esempio n. 38
0
    def __init__(self, logistic_regression, api=None):

        self.resource_id = None
        self.class_names = None
        self.input_fields = []
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.items = {}
        self.item_analysis = {}
        self.categories = {}
        self.coefficients = {}
        self.data_field_types = {}
        self.field_codings = {}
        self.numeric_fields = {}
        self.bias = None
        self.missing_numerics = None
        self.c = None
        self.eps = None
        self.lr_normalize = None
        self.balance_fields = None
        self.regularization = None

        old_coefficients = False

        # checks whether the information needed for local predictions is in
        # the first argument
        if isinstance(logistic_regression, dict) and \
                not check_model_fields(logistic_regression):
            # if the fields used by the logistic regression are not
            # available, use only ID to retrieve it again
            logistic_regression = get_logistic_regression_id( \
                logistic_regression)
            self.resource_id = logistic_regression

        if not (isinstance(logistic_regression, dict)
                and 'resource' in logistic_regression and
                logistic_regression['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_logistic_regression_id(logistic_regression)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(logistic_regression,
                                      resource_type='logistic_regression',
                                      method='get'))
            query_string = ONLY_MODEL
            logistic_regression = retrieve_resource(
                api, self.resource_id, query_string=query_string)
        else:
            self.resource_id = get_logistic_regression_id(logistic_regression)

        if 'object' in logistic_regression and \
            isinstance(logistic_regression['object'], dict):
            logistic_regression = logistic_regression['object']
        try:
            self.input_fields = logistic_regression.get("input_fields", [])
            self.dataset_field_types = logistic_regression.get(
                "dataset_field_types", {})
            objective_field = logistic_regression['objective_fields'] if \
                logistic_regression['objective_fields'] else \
                logistic_regression['objective_field']
        except KeyError:
            raise ValueError("Failed to find the logistic regression expected "
                             "JSON structure. Check your arguments.")
        if 'logistic_regression' in logistic_regression and \
            isinstance(logistic_regression['logistic_regression'], dict):
            status = get_status(logistic_regression)
            if 'code' in status and status['code'] == FINISHED:
                logistic_regression_info = logistic_regression[ \
                    'logistic_regression']
                fields = logistic_regression_info.get('fields', {})

                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(self.fields.items(),
                               key=lambda x: x[1].get("column_number"))]
                self.coefficients.update(logistic_regression_info.get( \
                    'coefficients', []))
                if not isinstance(self.coefficients.values()[0][0], list):
                    old_coefficients = True
                self.bias = logistic_regression_info.get('bias', True)
                self.c = logistic_regression_info.get('c')
                self.eps = logistic_regression_info.get('eps')
                self.lr_normalize = logistic_regression_info.get('normalize')
                self.balance_fields = logistic_regression_info.get( \
                    'balance_fields')
                self.regularization = logistic_regression_info.get( \
                    'regularization')
                self.field_codings = logistic_regression_info.get( \
                     'field_codings', {})
                # old models have no such attribute, so we set it to False in
                # this case
                self.missing_numerics = logistic_regression_info.get( \
                    'missing_numerics', False)
                objective_id = extract_objective(objective_field)
                ModelFields.__init__(
                    self, fields,
                    objective_id=objective_id, terms=True, categories=True,
                    numerics=True)
                self.field_codings = logistic_regression_info.get( \
                  'field_codings', {})
                self.format_field_codings()
                for field_id in self.field_codings:
                    if field_id not in fields and \
                            field_id in self.inverted_fields:
                        self.field_codings.update( \
                            {self.inverted_fields[field_id]: \
                             self.field_codings[field_id]})
                        del self.field_codings[field_id]
                if old_coefficients:
                    self.map_coefficients()
                categories = self.fields[self.objective_id].get( \
                    "summary", {}).get('categories')
                if len(self.coefficients.keys()) > len(categories):
                    self.class_names = [""]
                else:
                    self.class_names = []
                self.class_names.extend(sorted([category[0]
                                                for category in categories]))
            else:
                raise Exception("The logistic regression isn't finished yet")
        else:
            raise Exception("Cannot create the LogisticRegression instance."
                            " Could not find the 'logistic_regression' key"
                            " in the resource:\n\n%s" %
                            logistic_regression)
Esempio n. 39
0
    def __init__(self, deepnet, api=None):
        """The Deepnet constructor can be given as first argument:
            - a deepnet structure
            - a deepnet id
            - a path to a JSON file containing a deepnet structure

        """
        self.resource_id = None
        self.regression = False
        self.network = None
        self.networks = None
        self.input_fields = []
        self.class_names = []
        self.preprocess = []
        self.optimizer = None
        self.missing_numerics = False
        # the string can be a path to a JSON file
        if isinstance(deepnet, basestring):
            try:
                with open(deepnet) as deepnet_file:
                    deepnet = json.load(deepnet_file)
                    self.resource_id = get_deepnet_id(deepnet)
                    if self.resource_id is None:
                        raise ValueError("The JSON file does not seem"
                                         " to contain a valid BigML deepnet"
                                         " representation.")
            except IOError:
                # if it is not a path, it can be a deepnet id
                self.resource_id = get_deepnet_id(deepnet)
                if self.resource_id is None:
                    if deepnet.find('deepnet/') > -1:
                        raise Exception(
                            api.error_message(deepnet,
                                              resource_type='deepnet',
                                              method='get'))
                    else:
                        raise IOError("Failed to open the expected JSON file"
                                      " at %s" % deepnet)
            except ValueError:
                raise ValueError("Failed to interpret %s."
                                 " JSON file expected.")

        # checks whether the information needed for local predictions is in
        # the first argument
        if isinstance(deepnet, dict) and \
                not check_model_fields(deepnet):
            # if the fields used by the deepenet are not
            # available, use only ID to retrieve it again
            deepnet = get_deepnet_id(deepnet)
            self.resource_id = deepnet

        if not (isinstance(deepnet, dict) and 'resource' in deepnet
                and deepnet['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            query_string = ONLY_MODEL
            deepnet = retrieve_resource(api,
                                        self.resource_id,
                                        query_string=query_string)
        else:
            self.resource_id = get_deepnet_id(deepnet)
        if 'object' in deepnet and isinstance(deepnet['object'], dict):
            deepnet = deepnet['object']
        self.input_fields = deepnet['input_fields']
        if 'deepnet' in deepnet and isinstance(deepnet['deepnet'], dict):
            status = get_status(deepnet)
            objective_field = deepnet['objective_fields']
            deepnet = deepnet['deepnet']
            if 'code' in status and status['code'] == FINISHED:
                self.fields = deepnet['fields']
                ModelFields.__init__(
                    self,
                    self.fields,
                    objective_id=extract_objective(objective_field),
                    terms=True,
                    categories=True)

                self.regression = \
                    self.fields[self.objective_id]['optype'] == NUMERIC
                if not self.regression:
                    self.class_names = [category for category,_ in \
                        self.fields[self.objective_id][ \
                        'summary']['categories']]
                    self.class_names.sort()

                self.missing_numerics = deepnet.get('missing_numerics', False)
                if 'network' in deepnet:
                    network = deepnet['network']
                    self.network = network
                    self.networks = network.get('networks', [])
                    self.preprocess = network.get('preprocess')
                    self.optimizer = network.get('optimizer', {})
            else:
                raise Exception("The deepnet isn't finished yet")
        else:
            raise Exception("Cannot create the Deepnet instance. Could not"
                            " find the 'deepnet' key in the resource:\n\n%s" %
                            deepnet)
Esempio n. 40
0
    def __init__(self, model, api=None, fields=None, checked=True):

        check_fn = check_local_but_fields if fields is not None else \
            check_local_info
        if isinstance(model, dict) and (checked or check_fn(model)):
            self.resource_id = model['resource']
        else:
            # If only the model id is provided, the short version of the model
            # resource is used to build a basic summary of the model
            self.api = get_api_connection(api)
            self.resource_id = get_model_id(model)
            if self.resource_id is None:
                raise Exception(self.api.error_message(model,
                                                       resource_type='model',
                                                       method='get'))
            if fields is not None and isinstance(fields, dict):
                query_string = EXCLUDE_FIELDS
            else:
                query_string = ONLY_MODEL
            model = retrieve_resource(api, self.resource_id,
                                      query_string=query_string,
                                      no_check_fields=fields is not None)

        if 'object' in model and isinstance(model['object'], dict):
            model = model['object']

        if 'model' in model and isinstance(model['model'], dict):
            status = get_status(model)
            if 'code' in status and status['code'] == FINISHED:
                if (fields is None and ('model_fields' in model['model'] or
                                        'fields' in model['model'])):
                    fields = model['model'].get('model_fields',
                                                model['model'].get('fields',
                                                                   []))
                    # model_fields doesn't contain the datetime fields
                    fields.update(datetime_fields(model['model'].get('fields',
                                                                     {})))
                    # pagination or exclusion might cause a field not to
                    # be in available fields dict
                    if not all(key in model['model']['fields']
                               for key in list(fields.keys())):
                        raise Exception("Some fields are missing"
                                        " to generate a local model."
                                        " Please, provide a model with"
                                        " the complete list of fields.")
                    for field in fields:
                        field_info = model['model']['fields'][field]
                        if 'summary' in field_info:
                            fields[field]['summary'] = field_info['summary']
                        fields[field]['name'] = field_info['name']
                objective_field = model['objective_fields']
                missing_tokens = model['model'].get('missing_tokens')

                ModelFields.__init__(
                    self, fields, objective_id=extract_objective(objective_field),
                    missing_tokens=missing_tokens)
                self.description = model['description']
                self.field_importance = model['model'].get('importance',
                                                           None)
                if self.field_importance:
                    self.field_importance = [element for element
                                             in self.field_importance
                                             if element[0] in fields]
                self.locale = model.get('locale', DEFAULT_LOCALE)
            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception("Cannot create the BaseModel instance. Could not"
                            " find the 'model' key in the resource:\n\n%s" %
                            model)
Esempio n. 41
0
    def __init__(self, model, api=None):

        if check_model_structure(model):
            self.resource_id = model['resource']
        else:
            # If only the model id is provided, the short version of the model
            # resource is used to build a basic summary of the model
            if api is None:
                api = BigML()
            self.resource_id = get_model_id(model)
            if self.resource_id is None:
                raise Exception(api.error_message(model,
                                                  resource_type='model',
                                                  method='get'))
            query_string = ONLY_MODEL
            model = retrieve_resource(api, self.resource_id,
                                      query_string=query_string)
            # Stored copies of the model structure might lack some necessary
            # keys
            if not check_model_structure(model):
                model = api.get_model(self.resource_id,
                                      query_string=query_string)

        if 'object' in model and isinstance(model['object'], dict):
            model = model['object']

        if 'model' in model and isinstance(model['model'], dict):
            status = get_status(model)
            if 'code' in status and status['code'] == FINISHED:
                if ('model_fields' in model['model'] or
                    'fields' in model['model']):
                    fields = model['model'].get('model_fields',
                                                model['model'].get('fields',
                                                                   []))
                    # pagination or exclusion might cause a field not to
                    # be in available fields dict
                    if not all(key in model['model']['fields']
                               for key in fields.keys()):
                        raise Exception("Some fields are missing"
                                        " to generate a local model."
                                        " Please, provide a model with"
                                        " the complete list of fields.")
                    for field in fields:
                        field_info = model['model']['fields'][field]
                        if 'summary' in field_info:
                            fields[field]['summary'] = field_info['summary']
                        fields[field]['name'] = field_info['name']
                objective_field = model['objective_fields']
                ModelFields.__init__(
                    self, fields,
                    objective_id=extract_objective(objective_field))
                self.description = model['description']
                self.field_importance = model['model'].get('importance',
                                                           None)
                if self.field_importance:
                    self.field_importance = [element for element
                                             in self.field_importance
                                             if element[0] in fields]
                self.locale = model.get('locale', DEFAULT_LOCALE)

            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception("Cannot create the BaseModel instance. Could not"
                            " find the 'model' key in the resource:\n\n%s" %
                            model)
Esempio n. 42
0
    def __init__(self, linear_regression, api=None):

        self.resource_id = None
        self.input_fields = []
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.items = {}
        self.item_analysis = {}
        self.categories = {}
        self.coefficients = []
        self.data_field_types = {}
        self.field_codings = {}
        self.bias = None
        self.xtx_inverse = []
        self.mean_squared_error = None
        self.number_of_parameters = None
        self.number_of_samples = None

        self.resource_id, linear_regression = get_resource_dict( \
            linear_regression, "linearregression", api=api)

        if 'object' in linear_regression and \
            isinstance(linear_regression['object'], dict):
            linear_regression = linear_regression['object']
        try:
            self.input_fields = linear_regression.get("input_fields", [])
            self.dataset_field_types = linear_regression.get(
                "dataset_field_types", {})
            self.weight_field = linear_regression.get("weight_field")
            objective_field = linear_regression['objective_fields'] if \
                linear_regression['objective_fields'] else \
                linear_regression['objective_field']
        except KeyError:
            raise ValueError("Failed to find the linear regression expected "
                             "JSON structure. Check your arguments.")
        if 'linear_regression' in linear_regression and \
            isinstance(linear_regression['linear_regression'], dict):
            status = get_status(linear_regression)
            if 'code' in status and status['code'] == FINISHED:
                linear_regression_info = linear_regression[ \
                    'linear_regression']
                fields = linear_regression_info.get('fields', {})

                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(fields.items(),
                               key=lambda x: x[1].get("column_number"))]
                self.coeff_ids = self.input_fields[:]
                self.coefficients = linear_regression_info.get( \
                    'coefficients', [])
                self.bias = linear_regression_info.get('bias', True)
                self.field_codings = linear_regression_info.get( \
                     'field_codings', {})
                self.number_of_parameters = linear_regression_info.get( \
                    "number_of_parameters")
                missing_tokens = linear_regression_info.get("missing_tokens")

                objective_id = extract_objective(objective_field)
                ModelFields.__init__(self,
                                     fields,
                                     objective_id=objective_id,
                                     terms=True,
                                     categories=True,
                                     numerics=True,
                                     missing_tokens=missing_tokens)
                self.field_codings = linear_regression_info.get( \
                  'field_codings', {})
                self.format_field_codings()
                for field_id in self.field_codings:
                    if field_id not in fields and \
                            field_id in self.inverted_fields:
                        self.field_codings.update( \
                            {self.inverted_fields[field_id]: \
                             self.field_codings[field_id]})
                        del self.field_codings[field_id]
                stats = linear_regression_info["stats"]
                if stats is not None and stats.get("xtx_inverse") is not None:
                    self.xtx_inverse = stats["xtx_inverse"][:]
                    self.mean_squared_error = stats["mean_squared_error"]
                    self.number_of_samples = stats["number_of_samples"]
                    # to be used in predictions
                    self.t_crit = student_t.interval( \
                        CONFIDENCE,
                        self.number_of_samples - self.number_of_parameters)[1]
                    self.xtx_inverse = list( \
                        np.linalg.inv(np.array(self.xtx_inverse)))

            else:
                raise Exception("The linear regression isn't finished yet")
        else:
            raise Exception("Cannot create the LinearRegression instance."
                            " Could not find the 'linear_regression' key"
                            " in the resource:\n\n%s" % linear_regression)
Esempio n. 43
0
    def __init__(self, time_series, api=None):

        self.resource_id = None
        self.input_fields = []
        self.objective_fields = []
        self.all_numeric_objectives = False
        self.period = 1
        self.ets_models = {}
        self.error = None
        self.damped_trend = None
        self.seasonality = None
        self.trend = None
        self.time_range = {}
        self.field_parameters = {}
        self._forecast = {}
        self.api = get_api_connection(api)

        self.resource_id, time_series = get_resource_dict( \
            time_series, "timeseries", api=self.api)

        if 'object' in time_series and \
            isinstance(time_series['object'], dict):
            time_series = time_series['object']
        try:
            self.input_fields = time_series.get("input_fields", [])
            self._forecast = time_series.get("forecast")
            self.objective_fields = time_series.get("objective_fields", [])
            objective_field = time_series['objective_field'] if \
                time_series.get('objective_field') else \
                time_series['objective_fields']
        except KeyError:
            raise ValueError("Failed to find the time series expected "
                             "JSON structure. Check your arguments.")
        if 'time_series' in time_series and \
            isinstance(time_series['time_series'], dict):
            status = get_status(time_series)
            if 'code' in status and status['code'] == FINISHED:
                time_series_info = time_series['time_series']
                fields = time_series_info.get('fields', {})
                self.fields = fields
                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(self.fields.items(),
                               key=lambda x: x[1].get("column_number"))]
                self.all_numeric_objectives = time_series_info.get( \
                    'all_numeric_objectives')
                self.period = time_series_info.get('period', 1)
                self.ets_models = time_series_info.get('ets_models', {})
                self.error = time_series_info.get('error')
                self.damped_trend = time_series_info.get('damped_trend')
                self.seasonality = time_series_info.get('seasonality')
                self.trend = time_series_info.get('trend')
                self.time_range = time_series_info.get('time_range')
                self.field_parameters = time_series_info.get( \
                    'field_parameters', {})

                objective_id = extract_objective(objective_field)
                ModelFields.__init__(self, fields, objective_id=objective_id)
            else:
                raise Exception("The time series isn't finished yet")
        else:
            raise Exception("Cannot create the TimeSeries instance."
                            " Could not find the 'time_series' key"
                            " in the resource:\n\n%s" % time_series)
Esempio n. 44
0
def topic_model_from_shared_url(step):
    world.topic_model = world.api.get_topic_model("shared/topicmodel/%s" %
                                                  world.shared_hash)
    assert get_status(world.topic_model)['code'] == FINISHED
Esempio n. 45
0
    def __init__(self, model, api=None, fields=None, cache_get=None):
        """The Model constructor can be given as first argument:
            - a model structure
            - a model id
            - a path to a JSON file containing a model structure

        """

        if use_cache(cache_get):
            # using a cache to store the model attributes
            self.__dict__ = load(get_model_id(model), cache_get)
            return

        self.resource_id = None
        self.ids_map = {}
        self.terms = {}
        self.regression = False
        self.boosting = None
        self.class_names = None
        api = get_api_connection(api)
        # retrieving model information from
        self.resource_id, model = get_resource_dict( \
            model, "model", api=api, no_check_fields=fields is not None)

        if 'object' in model and isinstance(model['object'], dict):
            model = model['object']

        if 'model' in model and isinstance(model['model'], dict):
            status = get_status(model)
            if 'code' in status and status['code'] == FINISHED:
                # fill boosting info before creating modelfields
                if model.get("boosted_ensemble"):
                    self.boosting = model.get('boosting', False)
                if self.boosting == {}:
                    self.boosting = False

                self.input_fields = model["input_fields"]
                BaseModel.__init__(self, model, api=api, fields=fields)

                root = model['model']['root']
                self.weighted = "weighted_objective_summary" in root

                if self.boosting:
                    # build boosted tree
                    self.tree = b.build_boosting_tree( \
                        model['model']['root'])
                elif self.regression:
                    self.root_distribution = model['model'][ \
                        'distribution']['training']
                    # build regression tree
                    self.tree = r.build_regression_tree(root, \
                        distribution=self.root_distribution, \
                        weighted=self.weighted)
                else:
                    # build classification tree
                    self.root_distribution = model['model'][\
                        'distribution']['training']
                    self.laplacian_term = laplacian_term( \
                        extract_distribution(self.root_distribution)[1],
                        self.weighted)
                    self.tree = c.build_classification_tree( \
                        model['model']['root'], \
                        distribution=self.root_distribution, \
                        weighted=self.weighted)
                    self.class_names = sorted( \
                        [category[0] for category in \
                        self.root_distribution["categories"]])
                    self.objective_categories = [category for \
                        category, _ in self.fields[self.objective_id][ \
                       "summary"]["categories"]]

                if self.boosting:
                    self.tree_type = BOOSTING
                    self.offsets = b.OFFSETS
                elif self.regression:
                    self.tree_type = REGRESSION
                    self.offsets = r.OFFSETS[str(self.weighted)]
                else:
                    self.tree_type = CLASSIFICATION
                    self.offsets = c.OFFSETS[str(self.weighted)]

            else:
                raise Exception("Cannot create the Model instance."
                                " Only correctly finished models can be"
                                " used. The model status is currently:"
                                " %s\n" % STATUSES[status['code']])
        else:
            raise Exception("Cannot create the Model instance. Could not"
                            " find the 'model' key in the resource:"
                            "\n\n%s" % model)
Esempio n. 46
0
    def __init__(self, time_series, api=None):

        self.resource_id = None
        self.input_fields = []
        self.objective_fields = []
        self.all_numeric_objectives = False
        self.period = 1
        self.ets_models = {}
        self.error = None
        self.damped_trend = None
        self.seasonality = None
        self.trend = None
        self.time_range = {}
        self.field_parameters = {}
        self._forecast = {}

        # checks whether the information needed for local predictions is in
        # the first argument
        if isinstance(time_series, dict) and \
                not check_model_fields(time_series):
            # if the fields used by the logistic regression are not
            # available, use only ID to retrieve it again
            time_series = get_time_series_id( \
                time_series)
            self.resource_id = time_series

        if not (isinstance(time_series, dict)
                and 'resource' in time_series and
                time_series['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_time_series_id(time_series)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(time_series,
                                      resource_type='time_series',
                                      method='get'))
            query_string = ONLY_MODEL
            time_series = retrieve_resource(
                api, self.resource_id, query_string=query_string)
        else:
            self.resource_id = get_time_series_id(time_series)

        if 'object' in time_series and \
            isinstance(time_series['object'], dict):
            time_series = time_series['object']
        try:
            self.input_fields = time_series.get("input_fields", [])
            self._forecast = time_series.get("forecast")
            self.objective_fields = time_series.get(
                "objective_fields", [])
            objective_field = time_series['objective_field'] if \
                time_series.get('objective_field') else \
                time_series['objective_fields']
        except KeyError:
            raise ValueError("Failed to find the time series expected "
                             "JSON structure. Check your arguments.")
        if 'time_series' in time_series and \
            isinstance(time_series['time_series'], dict):
            status = get_status(time_series)
            if 'code' in status and status['code'] == FINISHED:
                time_series_info = time_series['time_series']
                fields = time_series_info.get('fields', {})
                self.fields = fields
                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(self.fields.items(),
                               key=lambda x: x[1].get("column_number"))]
                self.all_numeric_objectives = time_series_info.get( \
                    'all_numeric_objectives')
                self.period = time_series_info.get('period', 1)
                self.ets_models = time_series_info.get('ets_models', {})
                self.error = time_series_info.get('error')
                self.damped_trend = time_series_info.get('damped_trend')
                self.seasonality = time_series_info.get('seasonality')
                self.trend = time_series_info.get('trend')
                self.time_range = time_series_info.get('time_range')
                self.field_parameters = time_series_info.get( \
                    'field_parameters', {})

                objective_id = extract_objective(objective_field)
                ModelFields.__init__(
                    self, fields,
                    objective_id=objective_id)
            else:
                raise Exception("The time series isn't finished yet")
        else:
            raise Exception("Cannot create the TimeSeries instance."
                            " Could not find the 'time_series' key"
                            " in the resource:\n\n%s" %
                            time_series)
Esempio n. 47
0
    def __init__(self, deepnet, api=None, cache_get=None):
        """The Deepnet constructor can be given as first argument:
            - a deepnet structure
            - a deepnet id
            - a path to a JSON file containing a deepnet structure

        """

        if use_cache(cache_get):
            # using a cache to store the model attributes
            self.__dict__ = load(get_deepnet_id(deepnet), cache_get)
            return

        self.resource_id = None
        self.regression = False
        self.network = None
        self.networks = None
        self.input_fields = []
        self.class_names = []
        self.preprocess = []
        self.optimizer = None
        self.missing_numerics = False
        api = get_api_connection(api)
        self.resource_id, deepnet = get_resource_dict( \
            deepnet, "deepnet", api=api)

        if 'object' in deepnet and isinstance(deepnet['object'], dict):
            deepnet = deepnet['object']
        self.input_fields = deepnet['input_fields']
        if 'deepnet' in deepnet and isinstance(deepnet['deepnet'], dict):
            status = get_status(deepnet)
            objective_field = deepnet['objective_fields']
            deepnet = deepnet['deepnet']
            if 'code' in status and status['code'] == FINISHED:
                self.fields = deepnet['fields']
                missing_tokens = deepnet.get('missing_tokens')
                ModelFields.__init__(
                    self, self.fields,
                    objective_id=extract_objective(objective_field),
                    terms=True, categories=True, missing_tokens=missing_tokens)

                self.regression = \
                    self.fields[self.objective_id]['optype'] == NUMERIC
                if not self.regression:
                    self.class_names = [category for category, _ in \
                        self.fields[self.objective_id][ \
                        'summary']['categories']]
                    self.class_names.sort()
                    # order matters
                    self.objective_categories = [category for \
                        category, _ in self.fields[self.objective_id][ \
                       "summary"]["categories"]]

                self.missing_numerics = deepnet.get('missing_numerics', False)
                if 'network' in deepnet:
                    network = deepnet['network']
                    self.network = network
                    self.networks = network.get('networks', [])
                    self.preprocess = network.get('preprocess')
                    self.optimizer = network.get('optimizer', {})
            else:
                raise Exception("The deepnet isn't finished yet")
        else:
            raise Exception("Cannot create the Deepnet instance. Could not"
                            " find the 'deepnet' key in the resource:\n\n%s" %
                            deepnet)
Esempio n. 48
0
    def __init__(self, pca, api=None):

        self.resource_id = None
        self.input_fields = []
        self.term_forms = {}
        self.tag_clouds = {}
        self.dataset_field_types = {}
        self.term_analysis = {}
        self.categories = {}
        self.categories_probabilities = {}
        self.items = {}
        self.fields = {}
        self.item_analysis = {}
        self.standardize = None
        self.famd_j = 1

        self.resource_id, pca = get_resource_dict( \
            pca, "pca", api=api)

        if 'object' in pca and \
            isinstance(pca['object'], dict):
            pca = pca['object']
        try:
            self.input_fields = pca.get("input_fields", [])
            self.dataset_field_types = pca.get("dataset_field_types", {})
            self.famd_j = 1 if (self.dataset_field_types['categorical'] != \
                self.dataset_field_types['total']) else \
                self.dataset_field_types['categorical']

        except KeyError:
            raise ValueError("Failed to find the pca expected "
                             "JSON structure. Check your arguments.")
        if 'pca' in pca and \
            isinstance(pca['pca'], dict):
            status = get_status(pca)
            if 'code' in status and status['code'] == FINISHED:
                pca_info = pca[ \
                    'pca']
                fields = pca_info.get('fields', {})
                self.fields = fields
                if not self.input_fields:
                    self.input_fields = [ \
                        field_id for field_id, _ in
                        sorted(self.fields.items(),
                               key=lambda x: x[1].get("column_number"))]
                missing_tokens = pca_info.get("missing_tokens")
                ModelFields.__init__(self,
                                     fields,
                                     objective_id=None,
                                     terms=True,
                                     categories=True,
                                     numerics=False,
                                     missing_tokens=missing_tokens)

                for field_id in self.categories:
                    field = self.fields[field_id]
                    probabilities = [probability for _, probability in \
                                     field["summary"]["categories"]]
                    if field["summary"].get("missing_count", 0) > 0:
                        probabilities.append(field["summary"]["missing_count"])
                    total = float(sum(probabilities))
                    if total > 0:
                        probabilities = [probability / total for probability \
                            in probabilities]
                    self.categories_probabilities[field_id] = probabilities
                self.components = pca_info.get('components')
                self.eigenvectors = pca_info.get('eigenvectors')
                self.cumulative_variance = pca_info.get('cumulative_variance')
                self.text_stats = pca_info.get('text_stats')
                self.standardized = pca_info.get('standardized')
                self.variance = pca_info.get('variance')

            else:
                raise Exception("The pca isn't finished yet")
        else:
            raise Exception("Cannot create the PCA instance."
                            " Could not find the 'pca' key"
                            " in the resource:\n\n%s" % pca)
Esempio n. 49
0
def cluster_from_shared_url(step):
    world.cluster = world.api.get_cluster("shared/cluster/%s" % world.shared_hash)
    assert get_status(world.cluster)['code'] == FINISHED
Esempio n. 50
0
    def __init__(self, association, api=None):

        self.resource_id = None
        self.complement = None
        self.discretization = {}
        self.field_discretizations = {}
        self.items = []
        self.max_k = None
        self.max_lhs = None
        self.min_confidence = None
        self.min_leverage = None
        self.min_support = None
        self.min_lift = None
        self.search_strategy = DEFAULT_SEARCH_STRATEGY
        self.rules = []
        self.significance_level = None
        self.api = get_api_connection(api)

        self.resource_id, association = get_resource_dict( \
            association, "association", api=self.api)

        if 'object' in association and isinstance(association['object'], dict):
            association = association['object']

        if 'associations' in association and \
                isinstance(association['associations'], dict):
            status = get_status(association)
            if 'code' in status and status['code'] == FINISHED:
                self.input_fields = association['input_fields']
                associations = association['associations']
                fields = associations['fields']
                ModelFields.__init__( \
                    self, fields, \
                    missing_tokens=associations.get('missing_tokens'))
                self.complement = associations.get('complement', False)
                self.discretization = associations.get('discretization', {})
                self.field_discretizations = associations.get(
                    'field_discretizations', {})
                self.items = [
                    Item(index, item, fields)
                    for index, item in enumerate(associations.get('items', []))
                ]
                self.max_k = associations.get('max_k', 100)
                self.max_lhs = associations.get('max_lhs', 4)
                self.min_confidence = associations.get('min_confidence', 0)
                self.min_leverage = associations.get('min_leverage', -1)
                self.min_support = associations.get('min_support', 0)
                self.min_lift = associations.get('min_lift', 0)
                self.search_strategy = associations.get('search_strategy', \
                    DEFAULT_SEARCH_STRATEGY)
                self.rules = [
                    AssociationRule(rule)
                    for rule in associations.get('rules', [])
                ]
                self.significance_level = associations.get(
                    'significance_level', 0.05)
            else:
                raise Exception("The association isn't finished yet")
        else:
            raise Exception("Cannot create the Association instance. Could not"
                            " find the 'associations' key in the "
                            "resource:\n\n%s" % association)
Esempio n. 51
0
    def __init__(self, model, api=None):

        if (isinstance(model, dict) and 'resource' in model and
                model['resource'] is not None):
            self.resource_id = model['resource']
        else:
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_model_id(model)
            if self.resource_id is None:
                raise Exception(error_message(model,
                                              resource_type='model',
                                              method='get'))
            model = retrieve_model(api, self.resource_id)

        if ('object' in model and isinstance(model['object'], dict)):
            model = model['object']

        if ('model' in model and isinstance(model['model'], dict)):
            status = get_status(model)
            if ('code' in status and status['code'] == FINISHED):
                if 'model_fields' in model['model']:
                    fields = model['model']['model_fields']
                    # pagination or exclusion might cause a field not to
                    # be in available fields dict
                    if not all(key in model['model']['fields']
                               for key in fields.keys()):
                        raise Exception("Some fields are missing"
                                        " to generate a local model."
                                        " Please, provide a model with"
                                        " the complete list of fields.")
                    for field in fields:
                        field_info = model['model']['fields'][field]
                        fields[field]['summary'] = field_info['summary']
                        fields[field]['name'] = field_info['name']
                else:
                    fields = model['model']['fields']
                objective_field = model['objective_fields']
                self.objective_field = extract_objective(objective_field)
                self.uniquify_varnames(fields)
                self.inverted_fields = invert_dictionary(fields)
                self.all_inverted_fields = invert_dictionary(model['model']
                                                             ['fields'])
                self.tree = Tree(
                    model['model']['root'],
                    fields,
                    self.objective_field)
                self.description = model['description']
                self.field_importance = model['model'].get('importance',
                                                           None)
                if self.field_importance:
                    self.field_importance = [element for element
                                             in self.field_importance
                                             if element[0] in fields]
                self.locale = model.get('locale', DEFAULT_LOCALE)

            else:
                raise Exception("The model isn't finished yet")
        else:
            raise Exception("Cannot create the Model instance. Could not"
                            " find the 'model' key in the resource:\n\n%s" %
                            model)
Esempio n. 52
0
def dataset_status_finished(step):
    assert get_status(world.dataset)['code'] == FINISHED
Esempio n. 53
0
def model_from_public_url(step):
    world.model = world.api.get_model("public/%s" % world.model['resource'])
    eq_(get_status(world.model)['code'], FINISHED)
Esempio n. 54
0
    def __init__(self, topic_model, api=None):

        self.resource_id = None
        self.stemmer = None
        self.seed = None
        self.case_sensitive = False
        self.bigrams = False
        self.ntopics = None
        self.temp = None
        self.phi = None
        self.term_to_index = None
        self.topics = []
        self.api = get_api_connection(api)

        self.resource_id, topic_model = get_resource_dict( \
            topic_model, "topicmodel", api=self.api)

        if 'object' in topic_model and isinstance(topic_model['object'], dict):
            topic_model = topic_model['object']

        if 'topic_model' in topic_model \
                and isinstance(topic_model['topic_model'], dict):
            status = get_status(topic_model)
            if 'code' in status and status['code'] == FINISHED:
                self.input_fields = topic_model['input_fields']
                model = topic_model['topic_model']
                self.topics = model['topics']

                if 'language' in model and  model['language'] is not None:
                    lang = model['language']
                    if lang in CODE_TO_NAME:
                        self.stemmer = Stemmer.Stemmer(CODE_TO_NAME[lang])

                self.term_to_index = {self.stem(term): index for index, term
                                      in enumerate(model['termset'])}

                self.seed = abs(model['hashed_seed'])
                self.case_sensitive = model['case_sensitive']
                self.bigrams = model['bigrams']

                self.ntopics = len(model['term_topic_assignments'][0])

                self.alpha = model['alpha']
                self.ktimesalpha = self.ntopics * self.alpha

                self.temp = [0] * self.ntopics

                assignments = model['term_topic_assignments']
                beta = model['beta']
                nterms = len(self.term_to_index)

                sums = [sum(n[index] for n in assignments) for index
                        in range(self.ntopics)]

                self.phi = [[0 for _ in range(nterms)]
                            for _ in range(self.ntopics)]

                for k in range(self.ntopics):
                    norm = sums[k] + nterms * beta
                    for w in range(nterms):
                        self.phi[k][w] = (assignments[w][k] + beta) / norm

                missing_tokens = model.get("missing_tokens")
                ModelFields.__init__(self, model['fields'],
                                     missing_tokens=missing_tokens)
            else:
                raise Exception("The topic model isn't finished yet")
        else:
            raise Exception("Cannot create the topic model instance. Could not"
                            " find the 'topic_model' key in the"
                            " resource:\n\n%s" % topic_model)
Esempio n. 55
0
def model_from_shared_key(step):
    username = os.environ.get("BIGML_USERNAME")
    world.model = world.api.get_model(world.model['resource'],
                                      shared_username=username,
                                      shared_api_key=world.sharing_key)
    eq_(get_status(world.model)['code'], FINISHED)
Esempio n. 56
0
def dataset_status_finished(step):
    eq_(get_status(world.dataset)['code'], FINISHED)
Esempio n. 57
0
    def __init__(self, association, api=None):

        self.resource_id = None
        self.complement = None
        self.discretization = {}
        self.field_discretizations = {}
        self.items = []
        self.k = None
        self.max_lhs = None
        self.min_coverage = None
        self.min_leverage = None
        self.min_strength = None
        self.min_support = None
        self.min_lift = None
        self.prune = None
        self.search_strategy = DEFAULT_SEARCH_STRATEGY
        self.rules = []
        self.significance_level = None

        if not (isinstance(association, dict) and 'resource' in association
                and association['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_association_id(association)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(association,
                                      resource_type='association',
                                      method='get'))
            query_string = ONLY_MODEL
            association = retrieve_resource(api,
                                            self.resource_id,
                                            query_string=query_string)
        else:
            self.resource_id = get_association_id(association)
        if 'object' in association and isinstance(association['object'], dict):
            association = association['object']

        if 'associations' in association and \
                isinstance(association['associations'], dict):
            status = get_status(association)
            if 'code' in status and status['code'] == FINISHED:
                associations = association['associations']
                fields = associations['fields']
                ModelFields.__init__(self, fields)
                self.complement = associations.get('complement', False)
                self.discretization = associations.get('discretization', {})
                self.field_discretizations = associations.get(
                    'field_discretizations', {})
                self.items = [
                    Item(index, item, fields)
                    for index, item in enumerate(associations.get('items', []))
                ]
                self.k = associations.get('k', 100)
                self.max_lhs = associations.get('max_lhs', 4)
                self.min_coverage = associations.get('min_coverage', 0)
                self.min_leverage = associations.get('min_leverage', -1)
                self.min_strength = associations.get('min_strength', 0)
                self.min_support = associations.get('min_support', 0)
                self.min_lift = associations.get('min_lift', 0)
                self.prune = associations.get('prune', True)
                self.search_strategy = associations.get('search_strategy', \
                    DEFAULT_SEARCH_STRATEGY)
                self.rules = [
                    AssociationRule(rule)
                    for rule in associations.get('rules', [])
                ]
                self.significance_level = associations.get(
                    'significance_level', 0.05)
            else:
                raise Exception("The association isn't finished yet")
        else:
            raise Exception("Cannot create the Association instance. Could not"
                            " find the 'associations' key in the "
                            "resource:\n\n%s" % association)
Esempio n. 58
0
    def __init__(self, cluster, api=None):

        self.resource_id = None
        self.centroids = None
        self.scales = {}
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.item_analysis = {}
        self.items = {}
        if not (isinstance(cluster, dict) and 'resource' in cluster
                and cluster['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_cluster_id(cluster)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(cluster,
                                      resource_type='cluster',
                                      method='get'))
            query_string = ONLY_MODEL
            cluster = retrieve_resource(api,
                                        self.resource_id,
                                        query_string=query_string)
        else:
            self.resource_id = get_cluster_id(cluster)
        if 'object' in cluster and isinstance(cluster['object'], dict):
            cluster = cluster['object']

        if 'clusters' in cluster and isinstance(cluster['clusters'], dict):
            status = get_status(cluster)
            if 'code' in status and status['code'] == FINISHED:
                clusters = cluster['clusters']['clusters']
                self.centroids = [Centroid(centroid) for centroid in clusters]
                self.scales = {}
                self.scales.update(cluster['scales'])
                self.term_forms = {}
                self.tag_clouds = {}
                self.term_analysis = {}
                fields = cluster['clusters']['fields']
                summary_fields = cluster['summary_fields']
                for field_id in summary_fields:
                    del fields[field_id]
                for field_id, field in fields.items():
                    if field['optype'] == 'text':
                        self.term_forms[field_id] = {}
                        self.term_forms[field_id].update(
                            field['summary']['term_forms'])
                        self.tag_clouds[field_id] = {}
                        self.tag_clouds[field_id].update(
                            field['summary']['tag_cloud'])
                        self.term_analysis[field_id] = {}
                        self.term_analysis[field_id].update(
                            field['term_analysis'])
                    if field['optype'] == 'items':
                        self.items[field_id] = {}
                        self.items[field_id].update(
                            dict(field['summary']['items']))
                        self.item_analysis[field_id] = {}
                        self.item_analysis[field_id].update(
                            field['item_analysis'])

                ModelFields.__init__(self, fields)
                if not all(
                    [field_id in self.fields for field_id in self.scales]):
                    raise Exception("Some fields are missing"
                                    " to generate a local cluster."
                                    " Please, provide a cluster with"
                                    " the complete list of fields.")
            else:
                raise Exception("The cluster isn't finished yet")
        else:
            raise Exception("Cannot create the Cluster instance. Could not"
                            " find the 'clusters' key in the resource:\n\n%s" %
                            cluster)
Esempio n. 59
0
    def __init__(self, anomaly, api=None):

        self.resource_id = None
        self.sample_size = None
        self.input_fields = None
        self.mean_depth = None
        self.expected_mean_depth = None
        self.iforest = None
        self.top_anomalies = None
        if not (isinstance(anomaly, dict) and 'resource' in anomaly
                and anomaly['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_anomaly_id(anomaly)
            if self.resource_id is None:
                raise Exception(
                    api.error_message(anomaly,
                                      resource_type='anomaly',
                                      method='get'))
            query_string = ONLY_MODEL
            anomaly = retrieve_resource(api,
                                        self.resource_id,
                                        query_string=query_string)
        else:
            self.resource_id = get_anomaly_id(anomaly)
        if 'object' in anomaly and isinstance(anomaly['object'], dict):
            anomaly = anomaly['object']
            self.sample_size = anomaly.get('sample_size')
            self.input_fields = anomaly.get('input_fields')
        if 'model' in anomaly and isinstance(anomaly['model'], dict):
            ModelFields.__init__(self, anomaly['model'].get('fields'))
            if ('top_anomalies' in anomaly['model']
                    and isinstance(anomaly['model']['top_anomalies'], list)):
                self.mean_depth = anomaly['model'].get('mean_depth')
                status = get_status(anomaly)
                if 'code' in status and status['code'] == FINISHED:
                    self.expected_mean_depth = None
                    if self.mean_depth is None or self.sample_size is None:
                        raise Exception("The anomaly data is not complete. "
                                        "Score will"
                                        " not be available")
                    else:
                        default_depth = (
                            2 *
                            (0.5772156649 + math.log(self.sample_size - 1) -
                             (float(self.sample_size - 1) / self.sample_size)))
                        self.expected_mean_depth = min(self.mean_depth,
                                                       default_depth)
                    iforest = anomaly['model'].get('trees', [])
                    if iforest:
                        self.iforest = [
                            AnomalyTree(anomaly_tree['root'], self.fields)
                            for anomaly_tree in iforest
                        ]
                    self.top_anomalies = anomaly['model']['top_anomalies']
                else:
                    raise Exception("The anomaly isn't finished yet")
            else:
                raise Exception("Cannot create the Anomaly instance. Could not"
                                " find the 'top_anomalies' key in the"
                                " resource:\n\n%s" % anomaly['model'].keys())
Esempio n. 60
0
    def __init__(self, cluster, api=None):

        self.resource_id = None
        self.centroids = None
        self.cluster_global = None
        self.total_ss = None
        self.within_ss = None
        self.between_ss = None
        self.ratio_ss = None
        self.critical_value = None
        self.k = None
        self.scales = {}
        self.term_forms = {}
        self.tag_clouds = {}
        self.term_analysis = {}
        self.item_analysis = {}
        self.items = {}
        if not (isinstance(cluster, dict) and 'resource' in cluster and
                cluster['resource'] is not None):
            if api is None:
                api = BigML(storage=STORAGE)
            self.resource_id = get_cluster_id(cluster)
            if self.resource_id is None:
                raise Exception(api.error_message(cluster,
                                                  resource_type='cluster',
                                                  method='get'))
            query_string = ONLY_MODEL
            cluster = retrieve_resource(api, self.resource_id,
                                        query_string=query_string)
        else:
            self.resource_id = get_cluster_id(cluster)
        if 'object' in cluster and isinstance(cluster['object'], dict):
            cluster = cluster['object']

        if 'clusters' in cluster and isinstance(cluster['clusters'], dict):
            status = get_status(cluster)
            if 'code' in status and status['code'] == FINISHED:
                the_clusters = cluster['clusters']
                cluster_global = the_clusters.get('global')
                clusters = the_clusters['clusters']
                self.centroids = [Centroid(centroid) for centroid in clusters]
                self.cluster_global = cluster_global
                if cluster_global:
                    self.cluster_global = Centroid(cluster_global)
                    # "global" has no "name" and "count" then we set them
                    self.cluster_global.name = GLOBAL_CLUSTER_LABEL
                    self.cluster_global.count = \
                        self.cluster_global.distance['population']
                self.total_ss = the_clusters.get('total_ss')
                self.within_ss = the_clusters.get('within_ss')
                if not self.within_ss:
                    self.within_ss = sum(centroid.distance['sum_squares'] for
                                         centroid in self.centroids)
                self.between_ss = the_clusters.get('between_ss')
                self.ratio_ss = the_clusters.get('ratio_ss')
                self.critical_value = cluster.get('critical_value', None)
                self.k = cluster.get('k')
                self.scales.update(cluster['scales'])
                self.term_forms = {}
                self.tag_clouds = {}
                self.term_analysis = {}
                fields = cluster['clusters']['fields']
                summary_fields = cluster['summary_fields']
                for field_id in summary_fields:
                    del fields[field_id]
                for field_id, field in fields.items():
                    if field['optype'] == 'text':
                        self.term_forms[field_id] = {}
                        self.term_forms[field_id].update(field[
                            'summary']['term_forms'])
                        self.tag_clouds[field_id] = {}
                        self.tag_clouds[field_id].update(field[
                            'summary']['tag_cloud'])
                        self.term_analysis[field_id] = {}
                        self.term_analysis[field_id].update(
                            field['term_analysis'])
                    if field['optype'] == 'items':
                        self.items[field_id] = {}
                        self.items[field_id].update(
                            dict(field['summary']['items']))
                        self.item_analysis[field_id] = {}
                        self.item_analysis[field_id].update(
                            field['item_analysis'])

                ModelFields.__init__(self, fields)
                if not all([field_id in self.fields for
                            field_id in self.scales]):
                    raise Exception("Some fields are missing"
                                    " to generate a local cluster."
                                    " Please, provide a cluster with"
                                    " the complete list of fields.")
            else:
                raise Exception("The cluster isn't finished yet")
        else:
            raise Exception("Cannot create the Cluster instance. Could not"
                            " find the 'clusters' key in the resource:\n\n%s" %
                            cluster)