def save_entity(self, session_type, session_id):
        '''

        This method overrides the identical method from the inherited
        superclass, 'BaseData'. Specifically, this method updates an
        existing entity within the corresponding database table,
        'tbl_dataset_entity'.

        @session_id, is synonymous to 'entity_id', and provides context to
            update 'modified_xx' columns within the 'tbl_dataset_entity'
            database table.

        '''

        premodel_settings = self.premodel_data['properties']
        premodel_entity = {
            'title': premodel_settings.get('session_name', None),
            'uid': self.uid,
            'id_entity': session_id,
        }
        db_save = Entity(premodel_entity, session_type)

        # save dataset element
        db_return = db_save.save()

        # return
        if db_return['status']:
            return {'status': True, 'error': None}
        else:
            self.list_error.append(db_return['error'])
            return {'status': False, 'error': self.list_error}
def remove_collection():
    '''

    This router function removes a collection, with respect to a database type.

    @collection, indicates a nosql implementation
    @entity, indicates a sql database

    '''

    if request.method == 'POST':
        # local variables
        response = None
        entity = Entity()
        collection = Collection()

        # programmatic-interface
        if request.get_json():
            r = request.get_json()
            uid = r['uid']
            type = r['type']
            cname = r['collection']

            if (cname and type == 'collection'):
                payload = {'properties.uid': uid}
                response = collection.query(cname, 'drop_collection', payload)

            elif (type == 'entity'):
                response = entity.remove_entity(uid, cname)

        # lastrowid returned must be greater than 0
        if response and response['result']:
            return json.dumps({'response': response['result']})
        else:
            return json.dumps({'response': response})
Example #3
0
def save_info(dataset, session_type, userid):
    '''

    This method saves the current entity into the database, then returns the
    corresponding entity id.

    '''

    list_error = []
    premodel_settings = dataset['data']['settings']
    premodel_entity = {
        'title': premodel_settings.get('session_name', None),
        'uid': userid,
        'id_entity': None
    }
    db_save = Entity(premodel_entity, session_type)

    # save dataset element
    db_return = db_save.save()

    # return error(s)
    if not db_return['status']:
        list_error.append(db_return['error'])
        return {'id': None, 'error': list_error}

    # return session id
    elif db_return['status'] and session_type == 'data_new':
        return {'id': db_return['id'], 'error': None}
Example #4
0
def remove_collection():
    '''

    This router function removes a collection, with respect to a database type.

    @collection, indicates a nosql implementation
    @entity, indicates a sql database

    '''

    if request.method == 'POST':
        # local variables
        response = None
        entity = Entity()
        collection = Collection()

        # programmatic-interface
        if request.get_json():
            r = request.get_json()
            uid = r['uid']
            type = r['type']
            cname = r['collection']

            if (cname and type == 'collection'):
                payload = {'properties.uid': uid}
                response = collection.query(cname, 'drop_collection', payload)

            elif (type == 'entity'):
                response = entity.remove_entity(uid, cname)

        # lastrowid returned must be greater than 0
        if response and response['result']:
            return json.dumps({'response': response['result']})
        else:
            return json.dumps({'response': response})
    def save_entity(self, session_type, session_id):
        '''

        This method overrides the identical method from the inherited
        superclass, 'BaseData'. Specifically, this method updates an
        existing entity within the corresponding database table,
        'tbl_dataset_entity'.

        @session_id, is synonymous to 'entity_id', and provides context to
            update 'modified_xx' columns within the 'tbl_dataset_entity'
            database table.

        '''

        # local variables
        db_return = None
        entity = Entity()
        cursor = Collection()
        premodel_settings = self.premodel_data['properties']
        collection = premodel_settings['collection']
        collection_adjusted = collection.lower().replace(' ', '_')
        collection_count = entity.get_collection_count(self.uid)
        document_count = cursor.query(collection_adjusted, 'count_documents')

        # define entity properties
        premodel_entity = {
            'title': premodel_settings.get('session_name', None),
            'uid': self.uid,
            'id_entity': session_id,
        }

        # store entity values in database
        if (
            collection_adjusted and
            collection_count and
            collection_count['result'] < self.max_collection and
            document_count and
            document_count['result'] < self.max_document
        ):
            db_save = Entity(premodel_entity, session_type)
            db_return = db_save.save()

            if db_return and db_return['status']:
                return {'status': True, 'error': None}

            else:
                self.list_error.append(db_return['error'])
                return {'status': False, 'error': self.list_error}

        else:
            return {'status': True, 'error': None}
Example #6
0
    def save_premodel_dataset(self):
        '''

        This method saves the entire the dataset collection, as a json
        document, into the nosql implementation.

        '''

        # local variables
        entity = Entity()
        cursor = Collection()
        collection = self.premodel_data['properties']['collection']
        collection_adjusted = collection.lower().replace(' ', '_')
        collection_count = entity.get_collection_count(self.uid)
        document_count = cursor.query(collection_adjusted, 'count_documents')

        # enfore collection limit: oldest collection name is obtained from the
        #     sql database. Then, the corresponding collection (i.e. target) is
        #     removed from the nosql database.
        if (not self.uid and collection_count
                and collection_count['result'] >= self.max_collection
                and collection_adjusted):
            target = entity.get_collections(self.uid)['result'][0]
            cursor.query(target, 'drop_collection')
            entity.remove_entity(self.uid, target)
            collection_count = entity.get_collection_count(self.uid)
            document_count = cursor.query(collection_adjusted,
                                          'count_documents')

        # save dataset
        if (collection_adjusted and collection_count
                and collection_count['result'] < self.max_collection
                and document_count
                and document_count['result'] < self.max_document):
            current_utc = datetime.datetime.utcnow().strftime(
                "%Y-%m-%dT%H:%M:%S")
            self.premodel_data['properties']['datetime_saved'] = current_utc
            self.premodel_data['properties']['uid'] = self.uid
            document = {
                'properties': self.premodel_data['properties'],
                'dataset': self.dataset
            }

            response = cursor.query(collection_adjusted, 'insert_one',
                                    document)

        else:
            response = None

        # return result
        if response and response['error']:
            self.list_error.append(response['error'])
            return {'result': None, 'error': response['error']}

        elif response and response['result']:
            return {'result': response['result'], 'error': None}

        else:
            return {'result': None, 'error': 'no dataset provided'}
Example #7
0
    def save_entity(self, session_type, id_entity=None):
        '''

        This method overrides the identical method from the inherited
        superclass, 'BaseData'. Specifically, this method updates an
        existing entity within the corresponding database table,
        'tbl_dataset_entity'.

        @session_id, is synonymous to 'entity_id', and provides context to
            update 'modified_xx' columns within the 'tbl_dataset_entity'
            database table.

        @numeric_model_type, list indices begin at 0, and needs to be corrected
            by adding 1. This allows the numeric representation of the
            'model_type' to relate to another database table, which maps
            integer values with the corresponding 'model_type' name. The
            integer column of the mapping table begins at 1.

        '''

        # assign numerical representation
        numeric_model_type = self.list_model_type.index(self.model_type) + 1

        # store entity values in database
        premodel_settings = self.premodel_data['properties']
        premodel_entity = {
            'title': premodel_settings.get('session_name', None),
            'collection': premodel_settings['collection'],
            'model_type': numeric_model_type,
            'uid': self.uid,
        }
        db_save = Entity(premodel_entity, session_type)

        # save dataset element
        db_return = db_save.save()

        # return
        if db_return['status']:
            return {'status': True, 'error': None, 'id': db_return['id']}
        else:
            self.list_error.append(db_return['error'])
            return {'status': False, 'error': self.list_error}
def collection_count():
    '''

    This router function retrieves the number of collections, saved by a
    specified user.

    '''

    if request.method == 'POST':
        # local variables
        count = None
        entity = Entity()

        # programmatic-interface
        if request.get_json():
            r = request.get_json()
            uid = r['uid']
            count = entity.get_collection_count(uid)

        if count and isinstance(count['result'], (int, long)):
            return json.dumps({'count': count['result']})
        else:
            return json.dumps({'count': -1})
Example #9
0
def collection_count():
    '''

    This router function retrieves the number of collections, saved by a
    specified user.

    '''

    if request.method == 'POST':
        # local variables
        count = None
        entity = Entity()

        # programmatic-interface
        if request.get_json():
            r = request.get_json()
            uid = r['uid']
            count = entity.get_collection_count(uid)

        if count and isinstance(count['result'], (int, long)):
            return json.dumps({'count': count['result']})
        else:
            return json.dumps({'count': -1})
Example #10
0
    def save_entity(self, session_type, session_id):
        '''

        This method overrides the identical method from the inherited
        superclass, 'BaseData'. Specifically, this method updates an
        existing entity within the corresponding database table,
        'tbl_dataset_entity'.

        @session_id, is synonymous to 'entity_id', and provides context to
            update 'modified_xx' columns within the 'tbl_dataset_entity'
            database table.

        '''

        # local variables
        db_return = None
        entity = Entity()
        cursor = Collection()
        premodel_settings = self.premodel_data['properties']
        collection = premodel_settings['collection']
        collection_adjusted = collection.lower().replace(' ', '_')
        collection_count = entity.get_collection_count(self.uid)
        document_count = cursor.query(collection_adjusted, 'count_documents')

        # define entity properties
        premodel_entity = {
            'title': premodel_settings.get('session_name', None),
            'uid': self.uid,
            'id_entity': session_id,
        }

        # store entity values in database
        if (collection_adjusted and collection_count
                and collection_count['result'] < self.max_collection
                and document_count
                and document_count['result'] < self.max_document):
            db_save = Entity(premodel_entity, session_type)
            db_return = db_save.save()

            if db_return and db_return['status']:
                return {'status': True, 'error': None}

            else:
                self.list_error.append(db_return['error'])
                return {'status': False, 'error': self.list_error}

        else:
            return {'status': True, 'error': None}
Example #11
0
def generate(model, kernel_type, session_id, feature_request, list_error):
    '''

    This method generates an sv (i.e. svm, or svr) model using feature data,
    retrieved from the database. The generated model, is then stored within the
    NoSQL datastore.

    @grouped_features, a matrix of observations, where each nested vector,
        or python list, is a collection of features within the containing
        observation.
    @encoded_labels, observation labels (dependent variable labels),
        encoded into a unique integer representation.

    '''

    # local variables
    dataset = feature_request.get_dataset(session_id, model)
    get_feature_count = feature_request.get_count(session_id)
    label_encoder = preprocessing.LabelEncoder()
    logger = Logger(__name__, 'error', 'error')
    list_model_type = current_app.config.get('MODEL_TYPE')

    # get dataset
    if dataset['error']:
        logger.log(dataset['error'])
        list_error.append(dataset['error'])
        dataset = None
    else:
        dataset = numpy.asarray(dataset['result'])

    # get feature count
    if get_feature_count['error']:
        logger.log(get_feature_count['error'])
        list_error.append(get_feature_count['error'])
        feature_count = None
    else:
        feature_count = get_feature_count['result'][0][0]

    # check dataset integrity, build model
    if len(dataset) % feature_count == 0:
        features_list = dataset[:, [[0], [2], [1]]]
        current_features = []
        grouped_features = []
        observation_labels = []
        feature_labels = []

        # group features into observation instances, record labels
        for index, feature in enumerate(features_list):
            # svm: observation labels
            if model == list_model_type[0]:
                current_features.append(feature[1][0])

                if (index+1) % feature_count == 0:
                    grouped_features.append(current_features)
                    observation_labels.append(feature[0][0])
                    current_features = []

            # svr: observation labels
            elif model == list_model_type[1]:
                current_features.append(float(feature[1][0]))

                if (index+1) % feature_count == 0:
                    grouped_features.append(current_features)
                    observation_labels.append(float(feature[0][0]))
                    current_features = []

            # general feature labels in every observation
            if not len(feature_labels) == feature_count:
                feature_labels.append(feature[2][0])

        # case 1: svm model
        if model == list_model_type[0]:
            # convert observation labels to a unique integer representation
            label_encoder = preprocessing.LabelEncoder()
            label_encoder.fit(dataset[:, 0])
            encoded_labels = label_encoder.transform(observation_labels)

            # create model
            clf = svm.SVC(kernel=kernel_type, probability=True)

            # cache encoded labels
            Model(label_encoder).cache(model + '_labels', session_id)

            # fit model
            clf.fit(grouped_features, encoded_labels)

        # case 2: svr model
        elif model == list_model_type[1]:
            # create model
            clf = svm.SVR(kernel=kernel_type)

            # fit model
            clf.fit(grouped_features, observation_labels)

            # compute, and cache coefficient of determination
            r2 = clf.score(grouped_features, observation_labels)
            Hset().cache(
                model + '_r2',
                session_id,
                r2
            )

        # get title
        entity = Entity()
        title = entity.get_title(session_id)['result'][0][0]

        # cache model, title
        Model(clf).cache(
            model + '_model',
            str(session_id) + '_' + title
        )
        Hset().cache(model + '_title', session_id, title)

        # cache feature labels, with respect to given session id
        Hset().cache(
            model + '_feature_labels',
            str(session_id),
            json.dumps(feature_labels)
        )

        # return error(s) if exists
        return {'error': list_error}
Example #12
0
    def save_entity(self, session_type, id_entity=None):
        '''

        This method overrides the identical method from the inherited
        superclass, 'BaseData'. Specifically, this method updates an
        existing entity within the corresponding database table,
        'tbl_dataset_entity'.

        @session_id, is synonymous to 'entity_id', and provides context to
            update 'modified_xx' columns within the 'tbl_dataset_entity'
            database table.

        @numeric_model_type, list indices begin at 0, and needs to be corrected
            by adding 1. This allows the numeric representation of the
            'model_type' to relate to another database table, which maps
            integer values with the corresponding 'model_type' name. The
            integer column of the mapping table begins at 1.

        '''

        # local variables
        db_return = None
        entity = Entity()
        cursor = Collection()
        premodel_settings = self.premodel_data['properties']
        collection = premodel_settings['collection']
        collection_adjusted = collection.lower().replace(' ', '_')
        collection_count = entity.get_collection_count(self.uid)
        document_count = cursor.query(collection_adjusted, 'count_documents')

        # assign numerical representation
        numeric_model_type = self.list_model_type.index(self.model_type) + 1

        # define entity properties
        premodel_entity = {
            'title': premodel_settings.get('session_name', None),
            'collection': collection,
            'model_type': numeric_model_type,
            'uid': self.uid,
        }

        # store entity values in database
        if (
            collection_adjusted and
            collection_count and
            collection_count['result'] < self.max_collection and
            document_count and
            document_count['result'] < self.max_document
        ):
            entity = Entity(premodel_entity, session_type)
            db_return = entity.save()

        # return
        if db_return and db_return['error']:
            self.list_error.append(db_return['error'])
            return {'status': False, 'error': self.list_error}

        elif db_return and db_return['status']:
            return {'status': True, 'error': None, 'id': db_return['id']}

        else:
            return {'status': True, 'error': 'Entity was not saved', 'id': None}
    def save_premodel_dataset(self):
        '''

        This method saves the entire the dataset collection, as a json
        document, into the nosql implementation.

        '''

        # local variables
        response = None
        entity = Entity()
        cursor = Collection()
        collection = self.premodel_data['properties']['collection']
        collection_adjusted = collection.lower().replace(' ', '_')
        collection_count = entity.get_collection_count(self.uid)
        document_count = cursor.query(collection_adjusted, 'count_documents')

        # enfore collection limit: oldest collection name is obtained from the
        #     sql database. Then, the corresponding collection (i.e. target) is
        #     removed from the nosql database.
        if (
            not self.uid and
            collection_count and
            collection_count['result'] >= self.max_collection and
            collection_adjusted
        ):
            target = entity.get_collections(self.uid)['result'][0]
            cursor.query(target, 'drop_collection')
            entity.remove_entity(self.uid, target)
            collection_count = entity.get_collection_count(self.uid)
            document_count = cursor.query(collection_adjusted, 'count_documents')

        # save dataset
        if (
            collection_adjusted and
            collection_count and
            collection_count['result'] < self.max_collection and
            document_count and
            document_count['result'] < self.max_document
        ):
            current_utc = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
            self.premodel_data['properties']['datetime_saved'] = current_utc

            if self.dataset:
                document = {
                    'properties': self.premodel_data['properties'],
                    'dataset': self.dataset
                }

                response = cursor.query(
                    collection_adjusted,
                    'insert_one',
                    document
                )

        else:
            response = None

        # return result
        if response and response['error']:
            self.list_error.append(response['error'])
            return {'result': None, 'error': response['error']}

        elif response and response['result']:
            return {'result': response['result'], 'error': None}

        else:
            return {'result': None, 'error': 'no dataset provided'}