Exemplo n.º 1
0
    def __init__(self,
                 reduced_database_source="bar",
                 reduced_collection_source="raw_vector01_redu",
                 folder_contains_imgs="/data/bar03/screenshot01/",
                 traind_ipca_model_path="/data/bar03/ipcav08.pkl",
                 index_to_name_file="/data/bar03/moive_name_list_new.txt"):

        reduced_database = DATABASE()
        reduced_database.database_chose(reduced_database_source)
        reduced_database.collection_chose(reduced_collection_source)
        data_from_database = reduced_database.get_data().astype("float32")

        self.compare_data = data_from_database[:, :-2]
        self.compare_target = data_from_database[:, -2:]

        self.folder_contains_imgs = folder_contains_imgs
        self.img_path_many = self._Img_List()
        self.model_path_ipca = traind_ipca_model_path
        with open(self.model_path_ipca, 'rb') as file_id:
            self._Ipca_loaded = pickle.load(file_id)

        file = open(index_to_name_file, "r")
        # And for reading use
        lines = file.read().split()
        file.close()
        self.index_to_name = lines
Exemplo n.º 2
0
def connectdb(thread=False):
    # Create a connection to InfluxDB if thread=True, otherwise it will create a dummy data instance
    global db
    global cp
    global ue_data
    if thread:
        db = DUMMY()
    else:
        ins.populatedb(
        )  # temporary method to populate db, it will be removed when data will be coming through KPIMON to influxDB

        db = DATABASE('UEData')
        db.read_data("liveUE")
        ue_data = db.data.values.tolist(
        )  # needs to be updated in future when live feed will be coming through KPIMON to influxDB
    cp = CAUSE(db)
Exemplo n.º 3
0
def train(thread=False):
    """
     Main function to perform training on input data
    """
    if thread:
        db = DUMMY()
    else:
        db = DATABASE('UEData')
    db.read_data('train')
    ps = PREPROCESS(db.data)
    ps.process()
    df = ps.data

    mod = modelling(df)
    mod.read_test(db)

    scores = []
    for of in np.arange(0.01, 0.4, 0.01):
        scores.append(mod.isoforest(outliers_fraction=of))
    opt_f1 = scores.index(max(scores)) + 1
    mod.isoforest(outliers_fraction=opt_f1 * 0.01, push_model=True)
    print("Optimum value of contamination : {}".format(opt_f1 * 0.01))
    print('Training Ends : ')
Exemplo n.º 4
0
def atualizabanco():
    dados = request.get_json()
    DATABASE().insert_DB(**dados)
    return dados
Exemplo n.º 5
0
#         if numpy_data:
#             return np.array(data_list_from_db)
#         else:
#             return data_list_from_db
#
#     def insert_data(self, d2_arrary_data, d2_target):
#         dimension_of_data = d2_arrary_data.shape[1]
#         df_data = pd.DataFrame(data=d2_arrary_data, columns=range(dimension_of_data))
#         df_target = pd.DataFrame(data=d2_target, columns=["movie_name", "second"])
#         data_target = df_data.join(df_target)
#         insert_result = self.collection.insert_many(json.loads(data_target.to_json(orient="records")))
#         return insert_result

# from sklearn.decomposition import PCA, IncrementalPCA

data_base = DATABASE()
data_base.collections_of_eachdatabase

data_base.database_chose("bar")
data_base.collection_chose("raw_vector01")
print("data_base.collection =", data_base.collection)


def Explain_Ratio(numpy_array, percentage=0.95, first=50):
    numpy_array = np.array(numpy_array).flatten()
    sum_ = 0
    for index in range(len(numpy_array)):
        sum_ += numpy_array[index]
        if sum_ >= percentage:
            return index + 1, (index + 1) / len(numpy_array), len(
                numpy_array), numpy_array[:first].sum(
Exemplo n.º 6
0
from database import DATABASE
import time

print(DATABASE().collections_of_eachdatabase)

bar = DATABASE()
bar.database_chose("bar")
bar.collection_chose("raw_vector01_redu")

print(bar.collection.count())

print(DATABASE)

# while True:
#     print(  bar.collection.count() )
#     time.sleep(60)
Exemplo n.º 7
0
import sys
sys.path.insert(0, '..')
import constants as c

from database import DATABASE

db = DATABASE()

db.Print()

Exemplo n.º 8
0
import numpy as np
import subprocess
import os
# np.set_printoptions(precision=20)

from database import DATABASE

bar3 = DATABASE()
bar3.database_chose("bar3")
bar3.collection_chose("bar3")


class MODEL_JPG_VECTOR(object):
    def __init__(self,
                 chunk=1500,
                 img_path_many=None,
                 folder_path=None,
                 database=bar3):
        self.database = database

        from keras.models import Model
        self._Model = Model
        from keras.preprocessing import image
        self._image = image
        from keras.applications.xception import Xception as key_model
        self._key_model = key_model
        from keras.applications.xception import preprocess_input, decode_predictions
        self._preprocess_input = preprocess_input
        self._decode_predictions = decode_predictions
        base_model_4 = key_model(weights='imagenet', include_top=False)
        self._base_model_4 = base_model_4
Exemplo n.º 9
0
 def __init__(self):
     self.__read = DATABASE()
Exemplo n.º 10
0
sys.path.insert(0, '../database')
from database import DATABASE

sys.path.insert(0, '../pyrosim')
import pyrosim

sys.path.insert(0, '../TPR_3')

sys.path.insert(0, '../environments')
from environment0 import ENVIRONMENT0

import constants as c

import pickle

database = DATABASE()

filename = '../data/robot0.p'

s = pyrosim.Simulator(debug=False,
                      play_paused=False,
                      eval_time=c.evaluationTime)

e = ENVIRONMENT0(s, [0, 0, 0], [0, 0, 0], c.noFade)

e.Send_To_Simulator()

r = pickle.load(open(filename, 'rb'))

command = c.defaultCommand
Exemplo n.º 11
0
from database import DATABASE
import pickle
import faiss

import numpy as np
from keras.models import Model
from keras.preprocessing import image
from keras.applications.xception import Xception as key_model
from keras.applications.xception import preprocess_input, decode_predictions

base_model_4 = key_model(weights='imagenet', include_top=False)
model = Model(inputs=base_model_4.input,
              outputs=base_model_4.get_layer(index=-3).output)

reduced_database = DATABASE()
reduced_database.database_chose("bar")
reduced_database.collection_chose("raw_vector01_redu")

data_from_database = reduced_database.get_data(movie_name=0).astype("float32")
compare_data = data_from_database[:, :-2]
compare_target = data_from_database[:, -2:]

img_path = "/data/bar03/screenshot01/0_160.jpg"


def Jpg_To_Vector(img_path):
    if isinstance(img_path, list):
        img_path = img_path[0]

    img = image.load_img(img_path, target_size=(299, 299))
    x = image.img_to_array(img)
Exemplo n.º 12
0
from database import DATABASE
from model3 import MODEL_JPG_VECTOR


if __name__ =="__main__":
    print(" begining ".center(60,"="))
    folder_path = "/data/bar04/output"
    data_in_out = DATABASE()
    data_in_out.database_chose("bar")
    data_in_out.collection_chose("raw_vector02")

    data_to_mongod = MODEL_JPG_VECTOR(chunk=400,folder_path = folder_path,database=data_in_out,)
    data_to_mongod.Jpg_To_Vector_DataBase(to_database=True)
    # print(data_to_mongod.img_path_many.__len__())
    # data = data_in_out.get_data()
Exemplo n.º 13
0
import time
from database import DATABASE
import pymongo

data_base_of_raw_data = DATABASE()
data_base_of_raw_data.database_chose("bar")
data_base_of_raw_data.collection_chose("raw_vector01")
print(data_base_of_raw_data.collections_of_eachdatabase)
data_base = data_base_of_raw_data

page_size = 1000
movie_name = 3
cursor = data_base.collection.find({
    "movie_name": movie_name,
    'second': {
        '$gt': 25.1
    }
}).limit(page_size)
t0 = time.time()
cursor_dict = list(cursor)

length_of_chunk = cursor.count()
cursor.close()

print("len(cursor_dict)) =", length_of_chunk)
print("   read data time = {}  ".format(time.time() - t0).center(60, "*"))
Exemplo n.º 14
0
    def Ipca_Reduced_Model_Load(self, model_path=None):

        if model_path is None:
            with open(self._model_path, 'rb') as file_id:
                Ipca_loaded = pickle.load(file_id)
                self.Ipca_Loaded = Ipca_loaded
        else:
            with open(model_path, 'rb') as file_id:
                Ipca_loaded = pickle.load(file_id)
                self.Ipca_Loaded = Ipca_loaded
        # test_data_ipcad_loaded = IPCA.transform(test_data)


if __name__ == "__main__":

    data_base_of_raw_data = DATABASE()
    data_base_of_raw_data.database_chose("bar")
    data_base_of_raw_data.collection_chose("raw_vector01")

    data_base_reduced = DATABASE()
    data_base_reduced.database_chose("bar")
    data_base_reduced.collection_chose("raw_vector01_redu")
    # data_base_reduced.collection.drop()

    print(data_base_of_raw_data.collections_of_eachdatabase)

    train_ipca = VECTORS_REDUCE(
        data_base_of_raw_data=data_base_of_raw_data,
        data_base_reduced=data_base_reduced,
        folder_containing_movies="/data/bar03",
        # movie_name_list = [3,4],