def showModel(self):
     self.load_file = QFileDialog.getOpenFileName(
         self, '파일열기', '', '(*.pkl *.h5);; (*.pkl);; (*.h5)')
     if self.load_file[0] == '':
         return
     try:
         if self.load_file[0][-3:] == 'pkl':
             self.tabWidget.setCurrentIndex(0)
             self.model = ARIMAResults.load(self.load_file[0])
             self.name = self.load_file[0][::-1]
             self.res_name = ''
             for i in self.name:
                 if i == '/':
                     break
                 else:
                     self.res_name = i + self.res_name
         else:
             self.model = load_model(load_file[0])
     except:
         print(self.load_file)
         if self.load_file[0][-2:] == 'h5':
             self.tabWidget.setCurrentIndex(1)
             self.model = tf.keras.models.load_model(self.load_file[0])
             self.name = self.load_file[0][::-1]
             self.res_name = ''
             for i in self.name:
                 if i == '/':
                     break
                 else:
                     self.res_name = i + self.res_name
             # print(self.model)
             # Model_name = Model_name + '.h5'
             # model = tf.keras.models.load_model(Model_path + '/' + Model_name)
     QMessageBox.about(self, "Alert", "성공적으로 불러왔습니다.")
     self.model_name.setText(self.res_name)
예제 #2
0
def predict_sarimax(**kwargs) -> dict:
    with open('utils/last_exogs.pkl', 'rb') as f:
        exogs = pickle.load(f)
    exogs = pd.DataFrame.from_dict(exogs)
    exogs = exogs[['InTransit', 'Export', 'Import', 'Transit']]
    models = {}
    for col in cols:
        models[col] = ARIMAResults.load(f'utils/models/sarimax_{col}.pkl')
    start = models['Export'].fittedvalues.index.max()
    exogs.index = [start]

    def iteract(start):
        start = start + pd.DateOffset(months=1)
        tmp = {}
        for col in cols:
            tmp[col] = models[col].predict(
                start, exog=exogs.loc[:, exogs.columns != col])[0]
        return start, tmp

    if 'next' in kwargs.keys():
        for _ in range(args.next):
            start, tmp = iteract(start)
            exogs.loc[start] = tmp
        return exogs.iloc[1:].to_dict('series')
    elif 'start' in kwargs.keys() and 'end' in kwargs.keys():
        while str(start)[:7] != kwargs['end']:
            start, tmp = iteract(start)
            exogs.loc[start] = tmp
        return exogs.loc[kwargs['start']:kwargs['end']].to_dict('series')
예제 #3
0
 def load(self, model_path, scaler_path=None):
     '''
     Loads the model from disk
     :param model_path: Path to saved model
     '''
     if os.path.splitext(model_path)[1] != '.pkl':
         raise Exception('Model file path for ' + self.name +
                         ' must have ".pkl" extension.')
     self.model = ARIMAResults.load(model_path)
     return
예제 #4
0
def predict():
	dataset = read_csv('SmtExpMngr/models/train_data.csv', header=None, index_col=0, parse_dates=True, squeeze=True)
	X = dataset.values.astype('float32')
	history = [x for x in X]
	months_in_year = 1
	validation = read_csv('validation.csv', header=None, index_col=0, parse_dates=True, squeeze=True)
	y = validation.values.astype('float32')
	# load model
	model_fit = ARIMAResults.load('/home/termi/beproject/SmtExpMngr/models/model.pkl')
	bias = numpy.load('/home/termi/beproject/SmtExpMngr/models/model_bias.npy')
	# make first prediction
	predictions = list()
	yhat = float(model_fit.forecast())
	yhat = bias + inverse_difference(history, yhat, months_in_year)
	predictions.append(yhat)
	history.append(y[0])
	return yhat
예제 #5
0
 def get(self,period):
             
     print("Period to predict : ",period)
     
     # Connexion au client HDFS
     client = InsecureClient(url='http://namenode:9870', user='******')
     
     # Vérification de la présence du modèle sauvegardé sur HDFS
     if client.status(model_hdfs_remote_path + model_name , strict=False) != None:
         
         # load model
         client.download(model_hdfs_remote_path+model_name, model_local_path, overwrite=True)
         model_fit = ARIMAResults.load(model_local_path + model_name)
  
         # Dataset pour l'évaluation
         df = get_data_cassandra()
         print(df.head())
         X = df['total_estimated_load'].values
         
         start_index = len(X)
         end_index = start_index + int(period)
         forecast = model_fit.predict(start=start_index, end=end_index)
         
         #df['date_est_load'] = df['date_est_load'].apply(pd.Timestamp)
         day = df['date_est_load'].values[-1].date()
         print(day)
         print(type(day))
         day += datetime.timedelta(days=1)
         
         res = {}
         for yhat in forecast:
             res[day.strftime("%d/%m/%Y")] = yhat
             day += datetime.timedelta(days=1)
         
         return res
 
 
     return "Service has been stopped"
예제 #6
0
# load the finalized model and make a prediction
from statsmodels.tsa.arima.model import ARIMAResults
from math import exp
from math import log
import numpy


# invert box-cox transform
def boxcox_inverse(value, lam):
    if lam == 0:
        return exp(value)
    return exp(log(lam * value + 1) / lam)


model_fit = ARIMAResults.load('model.pkl')
lam = numpy.load('model_lambda.npy')
yhat = model_fit.forecast()[0]
yhat = boxcox_inverse(yhat, lam)
print('Predicted: %.3f' % yhat)
예제 #7
0
 def predict(self, model: ARIMAResults, start, end):
     return model.predict(start, end)
예제 #8
0
 def forecast(self, model: ARIMAResults, n_steps):
     return model.forecast(n_steps)
예제 #9
0
        last_exogs[key] = [df.iloc[-1][key]]
    with open('utils/last_exogs.pkl', 'wb') as f:
        pickle.dump(last_exogs, f)

predictions = {}
cols = ['InTransit', 'Export', 'Import', 'Transit']

if args.predict:
    start, end = args.predict.split(':')
    print(f'Прогнозирование в диапазоне от {start} до {end}.')
    try:
        if args.sarimax:
            predictions = predict_sarimax(start=start, end=end)
        else:
            for col in cols:
                model = ARIMAResults.load(f'utils/models/sarima_{col}.pkl')
                predictions[col] = model.predict(start, end)
    except FileNotFoundError:
        print(
            'Перед прогнозированием нужно создать модель. Запустите программу с флагом -r/--retrain и '
            'укажите путь до таблицы с исходными данными в формате CSV или XLSX'
        )
        exit(1)

if args.next:
    print(f'Прогнозирование на ближайшие месяцы: {args.next}.')
    try:
        if args.sarimax:
            predictions = predict_sarimax(next=args.next)
        else:
            for col in cols: