def authenticate_by_token(self):
     """
     Attempt to authenticate GDrive v3 API using saved token (if one exists)
     """
     logger.debug(f'Attempting to authenticate by token @ {self.token_path} ...')
     # The file token.pickle stores the user's access and refresh tokens, and is created automatically when the
     # authorization flow completes for the first time.
     if os.path.isfile(self.token_path):
         with open(self.token_path, 'rb') as token:
             try:
                 credentials = p_load(token)
                 if credentials.expired and credentials.refresh_token:
                     logger.debug('Refreshing expired credentials ...')
                     credentials.refresh(Request())
                 if credentials.valid:
                     self.credentials = credentials
                     return True
             except AttributeError as ex:
                 logger.debug(f'Unable to unserialize {self.token_path} as {Credentials.__class__.__qualname__}')
                 raise ex
     elif os.path.isdir(self.token_path):
         raise IsADirectoryError(f'Serialized token file was expected. \'{self.token_path}\' is a directory')
     elif not os.path.exists(self.token_path):
         raise FileNotFoundError(f'Serialized token file was expected. No such file: \'{self.token_path}\'')
     return False
Exemple #2
0
def load_cls(filename):
    """
    Load classifier from file
    """
    with open(filename, 'rb') as output:
        cls = p_load(output)
    return cls
Exemple #3
0
def load_model(file_name):
    """
    Wrapper for pickle.load. File object was created from the string
    :param file_name: name of file where the model is saved
    :return: the model
    """
    with open(file_name, 'rb') as file:
        classifier = p_load(file)
    return classifier
Exemple #4
0
 def load(self, path: str) -> None:
     """
     load neural network state dictionary
     :param path: full path for state dictionary file
     """
     if self._dtype is 'DQN':
         self.policy_net.load_state_dict(load(path))
         self.policy_net.eval()
     elif self._dtype is 'DQN':
         file = open(path, 'rb')
         self._q = p_load(file)
         file.close()
Exemple #5
0
from flask import request, Flask, render_template
from flask_pymongo import PyMongo
from joblib import dump, load
import numpy as np
from sklearn import preprocessing
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.linear_model import LinearRegression
from pickle import load as p_load

mlr_model = load('mlr_model.joblib')
# load scaler 
scaler = p_load(open('scaler.pkl', 'rb'))

# from flask import Flask
app = Flask(__name__)
 
# model = pickle.load(open('model.pkl', 'rb'))
col=['neighborhood','total_area','overallqual','garagecars','fullbath','yearbuilt','yearremodadd']
 
@app.route("/")
def index():
    return render_template("index.html")


@app.route('/predict', methods=['POST', 'GET'])
def predict():
    int_features = [int(x) for x in request.form.values()]
    final = np.array(int_features, dtype=float).reshape(1, -1)
    final_scaled = scaler.transform(final)
    prediction=mlr_model.predict(final_scaled)
    prediction=np.exp(prediction)
	def __init__(self):
	
		#INITIALIZE AND SHOW GUI...
		super(Main, self).__init__()
		self.ui=CC_Main()
		self.ui.setupUi(self)
		self.ui.lineEdit.setText("Programmer")
		self.ui.lineEdit_3.setText("Toronto")
		self.ui.tableWidget.addFuncPointers({"getProfileData": self.getProfileData})
		self.show()
		
		#Used to keep the log from being finalized multiple times...
		self.run_once = 0
		self.widgets_on = True
		
		#Keep track of which positions are valid for automation...
		self.good_list = []
		self.complex_list = []
		self.bad_list = []
		
		#See if saved data exists, and load if it does...
		fin = None
		try:
			fin = open("./data/past_app_sessions.pickle", "rb")
			log.append("Found previous application history data! loading into memory...", entity="main")
		except:
			log.append("No previous application history data found... skipping load.", entity="main")
			
		if fin != None:
			container = p_load(fin)
			self.good_list = container['good_list']
			self.complex_list = container['complex_list']
			self.bad_list = container['bad_list']
		
			
		self.last_row = -1
		
		log.append('Initializing...')
	
		#Redirect STDOUT and STDERR
		self.redir = StdRedir(log)
		self.ui.logWidget.loadConnection(self.redir.stdout_receiver.new_data)
		self.ui.logWidget.setLog(log)
		self.ui.tableWidget.setLog(log)
		self.redir.startThreads()
		
		#SETUP SIGNALS/SLOTS...
		self.ui.pushButton_2.clicked.connect(self.do_search)
		self.ui.pushButton_4.clicked.connect(self.applyToSelected)
		self.ui.lineEdit.returnPressed.connect(self.do_search)
		self.ui.lineEdit_3.returnPressed.connect(self.do_search)
		self.ui.newStyleButton.clicked.connect(self.ui.tableWidget._newStyle)
		app.aboutToQuit.connect(self.closingCode)
		
		#SETUP *CUSTOM* WORKERTHEAD SIGNALS
		self.worker = WorkerThread("*****@*****.**", "unkQRXen9", log)
		self.worker.search_complete.connect(self.process_search)
		self.worker.profiling_complete.connect(self.ui.tableWidget.setRowData)
		self.worker.job_tasks.connect(self.ui.loadBar.prepLoadBar)
		self.worker.task_complete.connect(self.ui.loadBar.updateLoadBar)
		self.worker.finished.connect(self.release_widgets)
		self.worker.submission_complete.connect(self.done_application)
		
		#LOGIN TO SITE!
		self.lockout_widgets()
		self.worker.start()
		
		log.append('Initialization Complete!')
		
		#Set focus to the search button initially...
		self.ui.pushButton_2.setFocus()
Exemple #7
0
def img_from_pickle(fp: str) -> Image:
    with open(fp, 'rb') as pf:
        return Image.fromarray(p_load(pf))