def exportGephi(self): e = Etl() from tkFileDialog import asksaveasfilename import tkMessageBox filename = asksaveasfilename() if e.exportGephi(filename): tkMessageBox.showinfo("Info", "File salvato con successo.")
def addRows(): """ only populates db if there is no data in it """ if Virus.query.first() == None: df = Etl().data df.to_sql('viruses', engine, if_exists="replace", index=False) print("done adding rows")
def predict(self): data = Data(self.training_url, self.testing_url) data.fetch_data() etl = Etl(data) etl.do_etl() model = Model(etl) model.train() #model.test(data.testing_data) self.today_data = data.testing_data[-1] self.today_suggestion = model.predict_one(self.today_data)
def cluster_data(data): etl = Etl() df = etl.process_data(data) df = etl.generate_rfm(df) df = etl.normalize_df(df) clustering = Clustering() [metrics, clusters] = clustering.generate_cluster(df) headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} try: requests.post(server_url + '/metrics', headers=headers, json=metrics) requests.post(server_url + '/clusters', headers=headers, json=clusters) except Exception as e: print('Error', e)
def main(): if len(sys.argv) == 2: data = Data(sys.argv[1]) elif len(sys.argv) == 3: data = Data(sys.argv[1], sys.argv[2]) else: data = Data() data.fetch_data() etl = Etl(data) etl.do_etl() model = Model(etl) model.train() #model.test(data.testing_data) result = model.predict_one(data.testing_data[-1]) print (result)
def lambda_connect(event, context): etl = Etl() etl.retrieve_all_stats() return 'pickle rick'
def __init__(self): t = Politician() self.politicianList = t.getList() self.classifier = NBClassifier() self.e = Etl()
def __init__(self, categories): self.list_categories = categories self.json_products = Etl(self.list_categories) self.all_prod = []
def __init__(self): self.db = Database() self.etl = Etl() self.last_session = self.db.get_count(coll='brainwriting_sessions') - 1