def ez_auth(): try: global auth_token response = eazyml.ez_auth(username, password=None, api_key=api_key) auth_token = response["token"] except Exception as e: print('The function ez_auth was not executed properly', e)
def ez_auth(): ''' This function calls the API which allows the user to authenticate with EazyML and returns a token ''' try: global auth_token #Calling the eazyml library function for authentication response = eazyml.ez_auth(username, api_key=api_key) print(response) auth_token = response["token"] print("Output of ez_auth function is", response) except Exception as e: print('The function ez_auth was not executed properly', e)
import eazyml import pandas as pd import numpy as np #to start the program, make sure you change the last number #in the second row to the id of the county you which to check #authentication to use api username = '******' password = '******' train_file_path = 'dataset.csv' resp = eazyml.ez_auth(username, None, password) auth_token = resp["token"] options = { "id": "ID", "impute": "yes", "outlier": "yes", "discard": "null", "accelerate": "yes", "outcome": "Major Incident" } ez_model_config = { "model_type": "predictive", "derive_text": "no", "derive_numeric": "no", "accelerate": "yes" }
def main_function(county_name): big_df = pd.read_csv("webserver\\big_data.csv") dates = big_df["Date"] county_column = big_df[county_name] average = sum(list(big_df[county_name])) / 310 result = pd.concat([dates, county_column], axis=1) result.to_csv("webserver\\dataset.csv", index=False) #authentication to use api username = '******' password = '******' train_file_path = 'webserver\\dataset.csv' resp = eazyml.ez_auth(username, None, password) auth_token = resp["token"] options = { "id": "null", "impute": "yes", "outlier": "yes", "discard": "null", "accelerate": "yes", "shuffle": "no", "outcome": county_name } ez_model_config = { "model_type": "timeseries", "derive_text": "no", "derive_numeric": "no", "accelerate": "yes", "date_time_column": "Date" } #loading the training data resp = eazyml.ez_load(auth_token, train_file_path, options) print(resp) dataset_id = resp["dataset_id"] #building the model resp = eazyml.ez_init_model(auth_token, dataset_id, ez_model_config) resp = eazyml.ez_get_models(auth_token) print(resp) model_id = resp["dataframe"]["data"][0][4] prediction_df = pd.DataFrame({ 'Date': ["6/30/2020", "12/31/2020"], 'Monmouth': ["", ""] }) prediction_df.to_csv("webserver\\prediction.csv", index=False) #getting final response with answers and displaying to user response = eazyml.ez_predict(auth_token, model_id, 'webserver\\prediction.csv') half_year = float(response["predictions"]["data"][0][2]) if half_year > average: half_year_statement = True else: half_year_statement = False full_year = float(response["predictions"]["data"][1][2]) if full_year > average: full_year_statement = True else: full_year_statement = False return half_year, half_year_statement, full_year, full_year_statement
#!/usr/bin/env python # coding: utf-8 # In[1]: import json import csv import eazyml as ez # In[2]: username = '******' api_key = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJhYjgyOWQyYi00MGZhLTQ0ZmEtYTVjMy1lODcxOGY2ZThiMmYiLCJleHAiOjE2MDUyNTE5MzgsImZyZXNoIjpmYWxzZSwiaWF0IjoxNjA1MTY1NTM4LCJ0eXBlIjoiYWNjZXNzIiwibmJmIjoxNjA1MTY1NTM4LCJpZGVudGl0eSI6IlJhanZhcmRoYW4gUmF2YXQifQ.936LvLzRPLzekkc14x4N6w4lMMUGZnZmDODJJESgaqQ' file_path = 'California_Wildfire_Data.csv' token = ez.ez_auth(username, None, api_key)["token"] # In[3]: def train_data(token, file_path): options = { "id": "null", "impute": "yes", "outlier": "yes", "discard": [ "started", "extinguished", "counties", "latitude", "longitude", "acresBurned", "visibility", "uvIndex"