def main_prediction_one_data(i, model, data, fh, freq): sMape_general, Mase_general = prediction.Prediction().main_resultat( data, model, fh, freq, i) p = [model, sMape_general, Mase_general] print("smape: ", sMape_general) print(" mase: ", Mase_general) return p
def periodic_prediction(self): """ Call the functions for new requirement prediction and send insight to Firebase. :return: """ import prediction as pd obj = (pd.Prediction(domain=self.domain, intention_id=self.intention_id, conditional_keys=self.conditional_keys)) print("Predicting real time value according to user given data") insight = obj.export_insights_firebase()
def index(): if request.method == "POST": name = request.form['patientNameInput'] sex = request.form['patientSexInput'] age = request.form['patientAgeInput'] cp = request.form['patientCpInput'] trestbps = request.form['patientTrestbpsInput'] chol = request.form['patientCholInput'] fbs = request.form['patientFbsInput'] restecg = request.form['patientRestecgInput'] thalach = request.form['patientThalacInput'] exang = request.form['patientExangInput'] oldpeak = request.form['patientOldpeakInput'] slope = request.form['patientSlopeInput'] ca = request.form['patientCaInput'] thal = request.form['patientThalInput'] data = { 'age': age, 'sex': sex, 'cp': cp, 'trestbps': trestbps, 'chol': chol, 'fbs': fbs, 'restecg': restecg, 'thalach': thalach, 'exang': exang, 'oldpeak': oldpeak, 'slope': slope, 'ca': ca, 'thal': thal } predict = prediction.Prediction(data) temp = process.proc(data) return redirect( url_for("user", usr=predict, nm=name, sex=temp['sex'], age=data['age'], chol=temp['chol'], trestbps=temp['trestbps'], thalach=temp['thalach'], cp=temp['cp'], tdate=today, ttime=current)) else: return render_template('index.html')
def main_prediction(i, model): T_sMape = [] T_Mase = [] # Hourly Daten print('Dataset_hourly') sMape_hourly_general, Mase_hourly_general = prediction.Prediction( ).main_resultat(Dataset_hourly, model, 48, 24, i) T_sMape.append(sMape_hourly_general) T_Mase.append(Mase_hourly_general) # Daily daten print('### Daily daten ###') sMape_daily_general, Mase_daily_general = prediction.Prediction( ).main_resultat(Dataset_daily, model, 14, 1, i) T_sMape.append(sMape_daily_general) T_Mase.append(Mase_daily_general) # Weekly daten print('### Weekly daten ###') sMape_weekly_general, Mase_weekly_general = prediction.Prediction( ).main_resultat(Dataset_weekly, model, 13, 1, i) T_sMape.append(sMape_weekly_general) T_Mase.append(Mase_weekly_general) # Monthly Daten print('### Monthly daten ###') sMape_monthly_general, Mase_monthly_general = prediction.Prediction( ).main_resultat(Dataset_monthly, model, 18, 12, i) T_sMape.append(sMape_monthly_general) T_Mase.append(Mase_monthly_general) # Quaterly Daten print('### Quaterly daten ###') sMape_quaterly_general, Mase_quaterly_general = prediction.Prediction( ).main_resultat(Dataset_quaterly, model, 8, 4, i) T_sMape.append(sMape_quaterly_general) T_Mase.append(Mase_quaterly_general) # Yearly Daten print('### Yearly daten ###') sMape_yearly_general, Mase_yearly_general = prediction.Prediction( ).main_resultat(Dataset_yearly, model, 6, 1, i) T_sMape.append(sMape_yearly_general) T_Mase.append(Mase_yearly_general) OWA = [np.mean(T_sMape), np.mean(T_Mase)] p = [ model, sMape_hourly_general, sMape_daily_general, sMape_weekly_general, sMape_monthly_general, sMape_quaterly_general, sMape_yearly_general, np.mean(T_sMape), Mase_hourly_general, Mase_daily_general, Mase_weekly_general, Mase_monthly_general, Mase_quaterly_general, Mase_yearly_general, np.mean(T_Mase), np.mean(OWA) ] return p
def __init__(self, *args, **kwargs): leginon.acquisition.Acquisition.__init__(self, *args, **kwargs) self.calclients['pixel size'] = \ leginon.calibrationclient.PixelSizeCalibrationClient(self) self.calclients['beam tilt'] = \ leginon.calibrationclient.BeamTiltCalibrationClient(self) self.btcalclient = self.calclients['beam tilt'] self.tilts = tilts.Tilts() self.exposure = exposure.Exposure() self.prediction = prediction.Prediction() if self.settings['simu tilt series'] is '': self.settings['simu tilt series'] = '1' self.setSettings(self.settings) self.simuseries = int(self.settings['simu tilt series']) self.simuseriesdata = self.getTiltSeries() if self.simuseriesdata is not None: self.getTiltImagedata(self.session, self.simuseriesdata) self.presetdata = self.getTiltSeriesPreset() self.loadPredictionInfo() self.first_tilt_direction = 1 fake_settings = { 'equally sloped': False, 'equally sloped n': 8, 'xcf bin': 1, 'run buffer cycle': True, 'align zero loss peak': True, 'measure dose': True, 'dose': 200.0, 'min exposure': None, 'max exposure': None, 'mean threshold': 100.0, 'collection threshold': 90.0, 'tilt pause time': 1.0, 'measure defocus': False, 'integer': False, 'intscale': 10, 'pausegroup': False, } self.settings.update(fake_settings) self.start()
def timeit(f, *args): print(mark) st = time.time() f(*args) et = time.time() print(mark) print('Operation completed in: ', et - st, 'seconds.') if __name__ == '__main__': api = set(s for s in dir(YF) if s.startswith('get_')) api.update(MODULE_ARGS) api.update(HELP_ARGS) ts = sys.argv[1:] queries = [q for q in ts if q in api] ts = [t for t in ts if not t in queries] or DEFAULT_ARGS if [h for h in HELP_ARGS if h in queries]: helpapi(queries) elif queries: customapi(queries, ts) else: a = collect.Collect(ts[0] if 1 == len(ts) else ts) timeit(a.data_collect) timeit(a.print_console) timeit(a.save_csv) b = prediction.Prediction() b.training_data() b.testing_data() b.predict_price()
def run(): # load the course data course_object = course.Course() # get next n segments in a dataframe for prediction analysis_window_size = 1000 # make sure weather runs last_weather_et = 0 # do stuff while True: # heartbeat data_wrangler.heartbeat() # get current location current_df = data_wrangler.bucket_csv_to_df() # check if there is any data returned if current_df.size > 0: try: # get lat/long from current_Df most_recent_row = current_df.ix[ current_df['timestamp'].idxmax()] try: read_lat = eval(most_recent_row['coordinates'])[1] read_lon = eval(most_recent_row['coordinates'])[0] except Exception as e: logging.error( 'Exception caught trying to parse lat/lon from s3 csv: {}' .format(e)) if read_lat is not None: # determine course segment current_segment_index = course_object.find_current_course_segment( read_lat, read_lon) # get weather (if necessary) # if it's been 15 min wind_df = course_object.segment_df.iloc[ current_segment_index:current_segment_index + analysis_window_size] if ((last_weather_et + 1800) < time.time()): last_weather_et = time.time() logging.info( "getting fresh weather data (this could take a few minutes...)" ) wind_data = weather_requests.query_wind_data( analysis_window_size, wind_df) # make predictions if len(wind_data.keys()) != 0: p = prediction.Prediction(course_object, analysis_window_size, current_segment_index, wind_data) except Exception as e: logging.error('Exception caught in main.run(): {}'.format(e)) else: logging.error("dataframe populated by IoT datastore is empty!!!")
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Tests para comprobar el funcionamiento de los microservicios @author: Álvaro de la Flor Bonilla """ import pytest import prediction import api_v1 import api_v2 """Cargamos las predicciones""" prediction_arima = prediction.Prediction() def test_arima_1(): """Solo pueden indicarse horas en arima, no strings""" with pytest.raises(ValueError): assert prediction_arima.get_predictions_arima('fail') def test_arima_2(): """El objeto devuelto debe ser una lista""" respuesta = prediction_arima.get_predictions_arima(24) assert type(respuesta) == list def test_arima_3(): """Funcionamiento correcto de la API 1""" app_v1 = api_v1.app.test_client() respuesta = app_v1.get('/servicio/v1/prediccion/24horas/')
class ICan: """ Main class of the iCan application that packages the high level logic of the device. Public Methods: checkOrientation() """ # SENSORS lidSensor = lid.Lid() cameraSensor = camera.Camera('/tmp/trash.jpg') # ACTUATORS/OUTPUT lightOutput = light.Light(21) trapdoorOutput = trapdoor.Trapdoor() # CLOUD SERVICES AND APIs storageService = s3.S3() databaseService = database.ICanItems() weatherService = local_weather.LocalWeather() recognitionService = image_recognition.ImageRecognition() predictionService = prediction.Prediction() notificationService = notification.Notification() # How long to wait to be in "steady" state (seconds) WAIT_TIME = 3 # How often readings are taken from the accelerometer (seconds) READING_INTERVAL = 0.2 recentOpenStates = None # Take a photo only after the can was just opened and closed (and in steady state) photoRecentlyTaken = True def __init__(self): # Store recent history of states in tuples (horizontal, vertical) initialState = (False, False) maxLength = int(self.WAIT_TIME / self.READING_INTERVAL) self.recentOpenStates = deque(maxLength * [initialState], maxLength) def checkOrientation(self): """ Checks the current orientation of the lid, take a photo and process it. :return: """ horizontal = self.lidSensor.isHorizontal() vertical = self.lidSensor.isVertical() print 'H: ', horizontal print 'V: ', vertical self.recentOpenStates.append((horizontal, vertical)) if self.isReadyToTakePhoto() and not self.photoRecentlyTaken: print 'Taking photo now . . . ' fileName = self.getFileName() self.cameraSensor.setImagePath(fileName) self.cameraSensor.takePhoto() link = self.uploadPhoto(fileName) identifiers = self.recognitionService.getImageIdentifiers(fileName) targetPrediction = self.predictionService.getTrashPrediction(identifiers) print identifiers print targetPrediction # Fallback in case nothing is recognized in the image by recognition service if len(identifiers) == 0: identifiers = ['trash'] targetPrediction = 'trash' self.saveToDatabase(identifiers, targetPrediction, link) self.respondToPrediction(identifiers, targetPrediction) self.photoRecentlyTaken = True if vertical and not horizontal: # Lid is open self.lightOutput.turnOn() self.photoRecentlyTaken = False else: self.lightOutput.turnOff() def saveToDatabase(self, identifiers, targetPrediction, link): """ Save the record of identification to the database. :param identifiers: List of identifier strings from the image recognition service :param targetPrediction: String prediction from the prediction service :param link: Public URL to the image :return: Response to save request from the database """ return self.databaseService.addRecord({ 'item_name': ", ".join(identifiers), 'recyclable': (targetPrediction == 'recyclable'), 'compostable': (targetPrediction == 'compostable'), 'timestamp': int(time()), 'temperature': self.weatherService.getCurrentTemperature(), 'image': link, 'user_feedback': False, }) def getFileName(self): """ Return the pseudo unique filename of the next photo to be taken. :return: Absolute path to the file as a string """ timestamp = time() name = 'trash_' + str(timestamp) + '.jpg' path = '/tmp/' return path + name def isReadyToTakePhoto(self): """ Return if the iCan is ready to take a photo based on current state and previous states. If the lid as been closed for the entire duration of the waiting period, then it is time to take a photo. :return: Boolean on whether the iCan is ready to take a photo """ # Check if the queue of states shows it has been closed # for the entire waiting period closedState = (True, False) return self.recentOpenStates.count(closedState) == self.recentOpenStates.maxlen def uploadPhoto(self, fileName): """ Upload given file to cloud storage, write the link to a file and return it :param fileName: Absolute path to file :return: URL to the file on cloud storage """ # Write the public link to a local file link = self.storageService.uploadData(fileName) with open('/tmp/photos.txt', 'a') as photosFile: photosFile.write(link + "\n") print 'URL: ' + link return link def respondToPrediction(self, identifiers, targetPrediction): """ React to the prediction by either opening the trapdoor or sending a notification. :param identifiers: List of string identifiers from Image Recognition Service :param targetPrediction: Prediction of 'trash', 'compostable', 'recyclable', etc. from the ML model :type targetPrediction: str """ if targetPrediction == 'trash': print 'Down the hatch!' self.trapdoorOutput.open() print 'Waiting . . . ' sleep(2) self.trapdoorOutput.close() else: print 'Sending Notification' identifiersList = ', '.join(identifiers[:3]) message = 'iCan has detected an item that is: ' + identifiersList message = message + "\nCategory " + targetPrediction.upper() self.notificationService.sendNotification(message) def cleanUp(self): """ Clean up any used I/O pins and close any connections if needed. :return: None """ self.trapdoorOutput.cleanUp() self.lightOutput.cleanUp()
'bus_station', 'school', 'hospital', 'subway_station', 'parking', 'movie_theater', 'price', 'population' ] x = df[cols] y = df[['branch_o_int']] if len(sys.argv) < 2: print( 'usage: python main.py [prediction option] option: dt, gnb. ex: python main.py gnb' ) else: option = sys.argv[1] if option == 'dt': # 測試決策樹準確率 dt_accu = prediction.Prediction( x=x, y=y, prediction_strategy=prediction.dtree_prediction).get_accuracy() print('accuracy(Decision Tree) : ', dt_accu) elif option == 'gnb': # 高斯單純貝氏分類器 gnb_accu = prediction.Prediction( x=x, y=y, prediction_strategy=prediction.gaussian_nb_prediction ).get_accuracy() print('accuracy(Gaussion Naive Bayes) : ', gnb_accu) elif option == 'rf': # 隨機森林 rf_accu = prediction.Prediction( x=x, y=y, prediction_strategy=prediction.random_forest_prediction ).get_accuracy() print('accuracy(Random Forest) : ', rf_accu)