Example #1
0
def connect_to_db2():
    client1 = df_client(host=u'localhost', port=8086)  # client that reads/writes db with pandas dataframes
    client1.switch_database('team_3_test_detect')
    return client1
Example #2
0
  import torch
  from influxdb import DataFrameClient as df_client
  import numpy as np
  import pandas as pd
  
  client = df_client(host='10.176.67.83', port=8086)  # client that reads/writes db with pandas dataframes
  client.switch_database('team_3_test_offline')
  
  X_torch = None
	Y_torch = None

	query = 'select * from bearing where mw = ' + str(batch_no)

	results = client.query(query)

	if results:
		results_df = results['bearing']
		x_np = results_df.take([0,2,5], axis=1).to_numpy()  # currently only looking at gs, sr, and load  --> to numpy
		X_torch = torch.tensor(x_np).type(torch.FloatTensor).float()  --> to pytorch tensor
	
		y_np = results_df.take([1], axis=1).to_numpy().astype(float)
		Y_torch = torch.from_numpy(y_np).float()
Example #3
0
PyTorch Neural Network for learning model from offline data.
"""

import numpy as np
import pandas as pd
import math, random
from datetime import datetime
import torch
from influxdb import DataFrameClient as df_client
from FaultDetectNet import FaultDetectNet
from sklearn.metrics import accuracy_score
from pywt import downcoef
import sys

client = df_client(host='localhost', port=8086)
client.switch_database('team_3_offline')

DEBUG = False
MW_MAX = 0
MW = 0
SAMPLE_SIZE = 3000  # one input contains 3000 points -- this will be pre-processed with downcoef
BATCH_SIZE = 10  # 10 samples at a time
TRAIN_CUT = 0
TEST_CUT = MW_MAX


# Perform partial discrete wavelet transform on gs data to obtain approximate coefficients (condense sample size from 3000 to 30)
def preprocess(x_raw):
    gs = x_raw['gs'].to_numpy().astype(float).flatten()
    gs = downcoef('a', gs, 'db4', level=7)