Exemplo n.º 1
0
	def createBayesianNetwork(self, cause_effect_file): #Populates the network

		# Create a helper object and preprocess (as described in class FileHelper)
		helper = fh.FileHelper()
		helper.preProcess(cause_effect_file)

		# Obtain lists of each attribute of the variable node
		hVars = helper.getAttributes('var')
		hCauseList = helper.getAttributes('causeList')
		hPP = helper.getAttributes('PP')
		hCondProbList = helper.getAttributes('condProbList')
		hNumCauses = helper.getAttributes('numCauses')		

		# create nodes
		for i in range(len(hVars)):
			self[hVars[i]] = VariableNode(hVars[i], hCauseList[i], hPP[i], hCondProbList[i], hNumCauses[i])

		# compute children
		for var in self.keys(): # For each variable node,
			for par in self[var].parents: # for each parent of that variable node,
				(self[par].childrenInOrder).append(var) # append the variable node to the list of children
				(self[par].children).add(var) # add the variable node to the set of children
Exemplo n.º 2
0
import pytz
import string
from subprocess import call
from dateutil.tz import tzlocal

import ConfigParser
config = ConfigParser.RawConfigParser()
config.read(sys.argv[1])


# Dataset config:


dataset=config.get('All','dataset')
ftp_server_base_dir=config.get('All','ftp_server_base_dir')
fileHelper=FileHelper.FileHelper()
ftp_process_base_dir=config.get('All','ftp_process_base_dir')
staging_local_path = config.get('All','staging_local_path')
staging_hdfs_path = config.get('All','staging_hdfs_path')
unzipToTempDirOnHDFS =  config.get('All','unzipToTempDirOnHDFS')

# FTP connect details:

ftp_server=config.get('All','ftp_server')
username=config.get('All','ftp_username')
password=config.get('All','ftp_password')
lastFilePullDateStoreFile=ftp_process_base_dir+'/'+dataset+'_'+config.get('All','lastFilePullDateStoreFile')
lookback_time_in_minutes=int(config.get('All','lookback_time_in_minutes'))

# Get the local datetime. This would be the time till which the files will be pulled from FTP for the current run.
Exemplo n.º 3
0
forLoops = 10

# seed = 1
learningRate = 0.1
useBiases = False
trainLoops = 100

seeds = [253, 124, 951, 536, 938, 2, 908, 254, 400, 481]
midActivationFunList = [f.sigmoid, f.tanh, f.relu, f.linear]
endActivationFunList = [f.sigmoid, f.tanh, f.sigmoid, f.sigmoid]
midDeactivationFunList = [f.dsigmoid, f.dtanh, f.drelu, f.dlinear]
endDeactivationFunList = [f.dsigmoid, f.dtanh, f.dsigmoid, f.dsigmoid]
lossFun = f.meanSquareErrorDerivative

# loading data
fileHelper = fh.FileHelper()
trainData = fileHelper.LoadClassificationData()
testData = fileHelper.LoadClassificationData()

maxCls = max(trainData, key=lambda x: x.cls).cls
nodes = [2, 6, maxCls]
errors = [0, 0, 0, 0]
for j in range(0, forLoops):
    seed = seeds[j]
    for i in range(0, len(midActivationFunList)):
        # initializing neural network
        midActivationFun = midActivationFunList[i]
        endActivationFun = endActivationFunList[i]
        midDeactivationFun = midDeactivationFunList[i]
        endDeactivationFun = endActivationFunList[i]