T = FFTN*dt
		
# Do the computation:
A, lags =  autocorr(mu, sigma, tau, R, T, dt, maxS)

################################################################################
# Generating Output:
################################################################################

# Save Results:
if saveResults is True:
	pt.pickle({
		'mu':mu,
		'sigma':sigma,
		'tau':tau,
		'R':R,
		'A':h0,
		'lags':Ez,
		'dt':dt,
		'FFTN':FFTN
		}, saveFileName = saveFileName)

# Display results:
if displayResults is True:
	print A, lags





#
#  DDMCube_Slave.py
#  DDMCubeTeraGrid
#
#  Created by nicain on 4/29/10.
#  Copyright (c) 2010 __MyCompanyName__. All rights reserved.
#

################################################################################
# Preamble
################################################################################

# Import packages:
import DDMCube
import analysisTools as at
import pbsTools as pt

# Grab the name of the settings for the run:
settings, FD, numberOfJobs, gitVersion = pt.getFromPickleJar(loadDir = './', fileNameSubString = '.settings')[0]

# Run the sims:
(resultsArray, crossTimesArray) = DDMCube.DDMOU(settings, FD, numberOfJobs[0])

# Save output:
pt.pickle((resultsArray, crossTimesArray),saveFileName = 'simResults.dat')



else:
	pythonPath = '/usr/lusers/nicain/epd-7.0-2-rh5-x86_64/bin/'

# Beginning computation:
quickName = quickNamePrefix + '-' + quickNameSuffix
numberOfJobs = [simsPerRep, simsPerRep*repsPerProc*procsPerNode*nodes]

# Write a "settings" file:
myUUID = uuid.uuid4()
gitVersion = 'None'
totalLength = 1
for parameter in settings: 
	thisSetting = settings[parameter]
	totalLength *= len(thisSetting)
settingsFileName = join(os.getcwd(), saveResultDir, quickName + '_' + str(myUUID) + '.settings')
pt.pickle((settings, FD, numberOfJobs, gitVersion), saveFileName = settingsFileName)

# Display settings:
at.printSettings(quickName, saveResultDir = saveResultDir)

# Run the job:
tBegin = time.mktime(time.localtime())
pt.runPBS(pythonPath + 'python DDMCube_Slave.py',
          fileList = ['DDMCube_Slave.py', settingsFileName, 'DDMCube.so'],
          nodes=nodes,
          ppn=procsPerNode,
		  repspp=repsPerProc,
		  outputDir=outputDir,
          runLocation=runLocation,
		  runType=runType,
		  waitForSims=waitForSims,
Coh = 6.4

# Initialize
rP = 40 + .4*Coh
rN = 40 - .4*Coh
thetaVals = pl.linspace(thetaMin,thetaMax,thetaN)


# Create the settings dictionary:
settingsDict = {}
for i in range(0,len(thetaVals)):
    settingsDict[i+1] = [rP,rN,corr,N,thetaVals[i],dt,nSims,maxY]

# Write out settings file:
settingsFileName = os.path.join(os.getcwd(),'jobSettings.settings')
pt.pickle(settingsDict, saveFileName = settingsFileName)

# Run PBS Job
pt.runPBS(pythonPath + 'python cythonOvershootSlave.py $ID',
          fileList = ['cythonOvershootSlave.py', settingsFileName, 'cythonOvershootSIP.so'],
          nodes=nodes,
          ppn=procsPerNode,
		  repspp=repsPerProc,
		  outputDir=outputDir,
          runLocation=runLocation,
		  runType=runType,
		  waitForSims=waitForSims,
          wallTime=wallTime,
		  dryRun=dryRun,
		  queue=queue,
		  wallTimeEstCount=wallTimeEstCount)
import pylab as pl
import sys
import time

# Grab the settings from the file:
settingsDict = pt.getFromPickleJar(loadDir = './', fileNameSubString = '.settings')[0]

# Grab the index for the current job:
thetaInd = int(sys.argv[1])

# Write down the settings for the job:
rP, rN, corr, N, theta, dt, nSims, maxY = settingsDict[thetaInd]

# Run the sim, recovering monte carlo data:
tic = time.time()
overShootTemp = getOvershootDist(rP, rN, corr, N, theta-.01, dt, nSims)
print time.time()-tic, 'sec Elapsed'

# Make the histogram:
bins = range(0,maxY)
overShootPref = overShootTemp[overShootTemp>0]-theta
mean = overShootPref.mean()
overShootHist, bin_edges = pl.histogram(overShootPref,bins=bins)
overShootHist = overShootHist*1.0/overShootHist.sum()

# Save output:
pt.pickle((overShootHist,mean),saveFileName = 'simResults_' + str(theta) + '.dat')