Exemplo n.º 1
0
 def __init__(self,element,options):
   '''Initilizes the dataSet by setting baseFileName, start, end, and intilizing plots from an xml 
   element'''
   
   #set some initial values
   self.baseFileName=None
   self.start=None
   self.end=None
   self.axes=[]
   self.nNumFiles=None
   self.fileIndices=[]
   self.hasNonTimeAxis=False
   
   [self.start,self.end,self.baseFileName]=disect_filename.disectFileName(element.attrib["fileRange"])
   
   #get equation of state file if one specified
   eosFileElement=element.findall("eosFile")
   if len(eosFileElement)>1:
     warnings.warn("more than one \"eosFile\" node, ignoring all but first node")
   if len(eosFileElement)>0:
     self.eosFile=eosFileElement[0].text
   else:
     self.eosFile=None
   
   #add axes to dataset
   axisElements=element.findall("axis")
   for axisElement in axisElements:
     axis=Axis(axisElement,options)
     self.axes.append(axis)
     if not axis.bTime:
       self.hasNonTimeAxis=True
Exemplo n.º 2
0
def main():
    #parse command line options
    (options, args) = parseOptions()

    #get base file name
    [start, end, baseFileName] = disect_filename.disectFileName(args[0])

    #make sure that all the combined binary files have profiles made
    failedFiles = make_profiles.make_fileSet(args[0], options)

    #get and sort files
    extension = "_pro" + ".txt"
    filesExistProfiles = glob.glob(baseFileName + "*" + extension)
    files = []
    for file in filesExistProfiles:
        intOfFile = int(file[len(baseFileName):len(file) - len(extension)])
        if intOfFile >= start and intOfFile < end:
            files.append(file)

    files.sort()

    if len(files) < 1:
        print __name__+":"+main.__name__+": no files found matching \""+baseFileName\
          +"\" in the range "+str(start)+"-"+str(end)
        return False

    #make list of radial grid velocity, and time
    radialGridVelocity = []
    times = []
    nCount = 0
    for file in files:
        print "reading profile ", file, " ..."
        fileData = datafile.DataFile()
        fileData.readFile(file)
        nNumZones = len(fileData.fColumnValues)
        radialGridVelocity.append(
            fileData.fColumnValues[nNumZones - options.zone -
                                   1][options.column - 1])
        fileHeader = fileData.sHeader.split()
        times.append(float(fileHeader[1]))
        nCount = nCount + 1

    #print data to file
    print "writing to file ..."
    outputFileName = options.outputFile + ".txt"
    f = open(outputFileName, 'w')
    for i in range(0, len(radialGridVelocity) - 1):
        line = str(times[i]) + " " + str(radialGridVelocity[i]) + "\n"
        f.write(line)

    f.close()
    success = os.system(paths.SPHERLSanalPath + ' -tl ' + outputFileName)
    if success != 0:
        print "error computing fourier transform of \"" + outputFileName + "\""

    if __name__ == "__main__":  #keeps from redundently reporting errors
        #report failed files
        for file in failedFiles:
            print file
Exemplo n.º 3
0
 def __init__(self,element):
   """Initialize an fileSet from an xml node"""
   
   #define allowed attributes for nodes
   self.__setSupportedNodeAttributes()
   
   #warn about unsupported attributes in node
   self.__checkSuppotedNodeAttributes(element)
   
   #get file range
   self.fileRange=element.get("fileRange")
   if self.fileRange==None or self.fileRange=="":
     raise Exception("Need a fileRange attribute in fileSet Node")
   [self.start,self.end,self.baseFileName]=disect_filename.disectFileName(self.fileRange)
   
   #get timeFile Name
   self.timeFile=element.get("timeFile")
   
   #get dataPerFile node
   dataPerFile=element.findall("dataPerFile")
   
   #check to see if there are more than one dataPerFile node
   if len(dataPerFile)>1:
     message="more than one \"dataPerFile\" node found only using first"\
       +" node and ignoring the rest"
     warnings.warn(message)
   
   #warn about unsupported attributes in node
   self.__checkSuppotedNodeAttributes(dataPerFile[0])
   
   #get list of variables to include in file
   self.variables=[]
   variableElements=dataPerFile[0].findall("variable")
   for variableElement in variableElements:
     
     #check that attributes are supported
     self.__checkSuppotedNodeAttributes(variableElement)
     self.variables.append(variable(variableElement))
   
   #get list of interpolation variables 
   self.interpVars={}
   interpVarElements=element.findall("interpVar")
   for interpVarElement in interpVarElements:
     
     #check that attributes of node are supported
     self.__checkSuppotedNodeAttributes(interpVarElement)
     tmp=interpVarElement.get("name")
     if tmp==None or tmp=="":
       raise Exception("\"interpVar\" must have a \"name\" attribute set")
     
     self.interpVars[tmp]=interpVar(interpVarElement)
Exemplo n.º 4
0
def make_fileSet(fileName,
                 options,
                 makeFileFunction=make_profile,
                 frequency=1):
    """Makes a set of files from SPHERLS output using fileMakingFunction
  
  fileName: is expected to be the name of the inputfile
  options: command line options
  makeFileFunction: function to be used to create the a new file in the set.
  
  returns a list of files that failed being made
  """

    #check that we have a file name
    if len(fileName) < 1:
        raise Exception("Requires an input file")

    #get base file name
    [start, end, baseFileName] = disect_filename.disectFileName(fileName)

    #make sure that all the distributed binary files in this range have been
    #converted to combined binary files
    failedFiles = combine_bins.combine_bin_files(options.keep, fileName,
                                                 options.remakeBins)

    #check for combined binary files in iteration range starting with baseFileName
    filesExistCombBin = glob.glob(baseFileName +
                                  "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]")
    files = []

    for file in filesExistCombBin:
        intOfFile = int(file[len(baseFileName):len(file)])
        if intOfFile >= start and intOfFile < end:
            files.append(file)

    files.sort()
    if len(files) == 0:
        raise NoFilesFound("no combined binary files found in range [" +
                           str(start) + "-" + str(end) + "]")

    nNumFiles = len(files)
    nCount = 1
    for i in range(0, len(files), frequency):
        try:
            makeFileFunction(files[i], options, nCount, nNumFiles)
        except FileCreateFailed as e:
            failedFiles.append(e.message)
        nCount += 1
    return failedFiles
Exemplo n.º 5
0
def main():
  
  #parse command line options
  (options,args)=parseOptions()
  
  #get base file name
  [start,end,baseFileName]=disect_filename.disectFileName(args[0])
  
  #make sure that all the combined binary files have profiles made
  failedFiles=make_profiles.make_fileSet(args[0],options)
  
  #compute the average PKE
  averagePKE(start,end,baseFileName,options)
  
  #report failed files
  for file in failedFiles:
    print file
Exemplo n.º 6
0
def cp_files(fileName,newBaseFileName,options):
  """
  Copies SPHERLS output files using the BASEFILENAME[startIndex-endIndex] syntax.
  
  This is the same syntax that the other scripts use when working with SPHERLS
  output files.
  """
  
  #get base file name
  [start,end,baseFileName]=disect_filename.disectFileName(fileName)
  
  #check for distributed binary files in interation range starting with baseFileName
  filesExist=glob.glob(baseFileName+"[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]*")
  files=[]
  for file in filesExist:
    
    intOfFile=-1
    
    #if base part of file name matches the base file name given
    if file[0:len(baseFileName)]==baseFileName:
      testStr=file[len(baseFileName):len(baseFileName)+8]
      endStr=file[len(baseFileName)+8:len(file)]
      if testStr.isdigit():
        intOfFile=int(testStr)
      
      if intOfFile>=start and intOfFile<end:
        if options.cbOnly and endStr=='' or not options.cbOnly:
          files.append(file)
      
  if(len(files)==0):
    print __name__+":"+cp_files.__name__+": no files found in range "+baseFileName+"["+parts2[0]+"-"+parts2[2]+"]"
    return False #nothing to do
  
  #put them in order, might be nice to have them in order? Certainly not required
  files.sort()
  for file in files:
    parts=file.split('_t')
    
    newFileName=newBaseFileName+'_t'+parts[1]
    print __name__+":"+cp_files.__name__+": copying file \""+file+"\" to file "+newFileName
    
    #remove the file
    cmd='cp '+file+" "+newFileName
    os.system(cmd)
    
  return True
Exemplo n.º 7
0
def make_2DSlices(keep, fileName, nPlane, nPlaneIndex, remake):

    #get base file name
    [start, end, baseFileName] = disect_filename.disectFileName(fileName)

    #make sure that all the distributed binary files in this range have been converted to combined binary files
    combine_bins.combine_bin_files(keep, fileName, False)

    #check for combined binary files in interation range starting with baseFileName
    filesExistCombBin = glob.glob(baseFileName +
                                  "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]")
    files = []
    for file in filesExistCombBin:
        intOfFile = int(file[len(baseFileName):len(file)])
        if intOfFile >= start and intOfFile < end:
            files.append(file)

    extension = "_2D"
    if nPlane == 0:
        extension = extension + "k=" + str(nPlaneIndex) + ".txt"
    if nPlane == 1:
        extension = extension + "i=" + str(nPlaneIndex) + ".txt"
    if nPlane == 2:
        extension = extension + "j=" + str(nPlaneIndex) + ".txt"
    count = 1
    for file in files:
        #if this particular 2D slice doesn't already exsist for this binary file create it
        if not (os.path.exists(file + extension)) or remake:

            #make 2D slice
            print __name__+":"+make_2DSlices.__name__+": creating 2D slice from \""+file+"\" "\
              +str(count)+"/"+str(len(files))+" ..."
            success = os.system(paths.SPHERLSanalPath + ' -s cb ' +
                                str(nPlane) + ' ' + str(nPlaneIndex) + ' ' +
                                file)
            if success == 0:
                pass
            else:

                #say there was an error and return
                print __name__ + ":" + make_2DSlices.__name__ + ": error making 2D slice " + file + extension
                return False
        else:
            print __name__ + ":" + make_2DSlices.__name__ + ": 2D slice \"" + file + extension + "\" already exists"
        count += 1
    return True
Exemplo n.º 8
0
def move_files(fileName, newBaseFileName, options):

    #get base file name
    [start, end, baseFileName] = disect_filename.disectFileName(fileName)

    #check for distributed binary files in interation range starting with baseFileName
    filesExist = glob.glob(baseFileName +
                           "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]*")
    files = []
    for file in filesExist:

        intOfFile = -1

        #if base part of file name matches the base file name given
        if file[0:len(baseFileName)] == baseFileName:
            testStr = file[len(baseFileName):len(baseFileName) + 8]
            endStr = file[len(baseFileName) + 8:len(file)]
            if testStr.isdigit():
                intOfFile = int(testStr)

            if intOfFile >= start and intOfFile < end:
                if options.cbOnly and endStr == '' or not options.cbOnly:
                    files.append(file)

    if (len(files) == 0):
        print __name__ + ":" + move_files.__name__ + ": no files found in range " + baseFileName + "[" + parts2[
            0] + "-" + parts2[2] + "]"
        return False  #nothing to do

    #put them in order, might be nice to have them in order? Certainly not required
    files.sort()
    for file in files:
        parts = file.partition('_t')

        newFileName = newBaseFileName + parts[1] + parts[2]
        print __name__ + ":" + move_files.__name__ + ": moving file \"" + file + "\" to file " + newFileName

        #remove the file
        cmd = 'mv ' + file + " " + newFileName
        os.system(cmd)

    return True
def createSlices(settings,parsed):
  #get base file name
  [start,end,baseFileName]=disect_filename.disectFileName(settings['inputFileName'])
  
  nCount=0
  for plane in settings['planes']:
    
    #make sure that all combined binary files have 2D slices made
    if plane['planeType']=="rt":
      nPlaneID=0
      planeID="k"
    if plane['planeType']=="tp":
      nPlaneID=1
      planeID="i"
    if plane['planeType']=="rp":
      nPlaneID=2
      planeID="j"

    if(parsed.eosFile!=""):
      cmd = 'mk2DSlice' + ' ' + "\"" + settings['inputFileName'] + "\"" + ' ' + parsed.eosFile + ' ' + str(nPlaneID) + ' ' + str(plane['planeIndex']);
    else:
      cmd = 'mk2DSlice' + ' ' + "\"" + settings['inputFileName'] + "\"" + str(nPlaneID) + ' ' + str(plane['planeIndex']);
    print(cmd)
    os.system(cmd);
Exemplo n.º 10
0
def combine_bin_files(keep, fileName, remakeBins):
    """Combines distributed binary dump files created by SPHERLS into a single binary file."""

    [start, end, baseFileName] = disect_filename.disectFileName(fileName)

    #if only doing one file we can make this faster
    filesExist = []
    if end - start == 1:
        filesExist.append(baseFileName + str(start).zfill(8) + "-0")
    else:
        #check for distributed binary files in iteration range starting with baseFileName
        filesExist = glob.glob(baseFileName +
                               "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-0")

    files = []
    for file in filesExist:
        intOfFile = int(file[len(baseFileName):len(file) - 2])
        if intOfFile >= start and intOfFile < end:
            files.append(file)

    if (len(files) == 0):
        return []  #nothing to do, return an empty list of failedFiles

    files.sort()
    failedFiles = []
    for i in range(len(files)):

        #if combined binary file doesn't already exist with this file name
        if not (os.path.exists(files[i][:len(files[i]) - 2])) or remakeBins:

            #make combined binary files
            if not keep:
                print __name__+":"+combine_bin_files.__name__+": combining \""+files[i][:len(files[i])-2]\
                  +"\" and removing distributed binaries ..."
            else:
                print __name__ + ":" + combine_bin_files.__name__ + ": combining \"" + files[
                    i][:len(files[i]) - 2] + "\" ..."

            success = os.system(paths.SPHERLSanalPath + ' -c dbcb ' +
                                files[i][:len(files[i]) - 2])
            if success == 0:
                if not keep:
                    #remove distributed binary files
                    os.system('rm -f ' + files[i][:len(files[i]) - 2] + '-*')
            else:
                #say there was an error and quit
                failedFiles.append(__name__ + ":" +
                                   combine_bin_files.__name__ +
                                   ": error combining binary file " +
                                   files[i][:len(files[i]) - 2])
        else:
            if not keep:
                print __name__+":"+combine_bin_files.__name__+": combined binary \""\
                  +files[i][:len(files[i])-2]+"\" already exists, removing distributed binaries ..."
                #remove distributed binary files
                os.system('rm -f ' + files[i][:len(files[i]) - 2] + '-*')
            else:
                print __name__+":"+combine_bin_files.__name__+": combined binary \""\
                  +files[i][:len(files[i])-2]+"\" already exists"

    if __name__ == "__main__":  #keeps from redundently reporting errors
        #report problem files
        for failedFile in failedFiles:
            print failedFile

    return failedFiles
Exemplo n.º 11
0
    def __init__(self, element):
        """Initialize an fileSet from an xml node"""

        #define allowed attributes for nodes
        self.__setSupportedNodeAttributes()

        #warn about unsupported attributes in node
        self.__checkSuppotedNodeAttributes(element)

        #get file range
        self.fileRange = element.get("fileRange")
        if self.fileRange == None or self.fileRange == "":
            raise Exception(
                "Need a \"fileRange\" attribute in \"fileSet\" Node")
        [self.start, self.end,
         self.baseFileName] = disect_filename.disectFileName(self.fileRange)

        #get timeFile Name
        self.timeFile = element.get("timeFile")
        if self.timeFile == None:
            self.timeFile = "timeFile.hdf"

        #get frequency
        self.frequency = 1
        self.frequency = int(element.get("frequency"))

        #get output path
        self.outputPath = element.get("outputPath")
        if self.outputPath == None or self.outputPath == "":
            raise Exception(
                "Need a \"outputPath\" attribute in \"fileSet\" Node")

        #get radialCutZone element text
        elementRadialCutZone = element.findall("radialCutZone")
        if len(elementRadialCutZone) > 1:
            warnings.warn(
                "more than one \"radialCutZone\" node ignoring all but first node"
            )
        elif len(elementRadialCutZone) == 1:
            self.__checkSuppotedNodeAttributes(elementRadialCutZone[0])
            if elementRadialCutZone[0].text != None or elementRadialCutZone[
                    0].text != "":
                self.radialCutZone = int(elementRadialCutZone[0].text)
        else:
            raise Exception(
                "must have a \"radialCutZone\" node in a \"fileSet\" node with an integer value"
            )

        #get includeBoundaries element text
        elementIncludeBoundaries = element.findall("includeBoundaries")
        if len(elementIncludeBoundaries) > 1:
            warnings.warn(
                "more than one \"includeBoundaries\" node ignoring all but first node"
            )
        elif len(elementIncludeBoundaries) == 1:
            self.__checkSuppotedNodeAttributes(elementIncludeBoundaries[0])
            if elementIncludeBoundaries[0] != None:
                if elementIncludeBoundaries[0].text in [
                        "true", "yes", "y", "t", "1"
                ]:
                    self.includeBoundaries = True
                elif elementIncludeBoundaries[0].text in [
                        "false", "no", "n", "n", "0"
                ]:
                    self.includeBoundaries = False
                else:
                    raise Exception(
                        "\"includeBoundaries\" node expects \"true\" or \"false\""
                    )
        else:
            raise Exception(
                "must have a \"includeBoundaries\" node in a \"fileSet\" node")

        #get numRInterp element text
        elementnumRInterp = element.findall("numRInterp")
        if len(elementnumRInterp) > 1:
            warnings.warn(
                "more than one \"numRInterp\" node ignoring all but first node"
            )
        elif len(elementnumRInterp) == 1:
            self.__checkSuppotedNodeAttributes(elementnumRInterp[0])
            if elementnumRInterp[0].text != None or elementnumRInterp[0] != "":
                self.numRInterp = int(elementnumRInterp[0].text)
        else:
            raise Exception(
                "must have a \"numRInterp\" node in a \"fileSet\" node with an integer value"
            )
        '''
Exemplo n.º 12
0
    def __init__(self, element):

        #get file of bolo metric corrections
        boloCorrElements = element.findall("boloCorr")
        if len(boloCorrElements) > 1:
            warnings.warn(
                "more than one \"boloCorr\" node in \"lightCurve\" node" +
                ", ignoring all but first node")
        if len(boloCorrElements) > 0:
            self.boloCorrFile = boloCorrElements[0].text
        else:
            raise Exception(
                "Must have one \"boloCorr\" input file per \"lightCurve\" node"
            )

        #get column for bolometric correction
        try:
            self.columnBC = int(boloCorrElements[0].get("columnBC"))
        except Exception as e:
            raise Exception(
                e.message +
                "\n Must have a \"columnBC\" attribute set to the column number for"
                +
                " the bolometric correction. The first column in the file is column 0"
            )

        #get column for bolometric correction
        if boloCorrElements[0].get("withAcceleration").lower() in [
                "true", "yes", "y", "1", "t"
        ]:
            self.withAcceleration = True
        else:
            self.withAcceleration = False

        #get input set
        inputFilesElements = element.findall("inputFiles")
        if len(inputFilesElements) > 1:
            warnings.warn(
                "more than one \"inputFiles\" node in \"lightCurve\" node" +
                ", ignoring all but first node")
        if len(inputFilesElements) > 0:
            self.inputFileRange = inputFilesElements[0].text
        else:
            raise Exception(
                "Must have one \"inputFiles\" node per \"lightCurve\" node")

        #get frequency
        if inputFilesElements[0].get("frequency") != None:
            try:
                self.frequency = int(inputFilesElements[0].get("frequency"))
            except ValueError as e:
                raise Exception(
                    e.message +
                    "\n \"frequency\" attribute in \"inputFiles\" node must be an "
                    + "integer")
        else:
            self.frequency = 1

        #split up file set string
        [self.start, self.end, self.baseFileName
         ] = disect_filename.disectFileName(self.inputFileRange)

        #get equation of state file if one specified
        eosFileElement = element.findall("eosFile")
        if len(eosFileElement) > 1:
            warnings.warn(
                "more than one \"eosFile\" node, ignoring all but first node")
        if len(eosFileElement) > 0:
            self.eosFile = eosFileElement[0].text
        else:
            self.eosFile = None

        #get get number of zones in from the surface, 0=surface
        zonesFromSurfElement = element.findall("zonesFromSurf")
        if len(zonesFromSurfElement) > 1:
            warnings.warn(
                "more than one \"zonesFromSurf\" node, ignoring all but first node"
            )
        if len(zonesFromSurfElement) > 0:
            try:
                self.zonesFromSurf = int(zonesFromSurfElement[0].text)
            except ValueError as e:
                raise Exception(
                    e.message +
                    "\nExpecting an integer for \"zonesFromSurf\" node")
        else:
            self.zonesFromSurf = 0

        #get outputFile
        outputFileElements = element.findall("outputFile")
        if len(outputFileElements) > 1:
            warnings.warn(
                "more than one \"outputFile\" node in \"lightCurve\" node" +
                ", ignoring all but first node")
        if len(outputFileElements) > 0:
            self.outputFile = outputFileElements[0].text
        else:
            raise Exception(
                "Must have one \"outputFile\" node per \"lightCurve\" node")

        #get period
        periodElements = element.findall("period")
        if len(periodElements) > 1:
            warnings.warn(
                "more than one \"period\" node in \"lightCurve\" node" +
                ", ignoring all but first node")
        if len(periodElements) > 0:
            self.period = float(periodElements[0].text)
        else:
            self.period = None
Exemplo n.º 13
0
def main():
    #parse command line options
    (options, args) = parseOptions()

    import matplotlib
    if not options.show:
        matplotlib.use('Agg')  #needed for saving figures

    import matplotlib.pyplot as plt
    from matplotlib.gridspec import GridSpec

    #get base file name
    fileName = args[0]
    [start, end, baseFileName] = disect_filename.disectFileName(fileName)

    #make sure that all the combined binary files have profiles made
    make_profiles.make_profiles(options.keep, fileName, options.remake,
                                options.remakeBins)

    #get and sort files
    extension = "_pro" + ".txt"
    filesExistProfiles = glob.glob(baseFileName + "*" + extension)
    files = []
    for file in filesExistProfiles:
        intOfFile = int(file[len(baseFileName):len(file) - len(extension)])
        if intOfFile >= start and intOfFile < end:
            files.append(file)

    files.sort()

    #get surface velocity for all files in range, and find max/min of value to plotting
    surfaceLuminosity = []
    fileTimes = []
    nCount = 0
    max = -1e60
    min = 1e60
    nColumn = 55
    nZonesFromSurface = 3
    for file in files:

        print "reading in profile ", file, " ..."
        fileData = datafile.DataFile()
        fileData.readFile(file)
        surfaceLuminosity.append(
            fileData.fColumnValues[len(fileData.fColumnValues) -
                                   nZonesFromSurface - 1][nColumn])
        fileHeader = fileData.sHeader.split()
        fileTimes.append(float(fileHeader[1]))
        nCount = nCount + 1

        if max < fileData.fColumnValues[
                len(fileData.fColumnValues) - nZonesFromSurface -
                1][nColumn] and fileData.fColumnValues[
                    len(fileData.fColumnValues) - nZonesFromSurface -
                    1][nColumn] != None:
            max = fileData.fColumnValues[len(fileData.fColumnValues) -
                                         nZonesFromSurface - 1][nColumn]
        if min > fileData.fColumnValues[
                len(fileData.fColumnValues) - nZonesFromSurface -
                1][nColumn] and fileData.fColumnValues[
                    len(fileData.fColumnValues) - nZonesFromSurface -
                    1][nColumn] != None:
            min = fileData.fColumnValues[len(fileData.fColumnValues) -
                                         nZonesFromSurface - 1][nColumn]

    #make plots
    nCount = 0
    fig = plt.figure(figsize=(13, 8))
    ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=3, rowspan=3)

    if not options.noGrid:
        ax1.grid()
    plotString = '-'
    if options.noLines:
        plotString = 'o'
    if options.points:
        plotString = plotString + 'o'

    if options.ymin != None:
        min = options.ymin
    if options.ymax != None:
        max = options.ymax
    print "min=", min, " max=", max
    ax1.plot(fileTimes, surfaceLuminosity, plotString)
    plt.ylim(min, max)
    ax1.set_xlabel("t [s]")
    ax1.set_ylabel("L [L_sun]")
    ax1.set_title("Ligh Curve")

    if options.show:
        plt.show()
    else:
        sOutFileName = options.outputFile + "." + options.format
        print __name__ + ":" + main.__name__ + ": creating plot \"" + sOutFileName + "\" from file " + file + "..."
        fig.savefig(sOutFileName, format=options.format,
                    transparent=False)  #save to file

    ax1.cla()  #clear plot
Exemplo n.º 14
0
def main():
  
  #parse command line options
  (options,args)=parseOptions()
  
  #get XML settings
  currentSettings=Settings()
  currentSettings.parseXML(args[0])
  
  #get base file name
  [start,end,baseFileName]=disect_filename.disectFileName(currentSettings.files)
  
  #make sure that all the combined binary files have profiles made
  failedFiles=make_profiles.make_profiles(options.keep,currentSettings.files,options.remake,False)
  
  #get period ranges and times for each period from PKE file 
  [periodRange,time]=getPeriodRanges(baseFileName,start,end,options)
  
  dWSum=[]
  dWSumError=[]
  dClosestToTurn=[]
  
  #get all possible file names
  extension="_pro"+".txt"
  filesExistProfiles=glob.glob(baseFileName+"*"+extension)
  filesExistProfiles.sort()
  
  #check for the t=0 model
  if currentSettings.workPlotSettings.temperatureProfileFile!=None:#if set use user set value
    firstFile=currentSettings.workPlotSettings.temperatureProfileFile
  else:
    firstFile=baseFileName+"00000000"
  failedFiles2=make_profiles.make_profiles(options.keep,firstFile,options.remake,False)
  if not os.path.isfile(firstFile+extension):
    warnings.warn("didn't find profile or dump at \""+firstFile+extension+
      "\" using, \""+filesExistProfiles[0]+"\" instead.")
    firstFile=filesExistProfiles[0]
  else:
    firstFile+=extension
  
  fileData=datafile.DataFile()
  fileData.readFile(firstFile)
  Log10T=fileData.fColumnValues[0:len(fileData.fColumnValues)-1,currentSettings.tColumn]
  Log10T=np.array(Log10T,dtype=np.float64)#force double precision
  Log10T=np.log10(Log10T)
  
  #find out how close to the surface to go
  nEndZone=len(fileData.fColumnValues)-1
  for j in range(len(fileData.fColumnValues)-1):#position
    if Log10T[j]<currentSettings.workPlotSettings.minTemp:
      nEndZone=j
      break
  
  for n in range(len(periodRange)):
    
    #get and sort files
    files=[]
    for file in filesExistProfiles:
      intOfFile=int(file[len(baseFileName):len(file)-len(extension)])
      if intOfFile>=periodRange[n][0] and intOfFile<periodRange[n][1]:
        files.append(file)
    
    #check to make sure we have a start and end for the period
    if periodRange[n][1]==None:
      raise Exception("file range index range "+str(start)+"-"+str(end)+" should contain at least"
        +" one period as indicated by PKE peaks, but does not.")
    
    if len(files)<3:
      raise Exception("need more than 3 files to compute the work, likely alot more with file"
        +" indices in the range  ("+str(periodRange[n][0])+","+str(periodRange[n][1])+")")
    
    #for first model dump
    fileData=datafile.DataFile()
    print "reading file ",files[0]," ..."
    fileData.readFile(files[0])
    
    #read in p, and 1/rho for first file
    p=np.zeros( (len(fileData.fColumnValues)-1,len(files)+1) )
    rhoInvert=np.zeros( (len(fileData.fColumnValues)-1,len(files)+1) )
    deltaM=np.zeros( (len(fileData.fColumnValues)-1,len(files)+1) )
    temperature=np.zeros( (len(fileData.fColumnValues)-1,len(files)+1) )
    maxP=np.empty(len(fileData.fColumnValues)-1)
    maxP.fill(-1.0*sys.float_info.max)
    minP=np.empty(len(fileData.fColumnValues)-1)
    minP.fill(sys.float_info.max)
    for i in range(len(fileData.fColumnValues)-1):
      p[i][0]=fileData.fColumnValues[i][currentSettings.pColumn]
      p[i][len(files)]=fileData.fColumnValues[i][currentSettings.pColumn]
      if currentSettings.AV:
        p[i][0]+=fileData.fColumnValues[i][currentSettings.QColumn]
        p[i][len(files)]+=fileData.fColumnValues[i][currentSettings.QColumn]
        
      #get max P
      if p[i][0]>=maxP[i]:
        maxP[i]=p[i][0]
      
      #get min P
      if p[i][0]<=minP[i]:
        maxP[i]=p[i][0]
      
      rhoInvert[i][0]=1.0/fileData.fColumnValues[i][currentSettings.rhoColumn]
      rhoInvert[i][len(files)]=1.0/fileData.fColumnValues[i][currentSettings.rhoColumn]
      deltaM[i][0]=fileData.fColumnValues[i][currentSettings.deltaMColumn]
      deltaM[i][len(files)]=fileData.fColumnValues[i][currentSettings.deltaMColumn]
      temperature[i][0]=fileData.fColumnValues[i][currentSettings.tColumn]
      temperature[i][len(files)]=fileData.fColumnValues[i][currentSettings.tColumn]
    
    #read all files in for current period
    for i in range(1,len(files)):#for each dump
      
      print "reading file ",files[i]," ..."
      
      fileData.readFile(files[i])
      
      #check headers for used columns
      if fileData.sColumnNames[currentSettings.pColumn]!=currentSettings.pColumnHeader:
        warings.warn("file \""+files[i]+" has pressure column header as \""\
          +fileData.sColumnNames[currentSettings.pColumn]+" expected something like \""\
          +currentSettings.pColumnHeader+"\".")
      if currentSettings.AV:
        if fileData.sColumnNames[currentSettings.QColumn]!=currentSettings.QColumnHeader:
          warnings.warn("file \""+files[i]+" has A.V. column header as \""\
            +fileData.sColumnNames[currentSettings.QColumn]+" expected something like \""\
            +currentSettings.QColumnHeader+"\".")
      if fileData.sColumnNames[currentSettings.rhoColumn]!=currentSettings.rhoColumnHeader:
        warnings.warn("file \""+files[i]+" has density column header as \""\
          +fileData.sColumnNames[currentSettings.rhoColumn]+" expected something like \""\
          +currentSettings.rhoColumnHeader+"\".")
      if fileData.sColumnNames[currentSettings.deltaMColumn]!=currentSettings.deltaMColumnHeader:
        warnings.warn("file \""+files[i]+" has delta M_r column header as \""\
          +fileData.sColumnNames[currentSettings.deltaMColumn]+" expected something like \""\
          +currentSettings.deltaMColumnHeader+"\".")
      
      for j in range(len(fileData.fColumnValues)-1):#for each zone
        p[j][i]=fileData.fColumnValues[j][currentSettings.pColumn]
        if currentSettings.AV:
          p[j][i]+=fileData.fColumnValues[j][currentSettings.QColumn]
          
        #get max P
        if p[j][i]>=maxP[j]:
          maxP[j]=p[j][i]
        
        #get min P
        if p[j][i]<=minP[j]:
          maxP[j]=p[j][i]
        
        deltaM[j][i]=fileData.fColumnValues[j][currentSettings.deltaMColumn]
        rhoInvert[j][i]=1.0/fileData.fColumnValues[j][currentSettings.rhoColumn]
    
    #compute work
    dW=np.zeros(len(fileData.fColumnValues)-1)
    for i in range(1,len(files)+1):#time
      for j in range(0,nEndZone):#position
        
        dW[j]+=(0.5*(rhoInvert[j][i]-rhoInvert[j][i-1])*(p[j][i]+p[j][i-1]))*deltaM[j][i]
    
    dWMax=0.0
    dWMin=0.0
    dWError=np.zeros(len(fileData.fColumnValues)-1)
    dClosenessToEdge=np.zeros(len(fileData.fColumnValues)-1)
    dWSum.append(0.0)
    dClosestToTurnThisPeriod=sys.float_info.max
    for j in range(0,nEndZone):#position
      dWSum[n]+=dW[j]
      
      #
      y1=p[j][len(files)]
      y0=p[j][len(files)-1]
      ym1=p[j][len(files)-2]
      x1=rhoInvert[j][len(files)]
      x0=rhoInvert[j][len(files)-1]
      xm1=rhoInvert[j][len(files)-2]
      deltaX=(x1-x0)
      y1py0=y1+y0
      tiny=1e-300#fixes the case where things don't move, e.g. near the core
      m=(y0-ym1)/((x0-xm1)+tiny)
      y1p=y0+m*deltaX
      y1ppy0=y1p+y0
      #how far off is our guess from assuming a linear extrapolation
      dW1=0.5*deltaX*y1py0#work from setting first point equal to last point
      dW1p=0.5*deltaX*y1ppy0#work from linear extrapolation
      dWError[j]=abs(dW1-dW1p)*deltaM[j][len(files)]
      dWMax+=dW[j]+dWError[j]
      dWMin+=dW[j]-dWError[j]
      
      #how close to a turn is the start/end position
      dtotal=maxP[j]-minP[i]
      dmax=max(abs(p[j][0]-maxP[j]),abs(p[j][0]-minP[j]))
      distanceFromEdge=dmax/dtotal#will be 1/2 if near middle, will be near 0 if near edge
      
      #keep value of closest start/end position to turn
      if distanceFromEdge<dClosestToTurnThisPeriod:
        dClosestToTurnThisPeriod=distanceFromEdge
    dClosestToTurn.append(dClosestToTurnThisPeriod)
    dWSumError.append((dWMax-dWMin)*0.5)
    
    #make plots of P and 1/rho for each zone
    fig=plt.figure(figsize=(13,8))
    ax1 = plt.subplot2grid((3,3), (0,0),colspan=3,rowspan=3)
    if currentSettings.plotPdVCurves:
      for j in range(currentSettings.PdVPlotSettings.startZone,len(fileData.fColumnValues)-1):
        #print rhoInvert[j]
        make_PdV_plot(rhoInvert[j],p[j],j,n,fig,ax1,currentSettings.PdVPlotSettings)
    
    #make work plot
    make_Work_plot(Log10T,dW,dWError,fig,ax1
      ,currentSettings.workPlotSettings,n,time[n],"Log10(T)")
    make_Work_plot(range(len(fileData.fColumnValues)-1),dW,dWError,fig,ax1
      ,currentSettings.workPlotSettings,n,time[n],"zone #")
    
  #print out total work done by model
  f=open("work_per_period.txt",'w')
  f.write("period time[s]      work[ergs] work_uncertianty[ergs] relative_distance_from_turn\n")
  for i in range(len(dWSum)):
    f.write(str(i)+" "+str(time[i])+" "+str(dWSum[i])+" "+str(dWSumError[i])+" "
      +str(dClosestToTurn[i])+"\n")
  f.close()