Exemplo n.º 1
0
# RA
sdgrd.aparm[1] = 19
sdgrd.aparm[2] = 23
sdgrd.aparm[3] = 40
# DEC
sdgrd.aparm[4] = 14
sdgrd.aparm[5] = 31
sdgrd.aparm[6] = 30
# Imsize
sdgrd.imsize[1] = 100
sdgrd.imsize[2] = 110
sdgrd.go()

from Wizardry.AIPSData import AIPSImage as WizAIPSImage
image = WizAIPSImage(AIPSCat()[mydisk][-1].name, \
                     AIPSCat()[mydisk][-1].klass, \
                     mydisk, AIPSCat()[mydisk][-1].seq)

image.header.niter = 1          # Allow down stream IMSTATs to sum correctly
image.header.update()
image.update()

## and write the last thing now in the catalog to disk
fittp.indisk=mydisk
fittp.inname=AIPSCat()[mydisk][-1].name
fittp.inclass=AIPSCat()[mydisk][-1].klass
fittp.inseq=AIPSCat()[mydisk][-1].seq
outimage = os.path.splitext(myfiles[0])[0]+'_cube.fits'
if os.path.exists(outimage):
    os.remove(outimage)
    print 'Removed existing file to make room for new one :',outimage
Exemplo n.º 2
0
sdgrd.imsize[2] = imySize
## The above lines set the default image parameters
## Below, override imaging center coordinate
# RA
#sdgrd.aparm[1] = 04    #hours
#sdgrd.aparm[2] = 41    #minutes
#sdgrd.aparm[3] = 15.0  #seconds
# DEC
#sdgrd.aparm[4] = 25    #degrees
#sdgrd.aparm[5] = 50    #arcmins
#sdgrd.aparm[6] = 00    #arcseconds
sdgrd.go()

from Wizardry.AIPSData import AIPSImage as WizAIPSImage
image = WizAIPSImage(AIPSCat()[mydisk][-1].name, \
                     AIPSCat()[mydisk][-1].klass, \
                     mydisk, AIPSCat()[mydisk][-1].seq)

image.header.niter = 1  # Allow down stream IMSTATs to sum correctly
#print image.header
bmaj = image.header.bmaj
#assume no smoothing in convolving function (sdgrd.xtype = -16)
newBmaj = bmaj
if sdgrd.xtype == -12:
    convolveMaj = sdgrd.xparm[2] / 3600.  # convolving function FWHM in degrees
    #Convolved image resolution adds in quadrature
    newBmaj = math.sqrt((bmaj * bmaj) + (convolveMaj * convolveMaj))
    print 'Gaussian Convolving function:'
    print bmaj * 3600., convolveMaj * 3600., '->', newBmaj * 3600.
if sdgrd.xtype == -16:
    #Convolved image resolution adds in quadrature
Exemplo n.º 3
0
sdgrd.imsize[2] = imySize
## The above lines set the default image parameters
## Below, override imaging center coordinate
# RA
#sdgrd.aparm[1] = 04    #hours
#sdgrd.aparm[2] = 41    #minutes
#sdgrd.aparm[3] = 15.0  #seconds
# DEC
#sdgrd.aparm[4] = 25    #degrees
#sdgrd.aparm[5] = 50    #arcmins
#sdgrd.aparm[6] = 00    #arcseconds
sdgrd.go()

from Wizardry.AIPSData import AIPSImage as WizAIPSImage
image = WizAIPSImage(AIPSCat()[mydisk][-1].name, \
                     AIPSCat()[mydisk][-1].klass, \
                     mydisk, AIPSCat()[mydisk][-1].seq)

image.header.niter = 1          # Allow down stream IMSTATs to sum correctly
#print image.header
bmaj = image.header.bmaj
#assume no smoothing in convolving function (sdgrd.xtype = -16)
newBmaj = bmaj
if sdgrd.xtype == -12:
    convolveMaj = sdgrd.xparm[2]/3600. # convolving function FWHM in degrees
#Convolved image resolution adds in quadrature
    newBmaj = math.sqrt( (bmaj*bmaj) + (convolveMaj*convolveMaj))
    print 'Gaussian Convolving function:'
    print bmaj*3600., convolveMaj*3600., '->',newBmaj*3600.
if sdgrd.xtype == -16:
#Convolved image resolution adds in quadrature
Exemplo n.º 4
0
def dbcon(sys):
    argc = len(sys.argv)
    if argc < 3:
        print ''
        print 'dbcon: Combine all observations into a single dish fits file'
        print 'usage: doImage dbcon.py <aipsNumber> <feeds>'
        print '                        <average> <channels> <display> <rmsflag> <verbose> <baseline-subtract>'
        print '                        <keeptempfiles> <spectra File 1> [<spectra File n>]'
        print 'where <aipsNumber>     Your *PIPELINE* AIPS number (should always be the same)'
        print '      <spectra File 1> One or more calibrated spectra files (*.fits)'
        print '      Combined spectra are placed in catalog slot 1'
        print ''
        quit()
    
    feeds = sys.argv[2].split(',')
    average = sys.argv[3]
    channels = sys.argv[4]
    display_idlToSdfits = sys.argv[5]
    idlToSdfits_rms_flag = sys.argv[6]
    verbose = sys.argv[7]
    idlToSdfits_baseline_subtract = sys.argv[8]
    keeptempfiles = sys.argv[9]
    imfiles = sys.argv[10:]
    
    if not imfiles:
        return
    
    sdf_files = []
    for feed in feeds:
        files = []
        for xx in imfiles:
            if 'feed{num}'.format(num=feed) in xx:
                files.append(xx)
        
        if not files:
            continue
            
        sdf = run_idlToSdfits(files, average, channels, display_idlToSdfits,
                   idlToSdfits_rms_flag, verbose, idlToSdfits_baseline_subtract)
        sdf_files.append(sdf)
        
    AIPS.userno=int(sys.argv[1])    # retrieve AIPS pipeline user number
    mydisk=2                        # choose a good default work disk
    baddisk=1                       # list a disk to avoid (0==no avoidance)
    
    AIPSCat().zap()                 # empty the catalog
    
    uvlod=AIPSTask('uvlod')         # Create structures for AIPS tasks
    uvlod.outdisk=mydisk            # write all input data to a select disk
    fittp=AIPSTask('fittp')
    dbcon=AIPSTask('dbcon')
    uvsrt=AIPSTask('uvsrt')
    mandl=AIPSTask('mandl')
    
    # Need a temporary small file to reserve slot 1
    mandl.outdisk = mydisk
    # create an image that will be deleted at end
    mandl.go()
    
    #
    kount = 0                       # init count of similar input files
    
    for thisFile in sdf_files:        # input all AIPS single dish FITS files
        uvlod.datain='PWD:'+thisFile
        print uvlod.datain
        uvlod.outdisk=mydisk
        uvlod.go()
        spectra = AIPSUVData(AIPSCat()[mydisk][-1].name, AIPSCat()[mydisk][-1].klass, mydisk, AIPSCat()[mydisk][-1].seq)
        nuRef    = spectra.header.crval[2]
        if kount == 0:
            firstNu = nuRef
        if ((firstNu - nuRef) < -1.E5) or ((firstNu - nuRef) > 1.E5):
            print 'Frequencies differ: ',nuRef,' != ',firstNu
            spectra.zap()
        else:
            kount = kount+1
    
    spectra = AIPSUVData(AIPSCat()[mydisk][-1].name, AIPSCat()[mydisk][-1].klass, mydisk, AIPSCat()[mydisk][-1].seq)
    
    # prepare to accumulate source names
    allObjects = ["","","","","","","","","","","","","","","","","","","",
                      "","","","","","","","","","","","","","","","","","",""]
    allObjects[0] = spectra.header.object
    nObjects = 1
    
    if kount > 1:            # if more than 1 file DBCON them
    
        # always do first 2
        dbcon.indisk=mydisk
        dbcon.outdisk=mydisk
        dbcon.in2disk=mydisk
        dbcon.inname = AIPSCat()[mydisk][1].name
        dbcon.inclass = AIPSCat()[mydisk][1].klass
        dbcon.inseq = AIPSCat()[mydisk][1].seq
        dbcon.in2name = AIPSCat()[mydisk][2].name
        dbcon.in2class = AIPSCat()[mydisk][2].klass
        dbcon.in2seq = AIPSCat()[mydisk][2].seq
        print 'combining 1: ', dbcon.inname, dbcon.inclass, dbcon.inseq
        print 'combining 2: ', dbcon.in2name, dbcon.in2class, dbcon.in2seq
        dbcon.reweight[1] = 0
        dbcon.reweight[2] = 0
        dbcon.go()
    
        # and keep adding in one
        for i in range(2,kount):
            # end of cat is always most recent dbcon result
            dbcon.inname = AIPSCat()[mydisk][-1].name
            dbcon.inclass = 'DBCON'
            dbcon.inseq = i - 1
            dbcon.in2name = AIPSCat()[mydisk][i+1].name
            dbcon.in2class = AIPSCat()[mydisk][i+1].klass
            dbcon.in2seq = AIPSCat()[mydisk][i+1].seq
            print 'combining 1: ', dbcon.inname, dbcon.inclass, dbcon.inseq
            print 'combining 2: ', dbcon.in2name, dbcon.in2class, dbcon.in2seq
            #prepare to zap revious dbconned file
            dbcon.go()
            # now zap previous big input file 
            spectra = AIPSUVData(AIPSCat()[mydisk][-1].name, 'DBCON',mydisk, i-1)
            spectra.zap()
    
        # remove input files, must remove them in reverse to presurve catalog order
        for i in range(1,kount+1):
            j = kount+1-i
            aname = AIPSCat()[mydisk][j].name
            aclass = AIPSCat()[mydisk][j].klass
            aseq = AIPSCat()[mydisk][j].seq
            # print i, j, aname, aclass, aseq
            spectra = AIPSUVData( aname, aclass, mydisk, aseq)
            notFound = True
            # check if this object is already in the list
            for iii in range(0,nObjects):
                if (allObjects[iii] == spectra.header.object):
                    notFound = False
            # if not in the list add to list and increment count
            if (notFound):
                allObjects[nObjects] = spectra.header.object
                nObjects = nObjects+1
            spectra.zap()
    
    #print nObjects,' Object(s) Observed: ', allObjects
    objectName = allObjects[0]
    for iii in range(1,nObjects):
        if len(allObjects[iii]) > 0:
            objectName = objectName + '+' + allObjects[iii]
    
    print nObjects,' Object(s) Observed: ', objectName
    
    if nObjects > 2:
        objectName = allObjects[0] + '+' + str( nObjects-1)
    
    # Extract the observations summary
    spectra = AIPSUVData(AIPSCat()[mydisk][-1].name, AIPSCat()[mydisk][-1].klass, mydisk, AIPSCat()[mydisk][-1].seq)
    
    # Read parameters passed inside the spectra data header
    nChan    = round(spectra.header.naxis[2])
    cellsize = round(spectra.header.cdelt[4] * 3600.)
    refChan  = spectra.header.crpix[2]
    imxSize  = 2*round(spectra.header.crpix[3]/1.5 )
    imySize  = 2*round(spectra.header.crpix[4]/1.5 )
    raDeg    = spectra.header.crval[3]
    decDeg   = spectra.header.crval[4]
    nuRef    = spectra.header.crval[2]
    dNu      = spectra.header.cdelt[2]
    
    print "Ra,Dec:", raDeg, decDeg, "Image:", imxSize, imySize, cellsize, 
    #print spectra.header
    
    #now free up slot 0
    image = WizAIPSImage(AIPSCat()[mydisk][0].name, \
                         AIPSCat()[mydisk][0].klass, \
                         mydisk, AIPSCat()[mydisk][0].seq)
    image.zap()
    
    # sort data to prevent down stream probelms
    uvsrt.indisk=mydisk
    uvsrt.outdisk=mydisk
    uvsrt.baddisk[1]=baddisk
    uvsrt.outcl='UVSRT'
    uvsrt.sort='TB'
    uvsrt.inname=AIPSCat()[mydisk][-1].name
    if kount < 2:
        uvsrt.inclass=AIPSCat()[mydisk][-1].klass
        uvsrt.inseq=kount    
    else:
        uvsrt.inclass='DBCON'
        uvsrt.inseq=kount - 1
    uvsrt.go()
    
    # now clean up the last of the input files
    spectra.zap()
    
    ## and write the last thing now in the catalog to disk
    fittp.indisk=mydisk
    fittp.inname=AIPSCat()[mydisk][-1].name
    fittp.inclass=AIPSCat()[mydisk][-1].klass
    fittp.inseq=AIPSCat()[mydisk][-1].seq
    outName = os.path.splitext(sdf_files[0])[0]
    # Trim out the source name
    iUnder = outName.find("_")
    if iUnder > 0:
        outName = outName[iUnder+1:]
    # Trim out the beam number
    iUnder = outName.find("_")
    if iUnder > 0:
        outName = outName[iUnder+1:]
    # Trim out the first scan number
    iUnder = outName.find("_")
    if iUnder > 0:
        outName = outName[iUnder+1:]
    # Trim out the sampler number
    iUnder = outName.rfind("_")
    if iUnder > 0:
        outName = outName[0:iUnder]
    #Now prepend the objects
    lObjectName = len(objectName)
    if lObjectName > 40:
        objectName = objectName[:40]
    outimage = objectName+'_'+outName+'_dbcon.fits'
    if os.path.exists(outimage):
        os.remove(outimage)
        print 'Removed existing file to make room for new one :',outimage
    
    fittp.dataout='PWD:'+outimage
    fittp.go()

    if keeptempfiles != '1':
        [os.unlink(xx) for xx in sdf_files]
        if os.path.isdir('summary'):
            [os.unlink('summary/'+xx) for xx in os.listdir('summary')]
            os.rmdir('summary')