def reset(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) nbeams = 37 logger.warning(' Deleting all converted data.') for beam in range(nbeams): path = self.get_crosscalsubdir_path(str(beam).zfill(2)) if path.isdir(path): subs_managefiles.director(self, 'rm', path + '/*') logger.warning( ' Deleting all parameter file entries for CONVERT module') subs_param.del_param(self, 'convert_fluxcal_MSavailable') subs_param.del_param(self, 'convert_polcal_MSavailable') subs_param.del_param(self, 'convert_targetbeams_MSavailable') subs_param.del_param(self, 'convert_fluxcal_MS2UVFITS') subs_param.del_param(self, 'convert_polcal_MS2UVFITS') subs_param.del_param(self, 'convert_targetbeams_MS2UVFITS') subs_param.del_param(self, 'convert_fluxcal_UVFITSavailable') subs_param.del_param(self, 'convert_polcal_UVFITSavailable') subs_param.del_param(self, 'convert_targetbeams_UVFITSavailable') subs_param.del_param(self, 'convert_fluxcal_UVFITS2MIRIAD') subs_param.del_param(self, 'convert_polcal_UVFITS2MIRIAD') subs_param.del_param(self, 'convert_targetbeams_UVFITS2MIRIAD')
def create_param_file(step): """ Create a new parameter file in case there is none in the base directory as a dictionary """ subs_setinit.setinitdirs(step) df = {} np.save(step.basedir + step.paramfilename, df)
def checkimagegaussianity(self, image, alpha): """ Subroutine to check if an image has gaussian distribution image (string): The path/name of the image to check in FITS-format returns (boolean): True if image is ok, False otherwise """ setinit.setinitdirs(self) char_set = string.ascii_uppercase + string.digits if os.path.isdir(image) or os.path.isfile(image): with tempfile.TemporaryDirectory() as tempdir: if os.path.isdir(image): temp_string = ''.join(random.sample(char_set * 8, 8)) fits = lib.miriad('fits') fits.op = 'xyout' fits.in_ = image fits.out = tempdir + '/' + temp_string + '.fits' fits.go() pyfile = pyfits.open(tempdir + '/' + temp_string + '.fits') elif os.path.isfile(image): pyfile = pyfits.open(image) else: error = 'Image format not supported. Only MIRIAD and FITS formats are supported!' logger.error(error) raise ApercalException(error) image = pyfile[0].data[0][0] pyfile.close() k2, p = scipy.stats.normaltest(image, nan_policy='omit', axis=None) if p < alpha: return True else: return False else: error = 'Image {} does not seem to exist!'.format(image) logger.error(error) raise ApercalException(error)
def show(config_object, section, showall=False): """ show: Prints the current settings of the pipeline. Only shows keywords, which are in the default config file default.cfg showall: Set to true if you want to see all current settings instead of only the ones from the current step """ subs_setinit.setinitdirs(config_object) config = ConfigParser() config.readfp(open(default_cfg)) for s in config.sections(): if showall: logger.info(s) o = config.options(s) for o in config.items(s): try: logger.info('\t' + str(o[0]) + ' = ' + str(config_object.__dict__.__getitem__(o[0]))) except KeyError: pass else: if s == section: logger.info(s) o = config.options(s) for o in config.items(s): try: logger.info( '\t' + str(o[0]) + ' = ' + str(config_object.__dict__.__getitem__(o[0]))) except KeyError: pass else: pass
def reset(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) logger.warning('Deleting all raw data products and their directories.') subs_managefiles.director(self, 'ch', self.basedir) deldirs = glob.glob(self.basedir + '[0-9][0-9]' + '/' + self.rawsubdir) for dir_ in deldirs: subs_managefiles.director(self, 'rm', dir_) logger.warning( 'Deleting all parameter file entries for PREPARE module') subs_param.del_param(self, 'prepare_fluxcal_requested') subs_param.del_param(self, 'prepare_fluxcal_diskstatus') subs_param.del_param(self, 'prepare_fluxcal_altastatus') subs_param.del_param(self, 'prepare_fluxcal_copystatus') subs_param.del_param(self, 'prepare_fluxcal_rejreason') subs_param.del_param(self, 'prepare_polcal_requested') subs_param.del_param(self, 'prepare_polcal_diskstatus') subs_param.del_param(self, 'prepare_polcal_altastatus') subs_param.del_param(self, 'prepare_polcal_copystatus') subs_param.del_param(self, 'prepare_polcal_rejreason') subs_param.del_param(self, 'prepare_targetbeams_requested') subs_param.del_param(self, 'prepare_targetbeams_diskstatus') subs_param.del_param(self, 'prepare_targetbeams_altastatus') subs_param.del_param(self, 'prepare_targetbeams_copystatus') subs_param.del_param(self, 'prepare_targetbeams_rejreason')
def reset_all(self): """ Function to reset the current step and remove all generated data for all beams. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) for b in range(self.NBEAMS): cbeam = 'convert_B' + str(b).zfill(2) logger.warning('Beam ' + str(b).zfill(2) + ': Deleting all converted data.') path = self.get_crosscalsubdir_path(str(b).zfill(2)) if os.path.isdir(path): subs_managefiles.director(self, 'rm', path + '/*') logger.warning( 'Beam ' + str(b).zfill(2) + ': Deleting all parameter file entries for CONVERT module') subs_param.del_param(self, cbeam + '_fluxcal_MSavailable') subs_param.del_param(self, cbeam + '_polcal_MSavailable') subs_param.del_param(self, cbeam + '_targetbeams_MSavailable') subs_param.del_param(self, cbeam + '_fluxcal_MS2UVFITS') subs_param.del_param(self, cbeam + '_polcal_MS2UVFITS') subs_param.del_param(self, cbeam + '_targetbeams_MS2UVFITS') subs_param.del_param(self, cbeam + '_fluxcal_UVFITSavailable') subs_param.del_param(self, cbeam + '_polcal_UVFITSavailable') subs_param.del_param(self, cbeam + '_targetbeams_UVFITSavailable') subs_param.del_param(self, cbeam + '_fluxcal_UVFITS2MIRIAD') subs_param.del_param(self, cbeam + '_polcal_UVFITS2MIRIAD') subs_param.del_param(self, cbeam + '_targetbeams_UVFITS2MIRIAD')
def polarisation(self): """ Produces individual images for Stokes Q,U, cleans them with the mask from Stokes I and combines them in a cube. Then uses RM-Synthesis to produce a Faraday cube and clean it. """ subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) logger.info(' Polarisation imaging is going to be implemented later')
def director(self, option, dest, file_=None, verbose=True, ignore_nonexistent=False): """ director: Function to move, remove, and copy file_s and directories option: 'mk', 'ch', 'mv', 'rm', and 'cp' are supported dest: Destination of a file or directory to move to file_: Which file to move or copy, otherwise None ignore_nonexistent: ignore rm on existing files """ subs_setinit.setinitdirs(self) if option == 'mk': if os.path.exists(dest): pass else: os.makedirs(dest) if verbose: logger.debug('Creating directory ' + str(dest) + ' #') elif option == 'ch': if os.getcwd() == dest: pass else: self.lwd = os.getcwd( ) # Save the former working directory in a variable try: os.chdir(dest) except Exception: os.makedirs(dest) if verbose: logger.debug('Creating directory ' + str(dest) + ' #') os.chdir(dest) self.cwd = os.getcwd( ) # Save the current working directory in a variable if verbose: logger.debug('Moved to directory ' + str(dest) + ' #') elif option == 'mv': # Move if os.path.exists(dest): lib.basher("mv " + str(file_) + " " + str(dest)) else: os.mkdir(dest) lib.basher("mv " + str(file_) + " " + str(dest)) elif option == 'rn': # Rename lib.basher("mv " + str(file_) + " " + str(dest)) elif option == 'cp': # Copy lib.basher("cp -r " + str(file_) + " " + str(dest)) elif option == 'rm': # Remove if ignore_nonexistent and not glob.glob(str(dest)): return lib.basher("rm -r " + str(dest)) else: logger.warning( ' Option not supported! Only mk, ch, mv, rm, rn, and cp are supported!' )
def reset(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) logger.warning('Deleting all data products ready for transfer!') subs_managefiles.director(self, 'ch', self.basedir) subs_managefiles.director(self, 'rm', self.transferdir)
def reset(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) logger.warning(' Deleting all self-calibrated data.') subs_managefiles.director(self, 'ch', self.selfcaldir) subs_managefiles.director(self, 'rm', self.selfcaldir + '/*')
def flagline(self): """ Creates an image cube of the different chunks and measures the rms in each channel. All channels with an rms outside of a given sigma interval are flagged in the continuum calibration, but are still used for line imaging. """ if self.selfcal_flagline: subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) logger.info(' Automatic flagging of HI-line/RFI started') subs_managefiles.director(self, 'ch', self.selfcaldir) for chunk in self.list_chunks(): subs_managefiles.director(self, 'ch', self.selfcaldir + '/' + str(chunk)) logger.info('Looking through data chunk ' + str(chunk) + ' #') invert = lib.miriad('invert') invert.vis = chunk + '.mir' invert.map = 'map' invert.beam = 'beam' invert.imsize = self.selfcal_image_imsize invert.cell = self.selfcal_image_cellsize invert.stokes = 'ii' invert.slop = 1 invert.go() if os.path.exists('map'): fits = lib.miriad('fits') fits.in_ = 'map' fits.op = 'xyout' fits.out = 'map.fits' fits.go() cube = pyfits.open('map.fits') data = cube[0].data std = np.nanstd(data, axis=(0, 2, 3)) median = np.median(std) stdall = np.nanstd(std) diff = std - median detections = np.where(np.abs(self.selfcal_flagline_sigma * diff) > stdall)[0] if len(detections) > 0: logger.info('Found high noise in channel(s) ' + str(detections).lstrip('[').rstrip(']') + ' #') for d in detections: uvflag = lib.miriad('uvflag') uvflag.vis = chunk + '.mir' uvflag.flagval = 'flag' uvflag.line = "'" + 'channel,1,' + str(d + 1) + "'" uvflag.go() logger.info( 'Flagged channel(s) ' + str(detections).lstrip('[').rstrip(']') + ' in data chunk ' + str( chunk) + ' #') else: logger.info('No high noise found in data chunk ' + str(chunk) + ' #') subs_managefiles.director(self, 'rm', self.selfcaldir + '/' + str(chunk) + '/' + 'map') subs_managefiles.director(self, 'rm', self.selfcaldir + '/' + str(chunk) + '/' + 'map.fits') subs_managefiles.director(self, 'rm', self.selfcaldir + '/' + str(chunk) + '/' + 'beam') else: logger.info(' No data in chunk ' + str(chunk) + '!') logger.info(' Automatic flagging of HI-line/RFI done')
def show_param(step): """ Shows all the entries of the parameter file in a sorted order """ subs_setinit.setinitdirs(step) if not os.path.isfile(step.basedir + step.paramfilename): logger.info('Parameter file not found!') else: d = np.load(step.basedir + step.paramfilename).item() for k, v in d.items(): logger.info(k, v)
def add_param(step, parameter, values): """ Check if the param file exists, open it, check if the parameter exists and add or overwrite the parameter. parameter(string): Name of the parameter in the param file values(diverse): The data corresponding to the parameter """ subs_setinit.setinitdirs(step) if not os.path.isfile(step.basedir + step.paramfilename): create_param_file(step) d = np.load(step.basedir + step.paramfilename).item() d[parameter] = values np.save(step.basedir + step.paramfilename, d)
def reset_all(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) logger.warning( 'Deleting all raw data products and their directories for all beams. You will need to ' 'start with the PREPARE step again!') subs_managefiles.director(self, 'ch', self.basedir) for b in range(self.NBEAMS): prebeam = 'prepare_B' + str(b).zfill(2) sbeam = 'split_B' + str(b).zfill(2) if os.path.isdir(self.basedir + str(b).zfill(2) + '/' + self.rawsubdir): try: logger.warning('Beam ' + str(b).zfill(2) + ': Deleting all raw data products.') subs_managefiles.director( self, 'rm', self.basedir + str(b).zfill(2) + '/' + self.rawsubdir) except: pass logger.warning( 'Beam ' + str(b).zfill(2) + ': Deleting all parameter file entries for PREPARE and SPLIT module.' ) subs_param.del_param(self, prebeam + '_fluxcal_requested') subs_param.del_param(self, prebeam + '_fluxcal_diskstatus') subs_param.del_param(self, prebeam + '_fluxcal_altastatus') subs_param.del_param(self, prebeam + '_fluxcal_copystatus') subs_param.del_param(self, prebeam + '_fluxcal_rejreason') subs_param.del_param(self, prebeam + '_polcal_requested') subs_param.del_param(self, prebeam + '_polcal_diskstatus') subs_param.del_param(self, prebeam + '_polcal_altastatus') subs_param.del_param(self, prebeam + '_polcal_copystatus') subs_param.del_param(self, prebeam + '_polcal_rejreason') subs_param.del_param(self, prebeam + '_targetbeams_requested') subs_param.del_param(self, prebeam + '_targetbeams_diskstatus') subs_param.del_param(self, prebeam + '_targetbeams_altastatus') subs_param.del_param(self, prebeam + '_targetbeams_copystatus') subs_param.del_param(self, prebeam + '_targetbeams_rejreason') subs_param.del_param(self, sbeam + '_fluxcal_status') subs_param.del_param(self, sbeam + '_polcal_status') subs_param.del_param(self, sbeam + '_targetbeams_status') else: logger.warning('Beam ' + str(b).zfill(2) + ': No raw data present.')
def get_param(step, parameter): """ Load a keyword of the parameter file into a variable parameter (string): Name of the keyword to load returns (various): The variable for the parameter """ subs_setinit.setinitdirs(step) if not os.path.isfile(step.basedir + step.paramfilename): logger.error('Parameter file not found! Cannot load parameter ' + str(parameter)) else: d = np.load(step.basedir + step.paramfilename).item() values = d[parameter] return values
def imagetofits(self, mirimage, fitsimage): """ Converts a MIRIAD image to a FITS image mirimage: The MIRIAD image to convert fitsimage: The converted FITS image """ subs_setinit.setinitdirs(self) fits = lib.miriad('fits') fits.op = 'xyout' fits.in_ = mirimage fits.out = fitsimage fits.go() if os.path.isfile(fitsimage): director(self, 'rm', mirimage)
def list_chunks(self): """ Checks how many chunk directories exist and returns a list of them. """ subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) for n in range(100): if os.path.exists(self.selfcaldir + '/' + str(n).zfill(2)): pass else: break # Stop the counting loop at the directory you cannot find anymore chunks = range(n) chunkstr = [str(i).zfill(2) for i in chunks] return chunkstr
def check_param(step, parameter): """ Check if a list of parameters exist in the parameter file ans return True or False parameter (list of strings): The parameters to search for returns (bool): True if parameter exists, otherwise False """ subs_setinit.setinitdirs(step) if not os.path.isfile(step.basedir + step.paramfilename): logger.info('Parameter file not found! Cannot load parameter ' + str(parameter)) create_param_file(step) else: d = np.load(step.basedir + step.paramfilename).item() if parameter in d: return True return False
def del_param(step, parameter): """ Delete a parameter from the parameter file. parameter(string): Name of the parameter to delete """ subs_setinit.setinitdirs(step) if not os.path.isfile(step.basedir + step.paramfilename): logger.info('Parameter file not found! Cannot remove parameter ' + str(parameter)) else: d = np.load(step.basedir + step.paramfilename).item() try: del d[parameter] np.save(step.basedir + step.paramfilename, d) except KeyError: logger.info('Parameter file does not have parameter ' + str(parameter))
def check_starting_conditions(self): """ Check that the miriad file from convert exists. If it does not exists, none of the subsequent tasks in go need to be executed. This seems necessary as not all the tasks do this check and they do not have to. A single task is enough. Not sure if it is necessary to add all the param variables from selfcal and set them False if the check fails. For now, just use the main one Args: self Return: (bool): True if file is found, otherwise False """ logger.info( "Beam {}: Checking starting conditions for TRANSFER".format( self.beam)) # initial setup subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) # path to converted miriad file mir_file = os.path.join(self.crosscaldir, self.target) # check that the file exists if os.path.isdir(mir_file): # miriad file exists logger.info( "Beam {}: Checking starting conditions for TRANSFER ... Done: All good." .format(self.beam)) return True else: # miriad file does not exists logger.warning( "Beam {}: Checking starting conditions for TRANSFER ... Done: Failed" .format(self.beam)) logger.warning( "Beam {}: Did not find main miriad file in {}".format( self.beam, mir_file)) return False
def getimagestats(self, image): """ Subroutine to calculate the min, max and rms of an image image (string): The absolute path to the image file. returns (numpy array): The min, max and rms of the image """ setinit.setinitdirs(self) char_set = string.ascii_uppercase + string.digits if os.path.isdir(image) or os.path.isfile(image): if os.path.isdir(image): temp_string = ''.join(random.sample(char_set * 8, 8)) fits = lib.miriad('fits') fits.op = 'xyout' fits.in_ = image with tempfile.TemporaryDirectory() as tempdir: fits.out = tempdir + '/' + temp_string + '.fits' fits.go() image_data = pyfits.open(tempdir + '/' + temp_string + '.fits') elif os.path.isfile(image): image_data = pyfits.open(image) else: error = 'Image format not supported. Only MIRIAD and FITS formats are supported!' logger.error(error) raise ApercalException(error) data = image_data[0].data imagestats = np.full(3, np.nan) if data.shape[-3] == 2: imagestats[0] = np.nanmin( data[0, 0, :, :]) # Get the maxmimum of the image imagestats[1] = np.nanmax( data[0, 0, :, :]) # Get the minimum of the image imagestats[2] = np.nanstd( data[0, 0, :, :]) # Get the standard deviation else: imagestats[0] = np.nanmin(data) # Get the maxmimum of the image imagestats[1] = np.nanmax(data) # Get the minimum of the image imagestats[2] = np.nanstd(data) # Get the standard deviation image_data.close() # Close the image else: error = 'Image does not seem to exist!' logger.error(error) raise ApercalException(error) return imagestats
def get_param_def(step, parameter, default): """ Load a keyword of the paramterfile into a variable, or give a default value if the keyword is not in the parameter file TODO: merge this into get_param to avoid loading param.npy too often step (object): step for which to do this parameter (string): name of the keyword to load parameter (object): default value """ subs_setinit.setinitdirs(step) if not os.path.isfile(step.basedir + step.paramfilename): return default else: d = np.load(step.basedir + step.paramfilename).item() if parameter in d: # logger.info('Parameter ' + str(parameter) + ' found in cache (param.npy).') return d[parameter] return default
def reset(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) logger.warning('Beam ' + self.beam + ': Deleting all raw data and their directories.') subs_managefiles.director(self, 'ch', self.basedir) try: subs_managefiles.director( self, 'rm', self.basedir + self.beam + '/' + self.rawsubdir) except: pass logger.warning( 'Beam ' + self.beam + ': Deleting all parameter file entries for SPLIT and PREPARE module' ) prebeam = 'prepare_B' + str(self.beam).zfill(2) sbeam = 'split_B' + str(self.beam).zfill(2) subs_param.del_param(self, prebeam + '_fluxcal_requested') subs_param.del_param(self, prebeam + '_fluxcal_diskstatus') subs_param.del_param(self, prebeam + '_fluxcal_altastatus') subs_param.del_param(self, prebeam + '_fluxcal_copystatus') subs_param.del_param(self, prebeam + '_fluxcal_rejreason') subs_param.del_param(self, prebeam + '_polcal_requested') subs_param.del_param(self, prebeam + '_polcal_diskstatus') subs_param.del_param(self, prebeam + '_polcal_altastatus') subs_param.del_param(self, prebeam + '_polcal_copystatus') subs_param.del_param(self, prebeam + '_polcal_rejreason') subs_param.del_param(self, prebeam + '_targetbeams_requested') subs_param.del_param(self, prebeam + '_targetbeams_diskstatus') subs_param.del_param(self, prebeam + '_targetbeams_altastatus') subs_param.del_param(self, prebeam + '_targetbeams_copystatus') subs_param.del_param(self, prebeam + '_targetbeams_rejreason') subs_param.del_param(self, sbeam + '_fluxcal_status') subs_param.del_param(self, sbeam + '_polcal_status') subs_param.del_param(self, sbeam + '_targetbeams_status')
def reset(self): """ Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step! """ subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) if os.path.isdir(self.mosdir): logger.warning('Deleting all mosaicked data products.') subs_managefiles.director(self, 'ch', self.basedir) subs_managefiles.director(self, 'rm', self.mosdir) logger.warning( 'Deleting all parameter file entries for MOSAIC module') subs_param.del_param(self, 'mosaic_continuum_mf_status') subs_param.del_param(self, 'mosaic_continuum_mf_continuumstatus') subs_param.del_param(self, 'mosaic_continuum_mf_copystatus') subs_param.del_param(self, 'mosaic_continuum_mf_convolstatus') subs_param.del_param(self, 'mosaic_continuum_mf_continuumbeamparams') subs_param.del_param(self, 'mosaic_continuum_mf_continuumimagestats') else: logger.warning('Mosaicked data products are not present!')
def getmaskstats(self, image, size): """ Subroutine to calculate the number of pixels in a mask and its percentage of the full image image (string): The absolute path to the image file. size (int): Number of pixels along an axis of the original image. Assumes square images. returns (numpy array): The number of pixels and their percentage of the full image """ setinit.setinitdirs(self) char_set = string.ascii_uppercase + string.digits if os.path.isdir(image) or os.path.isfile(image): if os.path.isdir(image): temp_string = ''.join(random.sample(char_set * 8, 8)) fits = lib.miriad('fits') fits.op = 'xyout' fits.in_ = image with tempfile.TemporaryDirectory() as tempdir: fits.out = tempdir + '/' + temp_string + '.fits' fits.go() mask_data = pyfits.open(tempdir + '/' + temp_string + '.fits') elif os.path.isfile(image): mask_data = pyfits.open(image) else: error = 'Image format not supported. Only MIRIAD and FITS formats are supported!' logger.error(error) raise ApercalException(error) data = mask_data[0].data maskstats = np.full(2, np.nan) maskstats[0] = np.count_nonzero(~np.isnan(data)) maskstats[1] = maskstats[0] / (size**2) mask_data.close() else: error = 'Image does not seem to exist!' logger.error(error) raise ApercalException(error) return maskstats
def getmodelstats(self, image): """ Subroutine to calculate the number of clean components and their flux image (string): The absolute path to the image file. returns (numpy array): The number of pixels with clean components and their summed flux in Jy """ setinit.setinitdirs(self) char_set = string.ascii_uppercase + string.digits if os.path.isdir(image) or os.path.isfile(image): if os.path.isdir(image): temp_string = ''.join(random.sample(char_set * 8, 8)) fits = lib.miriad('fits') fits.op = 'xyout' fits.in_ = image with tempfile.TemporaryDirectory() as tempdir: fits.out = tempdir + '/' + temp_string + '.fits' fits.go() model_data = pyfits.open(tempdir + '/' + temp_string + '.fits') elif os.path.isfile(image): model_data = pyfits.open(image) else: error = 'Image format not supported. Only MIRIAD and FITS formats are supported!' logger.error(error) raise ApercalException(error) data = model_data[0].data[:, 0, :, :] modelstats = np.full(2, np.nan) modelstats[0] = np.count_nonzero(data) modelstats[1] = np.sum(data) model_data.close() else: error = 'Image does not seem to exist!' logger.error(error) raise ApercalException(error) return modelstats
def ms2miriad(self): """ Converts the data from MS to MIRIAD format via UVFITS using drivecasa. Does it for the flux calibrator, polarisation calibrator, and target field independently. """ subs_setinit.setinitdirs(self) ccalbeam = 'ccal_B' + str(self.beam).zfill(2) cbeam = 'convert_B' + str(self.beam).zfill(2) # Read the parameters from crosscal # and check before doing anything # Status of the solution transfer for the target, flux calibrator and polarisation calibrator ccal_targetbeams_transfer = get_param_def( self, ccalbeam + '_targetbeams_transfer', False) ccal_calibration_calibrator_finished = get_param_def( self, ccalbeam + '_calibration_calibrator_finished', False) if not ccal_calibration_calibrator_finished: error = "Beam {}: Will not convert files to miriad format because cross-calibration failed.".format( str(self.beam).zfill(2)) logger.error(error) raise ApercalException(error) elif not ccal_targetbeams_transfer: error = "Beam {}: Will not convert files to miriad format because cross-calibration solutions were not successfully applied to target.".format( str(self.beam).zfill(2)) logger.error(error) raise ApercalException(error) # Create the parameters for the parameter file for converting from MS to UVFITS format # Flux calibrator MS dataset available? convertfluxcalmsavailable = get_param_def( self, cbeam + '_fluxcal_MSavailable', False) # Polarised calibrator MS dataset available? convertpolcalmsavailable = get_param_def(self, cbeam + '_polcal_MSavailable', False) # Target beam MS dataset available? converttargetbeamsmsavailable = get_param_def( self, cbeam + '_targetbeams_MSavailable', False) # Flux calibrator MS dataset converted to UVFITS? convertfluxcalms2uvfits = get_param_def(self, cbeam + '_fluxcal_MS2UVFITS', False) # Polarised calibrator MS dataset converted to UVFITS? convertpolcalms2uvfits = get_param_def(self, cbeam + '_polcal_MS2UVFITS', False) # Target beam MS dataset converted to UVFITS? converttargetbeamsms2uvfits = get_param_def( self, cbeam + '_targetbeams_MS2UVFITS', False) # Flux calibrator UVFITS dataset available? convertfluxcaluvfitsavailable = get_param_def( self, cbeam + '_fluxcal_UVFITSavailable', False) # Polarised calibrator UVFITS dataset available? convertpolcaluvfitsavailable = get_param_def( self, cbeam + '_polcal_UVFITSavailable', False) # Target beam UVFITS dataset available? converttargetbeamsuvfitsavailable = get_param_def( self, cbeam + '_targetbeams_UVFITSavailable', False) # Flux calibrator UVFITS dataset converted to MIRIAD? convertfluxcaluvfits2miriad = get_param_def( self, cbeam + '_fluxcal_UVFITS2MIRIAD', False) # Polarised calibrator UVFITS dataset converted to MIRIAD? convertpolcaluvfits2miriad = get_param_def( self, cbeam + '_polcal_UVFITS2MIRIAD', False) # Target beam UVFITS dataset converted to MIRIAD? converttargetbeamsuvfits2miriad = get_param_def( self, cbeam + '_targetbeams_UVFITS2MIRIAD', False) # Check which datasets are available in MS format # if self.fluxcal != '': convertfluxcalmsavailable = path.isdir(self.get_fluxcal_path()) else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator dataset not specified. Cannot convert flux calibrator!' ) if self.polcal != '': convertpolcalmsavailable = path.isdir(self.get_polcal_path()) else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!' ) if self.target != '': converttargetbeamsmsavailable = path.isdir(self.get_target_path()) else: logger.warning( 'Beam ' + self.beam + ': Target beam dataset not specified. Cannot convert target beams!' ) # Save the derived parameters for the availability to the parameter file subs_param.add_param(self, cbeam + '_fluxcal_MSavailable', convertfluxcalmsavailable) subs_param.add_param(self, cbeam + '_polcal_MSavailable', convertpolcalmsavailable) subs_param.add_param(self, cbeam + '_targetbeams_MSavailable', converttargetbeamsmsavailable) # Convert the flux calibrator if self.convert_fluxcal: if self.fluxcal != '': if not convertfluxcaluvfits2miriad: if convertfluxcalmsavailable: logger.debug( 'Beam ' + self.beam + ': Converting flux calibrator dataset from MS to UVFITS format.' ) subs_managefiles.director( self, 'mk', self.get_crosscalsubdir_path(), verbose=False) fluxcal_ms = self.get_fluxcal_path() # convert only if corrected data column exists if subs_msutils.has_correcteddata(fluxcal_ms): datacolumn = "corrected" fluxcal_fits = mspath_to_fitspath( self.get_crosscalsubdir_path(), fluxcal_ms) fc_convert = exportuvfits_cmd.format( vis=self.get_fluxcal_path(), fits=fluxcal_fits, datacolumn=datacolumn) lib.run_casa([fc_convert], timeout=3600) if path.isfile(fluxcal_fits): convertfluxcalms2uvfits = True logger.info( 'Beam ' + self.beam + ': Converted flux calibrator dataset from MS to UVFITS format!' ) else: convertfluxcalms2uvfits = False logger.warning( 'Beam ' + self.beam + ': Could not convert flux calibrator dataset {} ' 'from MS to UVFITS format!'.format( fluxcal_fits)) else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator does not have a corrected_data column! Not ' 'converting flux calibrator dataset!') else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator dataset {} not available!'. format(self.get_fluxcal_path())) else: logger.info( 'Beam ' + self.beam + ': Flux calibrator dataset was already converted from MS to UVFITS format' ) else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator dataset not specified. Cannot convert flux calibrator!' ) else: logger.warning('Beam ' + self.beam + ': Not converting flux calibrator dataset!') # Convert the polarised calibrator if self.convert_polcal: if self.polcal != '': if not convertpolcaluvfits2miriad: if convertpolcalmsavailable: logger.debug( 'Beam ' + self.beam + ': Converting polarised calibrator dataset from MS to UVFITS format.' ) subs_managefiles.director( self, 'mk', self.get_crosscalsubdir_path(), verbose=False) polcal_ms = self.get_polcal_path() # convert only if corrected data column exists if subs_msutils.has_correcteddata(polcal_ms): datacolumn = "corrected" polcal_fits = mspath_to_fitspath( self.get_crosscalsubdir_path(), polcal_ms) pc_convert = exportuvfits_cmd.format( vis=polcal_ms, fits=polcal_fits, datacolumn=datacolumn) lib.run_casa([pc_convert], timeout=3600) if path.isfile(polcal_fits): convertpolcalms2uvfits = True logger.info( 'Beam ' + self.beam + ': Converted polarised calibrator dataset from MS to UVFITS format!' ) else: convertpolcalms2uvfits = False logger.warning( 'Beam ' + self.beam + ': Could not convert polarised calibrator dataset from MS to UVFITS format!' ) else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator does not have a corrected_data column! Not ' 'converting polarised calibrator dataset!') else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator dataset not available!') else: logger.info( 'Beam ' + self.beam + ': Polarised calibrator dataset was already converted from MS to UVFITS format' ) else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!' ) else: logger.warning('Beam ' + self.beam + ': Not converting polarised calibrator dataset!') # Convert the target beams if self.convert_target: if self.target != '': logger.info( 'Beam ' + self.beam + ': Converting target beam dataset from MS to UVFITS format.' ) if not converttargetbeamsuvfits2miriad: if converttargetbeamsmsavailable: subs_managefiles.director( self, 'mk', self.get_crosscalsubdir_path(), verbose=False) target_ms = self.get_target_path() target_fits = mspath_to_fitspath( self.get_crosscalsubdir_path(), target_ms) # only convert if corrected data column exists if subs_msutils.has_correcteddata(target_ms): datacolumn = "corrected" tg_convert = exportuvfits_cmd.format( vis=target_ms, fits=target_fits, datacolumn=datacolumn) lib.run_casa([tg_convert], timeout=10000) if path.isfile(target_fits): converttargetbeamsms2uvfits = True logger.debug( 'Beam ' + self.beam + ': Converted dataset of target beam from MS to UVFITS format!' ) else: converttargetbeamsms2uvfits = False logger.warning( 'Beam ' + self.beam + ': Could not convert dataset for target beam from MS to UVFITS format!' ) else: logger.warning( 'Beam ' + self.beam + ': Target beam dataset does not have a corrected_data column! Not ' 'converting target beam dataset!') else: logger.warning('Beam ' + self.beam + ': Target beam dataset not available!') else: logger.info('Beam ' + self.beam + ': Target beam dataset was already ' 'converted from MS to UVFITS format') else: logger.warning( 'Beam ' + self.beam + ': Target beam dataset not specified. Cannot convert target beam dataset!' ) else: logger.warning('Beam ' + self.beam + ': Not converting target beam dataset!') # Save the derived parameters for the MS to UVFITS conversion to the parameter file subs_param.add_param(self, cbeam + '_fluxcal_MS2UVFITS', convertfluxcalms2uvfits) subs_param.add_param(self, cbeam + '_polcal_MS2UVFITS', convertpolcalms2uvfits) subs_param.add_param(self, cbeam + '_targetbeams_MS2UVFITS', converttargetbeamsms2uvfits) # Check which datasets are available in UVFITS format # if self.fluxcal != '': crosscal_fluxcal = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.fluxcal) convertfluxcaluvfitsavailable = path.isfile(crosscal_fluxcal) else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator dataset not specified. Cannot convert flux calibrator!' ) if self.polcal != '': crosscal_polcal = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.polcal) convertpolcaluvfitsavailable = path.isfile(crosscal_polcal) else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!' ) if self.target != '': crosscal_target = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.target) converttargetbeamsuvfitsavailable = path.isfile(crosscal_target) else: logger.warning( 'Beam ' + self.beam + ': Target beam dataset not specified. Cannot convert target beam!' ) # Save the derived parameters for the availability to the parameter file subs_param.add_param(self, cbeam + '_fluxcal_UVFITSavailable', convertfluxcaluvfitsavailable) subs_param.add_param(self, cbeam + '_polcal_UVFITSavailable', convertpolcaluvfitsavailable) subs_param.add_param(self, cbeam + '_targetbeams_UVFITSavailable', converttargetbeamsuvfitsavailable) # Convert the available UVFITS-datasets to MIRIAD format # # Convert the flux calibrator if self.convert_fluxcal: if self.fluxcal != '': if not convertfluxcaluvfits2miriad: if convertfluxcaluvfitsavailable: logger.debug( 'Beam ' + self.beam + ': Converting flux calibrator dataset from UVFITS to MIRIAD format.' ) subs_managefiles.director( self, 'ch', self.get_crosscalsubdir_path(), verbose=False) fits = lib.miriad('fits') fits.op = 'uvin' fits.in_ = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.fluxcal) fits.out = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.fluxcal, ext='mir') fits.go() if path.isdir(fits.out): convertfluxcaluvfits2miriad = True logger.info( 'Beam ' + self.beam + ': Converted flux calibrator dataset from UVFITS to MIRIAD format!' ) else: convertfluxcaluvfits2miriad = False logger.warning( 'Beam ' + self.beam + ': Could not convert flux calibrator dataset {} from UVFITS to ' 'MIRIAD format!'.format(fits.out)) else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator dataset not available!') else: logger.info( 'Beam ' + self.beam + ': Flux calibrator dataset was already converted from UVFITS to MIRIAD format' ) else: logger.warning( 'Beam ' + self.beam + ': Flux calibrator dataset not specified. Cannot convert flux calibrator!' ) else: logger.warning('Beam ' + self.beam + ': Not converting flux calibrator dataset!') # Convert the polarised calibrator if self.convert_polcal: if self.polcal != '': if not convertpolcaluvfits2miriad: if convertpolcaluvfitsavailable: logger.debug( 'Beam ' + self.beam + ': Converting polarised calibrator dataset from UVFITS to MIRIAD format.' ) subs_managefiles.director( self, 'ch', self.get_crosscalsubdir_path(), verbose=False) fits = lib.miriad('fits') fits.op = 'uvin' fits.in_ = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.polcal) fits.out = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.polcal, ext='mir') fits.go() if path.isdir(fits.out): convertpolcaluvfits2miriad = True logger.info( 'Beam ' + self.beam + ': Converted polarised calibrator dataset from UVFITS to MIRIAD format!' ) else: convertpolcaluvfits2miriad = False logger.warning( 'Beam ' + self.beam + ': Could not convert polarised calibrator dataset from UVFITS to MIRIAD format!' ) else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator dataset not available!') else: logger.info( 'Beam ' + self.beam + ': Polarised calibrator dataset was already converted from UVFITS to MIRIAD format' ) else: logger.warning( 'Beam ' + self.beam + ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!' ) else: logger.warning('Beam ' + self.beam + ': Not converting polarised calibrator dataset!') # Convert the target beams if self.convert_target: if self.target != '': logger.info( 'Beam ' + self.beam + ': Converting target beam dataset from UVFITS to MIRIAD format.' ) if not converttargetbeamsuvfits2miriad: if converttargetbeamsuvfitsavailable: subs_managefiles.director( self, 'ch', self.get_crosscalsubdir_path(), verbose=False) fits = lib.miriad('fits') fits.op = 'uvin' fits.in_ = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.target) fits.out = mspath_to_fitspath( self.get_crosscalsubdir_path(), self.target, ext='mir') fits.go() if path.isdir(fits.out): converttargetbeamsuvfits2miriad = True logger.debug( 'Beam ' + self.beam + ': Converted target beam dataset from ' 'UVFITS to MIRIAD format!') else: converttargetbeamsuvfits2miriad = False logger.warning( 'Beam ' + self.beam + ': Could not convert target beam dataset ' '{} from UVFITS to MIRIAD format!'.format( fits.out)) else: logger.warning('Beam ' + self.beam + ': Target beam dataset not available!') else: logger.info('Beam ' + self.beam + ': Target beam dataset was already converted ' 'from MS to UVFITS format') else: logger.warning( 'Beam ' + self.beam + ': Target beam dataset not specified. Cannot convert target beam datasets!' ) else: logger.warning('Beam ' + self.beam + ': Not converting target beam dataset!') # Save the derived parameters for the MS to UVFITS conversion to the parameter file subs_param.add_param(self, cbeam + '_fluxcal_UVFITS2MIRIAD', convertfluxcaluvfits2miriad) subs_param.add_param(self, cbeam + '_polcal_UVFITS2MIRIAD', convertpolcaluvfits2miriad) subs_param.add_param(self, cbeam + '_targetbeams_UVFITS2MIRIAD', converttargetbeamsuvfits2miriad) if self.convert_averagems and self.subdirification: logger.info('Beam ' + self.beam + ': Averaging down target measurement set') average_cmd = 'mstransform(vis="{vis}", outputvis="{outputvis}", chanaverage=True, chanbin=64)' vis = self.get_target_path() outputvis = vis.replace(".MS", "_avg.MS") lib.run_casa([average_cmd.format(vis=vis, outputvis=outputvis)], timeout=10000) # Remove measurement sets if wanted if self.convert_removems and self.subdirification: logger.info('Beam ' + self.beam + ': Removing measurement sets') vis = self.get_target_path() if path.exists(vis): subs_managefiles.director(self, 'rm', vis) # Remove the UVFITS files if wanted if self.convert_removeuvfits and self.subdirification: logger.info('Beam ' + self.beam + ': Removing all UVFITS files') if self.fluxcal != '' and path.exists( mspath_to_fitspath( self.get_crosscalsubdir_path(), self.fluxcal)) and convertfluxcalms2uvfits: subs_managefiles.director( self, 'rm', mspath_to_fitspath(self.get_crosscalsubdir_path(), self.fluxcal)) logger.info('Beam ' + self.beam + ': Removed fluxcal UVFITS files') else: logger.warning( 'Beam ' + self.beam + ': No fluxcal UVFITS file available for removing') if self.polcal != '' and path.exists( mspath_to_fitspath( self.get_crosscalsubdir_path(), self.polcal)) and convertpolcalms2uvfits: subs_managefiles.director( self, 'rm', mspath_to_fitspath(self.get_crosscalsubdir_path(), self.polcal)) logger.info('Beam ' + self.beam + ': Removed polcal UVFITS files') else: logger.warning( 'Beam ' + self.beam + ': No polcal UVFITS file available for removing') if self.target != '' and path.exists( mspath_to_fitspath( self.get_crosscalsubdir_path(), self.target)) and convertfluxcalms2uvfits: subs_managefiles.director( self, 'rm', mspath_to_fitspath(self.get_crosscalsubdir_path(), self.target)) logger.info('Beam ' + self.beam + ': Removed target UVFITS files') else: logger.warning( 'Beam ' + self.beam + ': No target UVFITS file available for removing')
def __init__(self, file_=None, **kwargs): self.default = lib.load_config(self, file_) subs_setinit.setinitdirs(self)
def copyobs(self): """ Prepares the directory structure and copies over the needed data from ALTA. Checks for data in the current working directories and copies only missing data. """ subs_setinit.setinitdirs(self) # Check if the parameter is already in the parameter file and load it otherwise create the needed arrays # if not os.path.isdir(self.basedir): os.mkdir(self.basedir) # Is the fluxcal data requested? preparefluxcalrequested = get_param_def(self, 'prepare_fluxcal_requested', False) # Is the polcal data requested? preparepolcalrequested = get_param_def(self, 'prepare_polcal_requested', False) # Is the target data requested? One entry per beam preparetargetbeamsrequested = get_param_def( self, 'prepare_targetbeams_requested', np.full(self.NBEAMS, False)) # Is the fluxcal data already on disk? preparefluxcaldiskstatus = get_param_def(self, 'prepare_fluxcal_diskstatus', False) # Is the polcal data already on disk? preparepolcaldiskstatus = get_param_def(self, 'prepare_polcal_diskstatus', False) # Is the target data already on disk? One entry per beam preparetargetbeamsdiskstatus = get_param_def( self, 'prepare_targetbeams_diskstatus', np.full(self.NBEAMS, False)) # Is the fluxcal data on ALTA? preparefluxcalaltastatus = get_param_def(self, 'prepare_fluxcal_altastatus', False) # Is the polcal data on ALTA? preparepolcalaltastatus = get_param_def(self, 'prepare_polcal_altastatus', False) # Is the target data on disk? One entry per beam preparetargetbeamsaltastatus = get_param_def( self, 'prepare_targetbeams_altastatus', np.full(self.NBEAMS, False)) # Is the fluxcal data copied? preparefluxcalcopystatus = get_param_def(self, 'prepare_fluxcal_copystatus', False) # Is the polcal data on copied? preparepolcalcopystatus = get_param_def(self, 'prepare_polcal_copystatus', False) # Is the target data copied? One entry per beam preparetargetbeamscopystatus = get_param_def( self, 'prepare_targetbeams_copystatus', np.full(self.NBEAMS, False)) # Reason for flux calibrator dataset not being there preparefluxcalrejreason = get_param_def(self, 'prepare_fluxcal_rejreason', np.full(1, '', dtype='U50')) # Reason for polarisation calibrator dataset not being there preparepolcalrejreason = get_param_def(self, 'prepare_polcal_rejreason', np.full(1, '', dtype='U50')) # Reason for a beam dataset not being there preparetargetbeamsrejreason = get_param_def( self, 'prepare_targetbeams_rejreason', np.full(self.NBEAMS, '', dtype='U50')) ################################################ # Start the preparation of the flux calibrator # ################################################ if self.fluxcal != '': # If the flux calibrator is requested preparefluxcalrejreason[0] = '' # Empty the comment string preparefluxcalrequested = True fluxcal = self.get_fluxcal_path() preparefluxcaldiskstatus = os.path.isdir(fluxcal) if preparefluxcaldiskstatus: logger.debug( 'Flux calibrator dataset found on disk ({})'.format( fluxcal)) else: logger.debug( 'Flux calibrator dataset not on disk ({})'.format(fluxcal)) if hasattr(self, 'prepare_bypass_alta') and self.prepare_bypass_alta: logger.debug("Skipping fetching dataset from ALTA") else: # Check if the flux calibrator dataset is available on ALTA preparefluxcalaltastatus = getstatus_alta( self.prepare_date, self.prepare_obsnum_fluxcal, self.beam) if preparefluxcalaltastatus: logger.debug('Flux calibrator dataset available on ALTA') else: logger.warning( 'Flux calibrator dataset not available on ALTA') # Copy the flux calibrator data from ALTA if needed if preparefluxcaldiskstatus and preparefluxcalaltastatus: preparefluxcalcopystatus = True elif preparefluxcaldiskstatus and not preparefluxcalaltastatus: preparefluxcalcopystatus = True logger.warning( 'Flux calibrator data available on disk, but not in ALTA!' ) elif not preparefluxcaldiskstatus and preparefluxcalaltastatus: subs_managefiles.director(self, 'mk', self.basedir + self.beam + '/' + self.rawsubdir, verbose=False) getdata_alta(int(self.prepare_date), int(self.prepare_obsnum_fluxcal), 0, targetdir=self.rawdir + '/' + self.fluxcal) if os.path.isdir(self.get_fluxcal_path()): preparefluxcalcopystatus = True logger.debug( 'Flux calibrator dataset successfully copied from ALTA' ) else: preparefluxcalcopystatus = False preparefluxcalrejreason[ 0] = 'Copy from ALTA not successful' logger.error( 'Flux calibrator dataset available on ALTA, but NOT successfully copied!' ) if self.prepare_flip_ra: flip_ra(self.rawdir + '/' + self.fluxcal, logger=logger) elif not preparefluxcaldiskstatus and not preparefluxcalaltastatus: preparefluxcalcopystatus = False preparefluxcalrejreason[0] = 'Dataset not on ALTA or disk' logger.error( 'Flux calibrator dataset not available on disk nor in ALTA! The next steps will not work!' ) else: # In case the flux calibrator is not specified meaning the parameter is empty. preparefluxcalrequested = False preparefluxcaldiskstatus = False preparefluxcalaltastatus = False preparefluxcalcopystatus = False preparefluxcalrejreason[0] = 'Dataset not specified' logger.error( 'No flux calibrator dataset specified. The next steps will not work!' ) # Save the derived parameters for the fluxcal to the parameter file subs_param.add_param(self, 'prepare_fluxcal_requested', preparefluxcalrequested) subs_param.add_param(self, 'prepare_fluxcal_diskstatus', preparefluxcaldiskstatus) subs_param.add_param(self, 'prepare_fluxcal_altastatus', preparefluxcalaltastatus) subs_param.add_param(self, 'prepare_fluxcal_copystatus', preparefluxcalcopystatus) subs_param.add_param(self, 'prepare_fluxcal_rejreason', preparefluxcalrejreason) ######################################################## # Start the preparation of the polarisation calibrator # ######################################################## if self.polcal != '': # If the polarised calibrator is requested preparepolcalrejreason[0] = '' # Empty the comment string preparepolcalrequested = True preparepolcaldiskstatus = os.path.isdir(self.get_polcal_path()) if preparepolcaldiskstatus: logger.debug('Polarisation calibrator dataset found on disk') else: logger.debug('Polarisation calibrator dataset not on disk') if hasattr(self, 'prepare_bypass_alta') and self.prepare_bypass_alta: logger.debug("Skipping fetching dataset from ALTA") else: # Check if the polarisation calibrator dataset is available on ALTA preparepolcalaltastatus = getstatus_alta( self.prepare_date, self.prepare_obsnum_polcal, self.beam) if preparepolcalaltastatus: logger.debug( 'Polarisation calibrator dataset available on ALTA') else: logger.warning( 'Polarisation calibrator dataset not available on ALTA' ) # Copy the polarisation calibrator data from ALTA if needed if preparepolcaldiskstatus and preparepolcalaltastatus: preparepolcalcopystatus = True elif preparepolcaldiskstatus and not preparepolcalaltastatus: preparepolcalcopystatus = True logger.warning( 'Polarisation calibrator data available on disk, but not in ALTA!' ) elif not preparepolcaldiskstatus and preparepolcalaltastatus: subs_managefiles.director(self, 'mk', self.basedir + self.beam + '/' + self.rawsubdir, verbose=False) getdata_alta(int(self.prepare_date), int(self.prepare_obsnum_polcal), 0, targetdir=self.rawdir + '/' + self.polcal) if os.path.isdir(self.get_polcal_path()): preparepolcalcopystatus = True logger.debug( 'Polarisation calibrator dataset successfully copied from ALTA' ) else: preparepolcalcopystatus = False preparepolcalrejreason[ 0] = 'Copy from ALTA not successful' logger.error( 'Polarisation calibrator dataset available on ALTA, but NOT successfully copied!' ) if self.prepare_flip_ra: flip_ra(self.rawdir + '/' + self.polcal, logger=logger) elif not preparepolcaldiskstatus and not preparepolcalaltastatus: preparepolcalcopystatus = False preparepolcalrejreason[0] = 'Dataset not on ALTA or disk' logger.warning( 'Polarisation calibrator dataset not available on disk nor in ALTA! Polarisation calibration will not work!' ) else: # In case the polarisation calibrator is not specified meaning the parameter is empty. preparepolcalrequested = False preparepolcaldiskstatus = False preparepolcalaltastatus = False preparepolcalcopystatus = False preparepolcalrejreason[0] = 'Dataset not specified' logger.warning( 'No polarisation calibrator dataset specified. Polarisation calibration will not work!' ) # Save the derived parameters for the polcal to the parameter file subs_param.add_param(self, 'prepare_polcal_requested', preparepolcalrequested) subs_param.add_param(self, 'prepare_polcal_diskstatus', preparepolcaldiskstatus) subs_param.add_param(self, 'prepare_polcal_altastatus', preparepolcalaltastatus) subs_param.add_param(self, 'prepare_polcal_copystatus', preparepolcalcopystatus) subs_param.add_param(self, 'prepare_polcal_rejreason', preparepolcalrejreason) ################################################ # Start the preparation of the target datasets # ################################################ if self.prepare_obsnum_target and self.prepare_obsnum_target != '': if self.prepare_target_beams == 'all': # if all beams are requested reqbeams_int = range( self.NBEAMS) # create a list of numbers for the beams reqbeams = [str(b).zfill(2) for b in reqbeams_int] # Add the leading zeros else: # if only certain beams are requested reqbeams = self.prepare_target_beams.split(",") reqbeams_int = [int(b) for b in reqbeams] reqbeams = [str(b).zfill(2) for b in reqbeams_int] # Add leading zeros for beam in reqbeams: preparetargetbeamsrequested[int(beam)] = True for b in reqbeams_int: # Check which target beams are already on disk preparetargetbeamsrejreason[int( b)] = '' # Empty the comment string preparetargetbeamsdiskstatus[b] = os.path.isdir( self.basedir + str(b).zfill(2) + '/' + self.rawsubdir + '/' + self.target) if preparetargetbeamsdiskstatus[b]: logger.debug('Target dataset for beam ' + str(b).zfill(2) + ' found on disk') else: logger.debug('Target dataset for beam ' + str(b).zfill(2) + ' NOT found on disk') if hasattr(self, 'prepare_bypass_alta') and self.prepare_bypass_alta: logger.debug("Skipping fetching dataset from ALTA") else: # Check which target datasets are available on ALTA preparetargetbeamsaltastatus[b] = getstatus_alta( self.prepare_date, self.prepare_obsnum_target, str(b).zfill(2)) if preparetargetbeamsaltastatus[b]: logger.debug('Target dataset for beam ' + str(b).zfill(2) + ' available on ALTA') else: logger.debug('Target dataset for beam ' + str(b).zfill(2) + ' NOT available on ALTA') if hasattr(self, 'prepare_bypass_alta') and self.prepare_bypass_alta: logger.debug("Skipping fetching dataset from ALTA") else: # Set the copystatus of the beams and copy beams which are requested but not on disk for c in reqbeams_int: if preparetargetbeamsdiskstatus[ c] and preparetargetbeamsaltastatus[c]: preparetargetbeamscopystatus[c] = True elif preparetargetbeamsdiskstatus[ c] and not preparetargetbeamsaltastatus[c]: preparetargetbeamscopystatus[c] = True logger.warning('Target dataset for beam ' + str(c).zfill(2) + ' available on disk, but not in ALTA!') elif not preparetargetbeamsdiskstatus[ c] and preparetargetbeamsaltastatus[c] and str( c ).zfill( 2 ) in reqbeams: # if target dataset is requested, but not on disk subs_managefiles.director(self, 'mk', self.basedir + str(c).zfill(2) + '/' + self.rawsubdir, verbose=False) getdata_alta(int(self.prepare_date), int(self.prepare_obsnum_target), int(str(c).zfill(2)), targetdir=self.basedir + str(c).zfill(2) + '/' + self.rawsubdir + '/' + self.target) # Check if copy was successful if os.path.isdir(self.basedir + str(c).zfill(2) + '/' + self.rawsubdir + '/' + self.target): preparetargetbeamscopystatus[c] = True else: preparetargetbeamscopystatus[c] = False preparetargetbeamsrejreason[int( c)] = 'Copy from ALTA not successful' logger.error( 'Target beam dataset available on ALTA, but NOT successfully copied!' ) if self.prepare_flip_ra: flip_ra(self.basedir + str(c).zfill(2) + '/' + self.rawsubdir + '/' + self.target, logger=logger) elif not preparetargetbeamsdiskstatus[ c] and not preparetargetbeamsaltastatus[c] and str( c).zfill(2) in reqbeams: preparetargetbeamscopystatus[c] = False preparetargetbeamsrejreason[int( c)] = 'Dataset not on ALTA or disk' logger.error( 'Target beam dataset not available on disk nor in ALTA! Requested beam cannot be processed!' ) else: # If no target dataset is requested meaning the parameter is empty logger.warning('No target datasets specified!') for b in range(self.NBEAMS): preparetargetbeamsrequested[b] = False preparetargetbeamsdiskstatus[b] = False preparetargetbeamsaltastatus[b] = False preparetargetbeamscopystatus[b] = False preparetargetbeamsrejreason[int(b)] = 'Dataset not specified' # Save the derived parameters for the target beams to the parameter file subs_param.add_param(self, 'prepare_targetbeams_requested', preparetargetbeamsrequested) subs_param.add_param(self, 'prepare_targetbeams_diskstatus', preparetargetbeamsdiskstatus) subs_param.add_param(self, 'prepare_targetbeams_altastatus', preparetargetbeamsaltastatus) subs_param.add_param(self, 'prepare_targetbeams_copystatus', preparetargetbeamscopystatus) subs_param.add_param(self, 'prepare_targetbeams_rejreason', preparetargetbeamsrejreason)
def splitdata(self): """ Applies calibrator corrections to data, splits the data into chunks in frequency and bins it to the given frequency resolution for the self-calibration """ if self.selfcal_splitdata: subs_setinit.setinitdirs(self) subs_setinit.setdatasetnamestomiriad(self) subs_managefiles.director(self, 'ch', self.selfcaldir) logger.info(' Splitting of target data into individual frequency chunks started') if os.path.exists(self.selfcaldir + '/' + self.target): logger.info('Calibrator corrections already seem to have been applied #') else: logger.info('Applying calibrator solutions to target data before averaging #') uvaver = lib.miriad('uvaver') uvaver.vis = self.crosscaldir + '/' + self.target uvaver.out = self.selfcaldir + '/' + self.target uvaver.go() logger.info('Calibrator solutions to target data applied #') if self.selfcal_flagantenna != '': uvflag = lib.miriad('uvflag') uvflag.vis = self.selfcaldir + '/' + self.target uvflag.flagval = 'flag' uvflag.select = 'antenna(' + str(self.selfcal_flagantenna) + ')' uvflag.go() else: pass try: uv = aipy.miriad.UV(self.selfcaldir + '/' + self.target) except RuntimeError: raise ApercalException(' No data in your selfcal directory!') try: nsubband = len(uv['nschan']) # Number of subbands in data except TypeError: nsubband = 1 # Only one subband in data since exception was triggered logger.info('Found ' + str(nsubband) + ' subband(s) in target data #') counter = 0 # Counter for naming the chunks and directories for subband in range(nsubband): logger.info('Started splitting of subband ' + str(subband) + ' #') if nsubband == 1: numchan = uv['nschan'] finc = np.fabs(uv['sdf']) else: numchan = uv['nschan'][subband] # Number of channels per subband finc = np.fabs(uv['sdf'][subband]) # Frequency increment for each channel subband_bw = numchan * finc # Bandwidth of one subband subband_chunks = round(subband_bw / self.selfcal_splitdata_chunkbandwidth) # Round to the closest power of 2 for frequency chunks with the same bandwidth over the frequency # range of a subband subband_chunks = int(np.power(2, np.ceil(np.log(subband_chunks) / np.log(2)))) if subband_chunks == 0: subband_chunks = 1 chunkbandwidth = (numchan / subband_chunks) * finc logger.info('Adjusting chunk size to ' + str( chunkbandwidth) + ' GHz for regular gridding of the data chunks over frequency #') for chunk in range(subband_chunks): logger.info( 'Starting splitting of data chunk ' + str(chunk) + ' for subband ' + str(subband) + ' #') binchan = round( self.selfcal_splitdata_channelbandwidth / finc) # Number of channels per frequency bin chan_per_chunk = numchan / subband_chunks if chan_per_chunk % binchan == 0: # Check if the freqeuncy bin exactly fits logger.info('Using frequency binning of ' + str( self.selfcal_splitdata_channelbandwidth) + ' for all subbands #') else: # Increase the frequency bin to keep a regular grid for the chunks while chan_per_chunk % binchan != 0: binchan = binchan + 1 else: # Check if the calculated bin is not larger than the subband channel number if chan_per_chunk >= binchan: pass else: # Set the frequency bin to the number of channels in the chunk of the subband binchan = chan_per_chunk logger.info('Increasing frequency bin of data chunk ' + str( chunk) + ' to keep bandwidth of chunks equal over the whole bandwidth #') logger.info('New frequency bin is ' + str(binchan * finc) + ' GHz #') nchan = int(chan_per_chunk / binchan) # Total number of output channels per chunk start = 1 + chunk * chan_per_chunk width = int(binchan) step = int(width) subs_managefiles.director(self, 'mk', self.selfcaldir + '/' + str(counter).zfill(2)) uvaver = lib.miriad('uvaver') uvaver.vis = self.selfcaldir + '/' + self.target uvaver.out = self.selfcaldir + '/' + str(counter).zfill(2) + '/' + str(counter).zfill(2) + '.mir' uvaver.select = "'" + 'window(' + str(subband + 1) + ')' + "'" uvaver.line = "'" + 'channel,' + str(nchan) + ',' + str(start) + ',' + str(width) + ',' + str( step) + "'" uvaver.go() counter = counter + 1 logger.info('Splitting of data chunk ' + str(chunk) + ' for subband ' + str(subband) + ' done #') logger.info('Splitting of data for subband ' + str(subband) + ' done #') logger.info(' Splitting of target data into individual frequency chunks done')