def load_directions(parset_file): """ Return directions for a run and any checkfactor-specific options """ # Read parset parset = factor.parset.parset_read(parset_file, use_log_file=False) options = parset['checkfactor'] # Figure out whether reimaging is going to be done and if so how # many images are to be made. imaging_parset = parset['imaging_specific'] reimages = len(imaging_parset['facet_cellsize_arcsec']) options['reimages'] = reimages # Load directions. First check for user-supplied directions file then for # Factor-generated file from a previous run direction_list = [] dir_parset = parset['direction_specific'] if 'directions_file' in dir_parset: directions = factor.directions.directions_read( dir_parset['directions_file'], parset['dir_working']) elif os.path.exists( os.path.join(parset['dir_working'], 'factor_directions.txt')): directions = factor.directions.directions_read( os.path.join(parset['dir_working'], 'factor_directions.txt'), parset['dir_working']) else: log.error('No directions found. Please run this tool after ' 'the directions have been defined') sys.exit(1) # Add the target to the directions list if desired target_ra = dir_parset['target_ra'] target_dec = dir_parset['target_dec'] target_radius_arcmin = dir_parset['target_radius_arcmin'] target_has_own_facet = dir_parset['target_has_own_facet'] if target_has_own_facet: if target_ra is not None and target_dec is not None and target_radius_arcmin is not None: # Make target object target = Direction('target', target_ra, target_dec, factor_working_dir=parset['dir_working']) # Add target to directions list directions.append(target) else: log.critical( 'target_has_own_facet = True, but target RA, Dec, or radius not found in parset' ) sys.exit(1) for direction in directions: has_state = direction.load_state() if has_state: direction_list.append(direction) return direction_list, options
def load_directions(parset_file): """ Return directions for a run """ # Read parset orig_dir = os.path.abspath('.') parset = factor.parset.parset_read(parset_file, use_log_file=False) os.chdir(orig_dir) # Load directions. First check for user-supplied directions file then for # Factor-generated file from a previous run direction_list = [] dir_parset = parset['direction_specific'] if 'directions_file' in dir_parset: directions = factor.directions.directions_read( dir_parset['directions_file'], parset['dir_working']) elif os.path.exists( os.path.join(parset['dir_working'], 'factor_directions.txt')): directions = factor.directions.directions_read( os.path.join(parset['dir_working'], 'factor_directions.txt'), parset['dir_working']) else: log.error('No directions found. Please run this tool after ' 'the directions have been defined') sys.exit(1) # Add the target to the directions list if desired target_ra = dir_parset['target_ra'] target_dec = dir_parset['target_dec'] target_radius_arcmin = dir_parset['target_radius_arcmin'] target_has_own_facet = dir_parset['target_has_own_facet'] if target_has_own_facet: if target_ra is not None and target_dec is not None and target_radius_arcmin is not None: # Make target object target = Direction('target', target_ra, target_dec, factor_working_dir=parset['dir_working']) # Add target to directions list directions.append(target) else: log.critical( 'target_has_own_facet = True, but target RA, Dec, or radius not found in parset' ) sys.exit(1) for direction in directions: has_state = direction.load_state() if has_state: direction_list.append(direction) return direction_list, parset
def plot_state(directions_list, trim_names=True): """ Plots the facets of a run """ global midRA, midDec, fig, at, selected_direction selected_direction = None # Set up coordinate system and figure points, midRA, midDec = factor.directions.getxy(directions_list) fig = plt.figure(1, figsize=(10, 9)) if hasWCSaxes: wcs = factor.directions.makeWCS(midRA, midDec) ax = WCSAxes(fig, [0.16, 0.1, 0.8, 0.8], wcs=wcs) fig.add_axes(ax) else: ax = plt.gca() field_x = min(points[0]) field_y = max(points[1]) adjust_xy = True while adjust_xy: adjust_xy = False for xy in points: dist = np.sqrt((xy[0] - field_x)**2 + (xy[1] - field_y)**2) if dist < 10.0: field_x -= 1 field_y += 1 adjust_xy = True break field_ra, field_dec = factor.directions.xy2radec([field_x], [field_y], refRA=midRA, refDec=midDec) field = Direction('field', field_ra[0], field_dec[0], factor_working_dir=directions_list[0].working_dir) directions_list.append(field) ax.set_title('Overview of FACTOR run in\n{}'.format( directions_list[0].working_dir)) # Plot facets markers = [] for direction in directions_list: if direction.name != 'field': vertices = read_vertices(direction.vertices_file) RAverts = vertices[0] Decverts = vertices[1] xverts, yverts = factor.directions.radec2xy(RAverts, Decverts, refRA=midRA, refDec=midDec) xyverts = [np.array([xp, yp]) for xp, yp in zip(xverts, yverts)] mpl_poly = Polygon(np.array(xyverts), edgecolor='#a9a9a9', facecolor='#F2F2F2', clip_box=ax.bbox, picker=3.0, linewidth=2) else: xverts = [field_x] yverts = [field_y] mpl_poly = Circle((field_x, field_y), radius=5.0, edgecolor='#a9a9a9', facecolor='#F2F2F2', clip_box=ax.bbox, picker=3.0, linewidth=2) mpl_poly.facet_name = direction.name mpl_poly.completed_ops = get_completed_ops(direction) mpl_poly.started_ops = get_started_ops(direction) mpl_poly.current_op = get_current_op(direction) set_patch_color(mpl_poly, direction) ax.add_patch(mpl_poly) # Add facet names if direction.name != 'field': poly_tuple = tuple([(xp, yp) for xp, yp in zip(xverts, yverts)]) xmid = SPolygon(poly_tuple).centroid.x ymid = SPolygon(poly_tuple).centroid.y else: xmid = field_x ymid = field_y if trim_names: name = direction.name.split('_')[-1] else: name = direction.name marker = ax.text(xmid, ymid, name, color='k', clip_on=True, clip_box=ax.bbox, ha='center', va='bottom') marker.set_zorder(1001) markers.append(marker) # Add info box at = AnchoredText("Selected direction: None", prop=dict(size=12), frameon=True, loc=3) at.patch.set_boxstyle("round,pad=0.,rounding_size=0.2") at.set_zorder(1002) ax.add_artist(at) ax.relim() ax.autoscale() ax.set_aspect('equal') if hasWCSaxes: RAAxis = ax.coords['ra'] RAAxis.set_axislabel('RA', minpad=0.75) RAAxis.set_major_formatter('hh:mm:ss') DecAxis = ax.coords['dec'] DecAxis.set_axislabel('Dec', minpad=0.75) DecAxis.set_major_formatter('dd:mm:ss') ax.coords.grid(color='black', alpha=0.5, linestyle='solid') else: plt.xlabel("RA (arb. units)") plt.ylabel("Dec (arb. units)") # Define coodinate formater to show RA and Dec under mouse pointer ax.format_coord = formatCoord # Show legend not_processed_patch = plt.Rectangle((0, 0), 1, 1, edgecolor='#a9a9a9', facecolor='#F2F2F2', linewidth=2) processing_patch = plt.Rectangle((0, 0), 1, 1, edgecolor='#a9a9a9', facecolor='#F2F5A9', linewidth=2) selfcal_ok_patch = plt.Rectangle((0, 0), 1, 1, edgecolor='#a9a9a9', facecolor='#A9F5A9', linewidth=2) selfcal_not_ok_patch = plt.Rectangle((0, 0), 1, 1, edgecolor='#a9a9a9', facecolor='#F5A9A9', linewidth=2) l = ax.legend([ not_processed_patch, processing_patch, selfcal_ok_patch, selfcal_not_ok_patch ], ['Unprocessed', 'Processing', 'Completed', 'Failed']) l.set_zorder(1002) # Add check for mouse clicks and key presses fig.canvas.mpl_connect('pick_event', on_pick) fig.canvas.mpl_connect('key_press_event', on_press) # Add timer to update the plot every 60 seconds timer = fig.canvas.new_timer(interval=60000) timer.add_callback(update_plot) timer.start() # Show plot plt.show() plt.close(fig) # Clean up any temp pyrap images if os.path.exists('/tmp/tempimage'): shutil.rmtree('/tmp/tempimage')
def run(parset_file, logging_level='info', dry_run=False, test_run=False, reset_directions=[], reset_operations=[]): """ Processes a dataset using facet calibration This function runs the operations in the correct order and handles all the bookkeeping for the processing: e.g., which operations are needed for each direction (depending on success of selfcal, the order in which they were processed, etc.). It also handles the set up of the computing parameters and the generation of DDE calibrators and facets. Parameters ---------- parset_file : str Filename of parset containing processing parameters logging_level : str, optional One of 'degug', 'info', 'warning' in decreasing order of verbosity dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal test_run : bool, optional If True, use test settings. These settings are for testing purposes only and will not produce useful results reset_directions : list of str, optional List of names of directions to be reset reset_operations : list of str, optional Llist of operations to be reset """ # Read parset parset = factor.parset.parset_read(parset_file) # Set up logger parset['logging_level'] = logging_level factor._logging.set_level(logging_level) # Set up clusterdesc, node info, scheduler, etc. scheduler = _set_up_compute_parameters(parset, dry_run) # Prepare vis data bands = _set_up_bands(parset, test_run) # Define directions and groups directions, direction_groups = _set_up_directions(parset, bands, dry_run, test_run, reset_directions, reset_operations) # Run peeling operations on outlier directions and any facets # for which the calibrator is to be peeled set_sub_data_colname = True peel_directions = [d for d in directions if d.is_outlier] peel_directions.extend([d for d in directions if d.peel_calibrator]) if len(peel_directions) > 0: # Combine the nodes and cores for the peeling operation outlier_directions = factor.cluster.combine_nodes(peel_directions, parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Do the peeling for d in peel_directions: # Reset if needed. Note that proper reset of the subtract steps in # outlierpeel and facetsub is not currently supported d.reset_state(['outlierpeel', 'facetpeel', 'facetpeelimage', 'facetsub']) if d.is_outlier: op = OutlierPeel(parset, bands, d) else: op = FacetPeel(parset, bands, d) scheduler.run(op) # Check whether direction went through selfcal successfully. If # not, exit if d.selfcal_ok: # Set the name of the subtracted data column for all directions if set_sub_data_colname: for direction in directions: direction.subtracted_data_colname = 'SUBTRACTED_DATA_ALL_NEW' set_sub_data_colname = False else: log.error('Peeling verification failed for direction {0}.'.format(d.name)) log.info('Exiting...') sys.exit(1) if d.peel_calibrator: # Do the imaging of the facet if calibrator was peeled and # subtract the improved model op = FacetPeelImage(parset, bands, d) scheduler.run(op) op = FacetSub(parset, bands, d) scheduler.run(op) # Run selfcal and subtract operations on direction groups for gindx, direction_group in enumerate(direction_groups): log.info('Processing {0} direction(s) in Group {1}'.format( len(direction_group), gindx+1)) # Set up reset of any directions that need it. If the direction has # already been through the facetsub operation, we must undo the # changes with the facetsubreset operation before we reset facetselfcal # (otherwise the model data required to reset facetsub will be deleted) direction_group_reset = [d for d in direction_group if d.do_reset] direction_group_reset_facetsub = [d for d in direction_group_reset if 'facetsub' in d.completed_operations] if len(direction_group_reset_facetsub) > 0: for d in direction_group_reset_facetsub: d.reset_state('facetsubreset') direction_group_reset_facetsub = factor.cluster.combine_nodes( direction_group_reset_facetsub, parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) ops = [FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['facetselfcal', 'facetsub']) # Divide up the nodes and cores among the directions for the parallel # selfcal operations direction_group = factor.cluster.divide_nodes(direction_group, parset['cluster_specific']['node_list'], parset['cluster_specific']['ndir_per_node'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Check for any directions within transfer radius that have successfully # gone through selfcal dirs_with_selfcal = [d for d in directions if d.selfcal_ok] if len(dirs_with_selfcal) > 0: for d in direction_group: nearest, sep = factor.directions.find_nearest(d, dirs_with_selfcal) if sep < parset['direction_specific']['transfer_radius']: log.debug('Initializing selfcal for direction {0} with solutions from direction {1}.'.format( d.name, nearest.name)) d.dir_dep_parmdb_mapfile = nearest.dir_dep_parmdb_mapfile d.save_state() d.transfer_nearest_solutions = True # Do selfcal or peeling on calibrator only to_peel = [d for d in direction_group if d.peel_calibrator] to_selfcal = [d for d in direction_group if not d.peel_calibrator] ops = [FacetSelfcal(parset, bands, d) for d in direction_group] scheduler.run(ops) if dry_run: # For dryrun, skip check for d in direction_group: d.selfcal_ok = True direction_group_ok = [d for d in direction_group if d.selfcal_ok] if set_sub_data_colname: # Set the name of the subtracted data column for remaining # directions (if needed) if len(direction_group_ok) > 0: for d in directions: if d.name != direction_group_ok[0].name: d.subtracted_data_colname = 'SUBTRACTED_DATA_ALL_NEW' set_sub_data_colname = False # Combine the nodes and cores for the serial subtract operations direction_group_ok = factor.cluster.combine_nodes(direction_group_ok, parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Subtract final model(s) for directions for which selfcal went OK ops = [FacetSub(parset, bands, d) for d in direction_group_ok] for op in ops: scheduler.run(op) # Handle directions in this group for which selfcal failed selfcal_ok = [d.selfcal_ok for d in direction_group] for d in direction_group: if not d.selfcal_ok: log.warn('Selfcal verification failed for direction {0}.'.format(d.name)) if not all(selfcal_ok) and parset['exit_on_selfcal_failure']: log.info('Exiting...') sys.exit(1) # Check that at least one direction went through selfcal successfully. If # not, exit if len([d for d in directions if d.selfcal_ok]) == 0: log.error('Selfcal verification failed for all directions. Exiting...') sys.exit(1) # Make final facet images (from final empty datasets) if desired. Also image # any facets for which selfcal failed or no selfcal was done dirs_to_image = [d for d in directions if d.make_final_image and d.selfcal_ok and not d.is_patch and not d.is_outlier] if len(dirs_to_image) > 0: log.info('Reimaging the following direction(s):') log.info('{0}'.format([d.name for d in dirs_to_image])) # Add directions without selfcal to those that will be imaged dirs_to_transfer = [d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier] if len(dirs_to_transfer) > 0: log.info('Imaging the following direction(s) with nearest selcal solutions:') log.info('{0}'.format([d.name for d in dirs_to_transfer])) dirs_with_selfcal = [d for d in directions if d.selfcal_ok] for d in dirs_to_transfer: # Search for nearest direction with successful selfcal nearest, sep = factor.directions.find_nearest(d, dirs_with_selfcal) log.debug('Using solutions from direction {0} for direction {1}.'.format( nearest.name, d.name)) d.dir_dep_parmdb_mapfile = nearest.dir_dep_parmdb_mapfile d.save_state() dirs_to_image.extend(dirs_to_transfer) if len(dirs_to_image) > 0: # Set up reset of any directions that need it directions_reset = [d for d in dirs_to_image if d.do_reset] for d in directions_reset: d.reset_state('facetimage') # Group directions. This is done to ensure that multiple directions # aren't competing for the same resources ndir_simul = (len(parset['cluster_specific']['node_list']) * parset['cluster_specific']['ndir_per_node']) for i in range(int(np.ceil(len(dirs_to_image)/float(ndir_simul)))): dir_group = dirs_to_image[i*ndir_simul:(i+1)*ndir_simul] # Divide up the nodes and cores among the directions for the parallel # imaging operations dir_group = factor.cluster.divide_nodes(dir_group, parset['cluster_specific']['node_list'], parset['cluster_specific']['ndir_per_node'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Do facet imaging ops = [FacetImage(parset, bands, d) for d in dir_group] scheduler.run(ops) # Mosaic the final facet images together if parset['make_mosaic']: # Make direction object for the field and load previous state (if any) field = Direction('field', bands[0].ra, bands[0].dec, factor_working_dir=parset['dir_working']) field.load_state() # Reset the field direction if specified if 'field' in reset_directions: field.reset_state('makemosaic') field.facet_image_filenames = [] field.facet_vertices_filenames = [] for d in directions: if not d.is_patch: facet_image = DataMap.load(d.facet_image_mapfile)[0].file field.facet_image_filenames.append(facet_image) field.facet_vertices_filenames.append(d.save_file) # Combine the nodes and cores for the mosaic operation field = factor.cluster.combine_nodes([field], parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands))[0] # Do mosaicking op = MakeMosaic(parset, bands, field) scheduler.run(op) log.info("Factor has finished :)")
def directions_read(directions_file, factor_working_dir): """ Read a Factor-formatted directions file and return list of Direction objects Parameters ---------- directions_file : str Filename of Factor-formated directions file factor_working_dir : str Full path of working directory Returns ------- directions : list of Direction objects List of Direction objects """ from astropy.coordinates import Angle if not os.path.isfile(directions_file): log.critical("Directions file (%s) not found." % (directions_file)) sys.exit(1) log.info("Reading directions file: %s" % (directions_file)) try: types = np.dtype({ 'names': ['name', 'radec', 'atrous_do', 'mscale_field_do', 'cal_imsize', 'solint_p', 'solint_a', 'dynamic_range', 'region_selfcal', 'region_field', 'peel_skymodel', 'outlier_source', 'cal_size_deg', 'cal_flux_mjy'], 'formats':['S255', 'S255', 'S5', 'S5', int, int, int, 'S2', 'S255', 'S255', 'S255', 'S5', float, float]}) directions = np.genfromtxt(directions_file, comments='#', dtype=types) except ValueError: types = np.dtype({ 'names': ['name', 'radec', 'atrous_do', 'mscale_field_do', 'cal_imsize', 'solint_p', 'solint_a', 'dynamic_range', 'region_selfcal', 'region_field', 'peel_skymodel', 'outlier_source'], 'formats':['S255', 'S255', 'S5', 'S5', int, int, int, 'S2', 'S255', 'S255', 'S255', 'S5']}) directions = np.genfromtxt(directions_file, comments='#', dtype=types) data = [] for direction in directions: RAstr, Decstr = direction['radec'].split(',') ra = Angle(RAstr).to('deg').value dec = Angle(Decstr).to('deg').value # Check coordinates if np.isnan(ra) or ra < 0 or ra > 360: log.error('RA %f is wrong for direction: %s. Ignoring direction.' % (direction['radec'], direction['name'])) continue if np.isnan(dec) or dec < -90 or dec > 90: log.error('DEC %f is wrong for direction: %s. Ignoring direction.' % (direction['radec'], direction['name'])) continue # Check atrous_do (wavelet) setting if direction['atrous_do'].lower() == 'empty': atrous_do = None elif direction['atrous_do'].lower() == 'true': atrous_do = True else: atrous_do = False # Check mscale_field_do (multi-scale) setting if direction['mscale_field_do'].lower() == 'empty': mscale_field_do = None elif direction['mscale_field_do'].lower() == 'true': mscale_field_do = True else: mscale_field_do = False # Check outlier_source (peeling) setting if direction['outlier_source'].lower() == 'empty': outlier_source = None elif direction['outlier_source'].lower() == 'true': outlier_source = True else: outlier_source = False # Set defaults if direction['solint_a'] < 0: direction['solint_a'] = 0 # 0 => set internally if direction['solint_p'] < 0: direction['solint_p'] = 0 # 0 => set internally if len(direction) > 13: if direction['cal_size_deg'] < 0.0 or np.isnan(direction['cal_size_deg']): cal_size_deg = None else: cal_size_deg = direction['cal_size_deg'] if np.isnan(direction['cal_flux_mjy']): cal_flux_jy = None else: cal_flux_jy = direction['cal_flux_mjy'] / 1000.0 else: cal_size_deg = None cal_flux_jy = None data.append(Direction(direction['name'], ra, dec, atrous_do, mscale_field_do, direction['cal_imsize'], direction['solint_p'], direction['solint_a'], direction['dynamic_range'], direction['region_selfcal'], direction['region_field'], direction['peel_skymodel'], outlier_source, factor_working_dir, False, cal_size_deg, cal_flux_jy)) return data
def run(parset_file, logging_level='info', dry_run=False, test_run=False, reset_directions=[], reset_operations=[]): """ Processes a dataset using facet calibration This function runs the operations in the correct order and handles all the bookkeeping for the processing: e.g., which operations are needed for each direction (depending on success of selfcal, the order in which they were processed, etc.). It also handles the set up of the computing parameters and the generation of DDE calibrators and facets. Parameters ---------- parset_file : str Filename of parset containing processing parameters logging_level : str, optional One of 'degug', 'info', 'warning' in decreasing order of verbosity dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal test_run : bool, optional If True, use test settings. These settings are for testing purposes only and will not produce useful results reset_directions : list of str, optional List of names of directions to be reset reset_operations : list of str, optional Llist of operations to be reset """ # Read parset parset = factor.parset.parset_read(parset_file) # Set up logger parset['logging_level'] = logging_level factor._logging.set_level(logging_level) # Set up clusterdesc, node info, scheduler, etc. scheduler = _set_up_compute_parameters(parset, dry_run) # Prepare vis data bands = _set_up_bands(parset, test_run) # Set up directions and groups directions, direction_groups = _set_up_directions(parset, bands, dry_run, test_run, reset_directions, reset_operations) # Run peeling operations on outlier directions and any facets # for which the calibrator is to be peeled set_sub_data_colname = True peel_directions = [d for d in directions if d.is_outlier] peel_directions.extend([d for d in directions if d.peel_calibrator]) if len(peel_directions) > 0: log.info('Peeling {0} direction(s)'.format(len(peel_directions))) # Do the peeling for d in peel_directions: # Reset if needed. Note that proper reset of the subtract steps in # outlierpeel and facetsub is not currently supported d.reset_state(['outlierpeel', 'facetpeel', 'facetpeelimage', 'facetsub']) if d.is_outlier: op = OutlierPeel(parset, bands, d) else: op = FacetPeel(parset, bands, d) scheduler.run(op) # Check whether direction went through selfcal successfully. If # not, exit if d.selfcal_ok: # Set the name of the subtracted data column for all directions if set_sub_data_colname: for direction in directions: direction.subtracted_data_colname = 'SUBTRACTED_DATA_ALL_NEW' set_sub_data_colname = False else: log.error('Peeling failed for direction {0}.'.format(d.name)) log.info('Exiting...') sys.exit(1) if d.peel_calibrator: # Do the imaging of the facet if calibrator was peeled and # subtract the improved model op = FacetPeelImage(parset, bands, d) scheduler.run(op) op = FacetSub(parset, bands, d) scheduler.run(op) # Run selfcal and subtract operations on direction groups for gindx, direction_group in enumerate(direction_groups): log.info('Self calibrating {0} direction(s) in Group {1}'.format( len(direction_group), gindx+1)) # Set up reset of any directions that need it. If the direction has # already been through the facetsub operation, we must undo the # changes with the facetsubreset operation direction_group_reset = [d for d in direction_group if d.do_reset] direction_group_reset_facetsub = [d for d in direction_group_reset if 'facetsub' in d.completed_operations] if (len(direction_group_reset_facetsub) > 0 and ('facetselfcal' in reset_operations or 'facetsub' in reset_operations or 'facetsubreset' in reset_operations)): for d in direction_group_reset_facetsub: if ('facetsubreset' in d.completed_operations or 'facetsubreset' in reset_operations): # Reset a previous reset, but only if it completed successfully # or is explicitly specified for reset (to allow one to resume # facetsubreset instead of always resetting and restarting it) d.reset_state('facetsubreset') ops = [FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['facetselfcal', 'facetsub']) # Do selfcal or peeling on calibrator only ops = [FacetSelfcal(parset, bands, d) for d in direction_group] scheduler.run(ops) if dry_run: # For dryrun, skip selfcal verification for d in direction_group: d.selfcal_ok = True direction_group_ok = [d for d in direction_group if d.selfcal_ok] if set_sub_data_colname: # Set the name of the subtracted data column for remaining # directions (if needed) if len(direction_group_ok) > 0: for d in directions: if d.name != direction_group_ok[0].name: d.subtracted_data_colname = 'SUBTRACTED_DATA_ALL_NEW' set_sub_data_colname = False # Subtract final model(s) for directions for which selfcal went OK ops = [FacetSub(parset, bands, d) for d in direction_group_ok] for op in ops: scheduler.run(op) # Handle directions in this group for which selfcal failed selfcal_ok = [d.selfcal_ok for d in direction_group] for d in direction_group: if not d.selfcal_ok: log.warn('Self calibration failed for direction {0}.'.format(d.name)) if not all(selfcal_ok) and parset['calibration_specific']['exit_on_selfcal_failure']: log.info('Exiting...') sys.exit(1) # Check that at least one direction went through selfcal successfully. If # not, exit if len([d for d in directions if d.selfcal_ok]) == 0: log.error('Self calibration failed for all directions. Exiting...') sys.exit(1) # (Re)image facets for each set of cellsize, robust, taper, and uv cut settings dirs_with_selfcal = [d for d in directions if d.selfcal_ok] dirs_with_selfcal_to_reimage = [d for d in dirs_with_selfcal if not d.is_patch and not d.is_outlier] dirs_without_selfcal = [d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier] if len(dirs_without_selfcal) > 0: log.info('Imaging the following direction(s) with nearest self calibration solutions:') log.info('{0}'.format([d.name for d in dirs_without_selfcal])) for d in dirs_without_selfcal: # Search for nearest direction with successful selfcal nearest, sep = factor.directions.find_nearest(d, dirs_with_selfcal) log.debug('Using solutions from direction {0} for direction {1} ' '(separation = {2} deg).'.format(nearest.name, d.name, sep)) d.dir_dep_parmdb_mapfile = nearest.dir_dep_parmdb_mapfile d.save_state() if len(dirs_with_selfcal_to_reimage + dirs_without_selfcal) > 0: cellsizes = parset['imaging_specific']['facet_cellsize_arcsec'] tapers = parset['imaging_specific']['facet_taper_arcsec'] robusts = parset['imaging_specific']['facet_robust'] min_uvs = parset['imaging_specific']['facet_min_uv_lambda'] nimages = len(cellsizes) for image_indx, (cellsize_arcsec, taper_arcsec, robust, min_uv_lambda) in enumerate( zip(cellsizes, tapers, robusts, min_uvs)): # Always image directions that did not go through selfcal dirs_to_image = dirs_without_selfcal[:] # Only reimage facets with selfcal imaging parameters if reimage_selfcal flag is set if parset['imaging_specific']['facet_imager'] == 'wsclean': selfcal_robust = parset['imaging_specific']['selfcal_robust_wsclean'] else: selfcal_robust = parset['imaging_specific']['selfcal_robust'] if (cellsize_arcsec == parset['imaging_specific']['selfcal_cellsize_arcsec'] and robust == selfcal_robust and taper_arcsec == 0.0): if parset['imaging_specific']['reimage_selfcaled']: dirs_to_image += dirs_with_selfcal_to_reimage else: dirs_to_image += dirs_with_selfcal_to_reimage if len(dirs_to_image) > 0: log.info('Imaging with cellsize = {0} arcsec, robust = {1}, ' 'taper = {2} arcsec, min_uv = {3} lambda'.format(cellsize_arcsec, robust, taper_arcsec, min_uv_lambda)) log.info('Imaging the following direction(s):') log.info('{0}'.format([d.name for d in dirs_to_image])) # Reset facetimage op for any directions that need it directions_reset = [d for d in dirs_to_image if d.do_reset] for d in directions_reset: op = FacetImage(parset, bands, d, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) d.reset_state(op.name) # Do facet imaging ops = [FacetImage(parset, bands, d, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) for d in dirs_to_image] scheduler.run(ops) # Mosaic the final facet images together for i, (cellsize_arcsec, taper_arcsec, robust, min_uv_lambda) in enumerate( zip(cellsizes, tapers, robusts, min_uvs)): if parset['imaging_specific']['make_mosaic']: # Make direction object for the field and load previous state (if any) field = Direction('field', bands[0].ra, bands[0].dec, factor_working_dir=parset['dir_working']) field.load_state() # Set averaging for primary beam generation field.avgpb_freqstep = bands[0].nchan field.avgpb_timestep = int(120.0 / bands[0].timepersample) # Reset the field direction if specified if 'field' in reset_directions: op = FieldMosaic(parset, bands, field, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) field.reset_state(op.name) field.facet_image_filenames = [] field.facet_vertices_filenames = [] for d in directions: if not d.is_patch: facet_image = DataMap.load(d.facet_image_mapfile)[0].file field.facet_image_filenames.append(facet_image) field.facet_vertices_filenames.append(d.save_file) # Do mosaicking op = FieldMosaic(parset, bands, field, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) scheduler.run(op) log.info("Factor has finished :)")
def _initialize_directions(parset, initial_skymodel, ref_band, max_radius_deg=None, dry_run=False): """ Read in directions file and initialize resulting directions Parameters ---------- parset : dict Parset containing processing parameters initial_skymodel : SkyModel object Local sky model ref_band : Band object Reference band max_radius_deg : float, optional Maximum radius in degrees from the phase center within which to include sources. If None, it is set to the FWHM (i.e., a diameter of 2 * FWHM) dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal Returns ------- directions : List of Direction instances All directions to be used """ dir_parset = parset['direction_specific'] s = initial_skymodel.copy() # First check for user-supplied directions file, then for Factor-generated # file from a previous run, then for parameters needed to generate it internally if 'directions_file' in dir_parset: directions = factor.directions.directions_read(dir_parset['directions_file'], parset['dir_working']) elif os.path.exists(os.path.join(parset['dir_working'], 'factor_directions.txt')): directions = factor.directions.directions_read(os.path.join(parset['dir_working'], 'factor_directions.txt'), parset['dir_working']) else: if dir_parset['flux_min_jy'] is None or \ dir_parset['size_max_arcmin'] is None or \ dir_parset['separation_max_arcmin'] is None: log.critical('If no directions file is specified, you must ' 'give values for flux_min_Jy, size_max_arcmin, and ' 'separation_max_arcmin') sys.exit(1) else: # Make directions from dir-indep sky model of highest-frequency # band, as it has the smallest field of view log.info("No directions file given. Selecting directions internally...") # Filter out sources that lie outside of maximum specific radius from phase # center if max_radius_deg is None: max_radius_deg = ref_band.fwhm_deg # means a diameter of 2 * FWHM log.info('Removing sources beyond a radius of {0} degrees (corresponding to ' 'a diameter of {1} * FWHM of the primary beam at {2} MHz)...'.format( max_radius_deg, round(2.0*max_radius_deg/ref_band.fwhm_deg, 1), ref_band.freq/1e6)) dist = s.getDistance(ref_band.ra, ref_band.dec, byPatch=True) s.remove(dist > max_radius_deg, aggregate=True) # Generate the directions file if dir_parset['minimize_nonuniformity']: dir_parset['directions_file'] = factor.directions.make_directions_file_from_skymodel_uniform( s, dir_parset['flux_min_jy'], dir_parset['size_max_arcmin'], dir_parset['separation_max_arcmin'], directions_max_num=dir_parset['ndir_max'], interactive=parset['interactive'], ncpu=parset['cluster_specific']['ncpu'], flux_min_for_merging_Jy=dir_parset['flux_min_for_merging_jy']) else: dir_parset['directions_file'] = factor.directions.make_directions_file_from_skymodel( s, dir_parset['flux_min_jy'], dir_parset['size_max_arcmin'], dir_parset['separation_max_arcmin'], directions_max_num=dir_parset['ndir_max'], interactive=parset['interactive'], flux_min_for_merging_Jy=dir_parset['flux_min_for_merging_jy']) directions = factor.directions.directions_read(dir_parset['directions_file'], parset['dir_working']) # Add the target to the directions list if desired target_ra = dir_parset['target_ra'] target_dec = dir_parset['target_dec'] target_radius_arcmin = dir_parset['target_radius_arcmin'] target_has_own_facet = dir_parset['target_has_own_facet'] if target_ra is not None and target_dec is not None and target_radius_arcmin is not None: # Make target object target = Direction('target', target_ra, target_dec, factor_working_dir=parset['dir_working']) if target_has_own_facet: target.contains_target = True # Check if target is already in directions list because it was # selected as a DDE calibrator. If so, remove the duplicate nearest, dist = factor.directions.find_nearest(target, directions) if dist < dir_parset['target_radius_arcmin']/60.0: directions.remove(nearest) # Add target to directions list directions.append(target) else: # Find direction that contains target nearest, dist = factor.directions.find_nearest(target, directions) nearest.contains_target = True else: if target_has_own_facet: log.critical('target_has_own_facet = True, but target RA, Dec, or radius not found in parset') sys.exit(1) # Set calibrator size (must be done before faceting below is done) for d in directions: d.set_cal_size(parset['imaging_specific']['selfcal_cellsize_arcsec']) # Create facets and patches faceting_radius_deg = dir_parset['faceting_radius_deg'] if faceting_radius_deg is None: faceting_radius_deg = 1.25 * ref_band.fwhm_deg / 2.0 beam_ratio = 1.0 / np.sin(ref_band.mean_el_rad) # ratio of N-S to E-W beam factor.directions.thiessen(directions, ref_band.ra, ref_band.dec, faceting_radius_deg, s=s, check_edges=dir_parset['check_edges'], target_ra=target_ra, target_dec=target_dec, target_radius_arcmin=target_radius_arcmin, beam_ratio=beam_ratio) # Make DS9 region files so user can check the facets, etc. ds9_facet_reg_file = os.path.join(parset['dir_working'], 'regions', 'facets_ds9.reg') factor.directions.make_ds9_region_file(directions, ds9_facet_reg_file) ds9_calimage_reg_file = os.path.join(parset['dir_working'], 'regions', 'calimages_ds9.reg') factor.directions.make_ds9_calimage_file(directions, ds9_calimage_reg_file) return directions
def run(parset_file, logging_level='info', dry_run=False, test_run=False, reset_directions=[], reset_operations=[], stop_after=0): """ Processes a dataset using facet calibration This function runs the operations in the correct order and handles all the bookkeeping for the processing: e.g., which operations are needed for each direction (depending on success of selfcal, the order in which they were processed, etc.). It also handles the set up of the computing parameters and the generation of DDE calibrators and facets. Parameters ---------- parset_file : str Filename of parset containing processing parameters logging_level : str, optional One of 'degug', 'info', 'warning' in decreasing order of verbosity dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal test_run : bool, optional If True, use test settings. These settings are for testing purposes only and will not produce useful results reset_directions : list of str, optional List of names of directions to be reset reset_operations : list of str, optional Llist of operations to be reset stop_after : int, optional Stop after processing so many facetselfcal groups. """ # Read parset parset = factor.parset.parset_read(parset_file) # Set up logger parset['logging_level'] = logging_level factor._logging.set_level(logging_level) # Set up clusterdesc, node info, scheduler, etc. scheduler = _set_up_compute_parameters(parset, dry_run) # Prepare vis data bands = _set_up_bands(parset, test_run) # Set up directions and groups directions, direction_groups = _set_up_directions(parset, bands, dry_run, test_run, reset_directions, reset_operations) # Run peeling operations on outlier directions and any facets # for which the calibrator is to be peeled set_sub_data_colname = True set_preapply_flag = True peel_directions = [d for d in directions if d.is_outlier] peel_directions.extend([d for d in directions if d.peel_calibrator]) if len(peel_directions) > 0: log.info('Peeling {0} direction(s)'.format(len(peel_directions))) # Set flag for first non-outlier direction (if any) to create preapply parmdb for d in peel_directions: if not d.is_outlier: d.create_preapply_h5parm = True break # Reset if needed direction_group_reset = [d for d in peel_directions if d.do_reset] direction_group_reset_facetsub = [d for d in direction_group_reset if 'facetsub' in d.reset_operations] if len(direction_group_reset_facetsub) > 0: for d in direction_group_reset_facetsub: if ('facetsubreset' in d.completed_operations or 'facetsubreset' in reset_operations): # Reset a previous reset, but only if it completed successfully # or is explicitly specified for reset (to allow one to resume # facetsubreset instead of always resetting and restarting it) d.reset_state('facetsubreset') ops = [FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['outlierpeel', 'facetpeel', 'facetsub']) # Do the peeling for d in peel_directions: if d.is_outlier: op = OutlierPeel(parset, bands, d) else: op = FacetPeel(parset, bands, d) scheduler.run(op) # Check whether direction went through peeling successfully. If so, # subtract and set various flags if needed. If not successful, exit if d.selfcal_ok: # Subtract improved model(s) with DDE calibration op = FacetSub(parset, bands, d) scheduler.run(op) # Set the name of the subtracted data column for subsequent directions if set_sub_data_colname: for direction in directions: direction.subtracted_data_colname = 'CORRECTED_DATA' set_sub_data_colname = False # Set the flag for preapplication of selfcal solutions, but only # if this direction is not an outlier (as its solutions are likely # too different to be useful) if (set_preapply_flag and parset['calibration_specific']['preapply_first_cal_phases'] and not d.is_outlier): for direction in directions: if direction.name != d.name: direction.preapply_phase_cal = True direction.preapply_parmdb_mapfile = d.preapply_parmdb_mapfile set_preapply_flag = False else: log.error('Peeling failed for direction {0}.'.format(d.name)) log.info('Exiting...') sys.exit(1) # Run selfcal and subtract operations on direction groups if stop_after: log.debug('Will stop after processing {} selfcal-groups.'.format(stop_after)) for gindx, direction_group in enumerate(direction_groups): if stop_after and gindx >= stop_after: log.warn('Stopping after having processed {} groups.'.format(stop_after)) log.info('Exiting...') sys.exit(0) log.info('Self calibrating {0} direction(s) in Group {1}'.format( len(direction_group), gindx+1)) # Set up reset of any directions that need it. If the direction has # already been through the facetsub operation, we must undo the # changes with the facetsubreset operation direction_group_reset = [d for d in direction_group if d.do_reset] direction_group_reset_facetsub = [d for d in direction_group_reset if 'facetsub' in d.reset_operations] if len(direction_group_reset_facetsub) > 0: for d in direction_group_reset_facetsub: # Set subtracted data column to ensure we are using the new one d.subtracted_data_colname = 'CORRECTED_DATA' if ('facetsubreset' in d.completed_operations or 'facetsubreset' in reset_operations): # Reset a previous reset, but only if it completed successfully # or is explicitly specified for reset (to allow one to resume # facetsubreset instead of always resetting and restarting it) d.reset_state('facetsubreset') ops = [FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['facetselfcal', 'facetsub']) # Set flag for first direction to create preapply parmdb if set_preapply_flag: direction_group[0].create_preapply_h5parm = True # Do selfcal on calibrator only ops = [FacetSelfcal(parset, bands, d) for d in direction_group] scheduler.run(ops) if dry_run: # For dryrun, skip selfcal verification for d in direction_group: d.selfcal_ok = True direction_group_ok = [d for d in direction_group if d.selfcal_ok] if set_sub_data_colname: # Set the name of the subtracted data column for remaining # directions (if needed) if len(direction_group_ok) > 0: for d in directions: if d.name != direction_group_ok[0].name: d.subtracted_data_colname = 'CORRECTED_DATA' set_sub_data_colname = False if set_preapply_flag: # Set the flag for preapplication of selfcal solutions (if needed) if len(direction_group_ok) > 0: if parset['calibration_specific']['preapply_first_cal_phases']: for d in directions: if d.name != direction_group_ok[0].name: d.preapply_phase_cal = True d.preapply_h5parm_mapfile = direction_group_ok[0].preapply_h5parm_mapfile set_preapply_flag = False # Subtract final model(s) for directions for which selfcal went OK ops = [FacetSub(parset, bands, d) for d in direction_group_ok] for op in ops: scheduler.run(op) # Handle directions in this group for which selfcal failed selfcal_ok = [d.selfcal_ok for d in direction_group] for d in direction_group: if not d.selfcal_ok: log.warn('Self calibration failed for direction {0}.'.format(d.name)) if not all(selfcal_ok) and parset['calibration_specific']['exit_on_selfcal_failure']: log.info('Exiting...') sys.exit(1) # Check that at least one direction went through selfcal successfully. If # not, exit if len([d for d in directions if d.selfcal_ok]) == 0: log.warn('Self calibration failed for all directions. Exiting...') sys.exit(1) # Image facets for each set of cellsize, robust, taper, and uv cut settings cellsizes = parset['imaging_specific']['facet_cellsize_arcsec'] tapers = parset['imaging_specific']['facet_taper_arcsec'] robusts = parset['imaging_specific']['facet_robust'] min_uvs = parset['imaging_specific']['facet_min_uv_lambda'] selfcal_robust = parset['imaging_specific']['selfcal_robust'] nimages = len(cellsizes) dirs_with_selfcal = [d for d in directions if d.selfcal_ok] if parset['imaging_specific']['image_target_only']: dirs_with_selfcal_to_image = [d for d in dirs_with_selfcal if not d.is_patch and not d.is_outlier and d.contains_target] dirs_without_selfcal_to_image = [d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier and d.contains_target] else: dirs_with_selfcal_to_image = [d for d in dirs_with_selfcal if not d.is_patch and not d.is_outlier] dirs_without_selfcal_to_image = [d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier] if len(dirs_without_selfcal_to_image) > 0: log.info('Imaging the following direction(s) with nearest self calibration solutions:') log.info('{0}'.format([d.name for d in dirs_without_selfcal_to_image])) for d in dirs_without_selfcal_to_image: # Search for nearest direction with successful selfcal nearest, sep = factor.directions.find_nearest(d, dirs_with_selfcal) log.debug('Using solutions from direction {0} for direction {1} ' '(separation = {2} deg).'.format(nearest.name, d.name, sep)) d.dir_dep_h5parm_mapfile = nearest.dir_dep_h5parm_mapfile d.preapply_h5parm_mapfile = nearest.preapply_h5parm_mapfile if nearest.create_preapply_h5parm: # Nearest is calibrator for which preapply solutions were made, so don't preapply them d.preapply_phase_cal = False d.save_state() if len(dirs_with_selfcal_to_image + dirs_without_selfcal_to_image) > 0: for image_indx, (cellsize_arcsec, taper_arcsec, robust, min_uv_lambda) in enumerate( zip(cellsizes, tapers, robusts, min_uvs)): # Always image directions that did not go through selfcal dirs_to_image = dirs_without_selfcal_to_image[:] # Only reimage facets with selfcal imaging parameters if reimage_selfcal flag is set full_res_im, opname = _get_image_type_and_name(cellsize_arcsec, taper_arcsec, robust, selfcal_robust, min_uv_lambda, parset) if full_res_im: dirs_to_image += dirs_with_selfcal_to_image else: dirs_to_image += dirs_with_selfcal_to_image if len(dirs_to_image) > 0: log.info('Imaging with cellsize = {0} arcsec, robust = {1}, ' 'taper = {2} arcsec, min_uv = {3} lambda'.format(cellsize_arcsec, robust, taper_arcsec, min_uv_lambda)) log.info('Imaging the following direction(s):') log.info('{0}'.format([d.name for d in dirs_to_image])) # Reset facetimage op for any directions that need it directions_reset = [d for d in dirs_to_image if d.do_reset] for d in directions_reset: d.reset_state(opname) # Do facet imaging ops = [FacetImage(parset, bands, d, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) for d in dirs_to_image] scheduler.run(ops) # Mosaic the final facet images together if parset['imaging_specific']['make_mosaic']: # Make direction object for the field and load previous state (if any) field = Direction('field', bands[0].ra, bands[0].dec, factor_working_dir=parset['dir_working']) field.load_state() if len(reset_operations) > 0: field.reset_operations = reset_operations else: field.reset_operations = (field.completed_operations[:] + field.started_operations[:]) # Set averaging for primary beam generation field.avgpb_freqstep = bands[0].nchan field.avgpb_timestep = int(120.0 / bands[0].timepersample) for i, (cellsize_arcsec, taper_arcsec, robust, min_uv_lambda) in enumerate( zip(cellsizes, tapers, robusts, min_uvs)): # Reset the field direction if specified full_res_im, opname = _get_image_type_and_name(cellsize_arcsec, taper_arcsec, robust, selfcal_robust, min_uv_lambda, parset, opbase='fieldmosaic') if 'field' in reset_directions: field.reset_state(opname) # Specify appropriate image, mask, and vertices file field.facet_image_filenames = [] field.facet_vertices_filenames = [] full_res_im, opname = _get_image_type_and_name(cellsize_arcsec, taper_arcsec, robust, selfcal_robust, min_uv_lambda, parset) for d in dirs_to_image: if not d.is_patch: facet_image = DataMap.load(d.facet_image_mapfile[opname])[0].file field.facet_image_filenames.append(facet_image) field.facet_vertices_filenames.append(d.save_file) # Do mosaicking op = FieldMosaic(parset, bands, field, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) scheduler.run(op) log.info("Factor has finished :)")
def _initialize_directions(parset, initial_skymodel, ref_band, max_radius_deg=None, dry_run=False): """ Read in directions file and initialize resulting directions Parameters ---------- parset : dict Parset containing processing parameters initial_skymodel : SkyModel object Local sky model ref_band : Band object Reference band max_radius_deg : float, optional Maximum radius in degrees from the phase center within which to include sources. If None, it is set to the FWHM (i.e., a diameter of 2 * FWHM) dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal Returns ------- directions : List of Direction instances All directions to be used """ dir_parset = parset['direction_specific'] s = initial_skymodel.copy() # First check for user-supplied directions file, then for Factor-generated # file from a previous run, then for parameters needed to generate it internally if 'directions_file' in dir_parset: directions = factor.directions.directions_read( dir_parset['directions_file'], parset['dir_working']) elif os.path.exists( os.path.join(parset['dir_working'], 'factor_directions.txt')): directions = factor.directions.directions_read( os.path.join(parset['dir_working'], 'factor_directions.txt'), parset['dir_working']) else: if dir_parset['flux_min_jy'] is None or \ dir_parset['size_max_arcmin'] is None or \ dir_parset['separation_max_arcmin'] is None: log.critical('If no directions file is specified, you must ' 'give values for flux_min_Jy, size_max_arcmin, and ' 'separation_max_arcmin') sys.exit(1) else: # Make directions from dir-indep sky model of highest-frequency # band, as it has the smallest field of view log.info( "No directions file given. Selecting directions internally...") # Filter out sources that lie outside of maximum specific radius from phase # center if max_radius_deg is None: max_radius_deg = ref_band.fwhm_deg # means a diameter of 2 * FWHM log.info( 'Removing sources beyond a radius of {0} degrees (corresponding to ' 'a diameter of {1} * FWHM of the primary beam at {2} MHz)...'. format(max_radius_deg, round(2.0 * max_radius_deg / ref_band.fwhm_deg, 1), ref_band.freq / 1e6)) dist = s.getDistance(ref_band.ra, ref_band.dec, byPatch=True) s.remove(dist > max_radius_deg, aggregate=True) # Generate the directions file if dir_parset['minimize_nonuniformity']: dir_parset[ 'directions_file'] = factor.directions.make_directions_file_from_skymodel_uniform( s, dir_parset['flux_min_jy'], dir_parset['size_max_arcmin'], dir_parset['separation_max_arcmin'], directions_max_num=dir_parset['ndir_max'], interactive=parset['interactive'], ncpu=parset['cluster_specific']['ncpu'], flux_min_for_merging_Jy=dir_parset[ 'flux_min_for_merging_jy']) else: dir_parset[ 'directions_file'] = factor.directions.make_directions_file_from_skymodel( s, dir_parset['flux_min_jy'], dir_parset['size_max_arcmin'], dir_parset['separation_max_arcmin'], directions_max_num=dir_parset['ndir_max'], interactive=parset['interactive'], flux_min_for_merging_Jy=dir_parset[ 'flux_min_for_merging_jy']) directions = factor.directions.directions_read( dir_parset['directions_file'], parset['dir_working']) # Add the target to the directions list if desired target_ra = dir_parset['target_ra'] target_dec = dir_parset['target_dec'] target_radius_arcmin = dir_parset['target_radius_arcmin'] target_has_own_facet = dir_parset['target_has_own_facet'] if target_ra is not None and target_dec is not None and target_radius_arcmin is not None: # Make target object target = Direction('target', target_ra, target_dec, factor_working_dir=parset['dir_working']) if target_has_own_facet: target.contains_target = True # Check if target is already in directions list because it was # selected as a DDE calibrator. If so, remove the duplicate nearest, dist = factor.directions.find_nearest(target, directions) if dist < dir_parset['target_radius_arcmin'] / 60.0: directions.remove(nearest) # Add target to directions list directions.append(target) else: # Find direction that contains target nearest, dist = factor.directions.find_nearest(target, directions) nearest.contains_target = True else: if target_has_own_facet: log.critical( 'target_has_own_facet = True, but target RA, Dec, or radius not found in parset' ) sys.exit(1) # Set calibrator size (must be done before faceting below is done) for d in directions: d.set_cal_size(parset['imaging_specific']['selfcal_cellsize_arcsec']) # Create facets and patches faceting_radius_deg = dir_parset['faceting_radius_deg'] if faceting_radius_deg is None: faceting_radius_deg = 1.25 * ref_band.fwhm_deg / 2.0 beam_ratio = 1.0 / np.sin(ref_band.mean_el_rad) # ratio of N-S to E-W beam factor.directions.thiessen(directions, ref_band.ra, ref_band.dec, faceting_radius_deg, s=s, check_edges=dir_parset['check_edges'], target_ra=target_ra, target_dec=target_dec, target_radius_arcmin=target_radius_arcmin, beam_ratio=beam_ratio) # Make DS9 region files so user can check the facets, etc. ds9_facet_reg_file = os.path.join(parset['dir_working'], 'regions', 'facets_ds9.reg') factor.directions.make_ds9_region_file(directions, ds9_facet_reg_file) ds9_calimage_reg_file = os.path.join(parset['dir_working'], 'regions', 'calimages_ds9.reg') factor.directions.make_ds9_calimage_file(directions, ds9_calimage_reg_file) return directions
def run(parset_file, logging_level='info', dry_run=False, test_run=False, reset_directions=[], reset_operations=[], stop_after=0): """ Processes a dataset using facet calibration This function runs the operations in the correct order and handles all the bookkeeping for the processing: e.g., which operations are needed for each direction (depending on success of selfcal, the order in which they were processed, etc.). It also handles the set up of the computing parameters and the generation of DDE calibrators and facets. Parameters ---------- parset_file : str Filename of parset containing processing parameters logging_level : str, optional One of 'degug', 'info', 'warning' in decreasing order of verbosity dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal test_run : bool, optional If True, use test settings. These settings are for testing purposes only and will not produce useful results reset_directions : list of str, optional List of names of directions to be reset reset_operations : list of str, optional Llist of operations to be reset stop_after : int, optional Stop after processing so many facetselfcal groups. """ # Read parset parset = factor.parset.parset_read(parset_file) # Set up logger parset['logging_level'] = logging_level factor._logging.set_level(logging_level) # Set up clusterdesc, node info, scheduler, etc. scheduler = _set_up_compute_parameters(parset, dry_run) # Prepare vis data bands = _set_up_bands(parset, test_run) # Set up directions and groups directions, direction_groups = _set_up_directions(parset, bands, dry_run, test_run, reset_directions, reset_operations) # Run peeling operations on outlier directions and any facets # for which the calibrator is to be peeled set_sub_data_colname = True set_preapply_flag = True peel_directions = [d for d in directions if d.is_outlier] peel_directions.extend([d for d in directions if d.peel_calibrator]) if len(peel_directions) > 0: log.info('Peeling {0} direction(s)'.format(len(peel_directions))) # Set flag for first non-outlier direction (if any) to create preapply parmdb for d in peel_directions: if not d.is_outlier: d.create_preapply_h5parm = True break # Reset if needed direction_group_reset = [d for d in peel_directions if d.do_reset] direction_group_reset_facetsub = [ d for d in direction_group_reset if 'facetsub' in d.reset_operations ] if len(direction_group_reset_facetsub) > 0: for d in direction_group_reset_facetsub: if ('facetsubreset' in d.completed_operations or 'facetsubreset' in reset_operations): # Reset a previous reset, but only if it completed successfully # or is explicitly specified for reset (to allow one to resume # facetsubreset instead of always resetting and restarting it) d.reset_state('facetsubreset') ops = [ FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub ] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['outlierpeel', 'facetpeel', 'facetsub']) # Do the peeling for d in peel_directions: if d.is_outlier: op = OutlierPeel(parset, bands, d) else: op = FacetPeel(parset, bands, d) scheduler.run(op) # Check whether direction went through peeling successfully. If so, # subtract and set various flags if needed. If not successful, exit if d.selfcal_ok: # Subtract improved model(s) with DDE calibration op = FacetSub(parset, bands, d) scheduler.run(op) # Set the name of the subtracted data column for subsequent directions if set_sub_data_colname: for direction in directions: direction.subtracted_data_colname = 'CORRECTED_DATA' set_sub_data_colname = False # Set the flag for preapplication of selfcal solutions, but only # if this direction is not an outlier (as its solutions are likely # too different to be useful) if (set_preapply_flag and parset['calibration_specific'] ['preapply_first_cal_phases'] and not d.is_outlier): for direction in directions: if direction.name != d.name: direction.preapply_phase_cal = True direction.preapply_parmdb_mapfile = d.preapply_parmdb_mapfile set_preapply_flag = False else: log.error('Peeling failed for direction {0}.'.format(d.name)) log.info('Exiting...') sys.exit(1) # Run selfcal and subtract operations on direction groups if stop_after: log.debug( 'Will stop after processing {} selfcal-groups.'.format(stop_after)) for gindx, direction_group in enumerate(direction_groups): if stop_after and gindx >= stop_after: log.warn('Stopping after having processed {} groups.'.format( stop_after)) log.info('Exiting...') sys.exit(0) log.info('Self calibrating {0} direction(s) in Group {1}'.format( len(direction_group), gindx + 1)) # Set up reset of any directions that need it. If the direction has # already been through the facetsub operation, we must undo the # changes with the facetsubreset operation direction_group_reset = [d for d in direction_group if d.do_reset] direction_group_reset_facetsub = [ d for d in direction_group_reset if 'facetsub' in d.reset_operations ] if len(direction_group_reset_facetsub) > 0: for d in direction_group_reset_facetsub: # Set subtracted data column to ensure we are using the new one d.subtracted_data_colname = 'CORRECTED_DATA' if ('facetsubreset' in d.completed_operations or 'facetsubreset' in reset_operations): # Reset a previous reset, but only if it completed successfully # or is explicitly specified for reset (to allow one to resume # facetsubreset instead of always resetting and restarting it) d.reset_state('facetsubreset') ops = [ FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub ] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['facetselfcal', 'facetsub']) # Set flag for first direction to create preapply parmdb if set_preapply_flag: direction_group[0].create_preapply_h5parm = True # Do selfcal on calibrator only ops = [FacetSelfcal(parset, bands, d) for d in direction_group] scheduler.run(ops) if dry_run: # For dryrun, skip selfcal verification for d in direction_group: d.selfcal_ok = True direction_group_ok = [d for d in direction_group if d.selfcal_ok] if set_sub_data_colname: # Set the name of the subtracted data column for remaining # directions (if needed) if len(direction_group_ok) > 0: for d in directions: if d.name != direction_group_ok[0].name: d.subtracted_data_colname = 'CORRECTED_DATA' set_sub_data_colname = False if set_preapply_flag: # Set the flag for preapplication of selfcal solutions (if needed) if len(direction_group_ok) > 0: if parset['calibration_specific']['preapply_first_cal_phases']: for d in directions: if d.name != direction_group_ok[0].name: d.preapply_phase_cal = True d.preapply_h5parm_mapfile = direction_group_ok[ 0].preapply_h5parm_mapfile set_preapply_flag = False # Subtract final model(s) for directions for which selfcal went OK ops = [FacetSub(parset, bands, d) for d in direction_group_ok] for op in ops: scheduler.run(op) # Handle directions in this group for which selfcal failed selfcal_ok = [d.selfcal_ok for d in direction_group] for d in direction_group: if not d.selfcal_ok: log.warn('Self calibration failed for direction {0}.'.format( d.name)) if not all(selfcal_ok) and parset['calibration_specific'][ 'exit_on_selfcal_failure']: log.info('Exiting...') sys.exit(1) # Check that at least one direction went through selfcal successfully. If # not, exit if len([d for d in directions if d.selfcal_ok]) == 0: log.warn('Self calibration failed for all directions. Exiting...') sys.exit(1) # Image facets for each set of cellsize, robust, taper, and uv cut settings cellsizes = parset['imaging_specific']['facet_cellsize_arcsec'] tapers = parset['imaging_specific']['facet_taper_arcsec'] robusts = parset['imaging_specific']['facet_robust'] min_uvs = parset['imaging_specific']['facet_min_uv_lambda'] selfcal_robust = parset['imaging_specific']['selfcal_robust'] nimages = len(cellsizes) dirs_with_selfcal = [d for d in directions if d.selfcal_ok] if parset['imaging_specific']['image_target_only']: dirs_with_selfcal_to_image = [ d for d in dirs_with_selfcal if not d.is_patch and not d.is_outlier and d.contains_target ] dirs_without_selfcal_to_image = [ d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier and d.contains_target ] else: dirs_with_selfcal_to_image = [ d for d in dirs_with_selfcal if not d.is_patch and not d.is_outlier ] dirs_without_selfcal_to_image = [ d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier ] if len(dirs_without_selfcal_to_image) > 0: log.info( 'Imaging the following direction(s) with nearest self calibration solutions:' ) log.info('{0}'.format([d.name for d in dirs_without_selfcal_to_image])) for d in dirs_without_selfcal_to_image: # Search for nearest direction with successful selfcal nearest, sep = factor.directions.find_nearest(d, dirs_with_selfcal) log.debug('Using solutions from direction {0} for direction {1} ' '(separation = {2} deg).'.format(nearest.name, d.name, sep)) d.dir_dep_h5parm_mapfile = nearest.dir_dep_h5parm_mapfile d.preapply_h5parm_mapfile = nearest.preapply_h5parm_mapfile if nearest.create_preapply_h5parm: # Nearest is calibrator for which preapply solutions were made, so don't preapply them d.preapply_phase_cal = False d.save_state() if len(dirs_with_selfcal_to_image + dirs_without_selfcal_to_image) > 0: for image_indx, (cellsize_arcsec, taper_arcsec, robust, min_uv_lambda) in enumerate( zip(cellsizes, tapers, robusts, min_uvs)): # Always image directions that did not go through selfcal dirs_to_image = dirs_without_selfcal_to_image[:] # Only reimage facets with selfcal imaging parameters if reimage_selfcal flag is set full_res_im, opname = _get_image_type_and_name( cellsize_arcsec, taper_arcsec, robust, selfcal_robust, min_uv_lambda, parset) if full_res_im: dirs_to_image += dirs_with_selfcal_to_image else: dirs_to_image += dirs_with_selfcal_to_image if len(dirs_to_image) > 0: log.info('Imaging with cellsize = {0} arcsec, robust = {1}, ' 'taper = {2} arcsec, min_uv = {3} lambda'.format( cellsize_arcsec, robust, taper_arcsec, min_uv_lambda)) log.info('Imaging the following direction(s):') log.info('{0}'.format([d.name for d in dirs_to_image])) # Reset facetimage op for any directions that need it directions_reset = [d for d in dirs_to_image if d.do_reset] for d in directions_reset: d.reset_state(opname) # Do facet imaging ops = [ FacetImage(parset, bands, d, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) for d in dirs_to_image ] scheduler.run(ops) # Mosaic the final facet images together if parset['imaging_specific']['make_mosaic']: # Make direction object for the field and load previous state (if any) field = Direction('field', bands[0].ra, bands[0].dec, factor_working_dir=parset['dir_working']) field.load_state() if len(reset_operations) > 0: field.reset_operations = reset_operations else: field.reset_operations = (field.completed_operations[:] + field.started_operations[:]) # Set averaging for primary beam generation field.avgpb_freqstep = bands[0].nchan field.avgpb_timestep = int(120.0 / bands[0].timepersample) for i, (cellsize_arcsec, taper_arcsec, robust, min_uv_lambda) in enumerate( zip(cellsizes, tapers, robusts, min_uvs)): # Reset the field direction if specified full_res_im, opname = _get_image_type_and_name( cellsize_arcsec, taper_arcsec, robust, selfcal_robust, min_uv_lambda, parset, opbase='fieldmosaic') if 'field' in reset_directions: field.reset_state(opname) # Specify appropriate image, mask, and vertices file field.facet_image_filenames = [] field.facet_vertices_filenames = [] full_res_im, opname = _get_image_type_and_name( cellsize_arcsec, taper_arcsec, robust, selfcal_robust, min_uv_lambda, parset) for d in dirs_to_image: if not d.is_patch: facet_image = DataMap.load( d.facet_image_mapfile[opname])[0].file field.facet_image_filenames.append(facet_image) field.facet_vertices_filenames.append(d.save_file) # Do mosaicking op = FieldMosaic(parset, bands, field, cellsize_arcsec, robust, taper_arcsec, min_uv_lambda) scheduler.run(op) log.info("Factor has finished :)")
def run(parset_file, logging_level='info', dry_run=False, test_run=False, reset_directions=[], reset_operations=[]): """ Processes a dataset using facet calibration This function runs the operations in the correct order and handles all the bookkeeping for the processing: e.g., which operations are needed for each direction (depending on success of selfcal, the order in which they were processed, etc.). It also handles the set up of the computing parameters and the generation of DDE calibrators and facets. Parameters ---------- parset_file : str Filename of parset containing processing parameters logging_level : str, optional One of 'degug', 'info', 'warning' in decreasing order of verbosity dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal test_run : bool, optional If True, use test settings. These settings are for testing purposes only and will not produce useful results reset_directions : list of str, optional List of names of directions to be reset reset_operations : list of str, optional Llist of operations to be reset """ # Read parset parset = factor.parset.parset_read(parset_file) # Set up logger parset['logging_level'] = logging_level factor._logging.set_level(logging_level) # Set up clusterdesc, node info, scheduler, etc. scheduler = _set_up_compute_parameters(parset, dry_run) # Prepare vis data bands = _set_up_bands(parset, test_run) # Define directions and groups directions, direction_groups = _set_up_directions(parset, bands, dry_run, test_run, reset_directions, reset_operations) # Run peeling operations on outlier directions and any facets # for which the calibrator is to be peeled set_sub_data_colname = True peel_directions = [d for d in directions if d.is_outlier] peel_directions.extend([d for d in directions if d.peel_calibrator]) if len(peel_directions) > 0: # Combine the nodes and cores for the peeling operation outlier_directions = factor.cluster.combine_nodes( peel_directions, parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Do the peeling for d in peel_directions: # Reset if needed. Note that proper reset of the subtract steps in # outlierpeel and facetsub is not currently supported d.reset_state( ['outlierpeel', 'facetpeel', 'facetpeelimage', 'facetsub']) if d.is_outlier: op = OutlierPeel(parset, bands, d) else: op = FacetPeel(parset, bands, d) scheduler.run(op) # Check whether direction went through selfcal successfully. If # not, exit if d.selfcal_ok: # Set the name of the subtracted data column for all directions if set_sub_data_colname: for direction in directions: direction.subtracted_data_colname = 'SUBTRACTED_DATA_ALL_NEW' set_sub_data_colname = False else: log.error( 'Peeling verification failed for direction {0}.'.format( d.name)) log.info('Exiting...') sys.exit(1) if d.peel_calibrator: # Do the imaging of the facet if calibrator was peeled and # subtract the improved model op = FacetPeelImage(parset, bands, d) scheduler.run(op) op = FacetSub(parset, bands, d) scheduler.run(op) # Run selfcal and subtract operations on direction groups for gindx, direction_group in enumerate(direction_groups): log.info('Processing {0} direction(s) in Group {1}'.format( len(direction_group), gindx + 1)) # Set up reset of any directions that need it. If the direction has # already been through the facetsub operation, we must undo the # changes with the facetsubreset operation before we reset facetselfcal # (otherwise the model data required to reset facetsub will be deleted) direction_group_reset = [d for d in direction_group if d.do_reset] direction_group_reset_facetsub = [ d for d in direction_group_reset if 'facetsub' in d.completed_operations ] if len(direction_group_reset_facetsub) > 0: for d in direction_group_reset_facetsub: d.reset_state('facetsubreset') direction_group_reset_facetsub = factor.cluster.combine_nodes( direction_group_reset_facetsub, parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) ops = [ FacetSubReset(parset, bands, d) for d in direction_group_reset_facetsub ] for op in ops: scheduler.run(op) for d in direction_group_reset: d.reset_state(['facetselfcal', 'facetsub']) # Divide up the nodes and cores among the directions for the parallel # selfcal operations direction_group = factor.cluster.divide_nodes( direction_group, parset['cluster_specific']['node_list'], parset['cluster_specific']['ndir_per_node'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Check for any directions within transfer radius that have successfully # gone through selfcal dirs_with_selfcal = [d for d in directions if d.selfcal_ok] if len(dirs_with_selfcal) > 0: for d in direction_group: nearest, sep = factor.directions.find_nearest( d, dirs_with_selfcal) if sep < parset['direction_specific']['transfer_radius']: log.debug( 'Initializing selfcal for direction {0} with solutions from direction {1}.' .format(d.name, nearest.name)) d.dir_dep_parmdb_mapfile = nearest.dir_dep_parmdb_mapfile d.save_state() d.transfer_nearest_solutions = True # Do selfcal or peeling on calibrator only to_peel = [d for d in direction_group if d.peel_calibrator] to_selfcal = [d for d in direction_group if not d.peel_calibrator] ops = [FacetSelfcal(parset, bands, d) for d in direction_group] scheduler.run(ops) if dry_run: # For dryrun, skip check for d in direction_group: d.selfcal_ok = True direction_group_ok = [d for d in direction_group if d.selfcal_ok] if set_sub_data_colname: # Set the name of the subtracted data column for remaining # directions (if needed) if len(direction_group_ok) > 0: for d in directions: if d.name != direction_group_ok[0].name: d.subtracted_data_colname = 'SUBTRACTED_DATA_ALL_NEW' set_sub_data_colname = False # Combine the nodes and cores for the serial subtract operations direction_group_ok = factor.cluster.combine_nodes( direction_group_ok, parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Subtract final model(s) for directions for which selfcal went OK ops = [FacetSub(parset, bands, d) for d in direction_group_ok] for op in ops: scheduler.run(op) # Handle directions in this group for which selfcal failed selfcal_ok = [d.selfcal_ok for d in direction_group] for d in direction_group: if not d.selfcal_ok: log.warn( 'Selfcal verification failed for direction {0}.'.format( d.name)) if not all(selfcal_ok) and parset['exit_on_selfcal_failure']: log.info('Exiting...') sys.exit(1) # Check that at least one direction went through selfcal successfully. If # not, exit if len([d for d in directions if d.selfcal_ok]) == 0: log.error('Selfcal verification failed for all directions. Exiting...') sys.exit(1) # Make final facet images (from final empty datasets) if desired. Also image # any facets for which selfcal failed or no selfcal was done dirs_to_image = [ d for d in directions if d.make_final_image and d.selfcal_ok and not d.is_patch and not d.is_outlier ] if len(dirs_to_image) > 0: log.info('Reimaging the following direction(s):') log.info('{0}'.format([d.name for d in dirs_to_image])) # Add directions without selfcal to those that will be imaged dirs_to_transfer = [ d for d in directions if not d.selfcal_ok and not d.is_patch and not d.is_outlier ] if len(dirs_to_transfer) > 0: log.info( 'Imaging the following direction(s) with nearest selcal solutions:' ) log.info('{0}'.format([d.name for d in dirs_to_transfer])) dirs_with_selfcal = [d for d in directions if d.selfcal_ok] for d in dirs_to_transfer: # Search for nearest direction with successful selfcal nearest, sep = factor.directions.find_nearest(d, dirs_with_selfcal) log.debug( 'Using solutions from direction {0} for direction {1}.'.format( nearest.name, d.name)) d.dir_dep_parmdb_mapfile = nearest.dir_dep_parmdb_mapfile d.save_state() dirs_to_image.extend(dirs_to_transfer) if len(dirs_to_image) > 0: # Set up reset of any directions that need it directions_reset = [d for d in dirs_to_image if d.do_reset] for d in directions_reset: d.reset_state('facetimage') # Group directions. This is done to ensure that multiple directions # aren't competing for the same resources ndir_simul = (len(parset['cluster_specific']['node_list']) * parset['cluster_specific']['ndir_per_node']) for i in range(int(np.ceil(len(dirs_to_image) / float(ndir_simul)))): dir_group = dirs_to_image[i * ndir_simul:(i + 1) * ndir_simul] # Divide up the nodes and cores among the directions for the parallel # imaging operations dir_group = factor.cluster.divide_nodes( dir_group, parset['cluster_specific']['node_list'], parset['cluster_specific']['ndir_per_node'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands)) # Do facet imaging ops = [FacetImage(parset, bands, d) for d in dir_group] scheduler.run(ops) # Mosaic the final facet images together if parset['make_mosaic']: # Make direction object for the field and load previous state (if any) field = Direction('field', bands[0].ra, bands[0].dec, factor_working_dir=parset['dir_working']) field.load_state() # Reset the field direction if specified if 'field' in reset_directions: field.reset_state('makemosaic') field.facet_image_filenames = [] field.facet_vertices_filenames = [] for d in directions: if not d.is_patch: facet_image = DataMap.load(d.facet_image_mapfile)[0].file field.facet_image_filenames.append(facet_image) field.facet_vertices_filenames.append(d.save_file) # Combine the nodes and cores for the mosaic operation field = factor.cluster.combine_nodes( [field], parset['cluster_specific']['node_list'], parset['cluster_specific']['nimg_per_node'], parset['cluster_specific']['ncpu'], parset['cluster_specific']['fmem'], len(bands))[0] # Do mosaicking op = MakeMosaic(parset, bands, field) scheduler.run(op) log.info("Factor has finished :)")
def _set_up_directions(parset, bands, dry_run=False, test_run=False, reset_directions=[]): """ Sets up directions (facets) Parameters ---------- parset : dict Parset containing processing parameters bands : list of Band instances Vis data dry_run : bool, optional If True, do not run pipelines. All parsets, etc. are made as normal test_run : bool, optional If True, use test settings. These settings are for testing purposes only and will not produce useful results reset_directions : list of str, optional List of direction names to be reset Returns ------- directions : List of Direction instances All directions to be used by the run() function direction_groups : List of lists of Direction instances Groups of directions to be selfcal-ed """ dir_parset = parset['direction_specific'] log.info("Building local sky model...") ref_band = bands[-1] max_radius_deg = dir_parset['max_radius_deg'] initial_skymodel = factor.directions.make_initial_skymodel(ref_band, max_radius_deg=max_radius_deg) log.info('Setting up directions...') # First check for user-supplied directions file, then for Factor-generated # file from a previous run, then for parameters needed to generate it internally if 'directions_file' in dir_parset: directions = factor.directions.directions_read(dir_parset['directions_file'], parset['dir_working']) elif os.path.exists(os.path.join(parset['dir_working'], 'factor_directions.txt')): directions = factor.directions.directions_read(os.path.join(parset['dir_working'], 'factor_directions.txt'), parset['dir_working']) else: if dry_run: # Stop here if dry_run is True but no directions file was given log.warn('No directions file given. Cannot proceed beyond the ' 'initsubtract operation. Exiting...') sys.exit(0) elif dir_parset['flux_min_jy'] is None or \ dir_parset['size_max_arcmin'] is None or \ dir_parset['separation_max_arcmin'] is None: log.critical('If no directions file is specified, you must ' 'give values for flux_min_Jy, size_max_arcmin, and ' 'separation_max_arcmin') sys.exit(1) else: # Make directions from dir-indep sky model of highest-frequency # band, as it has the smallest field of view log.info("No directions file given. Selecting directions internally...") dir_parset['directions_file'] = factor.directions.make_directions_file_from_skymodel( initial_skymodel.copy(), dir_parset['flux_min_jy'], dir_parset['size_max_arcmin'], dir_parset['separation_max_arcmin'], directions_max_num=dir_parset['max_num'], interactive=parset['interactive'], flux_min_for_merging_Jy=dir_parset['flux_min_for_merging_jy']) directions = factor.directions.directions_read(dir_parset['directions_file'], parset['dir_working']) # Add the target to the directions list if desired target_ra = dir_parset['target_ra'] target_dec = dir_parset['target_dec'] target_radius_arcmin = dir_parset['target_radius_arcmin'] target_has_own_facet = dir_parset['target_has_own_facet'] if target_has_own_facet: if target_ra is not None and target_dec is not None and target_radius_arcmin is not None: # Make target object target = Direction('target', target_ra, target_dec, factor_working_dir=parset['dir_working']) # Check if target is already in directions list because it was # selected as a DDE calibrator. If so, remove the duplicate nearest, dist = factor.directions.find_nearest(target, directions) if dist < dir_parset['target_radius_arcmin']/60.0: directions.remove(nearest) # Add target to directions list directions.append(target) else: log.critical('target_has_own_facet = True, but target RA, Dec, or radius not found in parset') sys.exit(1) # Create facets and patches faceting_radius_deg = dir_parset['faceting_radius_deg'] if faceting_radius_deg is None: faceting_radius_deg = 1.25 * ref_band.fwhm_deg / 2.0 beam_ratio = 1.0 / np.sin(ref_band.mean_el_rad) # ratio of N-S to E-W beam factor.directions.thiessen(directions, ref_band.ra, ref_band.dec, faceting_radius_deg, s=initial_skymodel.copy(), check_edges=dir_parset['check_edges'], target_ra=target_ra, target_dec=target_dec, target_radius_arcmin=target_radius_arcmin, beam_ratio=beam_ratio) # Warn user if they've specified a direction to reset that does not exist direction_names = [d.name for d in directions] for name in reset_directions: if name not in direction_names and name != 'field': log.warn('Direction {} was specified for resetting but does not ' 'exist in current list of directions'.format(name)) # Make DS9 region files so user can check the facets, etc. ds9_facet_reg_file = os.path.join(parset['dir_working'], 'regions', 'facets_ds9.reg') factor.directions.make_ds9_region_file(directions, ds9_facet_reg_file) ds9_calimage_reg_file = os.path.join(parset['dir_working'], 'regions', 'calimages_ds9.reg') factor.directions.make_ds9_calimage_file(directions, ds9_calimage_reg_file) # Check with user if parset['interactive']: print("Facet and DDE calibrator regions saved. Please check that they " "are OK before continuing.") prompt = "Continue processing (y/n)? " answ = raw_input(prompt) while answ.lower() not in ['y', 'n', 'yes', 'no']: answ = raw_input(prompt) if answ.lower() in ['n', 'no']: log.info('Exiting...') sys.exit() # Load previously completed operations (if any) and save the state for direction in directions: direction.load_state() direction.save_state() # Select subset of directions to process if dir_parset['ndir_total'] is not None: if dir_parset['ndir_total'] < len(directions): directions = directions[:dir_parset['ndir_total']] # Make sure target is still included direction_names = [d.name for d in directions] if target_has_own_facet and 'target' not in direction_names: directions.append(target) # Set various direction attributes log.info("Determining imaging parameters for each direction...") mean_freq_mhz = np.mean([b.freq for b in bands]) / 1e6 min_peak_smearing_factor = 1.0 - parset['max_peak_smearing'] for i, direction in enumerate(directions): # Set imaging and calibration parameters direction.set_imcal_parameters(parset['wsclean_nbands'], bands[0].chan_width_hz, bands[0].nchan, bands[0].timepersample, bands[0].minSamplesPerFile, len(bands), mean_freq_mhz, initial_skymodel, parset['preaverage_flux_jy'], min_peak_smearing_factor=min_peak_smearing_factor) # Set field center to that of first band (all bands have the same phase # center) direction.field_ra = bands[0].ra direction.field_dec = bands[0].dec # Set global re-image flag direction.make_final_image = dir_parset['reimage'] # Reset state if specified if direction.name in reset_directions: direction.do_reset = True else: direction.do_reset = False # Select directions to selfcal, excluding outliers and target if target_has_own_facet: # Make sure target is not a DDE calibrator and is at end of directions list selfcal_directions = [d for d in directions if d.name != target.name and not d.is_outlier] directions = [d for d in directions if d.name != target.name] + [target] else: selfcal_directions = [d for d in directions if not d.is_outlier] if dir_parset['ndir_selfcal'] is not None: if dir_parset['ndir_selfcal'] <= len(selfcal_directions): selfcal_directions = selfcal_directions[:dir_parset['ndir_selfcal']] # Divide directions into groups for selfcal direction_groups = factor.directions.group_directions(selfcal_directions, n_per_grouping=dir_parset['groupings'], allow_reordering=dir_parset['allow_reordering']) return directions, direction_groups