Пример #1
0
    def print_HAcov(self, png=None):
        """
        some info on the MSs
        """
        telescope = self.mssListObj[0].getTelescope()
        if telescope == 'LOFAR':
            telescope_coords = EarthLocation(lat=52.90889*u.deg, lon=6.86889*u.deg, height=0*u.m)
        elif telescope == 'GMRT':
            telescope_coords = EarthLocation(lat=19.0948*u.deg, lon=74.0493*u.deg, height=0*u.m)
        else:
            raise('Unknown Telescope.')
        
        has = []; elevs = []
        for ms in self.mssListObj:
            time = np.mean(ms.getTimeRange())
            time = Time( time/86400, format='mjd')
            time.delta_ut1_utc = 0. # no need to download precise table for leap seconds
            logger.info('%s (%s): Hour angle: %.1f hrs - Elev: %.2f (Sun distance: %.0f)' % (ms.nameMS,time.iso,ms.ha.deg/15.,ms.elev.deg,ms.sun_dist.deg))
            has.append(ms.ha.deg/15.)
            elevs.append(ms.elev.deg)

        if png is not None:
            import matplotlib.pyplot as pl
            pl.figure(figsize=(6,6))
            ax1 = pl.gca()
            ax1.plot(has, elevs, 'ko')
            ax1.set_xlabel('HA [hrs]')
            ax1.set_ylabel('elevs [deg]')
            logger.debug('Save plot: %s' % png)
            pl.savefig(png)
Пример #2
0
    def plot_HAcov(self, plotname='HAcov.png'):
        """
        Show the coverage in HA
        """
        from astropy.coordinates import get_sun, SkyCoord, EarthLocation, AltAz
        from astropy.time import Time
        from astropy import units as u

        telescope = self.mssListObj[0].getTelescope()
        if telescope == 'LOFAR':
            telescope_coords = EarthLocation(lat=52.90889 * u.deg,
                                             lon=6.86889 * u.deg,
                                             height=0 * u.m)
        elif telescope == 'GMRT':
            telescope_coords = EarthLocation(lat=19.0948 * u.deg,
                                             lon=74.0493 * u.deg,
                                             height=0 * u.m)
        else:
            raise ('Unknown Telescope.')

        for ms in self.mssListObj:
            time = np.mean(ms.getTimeRange())
            time = Time(time / 86400, format='mjd')
            coord_sun = get_sun(time)
            ra, dec = ms.getPhaseCentre()
            coord = SkyCoord(ra * u.deg, dec * u.deg)
            elev = coord.transform_to(
                AltAz(obstime=time, location=telescope_coords)).alt
            sun_dist = coord.separation(coord_sun)
            lst = time.sidereal_time('mean', telescope_coords.longitude)
            ha = lst - coord.ra  # hour angle
            logger.info(
                'Hour angle: %.1f hrs - Elev: %.2f (Sun distance: %.0f)' %
                (ha.deg / 15., elev.deg, sun_dist.deg))
Пример #3
0
    def run(self):
        """
        Run the algorithm
        """

        for it in range(self.n_iterations):
            logger.info("Grouper: Starting iteration %i" % it)
            for i, x in enumerate(self.coords):
                ### Step 1. For each datapoint x in X, find the neighbouring points N(x) of x.
                idx_neighbours = self.neighbourhood_points(x, self.coords, max_distance = self.look_distance)
                
                ### Step 2. For each datapoint x in X, calculate the mean shift m(x).
                distances = self.euclid_distance(self.coords[idx_neighbours], x)
                weights = self.gaussian_kernel(distances)
                weights *= self.fluxes[idx_neighbours]**2 # multiply by flux**1.5 to make bright sources more important
                numerator = np.sum(weights[:,np.newaxis] * self.coords[idx_neighbours], axis=0)
                denominator = np.sum(weights)
                new_x = numerator / denominator
                
                ### Step 3. For each datapoint x in X, update x <- m(x).
                self.coords[i] = new_x

            self.past_coords.append(np.copy(self.coords))

            #if it>1: 
            #    print (np.max(self.euclid_distance(self.coords,self.past_coords[-2])))

            # if things changes little, brak
            if it > 1 and np.max(self.euclid_distance(self.coords, self.past_coords[-2])) < self.grouping_distance/2.:
                break
Пример #4
0
    def grouping(self):
        """
        Take the last coords set and group sources nearby, then return a list of lists. 
        Each list has the index of one cluster.
        """
        coords_to_check = np.copy(self.coords)
        while len(coords_to_check) > 0:
            idx_cluster = self.neighbourhood_points(
                coords_to_check[0],
                self.coords,
                max_distance=self.grouping_distance)
            idx_cluster_to_remove = self.neighbourhood_points(
                coords_to_check[0],
                coords_to_check,
                max_distance=self.grouping_distance)

            # remove all coords of this clusters from the global list
            mask = np.ones(coords_to_check.shape[0], dtype=bool)
            mask[idx_cluster_to_remove] = False
            coords_to_check = coords_to_check[mask]

            # save this cluster indexes
            self.clusters.append(idx_cluster)

        logger.info('Grouper: Creating %i groups.' % len(self.clusters))
        return self.clusters
Пример #5
0
def repoint(h5parmFile, dirname, solsetname='sol000'):
    """
    rename the pointing direction of an h5parm from 'pointing' to 'dirname'
    """

    dirname = '%s' % dirname

    # open h5parm
    h5 = h5parm(h5parmFile, readonly=False)
    ss = h5.getSolset(solsetname)

    # rename each axes (must re-create the array as otherwise it truncates the dir name to the previous max string length)
    for tab in ss.getSoltabs():
        if 'dir' in tab.getAxesNames():
            tab.obj._v_file.remove_node('/' + tab.getAddress(), 'dir')
            tab.obj._v_file.create_array('/' + tab.getAddress(),
                                         'dir',
                                         obj=[dirname.encode()])

    # rename directions table
    sourceTable = ss.obj.source
    direction = sourceTable[0][1]
    logger.info('%s: update dir name "%s" -> "%s".' %
                (h5parmFile, sourceTable[0][0], dirname))
    sourceTable[0] = (dirname, direction)

    # write h5parm
    sourceTable.close()
    h5.close()
Пример #6
0
    def __init__(self,
                 qsub=None,
                 maxThreads=None,
                 max_processors=None,
                 log_dir='logs',
                 dry=False):
        """
        qsub:           if true call a shell script which call qsub and then wait
                        for the process to finish before returning
        maxThreads:    max number of parallel processes
        dry:            don't schedule job
        max_processors: max number of processors in a node (ignored if qsub=False)
        """
        self.cluster = self.get_cluster()
        self.log_dir = log_dir
        self.qsub = qsub
        # if qsub/max_thread/max_processors not set, guess from the cluster
        # if they are set, double check number are reasonable
        if (self.qsub == None):
            if (self.cluster == "Hamburg"):
                self.qsub = True
            else:
                self.qsub = False
        else:
            if ((self.qsub is False and self.cluster == "Hamburg") or
                (self.qsub is True and
                 (self.cluster == "Leiden" or self.cluster == "CEP3"
                  or self.cluster == "Hamburg_fat" or self.cluster == "IRA"
                  or self.cluster == "Herts"))):
                logger.critical('Qsub set to %s and cluster is %s.' %
                                (str(qsub), self.cluster))
                sys.exit(1)

        if (maxThreads is None):
            if (self.cluster == "Hamburg"):
                self.maxThreads = 32
            else:
                self.maxThreads = multiprocessing.cpu_count()
        else:
            self.maxThreads = maxThreads

        if (max_processors == None):
            if (self.cluster == "Hamburg"):
                self.max_processors = 6
            else:
                self.max_processors = multiprocessing.cpu_count()
        else:
            self.max_processors = max_processors

        self.dry = dry
        logger.info("Scheduler initialised for cluster " + self.cluster +
                    " (maxThreads: " + str(self.maxThreads) +
                    ", qsub (multinode): " + str(self.qsub) +
                    ", max_processors: " + str(self.max_processors) + ").")

        self.action_list = []
        self.log_list = [
        ]  # list of 2-tuples of the type: (log filename, type of action)
Пример #7
0
def run_losoto(s, c, h5s, parsets, plots_dir=None) -> object:
    """
    s : scheduler
    c : cycle name, e.g. "final"
    h5s : lists of H5parm files or string of 1 h5parm
    parsets : lists of parsets to execute
    """

    logger.info("Running LoSoTo...")

    h5out = 'cal-' + c + '.h5'

    if type(h5s) is str: h5s = [h5s]

    # convert from killMS
    for i, h5 in enumerate(h5s):
        if h5[-3:] == 'npz':
            newh5 = h5.replace('.npz', '.h5')
            s.add('killMS2H5parm.py -V --nofulljones %s %s ' % (newh5, h5),
                  log='losoto-' + c + '.log',
                  commandType="python",
                  processors='max')
            s.run(check=True)
            h5s[i] = newh5

    # concat/move
    if len(h5s) > 1:
        check_rm(h5out)
        s.add('H5parm_collector.py -V -s sol000 -o ' + h5out + ' ' +
              ' '.join(h5s),
              log='losoto-' + c + '.log',
              commandType="python",
              processors='max')
        s.run(check=True)
    else:
        os.system('cp -r %s %s' % (h5s[0], h5out))

    check_rm('plots')
    os.makedirs('plots')

    for parset in parsets:
        logger.debug('-- executing ' + parset + '...')
        s.add('losoto -V ' + h5out + ' ' + parset,
              log='losoto-' + c + '.log',
              logAppend=True,
              commandType="python",
              processors='max')
        s.run(check=True)

    if plots_dir is None:
        check_rm('plots-' + c)
        os.system('mv plots plots-' + c)
    else:
        if not os.path.exists(plots_dir): os.system('mkdir ' + plots_dir)
        os.system('mv plots/* ' + plots_dir)
        check_rm('plots')
Пример #8
0
    def plot(self):
        """
        Plot the status of the distribution
        """
        import matplotlib as mpl
        mpl.use("Agg")
        import matplotlib.pyplot as plt

        # decent colors
        import cycler, random
        color_idx = np.linspace(0, 1, len(self.clusters))
        random.shuffle(color_idx)
        color = plt.cm.rainbow(color_idx)
        mpl.rcParams['axes.prop_cycle'] = cycler.cycler('color', color)

        logger.info('Plotting grouped sources: grouping_xxx.png')
        for i, X in enumerate(self.past_coords):
            fig = plt.figure(figsize=(8, 8))
            fig.subplots_adjust(wspace=0)
            ax = fig.add_subplot(111)

            initial_x = self.past_coords[0][:, 0]
            initial_y = self.past_coords[0][:, 1]

            ax.plot(initial_x, initial_y, 'k.')
            ax.plot(X[:, 0], X[:, 1], 'ro')

            ax.set_xlim(np.min(initial_x), np.max(initial_x))
            ax.set_ylim(np.min(initial_y), np.max(initial_y))

            #print ('Saving plot_%i.png' % i)
            ax.set_xlim(ax.get_xlim()[::-1])  # reverse RA
            fig.savefig('grouping_%00i.png' % i, bbox_inches='tight')

        # plot clustering
        fig = plt.figure(figsize=(8, 8))
        fig.subplots_adjust(wspace=0)
        ax = fig.add_subplot(111)
        for cluster in self.clusters:
            ax.plot(initial_x[cluster],
                    initial_y[cluster],
                    marker='.',
                    linestyle='')

        ax.set_xlim(np.min(initial_x), np.max(initial_x))
        ax.set_ylim(np.min(initial_y), np.max(initial_y))
        ax.set_xlim(ax.get_xlim()[::-1])  # reverse RA

        logger.info('Plotting: grouping_clusters.png')
        fig.savefig('grouping_clusters.png', bbox_inches='tight')
Пример #9
0
 def merge_ids(self, ids):
     """ Merge groups containing ids"""
     if len(ids) < 2:
         return None
     clusters = self.clusters
     contains_id = []  # indices of clusters which contain one or more of the ids
     for id in ids:
         isin = [id in cluster for cluster in clusters]  # cluster_ids for clusters containing source ids
         contains_id.append(np.nonzero(isin))
     contains_id = np.unique(contains_id)
     if len(contains_id) == 1:  # all sources are already in the same cluster!
         return None
     else:
         merged = np.concatenate([clusters[id] for id in contains_id]) # this will be the merged cluster
         clusters = list(np.delete(clusters, contains_id)) # delete clusters that are merged so they don't apper twice
         logger.info('Merge groups in same mask island: {}'.format(merged))
         clusters.append(merged.astype(int))
         self.clusters = clusters
Пример #10
0
    def plot(self):
        """
        Plot the status of the distribution
        """
        import matplotlib as mpl
        mpl.use("Agg")
        import matplotlib.pyplot as plt

        logger.info('Plotting grouped sources: grouping_xxx.png')
        for i, X in enumerate(self.past_coords):
            fig = plt.figure(figsize=(8, 8))
            fig.subplots_adjust(wspace=0)
            ax = fig.add_subplot(111)

            initial_x = self.past_coords[0][:, 0]
            initial_y = self.past_coords[0][:, 1]

            ax.plot(initial_x, initial_y, 'k.')
            ax.plot(X[:, 0], X[:, 1], 'ro')

            ax.set_xlim(np.min(initial_x), np.max(initial_x))
            ax.set_ylim(np.min(initial_y), np.max(initial_y))

            #print ('Saving plot_%i.png' % i)
            fig.savefig('grouping_%00i.png' % i, bbox_inches='tight')

        # plot clustering
        fig = plt.figure(figsize=(8, 8))
        fig.subplots_adjust(wspace=0)
        ax = fig.add_subplot(111)
        for cluster in self.clusters:
            ax.plot(initial_x[cluster],
                    initial_y[cluster],
                    marker='.',
                    linestyle='')

        ax.set_xlim(np.min(initial_x), np.max(initial_x))
        ax.set_ylim(np.min(initial_y), np.max(initial_y))

        logger.info('Plotting: grouping_clusters.png')
        fig.savefig('grouping_clusters.png', bbox_inches='tight')
Пример #11
0
    def selectCC(self, keepInBeam=True):
        """
        remove cc from a skymodel according to masks
        keepInBeam: if beamReg is present and is True: remove sources outside beam
                    if beamReg is present and is False: remove source inside beam
        """
        self.makeMask()

        if self.beamReg is not None:
            logger.info('Predict (apply beam reg %s)...' % self.beamReg)
            blank_image_reg(
                self.maskname, self.beamReg, inverse=keepInBeam, blankval=0
            )  # if keep_in_beam set to 0 everything outside beam.reg

        # apply mask
        logger.info('%s: Apply mask on skymodel...' % self.imagename)
        lsm = lsmtool.load(self.skymodel)
        lsm.select('%s == True' % self.maskname)
        lsm.group('single')  # group to 1 patch
        lsm.write(self.skymodel_cut, format='makesourcedb', clobber=True)
        del lsm

        # convert from txt to blob
        logger.info('%s: Make skydb...' % self.imagename)
        lib_util.check_rm(self.skydb)
        os.system('makesourcedb outtype="blob" format="<" in="' +
                  self.skymodel_cut + '" out="' + self.skydb + '"')
Пример #12
0
def run_losoto(s, c, h5s, parsets):
    """
    s : scheduler
    c : cycle name, e.g. "final"
    h5s : lists of H5parm files or string of 1 h5parm
    parsets : lists of parsets to execute
    """

    logger.info("Running LoSoTo...")

    h5 = 'cal-' + c + '.h5'

    if type(h5s) is str: h5s = [h5s]

    # concat/move
    if len(h5s) > 1:
        check_rm("cal-" + c + ".h5")
        s.add('H5parm_collector.py -V -s sol000 -o ' + h5 + ' ' +
              ' '.join(h5s),
              log='losoto-' + c + '.log',
              commandType="python",
              processors='max')
        s.run(check=True)
    else:
        os.system('cp -r %s %s' % (h5s[0], h5))

    check_rm('plots')
    os.makedirs('plots')

    for parset in parsets:
        logger.debug('-- executing ' + parset + '...')
        s.add('losoto -V ' + h5 + ' ' + parset,
              log='losoto-' + c + '.log',
              logAppend=True,
              commandType="python",
              processors='max')
        s.run(check=True)

    check_rm('plots-' + c)
    os.system('mv plots plots-' + c)
Пример #13
0
    def selectCC(self, checkBeam=True, keepInBeam=True, maskname=None):
        """
        remove cc from a skymodel according to masks
        checkBeam: remove according to beam (see keepInBeam)
        keepInBeam: if beamReg is present and is True: remove sources outside beam
                    if beamReg is present and is False: remove source inside beam
        maskname: a possible mask, otherwise try to use standard
        """
        if maskname is None: maskname = self.maskname
        if not os.path.exists(maskname):
            raise("Missing mask in selectCC: %s." % maskname)

        if checkBeam:
            if self.beamReg is None:
                raise('Missing beam in selectCC.')
            logger.info('Predict (apply beam reg %s)...' % self.beamReg)
            blank_image_reg(maskname, self.beamReg, inverse=keepInBeam, blankval=0) # if keep_in_beam set to 0 everything outside beam.reg

        # apply mask
        logger.info('%s: Apply mask (%s) on skymodel...' % (self.imagename,maskname))
        lsm = lsmtool.load(self.skymodel)
        lsm.select('%s == True' % maskname)
        lsm.group('single') # group to 1 patch
        lsm.write(self.skymodel_cut, format = 'makesourcedb', clobber=True)
        del lsm

        # convert from txt to blob
        logger.info('%s: Make skydb...' % self.imagename)
        lib_util.check_rm(self.skydb)
        os.system('makesourcedb outtype="blob" format="<" in="'+self.skymodel_cut+'" out="'+self.skydb+'"')
Пример #14
0
    def makeMask(self,
                 threshisl=5,
                 atrous_do=True,
                 rmsbox=(100, 30),
                 remove_extended_cutoff=0.,
                 only_beam=False,
                 maskname=None):
        """
        Create a mask of the image where only believable flux is

        remove_extended_cutoff: if >0 then remove all islands where sum(brightness_pixels)/(#pixels^2) < remove_extended_cutoff
        this is useful to remove extended sources from the mask. This higher this number the more compact must be the source.
        A good value is 0.001 for DIE cal images.

        maskname: if give, then use a specific maskname
        only_beam: set to 0 outside the beam
        """
        if maskname is None: maskname = self.maskname

        if not os.path.exists(maskname):
            logger.info('%s: Making mask...' % self.imagename)
            make_mask.make_mask(image_name=self.imagename,
                                mask_name=maskname,
                                threshisl=threshisl,
                                atrous_do=atrous_do,
                                rmsbox=rmsbox)
        if self.userReg is not None:
            logger.info('%s: Adding user mask (%s)...' %
                        (self.imagename, self.userReg))
            blank_image_reg(maskname, self.userReg, inverse=False, blankval=1)
        if only_beam and self.beamReg is not None:
            logger.info('%s: Restricting to the beam (%s)...' %
                        (self.imagename, self.beamReg))
            blank_image_reg(maskname, self.beamReg, inverse=True, blankval=0)

        if remove_extended_cutoff > 0:

            # get data
            with pyfits.open(self.imagename) as fits:
                data = fits[0].data
            # get mask
            with pyfits.open(maskname) as fits:
                mask = fits[0].data
                # for each island calculate the catoff
                blobs, number_of_blobs = label(mask.astype(int).squeeze(),
                                               structure=[[1, 1, 1], [1, 1, 1],
                                                          [1, 1, 1]])
                for i in range(1, number_of_blobs):
                    this_blob = blobs == i
                    max_pix = np.max(data[0, 0, this_blob])
                    ratio = np.sum(data[0, 0, this_blob]) / np.sum(
                        mask[0, 0, this_blob])**2
                    if max_pix < 1. and ratio < remove_extended_cutoff:
                        mask[0, 0, this_blob] = False
                    #mask[0,0,this_blob] = ratio # debug

                # write mask back
                fits[0].data = mask
                fits.writeto(maskname, overwrite=True)
Пример #15
0
def addpol(h5parmFile, soltabname, solsetname='sol000'):
    """
    add pol axes on a soltab
    """
    # open h5parm
    logger.info('%s: add pol axis to %s.' % (h5parmFile, soltabname))
    h5 = h5parm(h5parmFile, readonly=False)
    ss = h5.getSolset(solsetname)
    st = ss.getSoltab(soltabname)

    if 'pol' in st.getAxesNames():
        h5.close()
        logger.warning('%s: polarisation axis already present in %s.' %
                       (h5parmFile, soltabname))
        return

    # create values for new soltab
    typ = st.getType()
    axesNames = st.getAxesNames() + ['pol']
    axesVals = [st.getAxisValues(axisName)
                for axisName in st.getAxesNames()] + [np.array(['XX', 'YY'])]
    vals = st.getValues(retAxesVals=False)
    vals = np.array([vals, vals])
    vals = np.moveaxis(vals, 0, -1)
    weights = st.getValues(weight=True, retAxesVals=False)
    weights = np.array([weights, weights])
    weights = np.moveaxis(weights, 0, -1)

    # remove old soltab
    st.delete()

    # make new soltab
    soltabout = ss.makeSoltab(soltype = typ, soltabName = soltabname, axesNames=axesNames, \
                axesVals=axesVals, vals=vals, weights=weights)

    # write h5parm
    h5.close()
Пример #16
0
def columnAddSimilar(pathMS, columnNameNew, columnNameSimilar, dataManagerInfoNameNew, overwrite = False, fillWithOnes = True, comment = "", verbose = False):
    # more to lib_ms
    """
    Add a column to a MS that is similar to a pre-existing column (in shape, but not in values).
    pathMS:                 path of the MS
    columnNameNew:          name of the column to be added
    columnNameSimilar:      name of the column from which properties are copied (e.g. "DATA")
    dataManagerInfoNameNew: string value for the data manager info (DMI) keyword "NAME" (should be unique in the MS)
    overwrite:              whether or not to overwrite column 'columnNameNew' if it already exists
    fillWithOnes:           whether or not to fill the newly-made column with ones
    verbose:                whether or not to produce abundant output
    """
    t = tables.table(pathMS, readonly = False)

    if (columnExists(t, columnNameNew) and not overwrite):
        logger.warning("Attempt to add column '" + columnNameNew + "' aborted, as it already exists and 'overwrite = False' in columnAddSimilar(...).")
    else: # Either the column does not exist yet, or it does but overwriting is allowed.

        # Remove column if necessary.
        if (columnExists(t, columnNameNew)):
            logger.info("Removing column '" + columnNameNew + "'...")
            t.removecols(columnNameNew)

        # Add column.
        columnDescription       = t.getcoldesc(columnNameSimilar)
        dataManagerInfo         = t.getdminfo(columnNameSimilar)

        if (verbose):
            logger.debug("columnDescription:")
            logger.debug(columnDescription)
            logger.debug("dataManagerInfo:")
            logger.debug(dataManagerInfo)

        columnDescription["comment"] = ""
        # What about adding something here like:
        #columnDescription["dataManagerGroup"] = ...?
        dataManagerInfo["NAME"]      = dataManagerInfoNameNew

        if (verbose):
            logger.debug("columnDescription (updated):")
            logger.debug(columnDescription)
            logger.debug("dataManagerInfo (updated):")
            logger.debug(dataManagerInfo)

        logger.info("Adding column '" + columnNameNew + "'...")
        t.addcols(tables.makecoldesc(columnNameNew, columnDescription), dataManagerInfo)

        # Fill with ones if desired.
        if (fillWithOnes):
            logger.info("Filling column '" + columnNameNew + "' with ones...")
            columnDataSimilar = t.getcol(columnNameSimilar)
            t.putcol(columnNameNew, np.ones_like(columnDataSimilar))

    # Close the table to avoid that it is locked for further use.
    t.close()
Пример #17
0
def make_voronoi_reg(directions,
                     fitsfile,
                     outdir_reg='regions',
                     out_mask='facet.fits',
                     png=None):
    """
    Take a list of coordinates and an image and voronoi tesselate the sky.
    It saves ds9 regions + fits mask of the facets

    directions : array of Direction objects
    firsfile : mask fits file to tassellate (used for coordinates and as template for the out_mask)
    outdir_reg : dir where to save regions
    out_mask : output mask with different numbers in each facet
    png : output png file that shows the tassellation
    """
    def closest_node(node, nodes):
        """
        Return closest values to node from nodes
        """
        nodes = np.asarray(nodes)
        dist_2 = np.sum((nodes - node)**2, axis=1)
        return np.argmin(dist_2)

    logger.debug("Image used for tasselation reference: " + fitsfile)
    fits = pyfits.open(fitsfile)
    hdr, data = lib_img.flatten(fits)
    w = pywcs.WCS(hdr)
    pixsize = np.abs(hdr['CDELT1'])

    # Get facets central pixels
    ras = np.array([d.position_cal[0] for d in directions])
    decs = np.array([d.position_cal[1] for d in directions])
    x_fs, y_fs = w.all_world2pix(ras, decs, 0, ra_dec_order=True)
    # keep trak of numbers in the direction names to name correctly patches in the fits files
    # in this way Isl_patch_12 will have "12" into the fits for that patch.
    nums = [d.isl_num for d in directions]

    x_c = data.shape[0] / 2.
    y_c = data.shape[1] / 2.

    # Check if dir is in img, otherwise drop
    idx_for_facet = []
    for i, direction in enumerate(directions):
        x, y = w.all_world2pix(ras[i], decs[i], 0, ra_dec_order=True)
        if x < 0 or x > data.shape[0] or y < 0 or y > data.shape[1]:
            logger.info(
                'Direction %s is outside the primary beam and will not have a facet (it will still be a calibrator).'
                % direction.name)
        else:
            idx_for_facet.append(i)

    # convert to pixel space (voronoi must be in eucledian space)
    x1 = 0
    y1 = 0
    x2 = data.shape[1]  # note that y is before x in fits.data
    y2 = data.shape[0]

    # do tasselization
    vor = Voronoi(
        np.array((x_fs[idx_for_facet], y_fs[idx_for_facet])).transpose())
    box = np.array([[x1, y1], [x2, y2]])
    impoly = voronoi_finite_polygons_2d_box(vor, box)

    # create fits mask (each region one number)
    x, y = np.meshgrid(np.arange(x2),
                       np.arange(y2))  # make a canvas with coordinates
    x, y = x.flatten(), y.flatten()
    pixels = np.vstack((x, y)).T
    data_facet = np.zeros(shape=data.shape)
    for num, poly in zip(nums, impoly):
        p = Path(poly)
        pixels_region = p.contains_points(pixels)
        data_facet[pixels_region.reshape(y2, x2)] = num

    # put all values in each island equal to the closest region
    struct = generate_binary_structure(2, 2)
    data = binary_dilation(data, structure=struct,
                           iterations=3).astype(data.dtype)  # expand masks
    blobs, number_of_blobs = label(data.astype(int).squeeze(),
                                   structure=[[1, 1, 1], [1, 1, 1], [1, 1, 1]])
    center_of_masses = center_of_mass(data, blobs,
                                      list(range(number_of_blobs + 1)))
    for blob in range(1, number_of_blobs + 1):
        # get closer facet
        facet_num = closest_node(
            center_of_masses[blob],
            np.array([y_fs[idx_for_facet], x_fs[idx_for_facet]]).T)
        # put all pixel of that mask to that facet value
        data_facet[blobs == blob] = nums[facet_num]

    # save fits mask
    pyfits.writeto(out_mask, data_facet, hdr, overwrite=True)

    # save regions
    if not os.path.isdir(outdir_reg): os.makedirs(outdir_reg)

    all_s = []
    for i, poly in enumerate(impoly):
        ra, dec = w.all_pix2world(poly[:, 0], poly[:, 1], 0, ra_dec_order=True)
        coords = np.array([ra, dec]).T.flatten()

        s = Shape('Polygon', None)
        s.coord_format = 'fk5'
        s.coord_list = coords  # ra, dec, radius
        s.coord_format = 'fk5'
        s.attr = ([], {
            'width': '2',
            'point': 'cross',
            'font': '"helvetica 16 normal roman"'
        })
        s.comment = 'color=red'
        all_s.append(s)

        regions = pyregion.ShapeList([s])
        regionfile = outdir_reg + '/' + directions[
            idx_for_facet[i]].name + '.reg'
        regions.write(regionfile)

    # add names for all.reg
    for d in directions:
        s = Shape('circle', None)
        s.coord_format = 'fk5'
        s.coord_list = [d.position_cal[0], d.position_cal[1],
                        0.01]  # ra, dec, radius
        s.coord_format = 'fk5'
        s.attr = ([], {
            'width': '1',
            'point': 'cross',
            'font': '"helvetica 16 normal roman"'
        })
        s.comment = 'color=white text="%s"' % d.name
        all_s.append(s)

    regions = pyregion.ShapeList(all_s)
    regionfile = outdir_reg + '/all.reg'
    regions.write(regionfile)
    logger.debug(
        'There are %i regions within the PB and %i outside (no facet).' %
        (len(idx_for_facet), len(directions) - len(idx_for_facet)))

    # plot tesselization
    if png is not None:
        import matplotlib.pyplot as pl
        pl.figure(figsize=(8, 8))
        ax1 = pl.gca()
        voronoi_plot_2d(vor,
                        ax1,
                        show_vertices=True,
                        line_colors='black',
                        line_width=2,
                        point_size=4)
        for i, d in enumerate(directions):
            ax1.text(x_fs[i], y_fs[i], d.name, fontsize=15)
        ax1.plot([x1, x1, x2, x2, x1], [y1, y2, y2, y1, y1])
        ax1.set_xlabel('RA (pixel)')
        ax1.set_ylabel('Dec (pixel)')
        ax1.set_xlim(x1, x2)
        ax1.set_ylim(y1, y2)
        logger.debug('Save plot: %s' % png)
        pl.savefig(png)