예제 #1
0
    def InitFromCatalog(self):

        FileCoords = self.FileCoords
        dtype = [('Name', 'S200'), ("ra", np.float64), ("dec", np.float64),
                 ('Type', 'S200')]
        #FileCoords="Transient_LOTTS.csv"

        self.PosArray = np.load(FileCoords)
        self.PosArray = self.PosArray.view(np.recarray)

        self.NDirSelected = self.PosArray.shape[0]

        self.NDir = self.PosArray.shape[0]

        self.DicoDATA = shared_dict.create("DATA")
        self.DicoGrids = shared_dict.create("Grids")
        self.DicoGrids["GridSTD"] = np.zeros((self.na, self.NTimes),
                                             np.float64)

        self.DoJonesCorr_kMS = False
        self.DicoJones = None
        if self.SolsName:
            self.DoJonesCorr_kMS = True
            self.DicoJones_kMS = shared_dict.create("DicoJones_kMS")

        self.DoJonesCorr_Beam = False
        if self.BeamModel:
            self.DoJonesCorr_Beam = True
            self.DicoJones_Beam = shared_dict.create("DicoJones_Beam")

        APP.registerJobHandlers(self)
        AsyncProcessPool.init(ncpu=self.NCPU, affinity=0)
        APP.startWorkers()
예제 #2
0
    def InitFromCatalog(self):

        FileCoords = self.FileCoords
        dtype = [('Name', 'S200'), ("ra", np.float64), ("dec", np.float64),
                 ('Type', 'S200')]
        #FileCoords="Transient_LOTTS.csv"

        self.PosArray = np.load(FileCoords)
        self.PosArray = self.PosArray.view(np.recarray)

        self.NDirSelected = self.PosArray.shape[0]

        self.NDir = self.PosArray.shape[0]

        self.DicoDATA = shared_dict.create("DATA")
        self.DicoGrids = shared_dict.create("Grids")

        dChan = np.min([self.StepFreq, self.NChan])
        self.DicoGrids["DomainEdges_Freq"] = np.int64(
            np.linspace(0, self.NChan,
                        int(self.NChan / dChan) + 1))
        #self.DicoGrids["DomainEdges_Time"] = np.int64(np.linspace(0,self.NTimes-1,int(self.NTimes/self.StepTime)+1))
        DT = self.times.max() - self.times.min()
        DTSol = np.min([self.StepTime * self.dt, DT])
        self.DicoGrids["DomainEdges_Time"] = np.linspace(
            self.times.min() - 1e-6,
            self.times.max() + 1e-6,
            int(DT / DTSol) + 1)

        self.DicoGrids["GridC2"] = np.zeros(
            (self.DicoGrids["DomainEdges_Time"].size - 1,
             self.DicoGrids["DomainEdges_Freq"].size - 1, self.na, self.na),
            np.complex128)
        self.DicoGrids["GridC"] = np.zeros(
            (self.DicoGrids["DomainEdges_Time"].size - 1,
             self.DicoGrids["DomainEdges_Freq"].size - 1, self.na, self.na),
            np.complex128)

        log.print("  DomainEdges_Freq: %s" %
                  (str(self.DicoGrids["DomainEdges_Freq"])))
        log.print("  DomainEdges_Time: %s" %
                  (str(self.DicoGrids["DomainEdges_Time"])))

        self.DoJonesCorr_kMS = False
        self.DicoJones = None
        if self.SolsName:
            self.DoJonesCorr_kMS = True
            self.DicoJones_kMS = shared_dict.create("DicoJones_kMS")

        self.DoJonesCorr_Beam = False
        if self.BeamModel:
            self.DoJonesCorr_Beam = True
            self.DicoJones_Beam = shared_dict.create("DicoJones_Beam")

        APP.registerJobHandlers(self)
        AsyncProcessPool.init(ncpu=self.NCPU, affinity=0)
        APP.startWorkers()
예제 #3
0
 def startWorkers(self):
     """Starts worker threads. All job handlers and events must be registered *BEFORE*"""
     self._shared_state = shared_dict.create("APP")
     self._job_counters.finalize(self._shared_state)
     if self.ncpu > 1:
         self._taras_bulba.start()
     self._started = True
예제 #4
0
 def computeSmearMappingInBackground (self, base_job_id, MS, DATA, radiusDeg, Decorr, channel_mapping, mode):
     l = radiusDeg * np.pi / 180
     dPhi = np.sqrt(6. * (1. - Decorr))
     # create new empty shared dicts for results
     self._outdict = shared_dict.create("%s:%s:tmp" %(DATA.path, self.name))
     blockdict = self._outdict.addSubdict("blocks")
     sizedict  = self._outdict.addSubdict("sizes")
     self._nbl = 0
     for a0 in xrange(MS.na):
         for a1 in xrange(MS.na):
             if a0 != a1:
                 self._nbl += 1
                 APP.runJob("%s:%s:%d:%d" % (base_job_id, self.name, a0, a1), self._smearmapping_worker,
                            counter=self._job_counter, collect_result=False,
                            args=(DATA.readonly(), blockdict.writeonly(), sizedict.writeonly(), a0, a1, dPhi, l,
                                  channel_mapping, mode))
예제 #5
0
    def __init__(self, VS):
        logger.setSilent(["ClassJones", "ClassLOFARBeam"])
        self.VS = VS

        self.GD = copy.deepcopy(self.VS.GD)
        self.DoCentralNorm = self.GD["Beam"]["CenterNorm"]
        self.SmoothBeam = None
        self.CheckCache()
        if self.CacheValid:
            logger.setLoud(["ClassJones", "ClassLOFARBeam"])
            return

        #self.GD["Beam"]["CenterNorm"]=0

        self.ListMS = self.VS.ListMS
        self.MS = self.ListMS[0]
        rac, decc = self.MS.radec
        self.CoordMachine = ModCoord.ClassCoordConv(rac, decc)
        self.CalcGrid()
        #self.Padding=Padding

        #self.SumJJsq=np.zeros((self.npix,self.npix,self.MS.Nchan),np.float64)
        #self.SumWsq=np.zeros((1,self.MS.Nchan),np.float64)

        self.StackedBeamDict = shared_dict.create("StackedBeamDict")
        for iDir in range(self.NDir):
            sd = self.StackedBeamDict.addSubdict(iDir)
            sd.addSharedArray("SumJJsq", (self.VS.NFreqBands, ), np.float64)
            sd.addSharedArray("SumWsq", (self.VS.NFreqBands, ), np.float64)

        self.DicoJonesMachine = {}
        for iMS, MS in enumerate(self.ListMS):
            JonesMachine = ClassJones.ClassJones(self.GD, MS,
                                                 self.VS.FacetMachine)
            JonesMachine.InitBeamMachine()
            self.DicoJonesMachine[iMS] = JonesMachine
        logger.setLoud(["ClassJones", "ClassLOFARBeam"])
예제 #6
0
    def DeconvListIsland(self,
                         ListIslands,
                         ParallelMode="OverIsland",
                         ListInitIslands=None):
        # ================== Parallel part

        NIslands = len(ListIslands)
        if NIslands == 0: return
        if ParallelMode == "OverIslands":
            NCPU = self.NCPU
            NCPU = np.min([NCPU, NIslands])
            Parallel = True
            ParallelPerIsland = False
            StopWhenQueueEmpty = True
        elif ParallelMode == "PerIsland":
            NCPU = 1  #self.NCPU
            Parallel = True
            ParallelPerIsland = True
            StopWhenQueueEmpty = True

        # ######### Debug
        # ParallelPerIsland=False
        # Parallel=False
        # NCPU=1
        # StopWhenQueueEmpty=True
        # ##################

        work_queue = multiprocessing.Queue()

        # shared dict to hold inputs and outputs to workers (each island number is a key)
        deconv_dict = shared_dict.create("DeconvListIslands")

        NJobs = NIslands
        T = ClassTimeIt.ClassTimeIt("    ")
        T.disable()
        for iIsland, ThisPixList in enumerate(ListIslands):
            island_dict = deconv_dict.addSubdict(iIsland)

            # print "%i/%i"%(iIsland,self.NIslands)
            island_dict["Island"] = np.array(ThisPixList)

            XY = np.array(ThisPixList, dtype=np.float32)
            xm, ym = np.mean(np.float32(XY), axis=0).astype(int)
            T.timeit("xm,ym")
            nchan, npol, _, _ = self._Dirty.shape
            JonesNorm = (self.DicoDirty["JonesNorm"][:, :, xm, ym]).reshape(
                (nchan, npol, 1, 1))
            W = self.DicoDirty["WeightChansImages"]
            JonesNorm = np.sum(JonesNorm * W.reshape((nchan, 1, 1, 1)),
                               axis=0).reshape((1, npol, 1, 1))
            T.timeit("JonesNorm")

            IslandBestIndiv = self.ModelMachine.GiveIndividual(ThisPixList)
            T.timeit("GiveIndividual")
            FacetID = self.PSFServer.giveFacetID2(xm, ym)
            T.timeit("FacetID")

            island_dict["BestIndiv"] = IslandBestIndiv

            ListOrder = [
                iIsland, FacetID, JonesNorm.flat[0], self.RMS**2,
                island_dict.path
            ]

            work_queue.put(ListOrder)
            T.timeit("Put")

        # ListArrayIslands=[np.array(ListIslands[iIsland]) for iIsland in range(NIslands)]
        # NpShared.PackListArray(SharedListIsland,ListArrayIslands)
        # T.timeit("Pack0")
        # SharedBestIndiv="%s.ListBestIndiv"%(self.IdSharedMem)
        # NpShared.PackListArray(SharedBestIndiv,ListBestIndiv)
        # T.timeit("Pack1")

        workerlist = []

        # List_Result_queue=[]
        # for ii in range(NCPU):
        #     List_Result_queue.append(multiprocessing.JoinableQueue())

        result_queue = multiprocessing.Queue()
        Title = " Evolve pop."
        if self.DeconvMode == "MetroClean":
            Title = " Running chain"

        pBAR = ProgressBar(Title=Title)
        #pBAR.disable()
        pBAR.render(0, NJobs)
        for ii in range(NCPU):
            W = WorkerDeconvIsland(work_queue,
                                   result_queue,
                                   self.GD,
                                   self._Dirty,
                                   self.DicoVariablePSF["CubeVariablePSF"],
                                   IdSharedMem=self.IdSharedMem,
                                   FreqsInfo=self.PSFServer.DicoMappingDesc,
                                   ParallelPerIsland=ParallelPerIsland,
                                   StopWhenQueueEmpty=StopWhenQueueEmpty,
                                   DeconvMode=self.DeconvMode,
                                   NChains=self.NChains,
                                   ListInitIslands=ListInitIslands)
            workerlist.append(W)

            if Parallel:
                workerlist[ii].start()
            else:
                workerlist[ii].run()

        iResult = 0
        #print "!!!!!!!!!!!!!!!!!!!!!!!!",iResult,NJobs
        while iResult < NJobs:
            DicoResult = None
            # for result_queue in List_Result_queue:
            #     if result_queue.qsize()!=0:
            #         try:
            #             DicoResult=result_queue.get_nowait()

            #             break
            #         except:

            #             pass
            #         #DicoResult=result_queue.get()
            #print "!!!!!!!!!!!!!!!!!!!!!!!!! Qsize",result_queue.qsize()
            if result_queue.qsize() != 0:
                try:
                    DicoResult = result_queue.get_nowait()
                except:
                    pass
                    #DicoResult=result_queue.get()

            if DicoResult is None:
                time.sleep(0.05)
                continue

            iResult += 1
            NDone = iResult
            intPercent = int(100 * NDone / float(NJobs))
            pBAR.render(NDone, NJobs)

            if DicoResult["Success"]:
                iIsland = DicoResult["iIsland"]
                island_dict = deconv_dict[iIsland]
                island_dict.reload()

                self.ModelMachine.AppendIsland(ListIslands[iIsland],
                                               island_dict["Model"].copy())

                if DicoResult["HasError"]:
                    self.ErrorModelMachine.AppendIsland(
                        ThisPixList, ListIslands[iIsland],
                        island_dict["sModel"].copy())

        deconv_dict.delete()

        for ii in range(NCPU):
            try:
                workerlist[ii].shutdown()
                workerlist[ii].terminate()
                workerlist[ii].join()
            except:
                pass
예제 #7
0
    def giveDicoInitIndiv(self,
                          ListIslands,
                          ModelImage,
                          DicoDirty,
                          ListDoIsland=None):
        DicoInitIndiv = shared_dict.create("DicoInitIsland")
        ParmDict = shared_dict.create("InitSSDModelHMP")
        ParmDict["ModelImage"] = ModelImage
        ParmDict["GridFreqs"] = self.GridFreqs
        ParmDict["DegridFreqs"] = self.DegridFreqs

        #         ListBigIslands=[]
        #         ListSmallIslands=[]
        #         ListDoBigIsland=[]
        #         ListDoSmallIsland=[]
        #         NParallel=0
        #         for iIsland,Island in enumerate(ListIslands):
        #             if len(Island)>self.GD["SSDClean"]["ConvFFTSwitch"]:
        #                 ListBigIslands.append(Island)
        #                 ListDoBigIsland.append(ListDoIsland[iIsland])
        #                 if ListDoIsland or ListDoIsland[iIsland]:
        #                     NParallel+=1
        #             else:
        #                 ListSmallIslands.append(Island)
        #                 ListDoSmallIsland.append(ListDoIsland[iIsland])
        #         print>>log,"Initialise big islands (parallelised per island)"
        #         pBAR= ProgressBar(Title="Init islands")
        #         pBAR.render(0, NParallel)
        #         nDone=0
        #         for iIsland,Island in enumerate(ListBigIslands):
        #             if not ListDoIsland or ListDoBigIsland[iIsland]:
        #                 subdict = DicoInitIndiv.addSubdict(iIsland)
        #                 # APP.runJob("InitIsland:%d" % iIsland, self._initIsland_worker,
        #                 #            args=(subdict.writeonly(), iIsland, Island,
        #                 #                  self.DicoVariablePSF.readonly(), DicoDirty.readonly(),
        #                 #                  ParmDict.readonly(), self.InitMachine.DeconvMachine.facetcache.readonly(),self.NCPU),serial=True)
        #                 self._initIsland_worker(subdict, iIsland, Island,
        #                                         self.DicoVariablePSF, DicoDirty,
        #                                         ParmDict, self.InitMachine.DeconvMachine.facetcache,
        #                                         self.NCPU)
        #                 pBAR.render(nDone+1, NParallel)
        #                 nDone+=1
        # #        APP.awaitJobResults("InitIsland:*", progress="Init islands")
        #         print>>log,"Initialise small islands (parallelised over islands)"
        #         for iIsland,Island in enumerate(ListSmallIslands):
        #             if not ListDoIsland or ListDoSmallIsland[iIsland]:
        #                 subdict = DicoInitIndiv.addSubdict(iIsland)
        #                 APP.runJob("InitIsland:%d" % iIsland, self._initIsland_worker,
        #                            args=(subdict.writeonly(), iIsland, Island,
        #                                  self.DicoVariablePSF.readonly(), DicoDirty.readonly(),
        #                                  ParmDict.readonly(), self.InitMachine.DeconvMachine.facetcache.readonly(),1))
        #         APP.awaitJobResults("InitIsland:*", progress="Init islands")
        #         DicoInitIndiv.reload()

        print >> log, "Initialise islands (parallelised over islands)"
        if not self.GD["GAClean"]["ParallelInitHMP"]:
            pBAR = ProgressBar(Title="  Init islands")
            for iIsland, Island in enumerate(ListIslands):
                if not ListDoIsland or ListDoIsland[iIsland]:
                    subdict = DicoInitIndiv.addSubdict(iIsland)
                    self._initIsland_worker(
                        subdict, iIsland, Island, self.DicoVariablePSF,
                        DicoDirty, ParmDict,
                        self.InitMachine.DeconvMachine.facetcache, 1)
                pBAR.render(iIsland, len(ListIslands))
        else:
            for iIsland, Island in enumerate(ListIslands):
                if not ListDoIsland or ListDoIsland[iIsland]:
                    subdict = DicoInitIndiv.addSubdict(iIsland)
                    APP.runJob("InitIsland:%d" % iIsland,
                               self._initIsland_worker,
                               args=(subdict.writeonly(), iIsland, Island,
                                     self.DicoVariablePSF.readonly(),
                                     DicoDirty.readonly(), ParmDict.readonly(),
                                     self.InitMachine.DeconvMachine.facetcache.
                                     readonly(), 1))
            APP.awaitJobResults("InitIsland:*", progress="Init islands")
            DicoInitIndiv.reload()

        ParmDict.delete()

        return DicoInitIndiv
예제 #8
0
    def InitMSMF(self, approx=False, cache=True, facetcache=None):
        """Initializes MSMF basis functions. If approx is True, then uses the central facet's PSF for
        all facets.
        Populates the self.facetcache dict, unless facetcache is supplied
        """
        self.DicoMSMachine = {}
        valid = True
        if facetcache is not None:
            print>> log, "HMP basis functions pre-initialized"
            self.facetcache = facetcache
        else:
            cachehash = dict(
                [(section, self.GD[section]) for section in (
                    "Data", "Beam", "Selection", "Freq",
                    "Image", "Facets", "Weight", "RIME","DDESolutions",
                    "Comp", "CF",
                    "HMP")])
            cachepath, valid = self.maincache.checkCache(self.CacheFileName, cachehash, reset=not cache or self.PSFHasChanged)
            # do not use cache in approx mode
            if approx or not cache:
                valid = False
            if valid:
                print>>log,"Initialising HMP basis functions from cache %s"%cachepath
                self.facetcache = shared_dict.create(self.CacheFileName)
                self.facetcache.restore(cachepath)
            else:
                self.facetcache = None


        init_cache = self.facetcache is None
        if init_cache:
            self.facetcache = shared_dict.create(self.CacheFileName)

        # in any mode, start by initializing a MS machine for the central facet. This will precompute the scale
        # functions
        centralFacet = self.PSFServer.DicoVariablePSF["CentralFacet"]

        self.DicoMSMachine[centralFacet] = MSM0 = \
            self._initMSM_facet(centralFacet,
                                self.facetcache.addSubdict(centralFacet) if init_cache else self.facetcache[centralFacet],
                                None, self.SideLobeLevel, self.OffsetSideLobe, verbose=True)
        if approx:
            print>>log, "HMP approximation mode: using PSF of central facet (%d)" % centralFacet
            for iFacet in xrange(self.PSFServer.NFacets):
                self.DicoMSMachine[iFacet] = MSM0
        elif (self.GD["Facets"]["NFacets"]==1)&(not self.GD["DDESolutions"]["DDSols"]):
            self.DicoMSMachine[0] = MSM0
            
        else:
            # if no facet cache, init in parallel
            if init_cache:
                for iFacet in xrange(self.PSFServer.NFacets):
                    if iFacet != centralFacet:
                        fcdict = self.facetcache.addSubdict(iFacet)
                        if self.ParallelMode:
                            args=(fcdict.writeonly(), MSM0.ScaleFuncs.readonly(), self.DicoVariablePSF.readonly(),
                                  iFacet, self.SideLobeLevel, self.OffsetSideLobe, False)
                            APP.runJob("InitHMP:%d"%iFacet, self._initMSM_handler,
                                       args=args)
                        else:
                            self.DicoMSMachine[iFacet] = \
                                self._initMSM_facet(iFacet, fcdict, None,
                                                    self.SideLobeLevel, self.OffsetSideLobe, MSM0=MSM0, verbose=False)

                if self.ParallelMode:
                    APP.awaitJobResults("InitHMP:*", progress="Init HMP")
                    self.facetcache.reload()

            #        t = ClassTimeIt.ClassTimeIt()
            # now reinit from cache (since cache was computed by subprocesses)
            for iFacet in xrange(self.PSFServer.NFacets):
                if iFacet not in self.DicoMSMachine:
                    self.DicoMSMachine[iFacet] = \
                        self._initMSM_facet(iFacet, self.facetcache[iFacet], None,
                                            self.SideLobeLevel, self.OffsetSideLobe, MSM0=MSM0, verbose=False)

            # write cache to disk, unless in a mode where we explicitly don't want it
            if facetcache is None and not valid and cache and not approx:
                try:
                    #MyPickle.DicoNPToFile(facetcache,cachepath)
                    #cPickle.dump(facetcache, file(cachepath, 'w'), 2)
                    print>>log,"  saving HMP cache to %s"%cachepath
                    self.facetcache.save(cachepath)
                    #self.maincache.saveCache("HMPMachine")
                    self.maincache.saveCache(self.CacheFileName)
                    self.PSFHasChanged=False
                    print>>log,"  HMP init done"
                except:
                    print>>log, traceback.format_exc()
                    print >>log, ModColor.Str(
                        "WARNING: HMP cache could not be written, see error report above. Proceeding anyway.")
예제 #9
0
    def InitFromCatalog(self):

        FileCoords = self.FileCoords
        dtype = [('Name', 'S200'), ("ra", np.float64), ("dec", np.float64),
                 ('Type', 'S200')]
        # should we use the surveys DB?
        if 'DDF_PIPELINE_DATABASE' in os.environ:
            print("Using the surveys database", file=log)
            from surveys_db import SurveysDB
            with SurveysDB() as sdb:
                sdb.cur.execute('select * from transients')
                result = sdb.cur.fetchall()
            # convert to a list, then to ndarray, then to recarray
            l = []
            for r in result:
                l.append((r['id'], r['ra'], r['decl'], r['type']))
            if FileCoords is not None:
                print('Adding data from file ' + FileCoords, file=log)
                additional = np.genfromtxt(FileCoords,
                                           dtype=dtype,
                                           delimiter=",")[()]
                if not additional.shape:
                    # deal with a one-line input file
                    additional = np.array([additional], dtype=dtype)
                for r in additional:
                    l.append(tuple(r))
            self.PosArray = np.asarray(l, dtype=dtype)
            print("Created an array with %i records" % len(result), file=log)

        else:

            #FileCoords="Transient_LOTTS.csv"
            if FileCoords is None:
                if not os.path.isfile(FileCoords):
                    ssExec = "wget -q --user=anonymous ftp://ftp.strw.leidenuniv.nl/pub/tasse/%s -O %s" % (
                        FileCoords, FileCoords)
                    print("Downloading %s" % FileCoords, file=log)
                    print("   Executing: %s" % ssExec, file=log)
                    os.system(ssExec)
            log.print("Reading cvs file: %s" % FileCoords)
            #self.PosArray=np.genfromtxt(FileCoords,dtype=dtype,delimiter=",")[()]
            self.PosArray = np.genfromtxt(FileCoords,
                                          dtype=dtype,
                                          delimiter=",")

        self.PosArray = self.PosArray.view(np.recarray)
        self.PosArray.ra *= np.pi / 180.
        self.PosArray.dec *= np.pi / 180.
        Radius = self.Radius
        NOrig = self.PosArray.Name.shape[0]
        Dist = AngDist(self.ra0, self.PosArray.ra, self.dec0,
                       self.PosArray.dec)
        ind = np.where(Dist < (Radius * np.pi / 180))[0]
        self.PosArray = self.PosArray[ind]
        self.NDirSelected = self.PosArray.shape[0]

        print("Selected %i target [out of the %i in the original list]" %
              (self.NDirSelected, NOrig),
              file=log)
        if self.NDirSelected == 0:
            print(ModColor.Str("   Have found no sources - returning"),
                  file=log)
            self.killWorkers()
            return

        NOff = self.NOff

        if NOff == -1:
            NOff = self.PosArray.shape[0] * 2
        if NOff is not None:
            print("Including %i off targets" % (NOff), file=log)
            self.PosArray = np.concatenate(
                [self.PosArray, self.GiveOffPosArray(NOff)])
            self.PosArray = self.PosArray.view(np.recarray)
        self.NDir = self.PosArray.shape[0]
        print("For a total of %i targets" % (self.NDir), file=log)

        self.DicoDATA = shared_dict.create("DATA")
        self.DicoGrids = shared_dict.create("Grids")
        self.DicoGrids["GridLinPol"] = np.zeros(
            (self.NDir, self.NChan, self.NTimes, 4), np.complex128)
        self.DicoGrids["GridWeight"] = np.zeros(
            (self.NDir, self.NChan, self.NTimes, 4), np.complex128)

        self.DoJonesCorr_kMS = False
        self.DicoJones = None
        if self.SolsName:
            self.DoJonesCorr_kMS = True
            self.DicoJones_kMS = shared_dict.create("DicoJones_kMS")

        self.DoJonesCorr_Beam = False
        if self.BeamModel:
            self.DoJonesCorr_Beam = True
            self.DicoJones_Beam = shared_dict.create("DicoJones_Beam")

        APP.registerJobHandlers(self)
        AsyncProcessPool.init(ncpu=self.NCPU, affinity=0)
        APP.startWorkers()
    def InitMSMF(self, approx=False, cache=True, facetcache=None):
        """Initializes MSMF basis functions. If approx is True, then uses the central facet's PSF for
        all facets.
        Populates the self.facetcache dict, unless facetcache is supplied
        """
        self.DicoMSMachine = {}
        valid = True
        if facetcache is not None:
            print >> log, "HMP basis functions pre-initialized"
            self.facetcache = facetcache
        else:
            cachehash = dict([
                (section, self.GD[section])
                for section in ("Data", "Beam", "Selection", "Freq", "Image",
                                "Facets", "Weight", "RIME", "Comp", "CF",
                                "HMP")
            ])
            cachepath, valid = self.maincache.checkCache(self.CacheFileName,
                                                         cachehash,
                                                         reset=not cache
                                                         or self.PSFHasChanged)
            # do not use cache in approx mode
            if approx or not cache:
                valid = False
            if valid:
                print >> log, "Initialising HMP basis functions from cache %s" % cachepath
                self.facetcache = shared_dict.create(self.CacheFileName)
                self.facetcache.restore(cachepath)
            else:
                self.facetcache = None

        centralFacet = self.PSFServer.DicoVariablePSF["CentralFacet"]
        if approx:
            print >> log, "HMP approximation mode: using PSF of central facet (%d)" % centralFacet
            self.PSFServer.setFacet(centralFacet)
            MSMachine = ClassMultiScaleMachine.ClassMultiScaleMachine(
                self.GD,
                self.facetcache.addSubdict(0),
                self.GainMachine,
                NFreqBands=self.NFreqBands)
            MSMachine.setModelMachine(self.ModelMachine)
            MSMachine.setSideLobeLevel(self.SideLobeLevel, self.OffsetSideLobe)
            MSMachine.SetFacet(centralFacet)
            MSMachine.SetPSF(self.PSFServer)  # ThisPSF,ThisMeanPSF)
            MSMachine.FindPSFExtent(verbose=True)
            MSMachine.MakeMultiScaleCube(verbose=True)
            MSMachine.MakeBasisMatrix()
            for iFacet in xrange(self.PSFServer.NFacets):
                self.DicoMSMachine[iFacet] = MSMachine
        else:
            # if no facet cache, init in parallel
            if self.facetcache is None:
                self.facetcache = shared_dict.create(self.CacheFileName)
                # breakout = False
                for iFacet in xrange(self.PSFServer.NFacets):
                    fcdict = self.facetcache.addSubdict(iFacet)
                    if self.ParallelMode:
                        args = (fcdict.writeonly(),
                                self.DicoVariablePSF.readonly(), iFacet,
                                self.SideLobeLevel, self.OffsetSideLobe,
                                centralFacet)
                        APP.runJob("InitHMP:%d" % iFacet,
                                   self._initMSM_handler,
                                   args=args)
                    else:
                        args = (fcdict, self.DicoVariablePSF, iFacet,
                                self.SideLobeLevel, self.OffsetSideLobe,
                                centralFacet)
                        self._initMSM_handler(*args)
                        # import pdb;
                        # pdb.set_trace()
                        # if breakout:
                        #     raise RuntimeError("exiting")

                if self.ParallelMode:
                    APP.awaitJobResults("InitHMP:*", progress="Init HMP")

                self.facetcache.reload()
            #        t = ClassTimeIt.ClassTimeIt()
            for iFacet in xrange(self.PSFServer.NFacets):
                self.PSFServer.setFacet(iFacet)
                MSMachine = ClassMultiScaleMachine.ClassMultiScaleMachine(
                    self.GD,
                    self.facetcache[iFacet],
                    self.GainMachine,
                    NFreqBands=self.NFreqBands)
                MSMachine.setModelMachine(self.ModelMachine)
                MSMachine.setSideLobeLevel(self.SideLobeLevel,
                                           self.OffsetSideLobe)
                MSMachine.SetFacet(iFacet)
                MSMachine.SetPSF(self.PSFServer)  # ThisPSF,ThisMeanPSF)
                MSMachine.FindPSFExtent(
                    verbose=(iFacet == centralFacet
                             ))  # only print to log for central facet
                MSMachine.MakeMultiScaleCube(verbose=(iFacet == centralFacet))
                MSMachine.MakeBasisMatrix()
                self.DicoMSMachine[iFacet] = MSMachine

            # write cache to disk, unless in a mode where we explicitly don't want it
            if facetcache is None and not valid and cache and not approx:
                try:
                    #MyPickle.DicoNPToFile(facetcache,cachepath)
                    #cPickle.dump(facetcache, file(cachepath, 'w'), 2)
                    print >> log, "  saving HMP cache to %s" % cachepath
                    self.facetcache.save(cachepath)
                    #self.maincache.saveCache("HMPMachine")
                    self.maincache.saveCache(self.CacheFileName)
                    self.PSFHasChanged = False
                    print >> log, "  HMP init done"
                except:
                    print >> log, traceback.format_exc()
                    print >> log, ModColor.Str(
                        "WARNING: HMP cache could not be written, see error report above. Proceeding anyway."
                    )
예제 #11
0
    def __init__(self,
                 ListMSName,
                 ColName="DATA",
                 ModelName="PREDICT_KMS",
                 UVRange=[1., 1000.],
                 SolsName=None,
                 FileCoords=None,
                 Radius=3.,
                 NOff=-1,
                 Image=None,
                 SolsDir=None,
                 NCPU=1):
        self.ListMSName = sorted(ListMSName)  #[0:2]
        self.nMS = len(self.ListMSName)
        self.ColName = ColName
        self.ModelName = ModelName
        self.OutName = self.ListMSName[0].split("/")[-1].split("_")[0]
        self.UVRange = UVRange
        self.ReadMSInfos()
        self.Radius = Radius
        self.Image = Image
        self.SolsDir = SolsDir
        #self.PosArray=np.genfromtxt(FileCoords,dtype=[('Name','S200'),("ra",np.float64),("dec",np.float64),('Type','S200')],delimiter="\t")

        # identify version in logs
        print >> log, "DynSpecMS version %s starting up" % version()

        # should we use the surveys DB?
        if 'DDF_PIPELINE_DATABASE' in os.environ:
            print >> log, "Using the surveys database"
            from surveys_db import SurveysDB
            with SurveysDB() as sdb:
                sdb.cur.execute('select * from transients')
                result = sdb.cur.fetchall()
            # convert to a list, then to ndarray, then to recarray
            l = []
            for r in result:
                l.append((r['id'], r['ra'], r['decl'], r['type']))
            self.PosArray = np.asarray(l,
                                       dtype=[('Name', 'S200'),
                                              ("ra", np.float64),
                                              ("dec", np.float64),
                                              ('Type', 'S200')])
            print >> log, "Created an array with %i records" % len(result)
        else:
            if FileCoords is None:
                FileCoords = "Transient_LOTTS.csv"
                if not os.path.isfile(FileCoords):
                    ssExec = "wget -q --user=anonymous ftp://ftp.strw.leidenuniv.nl/pub/tasse/%s -O %s" % (
                        FileCoords, FileCoords)
                    print >> log, "Downloading %s" % FileCoords
                    print >> log, "   Executing: %s" % ssExec
                    os.system(ssExec)
            self.PosArray = np.genfromtxt(FileCoords,
                                          dtype=[('Name', 'S200'),
                                                 ("ra", np.float64),
                                                 ("dec", np.float64),
                                                 ('Type', 'S200')],
                                          delimiter=",")[()]
        self.PosArray = self.PosArray.view(np.recarray)
        self.PosArray.ra *= np.pi / 180.
        self.PosArray.dec *= np.pi / 180.

        NOrig = self.PosArray.shape[0]
        Dist = AngDist(self.ra0, self.PosArray.ra, self.dec0,
                       self.PosArray.dec)
        ind = np.where(Dist < Radius * np.pi / 180)[0]
        self.PosArray = self.PosArray[ind]
        self.NDirSelected = self.PosArray.shape[0]

        print >> log, "Selected %i target [out of the %i in the original list]" % (
            self.NDirSelected, NOrig)
        if self.NDirSelected == 0:
            print >> log, ModColor.Str("   Have found no sources - returning")
            self.killWorkers()
            return

        if NOff == -1:
            NOff = self.PosArray.shape[0] * 2
        if NOff is not None:
            print >> log, "Including %i off targets" % (NOff)
            self.PosArray = np.concatenate(
                [self.PosArray, self.GiveOffPosArray(NOff)])
            self.PosArray = self.PosArray.view(np.recarray)
        self.NDir = self.PosArray.shape[0]
        print >> log, "For a total of %i targets" % (self.NDir)

        self.DicoDATA = shared_dict.create("DATA")
        self.DicoGrids = shared_dict.create("Grids")
        self.DicoGrids["GridLinPol"] = np.zeros(
            (self.NDir, self.NChan, self.NTimes, 4), np.complex128)
        self.DicoGrids["GridWeight"] = np.zeros(
            (self.NDir, self.NChan, self.NTimes, 4), np.complex128)

        self.SolsName = SolsName
        self.DoJonesCorr = False
        if self.SolsName:
            self.DoJonesCorr = True
            self.DicoJones = shared_dict.create("DicoJones")

        APP.registerJobHandlers(self)
        AsyncProcessPool.init(ncpu=NCPU, affinity=0)
        APP.startWorkers()