Exemplo n.º 1
0
    def get(self, singleSnapshot=False):
        """
        *geneate the pyephem positions*

        **Key Arguments:**
            -  ``singleSnapshot`` -- just extract positions for a single pyephem snapshot (used for unit testing)

        **Return:**
            - ``None``
        """
        self.log.info('starting the ``get`` method')

        global xephemOE
        global tileSide
        global magLimit

        # GRAB PARAMETERS FROM SETTINGS FILE
        tileSide = float(self.settings["pyephem"]["atlas exposure match side"])
        magLimit = float(self.settings["pyephem"]["magnitude limit"])

        snapshotsRequired = 1
        while snapshotsRequired > 0:
            nextMjds, exposures, snapshotsRequired = self._get_exposures_requiring_pyephem_positions(
                concurrentSnapshots=int(self.settings["pyephem"]
                                        ["batch size"]))
            print "There are currently %(snapshotsRequired)s more pyephem snapshots required " % locals(
            )
            if snapshotsRequired == 0:
                return

            if len(xephemOE) == 0:
                xephemOE = self._get_xephem_orbital_elements()

            # DEFINE AN INPUT ARRAY
            magLimit = self.settings["pyephem"]["magnitude limit"]
            pyephemDB = fmultiprocess(log=self.log,
                                      function=_generate_pyephem_snapshot,
                                      timeout=300,
                                      inputArray=nextMjds,
                                      magLimit=magLimit)

            matchedObjects = []
            for p, e, m in zip(pyephemDB, exposures, nextMjds):
                matchedObjects.append(
                    self._match_pyephem_snapshot_to_atlas_exposures(p, e, m))

            self._add_matched_objects_to_database(matchedObjects)
            self._update_database_flag(exposures)

            if singleSnapshot:
                snapshotsRequired = 0

        self.log.info('completed the ``get`` method')
        return None
Exemplo n.º 2
0
    def test_multiprocess_function_exception(self):

        from fundamentals import fmultiprocess
        try:
            this = fmultiprocess(log=log,
                                 settings=settings,
                                 fakeKey="break the code")
            this.get()
            assert False
        except Exception as e:
            assert True
            print(str(e))
    def test_multiprocess_function_exception(self):

        from fundamentals import fmultiprocess
        try:
            this = fmultiprocess(
                log=log,
                settings=settings,
                fakeKey="break the code"
            )
            this.get()
            assert False
        except Exception, e:
            assert True
            print str(e)
Exemplo n.º 4
0
    def get(self):
        """
        *match the orbfit predicted positions of known asteroids against the positions recored in the local cache of dophot files*

        **Return:**
            - None

        **Usage:**

            See class docstring
        """
        self.log.info('starting the ``get`` method')

        cachePath = self.settings["atlas archive path"]

        global exposureIds
        exposureIds, remaining = self._select_exposures_requiring_dophot_extraction(
            batch=int(self.settings["dophot"]["batch size"]))
        if remaining == 0:
            print "%(remaining)s locally cached dophot files remain needing to be parsed for orbfit predicted known asteroid positions" % locals(
            )
            return None

        # SELECT 100 EXPOSURES REQUIRING DOPHOT EXTRACTION
        while remaining > 0:
            exposureIds, remaining = self._select_exposures_requiring_dophot_extraction(
                batch=int(self.settings["dophot"]["batch size"]))
            print "%(remaining)s locally cached dophot files remain needing to be parsed for orbfit predicted known asteroid positions" % locals(
            )
            if remaining == 0:
                continue
            dophotMatches = fmultiprocess(log=self.log,
                                          function=_extract_phot_from_exposure,
                                          inputArray=range(len(exposureIds)),
                                          poolSize=5,
                                          timeout=300,
                                          cachePath=cachePath,
                                          settings=self.settings)
            self._add_dophot_matches_to_database(dophotMatches=dophotMatches,
                                                 exposureIds=exposureIds)
            dophotMatches = None

        self._add_value_to_dophot_table()

        self.log.info('completed the ``get`` method')
        return None
    def test_multiprocess_function(self):

        from fundamentals import fmultiprocess

        # DEFINE AN INPUT ARRAY
        inputArray = range(1000)
        t1 = time.time()
        result = fmultiprocess(log=log, function=f,
                               inputArray=inputArray, anotherKeyword="cheese")

        took = time.time() - t1
        print "Multiprocessing took: %(took)s" % locals()

        t2 = time.time()

        result = []
        for i in inputArray:
            result.append(f(i))
        took = time.time() - t2
        print "Serial processing took: %(took)s" % locals()
Exemplo n.º 6
0
    def test_multiprocess_function(self):

        from fundamentals import fmultiprocess

        # DEFINE AN INPUT ARRAY
        inputArray = list(range(1000))
        t1 = time.time()
        result = fmultiprocess(log=log,
                               function=f,
                               inputArray=inputArray,
                               anotherKeyword="cheese")

        took = time.time() - t1
        print("Multiprocessing took: %(took)s" % locals())

        t2 = time.time()

        result = []
        for i in inputArray:
            result.append(f(i))
        took = time.time() - t2
        print("Serial processing took: %(took)s" % locals())
Exemplo n.º 7
0
    def _match_pyephem_snapshot_to_atlas_exposures(self, pyephemDB, exposures,
                                                   mjd):
        """*match pyephem snapshot to atlas exposures*

        **Key Arguments:**
            - ``pyephemDB`` -- the pyephem solar-system snapshot database
            - ``exposures`` -- the atlas exposures to match against the snapshot
            - ``mjd`` -- the MJD of the pyephem snapshot

        **Return:**
            - ``matchedObjects`` -- these objects matched in the neighbourhood of the ATLAS exposures (list of dictionaries)
        """
        self.log.info(
            'starting the ``_match_pyephem_snapshot_to_atlas_exposures`` method'
        )

        global DEG_TO_RAD_FACTOR
        global RAD_TO_DEG_FACTOR
        global moversDict

        e = len(exposures)

        print "Matching %(e)s ATLAS exposures against the pyephem snapshot for MJD = %(mjd)s" % locals(
        )

        # MAKE SURE HEALPIX SMALL ENOUGH TO MATCH FOOTPRINTS CORRECTLY
        global nside

        # GRAB PARAMETERS FROM SETTINGS FILE
        tileSide = float(self.settings["pyephem"]["atlas exposure match side"])
        magLimit = float(self.settings["pyephem"]["magnitude limit"])

        # EXPLODE OUT THE PYEPHEM DATABASE
        ra = pyephemDB["ra_deg"]
        dec = pyephemDB["dec_deg"]
        healpix = pyephemDB["healpix"]
        objects = pyephemDB["object_name"]
        mpc_numbers = pyephemDB["mpc_number"]
        mag = pyephemDB["mag"]

        # INDEX PYEPHEM MOVERS IN DICTIONARY BY HEALPIX ID
        moversDict = defaultdict(list)
        for ind, (p, r, d, o, m, g) in enumerate(
                zip(healpix, ra, dec, objects, mpc_numbers, mag)):
            moversDict[p].append({
                "object_name": o,
                "ra_deg": r,
                "dec_deg": d,
                "mpc_number": m,
                "mag": g
            })

        # MATCH THE PYEPHEM MOVERS AGAINST THE ATLAS EXPOSURES
        matchedObjects = []
        results = fmultiprocess(
            log=self.log,
            function=_match_single_exposure_against_pyephem_db,
            timeout=120,
            inputArray=exposures)
        for r in results:
            matchedObjects += r

        self.log.info(
            'completed the ``_match_pyephem_snapshot_to_atlas_exposures`` method'
        )
        return matchedObjects
Exemplo n.º 8
0
    def plot(self):
        """*generate a batch of lightcurves using multiprocessing given their transientBucketIds*

        **Return**

        - ``filepath`` -- path to the last generated plot file


        **Usage**

        ```python
        from marshallEngine.lightcurves import marshall_lightcurves
        lc = marshall_lightcurves(
            log=log,
            dbConn=dbConn,
            settings=settings,
            transientBucketIds=[28421489, 28121353, 4637952, 27409808]
        )
        lc.plot()
        ```

        """
        self.log.debug('starting the ``plot`` method')

        # DEFINE AN INPUT ARRAY
        total = len(self.transientBucketIds)

        thisDict = {"database settings": self.settings["database settings"]}

        if total:
            print("updating lightcurves for %(total)s transients" % locals())
            print()

        # USE IF ISSUES IN _plot_one FUNCTION
        # for transientBucketId in self.transientBucketIds:
        #     _plot_one(
        #         transientBucketId=transientBucketId,
        #         log=self.log,
        #         settings=self.settings
        #     )

        results = fmultiprocess(log=self.log,
                                function=_plot_one,
                                inputArray=self.transientBucketIds,
                                poolSize=False,
                                timeout=3600,
                                settings=self.settings)

        sqlQuery = ""
        updatedTransientBucketIds = []
        for t, r in zip(self.transientBucketIds, results):
            if not r[0]:
                # LIGHTCURVE NOT GENERATED
                continue
            updatedTransientBucketIds.append(t)
            filepath = r[0]
            currentMagnitude = r[1]
            gradient = r[2]
            sqlQuery += """update transientBucketSummaries set currentMagnitudeEstimate = %(currentMagnitude)s, currentMagnitudeEstimateUpdated = NOW(), recentSlopeOfLightcurve = %(gradient)s where transientBucketId = %(t)s;
            """ % locals()
        ids = []
        ids[:] = [str(i) for i in updatedTransientBucketIds]
        updatedTransientBucketIds = (",").join(ids)
        sqlQuery += "update pesstoObjects set master_pessto_lightcurve = 1 where transientBucketId in (%(updatedTransientBucketIds)s);" % locals(
        )

        if len(updatedTransientBucketIds):
            writequery(
                log=self.log,
                sqlQuery=sqlQuery,
                dbConn=self.dbConn,
            )
        else:
            filepath = False

        self.log.debug('completed the ``plot`` method')

        return filepath
Exemplo n.º 9
0
def generate_atlas_lightcurves(dbConn, log, settings):
    """generate all atlas FP lightcurves (clipped and stacked)

    **Key Arguments**

    - ``dbConn`` -- mysql database connection
    - ``log`` -- logger
    - ``settings`` -- settings for the marshall.

    ```python
    from marshallEngine.feeders.atlas.lightcurve import generate_atlas_lightcurves
    generate_atlas_lightcurves(
        log=log,
        dbConn=dbConn,
        settings=settings
    )
    ```
    """
    log.debug('starting the ``generate_atlas_lightcurves`` function')

    # SELECT SOURCES THAT NEED THEIR ATLAS FP LIGHTCURVES CREATED/UPDATED
    sqlQuery = u"""
        SELECT
                t.transientBucketId
            FROM
                transientBucket t ,pesstoObjects p
            WHERE
                p.transientBucketId=t.transientBucketId
                and t.survey = 'ATLAS FP' and t.limitingMag = 0
                and ((p.atlas_fp_lightcurve < t.dateCreated and p.atlas_fp_lightcurve != 0) or p.atlas_fp_lightcurve is null)
            GROUP BY t.transientBucketId;
    """
    rows = readquery(log=log, sqlQuery=sqlQuery, dbConn=dbConn)
    transientIds = [r["transientBucketId"] for r in rows]

    total = len(transientIds)
    if total > 1000:
        print(
            "ATLAS lightcurves need generated for %(total)s sources - generating next 1000"
            % locals())
        transientIds = transientIds[:1000]
        total = len(transientIds)
    else:
        print("Generating ATLAS lightcurves for %(total)s sources" % locals())

    # SETUP THE INITIAL FIGURE FOR THE PLOT (ONLY ONCE)
    fig = plt.figure(num=None,
                     figsize=(10, 10),
                     dpi=100,
                     facecolor=None,
                     edgecolor=None,
                     frameon=True)
    mpl.rc('ytick', labelsize=18)
    mpl.rc('xtick', labelsize=18)
    mpl.rcParams.update({'font.size': 22})

    # FORMAT THE AXES
    ax = fig.add_axes([0.1, 0.1, 0.8, 0.8], polar=False, frameon=True)
    ax.set_xlabel('MJD', labelpad=20)
    ax.set_yticks([2.2])

    # RHS AXIS TICKS
    plt.setp(ax.xaxis.get_majorticklabels(),
             rotation=45,
             horizontalalignment='right')
    ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

    y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
    ax.yaxis.set_major_formatter(y_formatter)
    ax.xaxis.grid(False)

    # ADD SECOND Y-AXIS
    ax2 = ax.twinx()
    ax2.yaxis.set_major_formatter(y_formatter)
    ax2.set_ylabel('Flux ($\mu$Jy)', rotation=-90., labelpad=27)
    ax2.grid(False)

    # ADD SECOND X-AXIS
    ax3 = ax.twiny()
    ax3.grid(True)
    plt.setp(ax3.xaxis.get_majorticklabels(),
             rotation=45,
             horizontalalignment='left')

    # CONVERTER TO CONVERT MJD TO DATE
    converter = conversions(log=log)

    if len(transientIds) < 3:
        plotPaths = []
        for transientBucketId in transientIds:
            plotPaths.append(
                plot_single_result(log=log,
                                   transientBucketId=transientBucketId,
                                   fig=fig,
                                   converter=converter,
                                   ax=ax,
                                   settings=settings))
    else:
        log.info("""starting multiprocessing""")
        plotPaths = fmultiprocess(log=log,
                                  function=plot_single_result,
                                  inputArray=transientIds,
                                  poolSize=False,
                                  timeout=7200,
                                  fig=fig,
                                  converter=converter,
                                  ax=ax,
                                  settings=settings)
        log.info("""finished multiprocessing""")

    # REMOVE MISSING PLOTStrn
    transientIdGood = [t for p, t in zip(plotPaths, transientIds) if p]
    transientIdBad = [t for p, t in zip(plotPaths, transientIds) if p is None]

    # UPDATE THE atlas_fp_lightcurve DATE FOR TRANSIENTS WE HAVE JUST
    # GENERATED PLOTS FOR
    if len(transientIdGood):
        transientIdGood = (",").join([str(t) for t in transientIdGood])
        sqlQuery = f"""update pesstoObjects set atlas_fp_lightcurve = NOW() where transientBucketID in ({transientIdGood})"""
        writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn)

    # UPDATE THE atlas_fp_lightcurve DATE FOR TRANSIENTS WE HAVE JUST
    # GENERATED PLOTS FOR
    if len(transientIdBad):
        transientIdBad = (",").join([str(t) for t in transientIdBad])
        sqlQuery = f"""update pesstoObjects set atlas_fp_lightcurve = 0 where transientBucketID in ({transientIdBad})"""
        writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn)

    log.debug('completed the ``generate_atlas_lightcurves`` function')
    return None
Exemplo n.º 10
0
def orbfit_ephemeris(log,
                     objectId,
                     mjd,
                     settings,
                     obscode=500,
                     verbose=False,
                     astorbPath=False):
    """Given a known solar-system object ID (human-readable name or MPC number but *NOT* an MPC packed format) or list of names and one or more specific epochs, return the calculated ephemerides

    **Key Arguments:**
        - ``log`` -- logger
        - ``objectId`` -- human-readable name, MPC number or solar-system object name, or list of names
        - ``mjd`` -- a single MJD, or a list MJDs to generate an ephemeris for
        - ``settings`` -- the settings dictionary for rockfinder
        - ``obscode`` -- the observatory code for the ephemeris generation. Default **500** (geocentric)
        - ``verbose`` -- return extra information with each ephemeris
        - ``astorbPath`` -- override the default path to astorb.dat orbital elements file

    **Return:**
        - ``resultList`` -- a list of ordered dictionaries containing the returned ephemerides

    **Usage:**

        To generate a an ephemeris for a single epoch run,using ATLAS Haleakala as your observatory:

        .. code-block:: python

            from rockfinder import orbfit_ephemeris
            eph = orbfit_ephemeris(
                log=log,
                objectId=1,
                obscode="T05"
                mjd=57916.,
            )

        or to generate an ephemeris for multiple epochs:

        .. code-block:: python

            from rockfinder import orbfit_ephemeris
            eph = orbfit_ephemeris(
                log=log,
                objectId="ceres",
                mjd=[57916.1,57917.234,57956.34523]
                verbose=True
            )

        Note by passing `verbose=True` the essential ephemeris data is supplimented with some extra data

        It's also possible to pass in an array of object IDs:

        .. code-block:: python

            from rockfinder import orbfit_ephemeris
            eph = orbfit_ephemeris(
                log=log,
                objectId=[1,5,03547,"Shikoku"],
                mjd=[57916.1,57917.234,57956.34523]
            )

        And finally you override the default path to astorb.dat orbital elements file by passing in a custom path (useful for passing in a trimmed orbital elements database):

        .. code-block:: python

            from rockfinder import orbfit_ephemeris
            eph = orbfit_ephemeris(
                log=log,
                objectId=[1,5,03547,"Shikoku"],
                mjd=[57916.1,57917.234,57956.34523],
                astorbPath="/path/to/astorb.dat"
            )

    """
    log.debug('starting the ``orbfit_ephemeris`` function')

    global cmdList

    # MAKE SURE MJDs ARE IN A LIST
    if not isinstance(mjd, list):
        mjdList = [str(mjd)]
    else:
        mjdList = mjd

    if not isinstance(objectId, list):
        objectList = [objectId]
    else:
        objectList = objectId

    ephem = settings["path to ephem binary"]
    home = expanduser("~")

    results = []
    tmpCmdList = []

    for o in objectList:
        for m in mjdList:
            if not isinstance(o, int) and "'" in o:
                cmd = """%(ephem)s %(obscode)s %(m)s "%(o)s" """ % locals()
            else:
                cmd = """%(ephem)s %(obscode)s %(m)s '%(o)s'""" % locals()

            if astorbPath:
                cmd += " '%(astorbPath)s'" % locals()

            tmpCmdList.append((cmd, o))

    def chunks(l, n):
        """Yield successive n-sized chunks from l."""
        for i in range(0, len(l), n):
            yield l[i:i + n]

    # BATCH INTO 10s
    cmdList = [c for c in chunks(tmpCmdList, 10)]

    # DEFINE AN INPUT ARRAY
    results = fmultiprocess(log=log,
                            function=_generate_one_ephemeris,
                            inputArray=range(len(cmdList)))

    if verbose == True:
        order = [
            "object_name", "mjd", "ra_deg", "dec_deg", "apparent_mag",
            "observer_distance", "heliocentric_distance", "phase_angle",
            "obscode", "sun_obs_target_angle", "galactic_latitude",
            "ra_arcsec_per_hour", "dec_arcsec_per_hour"
        ]
    else:
        order = [
            "object_name", "mjd", "ra_deg", "dec_deg", "apparent_mag",
            "observer_distance", "heliocentric_distance", "phase_angle"
        ]

    # ORDER THE RESULTS
    resultList = []
    for r2 in results:
        if not r2:
            continue
        for r in r2:
            if not r:
                continue

            orderDict = collections.OrderedDict({})
            for i in order:
                orderDict[i] = r[i]

            resultList.append(orderDict)

    log.debug('completed the ``orbfit_ephemeris`` function')
    return resultList
Exemplo n.º 11
0
    def get(self, days):
        """
        *download a cache of ATLAS nights data*

        **Key Arguments:**
            - ``days`` -- the number of days data to cache locally

        **Return:**
            - None

        **Usage:**

            See class docstring
        """
        self.log.info('starting the ``get`` method')

        self._remove_processed_data()

        archivePath = self.settings["atlas archive path"]
        self._update_day_tracker_table()
        mjds = self._determine_mjds_to_download(days=days)

        if len(mjds) == 0:
            return

        dbConn = self.atlasMoversDBConn

        # DOWNLOAD THE DATA IN PARALLEL
        results = fmultiprocess(log=self.log,
                                function=_download_one_night_of_atlas_data,
                                timeout=3600,
                                inputArray=mjds,
                                archivePath=archivePath)

        global dbSettings

        dbSettings = self.settings["database settings"]["atlasMovers"]

        for d in results:
            if d and len(d[0]):
                insert_list_of_dictionaries_into_database_tables(
                    dbConn=dbConn,
                    log=self.log,
                    dictList=d[0],
                    dbTableName="atlas_exposures",
                    dateModified=True,
                    batchSize=10000,
                    replace=True,
                    dbSettings=dbSettings)

        # UPDATE BOOKKEEPING
        mjds = []
        mjds[:] = [r[1] for r in results if (r and r[1] is not None)]
        mjds = (',').join(mjds)

        if len(mjds):
            sqlQuery = """update atlas_exposures set local_data = 1 where floor(mjd) in (%(mjds)s);
        update day_tracker set processed = 1 where mjd in (%(mjds)s);""" % locals(
            )
            writequery(
                log=self.log,
                sqlQuery=sqlQuery,
                dbConn=self.atlasMoversDBConn,
            )

        bk = bookkeeper(log=self.log, settings=self.settings, fullUpdate=False)
        bk.clean_all()

        self.log.info('completed the ``get`` method')
        return None