def test_mjd_conversions_function(self):

        from astrocalc.times import conversions
        converter = conversions(
            log=log,
        )
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440))
    def test_decimal_day_to_day_hour_min_sec_function(self):

        from astrocalc.times import conversions
        converter = conversions(
            log=log,
        )
        daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
            daysFloat=24.2453)
        print("%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals())

        daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
            daysFloat=24.12345)
        print("%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals())

        daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
            daysFloat=24.2)
        print("%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals())

        daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
            daysFloat=24.1232435454)
        print("%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals())
Beispiel #3
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    from astrocalc.coords import unit_conversion
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="CRITICAL",
               options_first=True,
               projectName="astrocalc",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    coordflip = a["coordflip"]
    sep = a["sep"]
    timeflip = a["timeflip"]
    trans = a["trans"]
    now = a["now"]
    dist = a["dist"]
    ra = a["ra"]
    ra1 = a["ra1"]
    ra2 = a["ra2"]
    dec = a["dec"]
    dec1 = a["dec1"]
    dec2 = a["dec2"]
    datetime = a["datetime"]
    north = a["north"]
    east = a["east"]
    distVal = a["distVal"]
    hVal = a["hcFlag"]
    OmegaMatter = a["wmFlag"]
    OmegaVacuum = a["wvFlag"]
    mpcFlag = a["mpcFlag"]
    redshiftFlag = a["redshiftFlag"]
    cartesianFlag = a["cartesianFlag"]

    # CALL FUNCTIONS/OBJECTS
    if coordflip:

        if cartesianFlag:
            converter = unit_conversion(log=log)
            x, y, z = converter.ra_dec_to_cartesian(ra="23 45 21.23232",
                                                    dec="+01:58:5.45341")
            print(x, y, z)
            return

        try:
            ra = float(ra)
            dec = float(dec)
            degree = True
        except Exception as e:
            degree = False

        if degree is True:
            converter = unit_conversion(log=log)
            try:
                ra = converter.ra_decimal_to_sexegesimal(ra=ra, delimiter=":")
                dec = converter.dec_decimal_to_sexegesimal(dec=dec,
                                                           delimiter=":")
            except Exception as e:
                print(e)
                sys.exit(0)

            print(ra, dec)
        else:
            converter = unit_conversion(log=log)
            try:
                ra = converter.ra_sexegesimal_to_decimal(ra=ra)
                dec = converter.dec_sexegesimal_to_decimal(dec=dec)
            except Exception as e:
                print(e)
                sys.exit(0)
            print(ra, dec)

    if sep:
        from astrocalc.coords import separations
        calculator = separations(
            log=log,
            ra1=ra1,
            dec1=dec1,
            ra2=ra2,
            dec2=dec2,
        )
        angularSeparation, north, east = calculator.get()
        print("""%(angularSeparation)s arcsec (%(north)s N, %(east)s E)""" %
              locals())

    if timeflip:
        try:
            inputMjd = float(datetime)
            if datetime[0] not in ["0", "1", "2"]:
                inputMjd = True
            else:
                inputMjd = False
        except:
            inputMjd = False
        from astrocalc.times import conversions
        converter = conversions(log=log)

        if inputMjd == False:
            try:
                mjd = converter.ut_datetime_to_mjd(utDatetime=datetime)
                print(mjd)
            except Exception as e:
                print(e)
        else:
            try:
                utDate = converter.mjd_to_ut_datetime(mjd=datetime)
                print(utDate)
            except Exception as e:
                print(e)

    if trans:
        # TRANSLATE COORDINATES ACROSS SKY
        from astrocalc.coords import translate
        newRa, newDec = translate(log=log,
                                  ra=ra,
                                  dec=dec,
                                  northArcsec=float(north),
                                  eastArcsec=float(east)).get()
        from astrocalc.coords import unit_conversion
        converter = unit_conversion(log=log)
        ra = converter.ra_decimal_to_sexegesimal(ra=newRa, delimiter=":")
        dec = converter.dec_decimal_to_sexegesimal(dec=newDec, delimiter=":")

        print("%(newRa)s, %(newDec)s (%(ra)s, %(dec)s)" % locals())

    if now:
        from astrocalc.times import now
        mjd = now(log=log).get_mjd()
        print(mjd)

    if dist and redshiftFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        if not hcFlag:
            hcFlag = 70.
        if not wmFlag:
            wmFlag = 0.3
        if not wvFlag:
            wvFlag = 0.7
        dists = c.redshift_to_distance(z=float(distVal),
                                       WM=float(wmFlag),
                                       WV=float(wvFlag),
                                       H0=float(hcFlag))
        print("Distance Modulus: " + str(dists["dmod"]) + " mag")
        print("Luminousity Distance: " + str(dists["dl_mpc"]) + " Mpc")
        print("Angular Size Scale: " + str(dists["da_scale"]) + " kpc/arcsec")
        print("Angular Size Distance: " + str(dists["da_mpc"]) + " Mpc")
        print("Comoving Radial Distance: " + str(dists["dcmr_mpc"]) + " Mpc")

    if dist and mpcFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        z = c.distance_to_redshift(mpc=float(distVal))
        print("z = %(z)s" % locals())

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
    def parse_panstarrs_nightlogs(
            self,
            updateAll=False):
        """*download and parse the ps1 night logs from the range of time a wave survey campaign is active*

        The night-log data is added to the ps1_nightlogs table

        **Key Arguments:**
            - ``updateAll`` -- update all of the PS1 nightlogs. This will take sometime, the default is to lift the logs from the last 7 days. Default *False*.

        **Return:**
            - None

        **Usage:**
            ..  todo::

                - add usage info
                - create a sublime snippet for usage
                - update package tutorial if needed

            .. code-block:: python

                usage code

        """
        self.log.debug('starting the ``parse_panstarrs_nightlogs`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(
            log=self.log
        )

        createStatement = """
CREATE TABLE `ps1_nightlogs` (
  `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
  `airm` double DEFAULT NULL,
  `comments` varchar(200) DEFAULT NULL,
  `decDeg` double DEFAULT NULL,
  `etime` double DEFAULT NULL,
  `f` varchar(10) DEFAULT NULL,
  `filesetID` varchar(100) DEFAULT NULL,
  `raDeg` double DEFAULT NULL,
  `telescope_pointing` varchar(200) DEFAULT NULL,
  `time_registered` datetime DEFAULT NULL,
  `type` varchar(100) DEFAULT NULL,
  `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
  `dateLastModified` datetime DEFAULT CURRENT_TIMESTAMP,
  `updated` varchar(45) DEFAULT '0',
  PRIMARY KEY (`primaryId`),
  UNIQUE KEY `filesetid` (`filesetID`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
"""

        from astrocalc.times import now
        mjdNow = now(
            log=self.log
        ).get_mjd()

        # WAVE METADATA FOUND IN SETTINGS FILE
        for wave in self.settings["gravitational waves"]:
            # GIVE A 3 DAY WINDOW EITHER SIDE OF WAVE TIME-RANGE
            mjdLower = int(self.settings["gravitational waves"][
                wave]["mjd"] - 21. - 3.)
            mjdUpper = int(self.settings["gravitational waves"][
                wave]["mjd"] + 31. + 3.)

            if updateAll == False:
                if mjdUpper < mjdNow - 7.:
                    continue
                if mjdUpper > mjdNow:
                    mjdUpper = int(mjdNow)
                if mjdLower < mjdNow - 7.:
                    mjdLower = int(mjdNow - 7.)

            # METRIC NIGHT LOGS FOR EACH NIGHT FOUND AT A URL SIMILAR TO :
            # "http://ipp0022.ifa.hawaii.edu/ps1sc/metrics/2016-12-14/index.html"
            urls = []
            for i in range(mjdUpper - mjdLower + 3):
                mjd = i + mjdLower
                utDate = converter.mjd_to_ut_datetime(
                    mjd=mjd,
                    sqlDate=False,
                    datetimeObject=True
                )
                utDate = utDate.strftime("%Y-%m-%d")
                urls.append("http://ipp0022.ifa.hawaii.edu/ps1sc/metrics/%(utDate)s/index.html" % locals(
                ))

            localUrls = multiobject_download(
                urlList=urls,
                downloadDirectory="/tmp",
                log=self.log,
                timeStamp=True,
                timeout=180,
                concurrentDownloads=2,
                resetFilename=False,
                credentials=False,  # { 'username' : "...", "password", "..." }
                longTime=True,
                indexFilenames=False
            )

            for url in localUrls:
                if not url:
                    continue
                pathToReadFile = url
                try:
                    self.log.debug("attempting to open the file %s" %
                                   (pathToReadFile,))
                    readFile = codecs.open(
                        pathToReadFile, encoding='utf-8', mode='r')
                    thisData = readFile.read()
                    readFile.close()
                except IOError, e:
                    message = 'could not open the file %s' % (pathToReadFile,)
                    self.log.critical(message)
                    raise IOError(message)
                readFile.close()

                regex = re.compile(r'<pre>\s*# (filesetID.*?)</pre>', re.S)
                matchObject = re.finditer(
                    regex,
                    thisData
                )

                for match in matchObject:
                    csvReader = csv.DictReader(
                        io.StringIO(match.group(1)), delimiter='|')
                    nightLog = []
                    for row in csvReader:
                        cleanDict = {}
                        for k, v in row.iteritems():
                            cleanDict[k.strip().replace(" ", "_")] = v.strip()
                        if "telescope_pointing" in cleanDict:
                            cleanDict["raDeg"] = cleanDict["telescope_pointing"].split()[
                                0]
                            cleanDict["decDeg"] = cleanDict["telescope_pointing"].split()[
                                1]
                        if "time_registered" in cleanDict:
                            cleanDict["time_registered"] = cleanDict[
                                "time_registered"].replace("Z", "")
                        nightLog.append(cleanDict)

                dataSet = list_of_dictionaries(
                    log=self.log,
                    listOfDictionaries=nightLog
                )
                # Recursively create missing directories
                if not os.path.exists("/tmp/ps1_nightlogs"):
                    os.makedirs("/tmp/ps1_nightlogs")
                mysqlData = dataSet.mysql(
                    tableName="ps1_nightlogs", filepath="/tmp/ps1_nightlogs/ps1_nightlog_%(utDate)s.sql" % locals(), createStatement=createStatement)

                directory_script_runner(
                    log=self.log,
                    pathToScriptDirectory="/tmp/ps1_nightlogs",
                    databaseName=self.settings["database settings"][
                        "ligo_virgo_waves"]["db"],
                    loginPath=self.settings["database settings"][
                        "ligo_virgo_waves"]["loginPath"],
                    successRule="delete",
                    failureRule="failed"
                )
Beispiel #5
0
    def _create_lightcurve_plot_file(self, dataset, flatdata, flatLimits,
                                     objectNames, saveLocation, saveFileName):
        """*Generate the lightcurve and save to file*

        **Key Arguments**

        - ``log`` -- logger
        - ``dataset`` -- the observational dataset split into filters (and then mags, limits etc)
        - ``flatdata`` -- a flattened dataset to determine current magnitude
        - ``flatLimits`` -- a flattened dataset of non-detection limits
        - ``objectNames`` -- a single name or a list of names
        - ``saveLocation`` -- the folder to save the plot file to
        - ``saveFileName`` -- the filename to give the plot file (without extension)


        **Return**

        - ``filepath`` -- path to the lightcurve file
        - ``currentMag`` -- a prediction of the current magnitude if there is enough recent data
        - ``gradient`` -- a prediction of the gradient of recent data (on rise or decline?)

        """
        self.log.debug('starting the ``_create_lightcurve_plot_file`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(log=self.log)

        # INITIATE THE PLOT FIGURE - SQUARE
        fig = plt.figure(num=None,
                         figsize=(10, 10),
                         dpi=100,
                         facecolor=None,
                         edgecolor=None,
                         frameon=True)
        ax = fig.add_subplot(1, 1, 1)

        # TICK LABEL SIZE
        mpl.rc('ytick', labelsize=25)
        mpl.rc('xtick', labelsize=25)
        mpl.rcParams.update({'font.size': 25})

        # INITIAL RESTRICTIONS
        currentMag = -9999
        gradient = -9999

        # WHAT IS TODAY MJD (FIR CURRENT MAG ESTIMATE)
        todayMjd = now(log=self.log).get_mjd()

        # MAKE ARRAYS OF TIME AND MAG FOR PLOTS
        bigTimeArray, bigMagArray = np.array(flatdata["mjd"]), np.array(
            flatdata["mag"])
        # SORT TWO LIST BASED ON FIRST
        bigTimeArray, bigMagArray = zip(
            *[(x, y) for x, y in sorted(zip(bigTimeArray, bigMagArray))])

        # BIN DATA FOR POLYNOMIALS
        binData = True
        if binData is True:
            distinctMjds = {}
            for mjd, mag in zip(bigTimeArray, bigMagArray):
                # DICT KEY IS THE UNIQUE INTEGER MJD
                key = str(int(math.floor(mjd / 1.0)))
                # FIRST DATA POINT OF THE NIGHTS? CREATE NEW DATA SET
                if key not in distinctMjds:
                    distinctMjds[key] = {"mjds": [mjd], "mags": [mag]}
                # OR NOT THE FIRST? APPEND TO ALREADY CREATED LIST
                else:
                    distinctMjds[key]["mjds"].append(mjd)
                    distinctMjds[key]["mags"].append(mag)

            # ALL DATA NOW IN MJD SUBSETS. SO FOR EACH SUBSET (I.E. INDIVIDUAL
            # NIGHTS) ...
            summedMagnitudes = {'mjds': [], 'mags': []}
            for k, v in list(distinctMjds.items()):
                # GIVE ME THE MEAN MJD
                meanMjd = sum(v["mjds"]) / len(v["mjds"])
                summedMagnitudes["mjds"].append(meanMjd)
                # GIVE ME THE MEAN MAG
                meanMag = sum(v["mags"]) / len(v["mags"])
                summedMagnitudes["mags"].append(meanMag)

            bigTimeArray = summedMagnitudes["mjds"]
            bigMagArray = summedMagnitudes["mags"]

        bigTimeArray = np.array(bigTimeArray)
        bigMagArray = np.array(bigMagArray)

        # DETERMINE SENSIBLE AXIS LIMITS FROM FLATTENED DATA
        # LIMITS HERE ARE LOWER AND UPPER MJDS FOR X-AXIS
        xLowerLimit = bigTimeArray.min()
        xUpperLimit = bigTimeArray.max()
        latestTime = xUpperLimit
        xBorder = math.fabs((xUpperLimit - xLowerLimit)) * 0.1
        if xBorder < 5:
            xBorder = 5.
        xLowerLimit -= xBorder
        xUpperLimit += xBorder
        fixedXUpperLimit = xUpperLimit
        timeRange = xUpperLimit - xLowerLimit

        # POLYNOMIAL CONSTAINTS USING COMBINED DATASETS
        # POLYNOMIAL/LINEAR SETTINGS
        # SETTINGS FILE
        polyOrder = 5
        # EITHER USE DATA IN THESE LAST NUMBER OF DAYS OR ...
        lastNumDays = 10.
        # ... IF NOT ENOUGH DATA USE THE LAST NUMBER OF DATA POINTS
        predictCurrentMag = True
        lastNumDataPoints = 3
        numAnchors = 3
        anchorSeparation = 70
        latestMag = bigMagArray[0]
        anchorPointMag = latestMag + 0.5
        polyTimeArray, polyMagArray = [], []

        # QUIT IF NOT ENOUGH DATA FOR POLYNOMIAL
        if len(bigTimeArray) <= lastNumDataPoints or timeRange < 3.:
            predictCurrentMag = False

        if max(bigTimeArray) < todayMjd - 120:
            predictCurrentMag = False

        if predictCurrentMag:
            # USE ONLY THE LAST N DAYS OF DATA FOR LINEAR FIT
            mask = np.where(bigTimeArray - bigTimeArray.max() < -lastNumDays,
                            False, True)

            # DETERMINE GRADIENT OF SLOPE FROM LAST `LASTNUMDAYS` DAYS
            linearTimeArray = bigTimeArray[mask]
            linearMagArray = bigMagArray[mask]
            # FIT AND PLOT THE POLYNOMIAL ASSOCSIATED WITH ALL DATA SETS
            thisLinear = chebfit(linearTimeArray, linearMagArray, 1)
            gradient = thisLinear[1]

            firstAnchorPointTime = anchorSeparation + latestTime

            # CREATE THE ARRAY OF DATA USED TO GERNERATE THE POLYNOMIAL
            polyTimeArray = bigTimeArray
            polyMagArray = bigMagArray

            # ANCHOR THE POLYNOMIAL IN THE FUTURE SO THAT ALL PREDICTED LIGHTCURVES
            # EVENTUALLY FADE TO NOTHING
            extraTimes = np.arange(0, numAnchors) * \
                anchorSeparation + firstAnchorPointTime
            extraMags = np.ones(numAnchors) * anchorPointMag
            polyTimeArray = np.append(polyTimeArray, extraTimes)
            polyMagArray = np.append(polyMagArray, extraMags)

            # POLYNOMIAL LIMTIS
            xPolyLowerLimit = min(polyTimeArray) - 2.0
            xPolyUpperLimit = max(polyTimeArray) + 2.0

        # SET AXIS LIMITS
        xUpperLimit = 5
        yLowerLimit = min(bigMagArray) - 0.3
        yUpperLimit = max(bigMagArray) + 0.5
        yBorder = math.fabs((yUpperLimit - yLowerLimit)) * 0.1
        yLowerLimit -= yBorder
        yUpperLimit += yBorder

        # EXTEND LOWER X-LIMIT FOR NON-DETECTIONS
        xLowerTmp = xLowerLimit
        for t, m in zip(flatLimits["mjd"], flatLimits["mag"]):
            if m > yLowerLimit and t < xLowerTmp + 2 and t > xLowerLimit - 40:
                xLowerTmp = t - 2
        xLowerLimit = xLowerTmp

        if predictCurrentMag:
            thisPoly = chebfit(polyTimeArray, polyMagArray, polyOrder)
            # FLATTEN INTO A FUNCTION TO MAKE PLOTTING EASIER
            xData = np.arange(xPolyLowerLimit, todayMjd + 50, 1)
            flatLinear = chebval(xData, thisLinear)
            flatPoly = chebval(xData, thisPoly)
            plt.plot(xData, flatPoly, label="poly")
            plt.plot(xData, flatLinear, label="linear")

            # PREDICT A CURRENT MAGNITUDE FROM THE PLOT

            currentMag = chebval(todayMjd, thisPoly)
            self.log.debug('currentMag: %(currentMag)0.2f, m=%(gradient)s' %
                           locals())

            ls = "*g" % locals()
            currentMagArray = np.array([currentMag])
            nowArray = np.array([todayMjd])
            line = ax.plot(nowArray,
                           currentMagArray,
                           ls,
                           label="current estimate")

            lineExtras = ax.plot(extraTimes, extraMags, "+")

            # SET THE AXES / VIEWPORT FOR THE PLOT
            # if currentMag < yLowerLimit:
            #     yLowerLimit = currentMag - 0.4

        if currentMag > 23:
            currentMag = -9999.

        plt.clf()
        plt.cla()
        ax = fig.add_subplot(1, 1, 1)
        # print(currentMag)
        # print(bigTimeArray)
        # print(bigMagArray)

        # PLOT DATA VIA FILTER. MAGS AND LIMITS
        filterColor = {
            "r": "#29a329",
            "g": "#268bd2",
            "G": "#859900",
            "o": "#cb4b16",
            "c": "#2aa198",
            "U": "#6c71c4",
            "B": "blue",
            "V": "#008000",
            "R": "#e67300",
            "I": "#dc322f",
            "w": "#cc2900",
            "y": "#ff6666",
            "z": "#990000",
        }
        i = 0
        handles = []
        handlesAdded = []
        for k, v in list(dataset.items()):
            mag = v["mag"]
            magErr = v["magErr"]
            magMjd = v["magMjd"]
            limit = v["limit"]
            limitMjd = v["limitMjd"]
            magNoErr = v["magNoErr"]
            magNoErrMjd = v["magNoErrMjd"]
            magNoErrFudge = v["magNoErrFudge"]

            if k in filterColor:
                color = filterColor[k]
            else:
                color = "black"

            if len(limit):
                for l, m in zip(limit, limitMjd):
                    plt.text(m,
                             l,
                             u"\u21A7",
                             fontname='STIXGeneral',
                             size=30,
                             va='top',
                             ha='center',
                             clip_on=True,
                             color=color,
                             zorder=1)
            if len(magNoErr):
                theseMags = ax.errorbar(magNoErrMjd,
                                        magNoErr,
                                        yerr=magNoErrFudge,
                                        color=color,
                                        fmt='o',
                                        mfc=color,
                                        mec=color,
                                        zorder=2,
                                        ms=12.,
                                        alpha=0.8,
                                        linewidth=1.2,
                                        label=k,
                                        capsize=0)
                theseMags[-1][0].set_linestyle('--')

            if len(mag):
                theseMags = ax.errorbar(magMjd,
                                        mag,
                                        yerr=magErr,
                                        color=color,
                                        fmt='o',
                                        mfc=color,
                                        mec=color,
                                        zorder=3,
                                        ms=12.,
                                        alpha=0.8,
                                        linewidth=1.2,
                                        label=k,
                                        capsize=10)

            if not len(mag):
                theseMags = ax.errorbar([-500], [20],
                                        yerr=[0.2],
                                        color=color,
                                        fmt='o',
                                        mfc=color,
                                        mec=color,
                                        zorder=3,
                                        ms=12.,
                                        alpha=0.8,
                                        linewidth=1.2,
                                        label=k,
                                        capsize=10)

            if k not in handlesAdded:
                handles.append(theseMags)
                handlesAdded.append(k)

        # ADD LEGEND
        plt.legend(handles=handles,
                   prop={'size': 13.5},
                   bbox_to_anchor=(1., 1.25),
                   loc=0,
                   borderaxespad=0.,
                   ncol=18,
                   scatterpoints=1)

        # RHS AXIS TICKS
        plt.setp(ax.xaxis.get_majorticklabels(),
                 rotation=45,
                 horizontalalignment='right')
        ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

        # CHANGE PLOT TO FIXED TIME
        # SETUP THE AXES
        xUpperLimit = fixedXUpperLimit
        ax.set_xlabel('MJD', labelpad=20, fontsize=30)
        ax.set_ylabel('Magnitude', labelpad=20, fontsize=30)
        ax.set_title('')
        ax.set_xlim([xLowerLimit, xUpperLimit])
        ax.set_ylim([yUpperLimit, yLowerLimit])
        ax.xaxis.set_major_formatter(ticker.FormatStrFormatter('%d'))

        # GENERATE UT DATE AXIS FOR TOP OF PLOT
        lower, upper = ax.get_xlim()
        utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
        utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)
        ax3 = ax.twiny()
        ax3.set_xlim([utLower, utUpper])
        ax3.grid(True)
        ax.xaxis.grid(False)
        plt.setp(ax3.xaxis.get_majorticklabels(),
                 rotation=45,
                 horizontalalignment='left',
                 fontsize=14)
        ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d, %y'))

        # Y TICK FORMAT
        y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
        ax.yaxis.set_major_formatter(y_formatter)

        # PRINT CURRENT MAG AS SANITY CHECK
        # fig.text(0.1, 1.02, currentMag, ha="left", fontsize=40)

        # RECURSIVELY CREATE MISSING DIRECTORIES
        if not os.path.exists(saveLocation):
            try:
                os.makedirs(saveLocation)
            except:
                pass
        # SAVE THE PLOT
        filepath = """%(saveLocation)s%(saveFileName)s.png""" % locals()
        plt.savefig(filepath,
                    format='PNG',
                    bbox_inches='tight',
                    transparent=False,
                    pad_inches=0.4)
        # plt.show()
        plt.clf()  # clear figure
        plt.close()

        # TEST THAT PLOT FILE HAS ACTUALLY BEEN GENERATED
        try:
            with open(filepath):
                pass
            fileExists = True
        except IOError:
            raise IOError(
                "the path --pathToFile-- %s does not exist on this machine" %
                (filepath, ))
            filepath = False

        self.log.debug('completed the ``_create_lightcurve_plot_file`` method')

        return filepath, currentMag, gradient
    def test_utdatetime_conversions_function(self):

        from astrocalc.times import conversions
        converter = conversions(
            log=log,
        )

        print("\nUT = 20160426t144643.033433")
        print(converter.ut_datetime_to_mjd(
            utDatetime="20160426t144643.033433"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61577585013))

        print("\nUT = 20160426t144643.033433")
        print(converter.ut_datetime_to_mjd(
            utDatetime="20160426t144643.033433"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61577585013))

        print("\nUT = 20160426t144643.033433")
        print(converter.ut_datetime_to_mjd(
            utDatetime="20160426t144643.033433"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61577585013))

        print("\nUT = 20160426t1446")
        print(converter.ut_datetime_to_mjd(utDatetime="20160426t1446"))
        print(converter.mjd_to_ut_datetime(mjd=57504.6153))
        print("\nUT = 20160426t144643")
        print(converter.ut_datetime_to_mjd(utDatetime="20160426t144643"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61578))
        print("\nUT = 20160426t144643.033433")
        print(converter.ut_datetime_to_mjd(
            utDatetime="20160426t144643.033433"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61577585013))
        print("\nUT = 20161231t234643.033433")
        print(converter.ut_datetime_to_mjd(
            utDatetime="20161231t234643.033433"))
        print(converter.ut_datetime_to_mjd(
            utDatetime="20161231t234643.033433"))
        print(converter.mjd_to_ut_datetime(mjd="57753.99077585013"))
        print("\nUT = 201604261444")
        print(converter.ut_datetime_to_mjd(utDatetime="201604261444"))
        print(converter.mjd_to_ut_datetime(mjd=57504.6139))
        print("\nUT = 20160426")
        print(converter.ut_datetime_to_mjd(utDatetime="20160426"))
        print(converter.mjd_to_ut_datetime(mjd=57504.0))
        print("\nUT = 2016-04-26.33433")
        print(converter.ut_datetime_to_mjd(utDatetime="2016-04-26.33433"))
        print(converter.mjd_to_ut_datetime(mjd=57504.33411))
        print("\nUT = 20160426144444.5452")
        print(converter.ut_datetime_to_mjd(utDatetime="20160426144444.5452"))
        print(converter.mjd_to_ut_datetime(mjd=57504.614404459))
        print("\nUT = 2016-04-26 14:44:44.234")
        print(converter.ut_datetime_to_mjd(
            utDatetime="2016-04-26 14:44:44.234"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440086))
        print("\nUT = 20160426 14h44m44.432s")
        print(converter.ut_datetime_to_mjd(
            utDatetime="20160426 14h44m44.432s"))
        print(converter.mjd_to_ut_datetime(mjd=57504.61440315))
        print("\nUT = 2016-04-26T14:44:44.234")
        print(converter.ut_datetime_to_mjd(
            utDatetime="2016-04-26T14:44:44.234"))
        print(converter.mjd_to_ut_datetime(
            mjd=57504.61440086,
            sqlDate=True
        ))
Beispiel #7
0
def create_lc_depreciated(
        log,
        cacheDirectory,
        epochs):
    """*create the atlas lc for one transient*

    **Key Arguments**

    - ``cacheDirectory`` -- the directory to add the lightcurve to
    - ``log`` -- logger
    - ``epochs`` -- dictionary of lightcurve data-points
    

    **Return**

    - None
    

    **Usage**

    .. todo::

        add usage info
        create a sublime snippet for usage

    ```python
    usage code
    ```
    
    """
    log.debug('starting the ``create_lc`` function')

    # c = cyan, o = arange
    magnitudes = {
        'c': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
        'o': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
        'I': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
    }

    limits = {
        'c': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
        'o': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
        'I': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
    }

    discoveryMjd = False
    for epoch in epochs:
        if epoch["filter"] not in ["c", "o", "I"]:
            continue
        objectName = epoch["atlas_designation"]
        if epoch["limiting_mag"] == 1:
            limits[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
            limits[epoch["filter"]]["mags"].append(epoch["mag"])
            limits[epoch["filter"]]["magErrs"].append(epoch["dm"])
            limits[epoch["filter"]]["zp"].append(epoch["zp"])
            flux = 10**(old_div((float(epoch["zp"]) -
                                 float(epoch["mag"])), 2.5))
            limits[epoch["filter"]]["flux"].append(flux)
        else:
            if not discoveryMjd or discoveryMjd > epoch["mjd_obs"]:
                discoveryMjd = epoch["mjd_obs"]
            magnitudes[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
            magnitudes[epoch["filter"]]["mags"].append(epoch["mag"])
            magnitudes[epoch["filter"]]["magErrs"].append(epoch["dm"])
            magnitudes[epoch["filter"]]["zp"].append(epoch["zp"])
            flux = 10**(old_div((float(epoch["zp"]) -
                                 float(epoch["mag"])), 2.5))
            magnitudes[epoch["filter"]]["flux"].append(flux)

    # GENERATE THE FIGURE FOR THE PLOT
    fig = plt.figure(
        num=None,
        figsize=(10, 10),
        dpi=100,
        facecolor=None,
        edgecolor=None,
        frameon=True)

    mpl.rc('ytick', labelsize=20)
    mpl.rc('xtick', labelsize=20)
    mpl.rcParams.update({'font.size': 22})

    # FORMAT THE AXES
    ax = fig.add_axes(
        [0.1, 0.1, 0.8, 0.8],
        polar=False,
        frameon=True)
    ax.set_xlabel('MJD', labelpad=20)
    ax.set_ylabel('Apparent Magnitude', labelpad=15)

    # fig.text(0.1, 1.0, "ATLAS", ha="left", color="#2aa198", fontsize=40)
    # fig.text(0.275, 1.0, objectName.replace("ATLAS", ""),
    #          color="#FFA500", ha="left", fontsize=40)
    fig.text(0.1, 1.02, objectName, ha="left", fontsize=40)

    # ax.set_title(objectName, y=1.10, ha='left', position=(0, 1.11))
    plt.setp(ax.xaxis.get_majorticklabels(),
             rotation=45, horizontalalignment='right')
    import matplotlib.ticker as mtick
    ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

    # ADD MAGNITUDES AND LIMITS FOR EACH FILTER
    # plt.scatter(magnitudes['o']['mjds'], magnitudes['o']['mags'], s=20., c=None, alpha=0.9,
    # edgecolors='#FFA500', linewidth=1.0, facecolors='#FFA500')
    handles = []

    # SET AXIS LIMITS FOR MAGNTIUDES
    upperMag = -99
    lowerMag = 99

    # DETERMINE THE TIME-RANGE OF DETECTION FOR THE SOURCE
    mjdList = magnitudes['o']['mjds'] + \
        magnitudes['c']['mjds'] + magnitudes['I']['mjds']

    if len(mjdList) == 0:
        return

    lowerDetectionMjd = min(mjdList)
    upperDetectionMjd = max(mjdList)
    mjdLimitList = limits['o']['mjds'] + \
        limits['c']['mjds'] + limits['I']['mjds']
    priorLimitsFlavour = None
    for l in sorted(mjdLimitList):
        if l < lowerDetectionMjd and l > lowerDetectionMjd - 30.:
            priorLimitsFlavour = 1
    if not priorLimitsFlavour:
        for l in mjdLimitList:
            if l < lowerDetectionMjd - 30.:
                priorLimitsFlavour = 2
                lowerMJDLimit = l - 2

    if not priorLimitsFlavour:
        fig.text(0.1, -0.08, "* no recent pre-discovery detection limit > $5\\sigma$",
                 ha="left", fontsize=16)

    postLimitsFlavour = None

    for l in sorted(mjdLimitList):
        if l > upperDetectionMjd and l < upperDetectionMjd + 10.:
            postLimitsFlavour = 1
    if not postLimitsFlavour:
        for l in reversed(mjdLimitList):
            if l > upperDetectionMjd + 10.:
                postLimitsFlavour = 2
                upperMJDLimit = l + 2

    if priorLimitsFlavour or postLimitsFlavour:
        limits = {
            'c': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
            'o': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
            'I': {'mjds': [], 'mags': [], 'magErrs': [], 'flux': [], 'zp': []},
        }
        for epoch in epochs:
            objectName = epoch["atlas_designation"]
            if (epoch["limiting_mag"] == 1 and ((priorLimitsFlavour == 1 and epoch["mjd_obs"] > lowerDetectionMjd - 30.) or (priorLimitsFlavour == 2 and epoch["mjd_obs"] > lowerMJDLimit) or priorLimitsFlavour == None) and ((postLimitsFlavour == 1 and epoch["mjd_obs"] < upperDetectionMjd + 10.) or (postLimitsFlavour == 2 and epoch["mjd_obs"] < upperMJDLimit) or postLimitsFlavour == None)):
                limits[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
                limits[epoch["filter"]]["mags"].append(epoch["mag"])
                limits[epoch["filter"]]["magErrs"].append(epoch["dm"])
                limits[epoch["filter"]]["zp"].append(epoch["zp"])
                flux = 10**(old_div((float(epoch["zp"]) -
                                     float(epoch["mag"])), 2.5))
                limits[epoch["filter"]]["flux"].append(flux)

    allMags = limits['o']['mags'] + limits['c']['mags'] + \
        magnitudes['o']['mags'] + magnitudes['c']['mags']
    magRange = max(allMags) - min(allMags)
    if magRange < 4.:
        deltaMag = 0.1
    else:
        deltaMag = magRange * 0.08

    if len(limits['o']['mjds']):
        limitLeg = plt.scatter(limits['o']['mjds'], limits['o']['mags'], s=170., c=None, alpha=0.8,
                               edgecolors='#FFA500', linewidth=1.0, facecolors='none', label="$5\\sigma$ limit  ")
        handles.append(limitLeg)
        if max(limits['o']['mags']) > upperMag:
            upperMag = max(limits['o']['mags'])
            upperMagIndex = np.argmax(limits['o']['mags'])
            # MAG PADDING
            upperFlux = limits['o']['flux'][
                upperMagIndex] - 10**(old_div(deltaMag, 2.5))

        # if min(limits['o']['mags']) < lowerMag:
        #     lowerMag = min(limits['o']['mags'])
    if len(limits['c']['mjds']):
        limitLeg = plt.scatter(limits['c']['mjds'], limits['c']['mags'], s=170., c=None, alpha=0.8,
                               edgecolors='#2aa198', linewidth=1.0, facecolors='none', label="$5\\sigma$ limit  ")
        if len(handles) == 0:
            handles.append(limitLeg)
        if max(limits['c']['mags']) > upperMag:
            upperMag = max(limits['c']['mags'])
            upperMagIndex = np.argmax(limits['c']['mags'])
            # MAG PADDING
            upperFlux = limits['c']['flux'][
                upperMagIndex] - 10**(old_div(deltaMag, 2.5))
        # if min(limits['c']['mags']) < lowerMag:
        #     lowerMag = min(limits['c']['mags'])

    if len(limits['I']['mjds']):
        limitLeg = plt.scatter(limits['I']['mjds'], limits['I']['mags'], s=170., c=None, alpha=0.8,
                               edgecolors='#dc322f', linewidth=1.0, facecolors='none', label="$5\\sigma$ limit  ")
        if len(handles) == 0:
            handles.append(limitLeg)
        if max(limits['I']['mags']) > upperMag:
            upperMag = max(limits['I']['mags'])
            upperMagIndex = np.argmax(limits['I']['mags'])
            # MAG PADDING
            upperFlux = limits['I']['flux'][
                upperMagIndex] - 10**(old_div(deltaMag, 2.5))
    if len(magnitudes['o']['mjds']):
        orangeMag = plt.errorbar(magnitudes['o']['mjds'], magnitudes['o']['mags'], yerr=magnitudes[
            'o']['magErrs'], color='#FFA500', fmt='o', mfc='#FFA500', mec='#FFA500', zorder=1, ms=12., alpha=0.8, linewidth=1.2,  label='o-band mag ', capsize=10)

        # ERROBAR STYLE
        orangeMag[-1][0].set_linestyle('--')
        # ERROBAR CAP THICKNESS
        orangeMag[1][0].set_markeredgewidth('0.7')
        orangeMag[1][1].set_markeredgewidth('0.7')
        handles.append(orangeMag)
        if max(np.array(magnitudes['o']['mags']) + np.array(magnitudes['o']['magErrs'])) > upperMag:
            upperMag = max(
                np.array(magnitudes['o']['mags']) + np.array(magnitudes['o']['magErrs']))
            upperMagIndex = np.argmax((
                magnitudes['o']['mags']) + np.array(magnitudes['o']['magErrs']))
            # MAG PADDING
            upperFlux = magnitudes['o']['flux'][
                upperMagIndex] - 10**(old_div(deltaMag, 2.5))

        if min(np.array(magnitudes['o']['mags']) - np.array(magnitudes['o']['magErrs'])) < lowerMag:
            lowerMag = min(
                np.array(magnitudes['o']['mags']) - np.array(magnitudes['o']['magErrs']))
            lowerMagIndex = np.argmin((
                magnitudes['o']['mags']) - np.array(magnitudes['o']['magErrs']))
            # MAG PADDING
            lowerFlux = magnitudes['o']['flux'][
                lowerMagIndex] + 10**(old_div(deltaMag, 2.5))
    if len(magnitudes['c']['mjds']):
        cyanMag = plt.errorbar(magnitudes['c']['mjds'], magnitudes['c']['mags'], yerr=magnitudes[
            'c']['magErrs'], color='#2aa198', fmt='o', mfc='#2aa198', mec='#2aa198', zorder=1, ms=12., alpha=0.8, linewidth=1.2, label='c-band mag ', capsize=10)
        # ERROBAR STYLE
        cyanMag[-1][0].set_linestyle('--')
        # ERROBAR CAP THICKNESS
        cyanMag[1][0].set_markeredgewidth('0.7')
        cyanMag[1][1].set_markeredgewidth('0.7')
        handles.append(cyanMag)
        if max(np.array(magnitudes['c']['mags']) + np.array(magnitudes['c']['magErrs'])) > upperMag:
            upperMag = max(
                np.array(magnitudes['c']['mags']) + np.array(magnitudes['c']['magErrs']))
            upperMagIndex = np.argmax((
                magnitudes['c']['mags']) + np.array(magnitudes['c']['magErrs']))
            # MAG PADDING
            upperFlux = magnitudes['c']['flux'][
                upperMagIndex] - 10**(old_div(deltaMag, 2.5))
        if min(np.array(magnitudes['c']['mags']) - np.array(magnitudes['c']['magErrs'])) < lowerMag:
            lowerMag = min(
                np.array(magnitudes['c']['mags']) - np.array(magnitudes['c']['magErrs']))
            lowerMagIndex = np.argmin(
                (magnitudes['c']['mags']) - np.array(magnitudes['c']['magErrs']))
            # MAG PADDING
            lowerFlux = magnitudes['c']['flux'][
                lowerMagIndex] + 10**(old_div(deltaMag, 2.5))
    if len(magnitudes['I']['mjds']):
        cyanMag = plt.errorbar(magnitudes['I']['mjds'], magnitudes['I']['mags'], yerr=magnitudes[
            'I']['magErrs'], color='#dc322f', fmt='o', mfc='#dc322f', mec='#dc322f', zorder=1, ms=12., alpha=0.8, linewidth=1.2, label='I-band mag ', capsize=10)
        # ERROBAR STYLE
        cyanMag[-1][0].set_linestyle('--')
        # ERROBAR CAP THICKNESS
        cyanMag[1][0].set_markeredgewidth('0.7')
        cyanMag[1][1].set_markeredgewidth('0.7')
        handles.append(cyanMag)
        if max(np.array(magnitudes['I']['mags']) + np.array(magnitudes['I']['magErrs'])) > upperMag:
            upperMag = max(
                np.array(magnitudes['I']['mags']) + np.array(magnitudes['I']['magErrs']))
            upperMagIndex = np.argmax((
                magnitudes['I']['mags']) + np.array(magnitudes['I']['magErrs']))
            # MAG PADDING
            upperFlux = magnitudes['I']['flux'][
                upperMagIndex] - 10**(old_div(deltaMag, 2.5))
        if min(np.array(magnitudes['I']['mags']) - np.array(magnitudes['I']['magErrs'])) < lowerMag:
            lowerMag = min(
                np.array(magnitudes['I']['mags']) - np.array(magnitudes['I']['magErrs']))
            lowerMagIndex = np.argmin(
                (magnitudes['I']['mags']) - np.array(magnitudes['I']['magErrs']))
            # MAG PADDING
            lowerFlux = magnitudes['I']['flux'][
                lowerMagIndex] + 10**(old_div(deltaMag, 2.5))

    plt.legend(handles=handles, prop={
               'size': 13.5}, bbox_to_anchor=(1., 1.2), loc=0, borderaxespad=0., ncol=4, scatterpoints=1)

    # SET THE TEMPORAL X-RANGE
    allMjd = limits['o']['mjds'] + limits['c']['mjds'] + \
        magnitudes['o']['mjds'] + magnitudes['c']['mjds']
    xmin = min(allMjd) - 2.
    xmax = max(allMjd) + 2.
    ax.set_xlim([xmin, xmax])

    ax.set_ylim([lowerMag - deltaMag, upperMag + deltaMag])
    # FLIP THE MAGNITUDE AXIS
    plt.gca().invert_yaxis()

    # ADD SECOND Y-AXIS
    ax2 = ax.twinx()
    ax2.set_yscale('log')
    ax2.set_ylim([upperFlux, lowerFlux])
    y_formatter = mpl.ticker.FormatStrFormatter("%d")
    ax2.yaxis.set_major_formatter(y_formatter)

    # RELATIVE TIME SINCE DISCOVERY
    lower, upper = ax.get_xlim()
    from astrocalc.times import conversions
    # CONVERTER TO CONVERT MJD TO DATE
    converter = conversions(
        log=log
    )
    utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
    utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)

    # ADD SECOND X-AXIS
    ax3 = ax.twiny()
    ax3.set_xlim([utLower, utUpper])
    ax3.grid(True)
    ax.xaxis.grid(False)
    plt.setp(ax3.xaxis.get_majorticklabels(),
             rotation=45, horizontalalignment='left')
    ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d'))
    # ax3.set_xlabel('Since Discovery (d)',  labelpad=10,)

    # # Put a legend on plot
    # box = ax.get_position()
    # ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
    # ax.legend(loc='top right', bbox_to_anchor=(1.1, 0.5), prop={'size': 8})

    from matplotlib.ticker import LogLocator
    minorLocator = LogLocator(base=10, subs=[2.0, 5.0])
    if magRange < 1.5:
        minorLocator = LogLocator(
            base=10, subs=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0])
    ax2.yaxis.set_minor_locator(minorLocator)
    ax2.yaxis.set_minor_formatter(y_formatter)
    ax2.tick_params(axis='y', which='major', pad=5)
    ax2.tick_params(axis='y', which='minor', pad=5)
    ax2.set_ylabel('Approx. Counts', rotation=-90.,  labelpad=27)

    ax2.grid(False)
    # SAVE PLOT TO FILE
    pathToOutputPlotFolder = ""
    title = objectName + " forced photometry lc"
    # Recursively create missing directories
    if not os.path.exists(cacheDirectory):
        os.makedirs(cacheDirectory)
    fileName = cacheDirectory + "/atlas_fp_lightcurve.png"
    plt.savefig(fileName, bbox_inches='tight', transparent=False,
                pad_inches=0.1)

    # CLEAR FIGURE
    plt.clf()

    log.debug('completed the ``create_lc`` function')
    return None
def generate_atlas_lightcurves(dbConn, log, settings):
    """generate all atlas FP lightcurves (clipped and stacked)

    **Key Arguments**

    - ``dbConn`` -- mysql database connection
    - ``log`` -- logger
    - ``settings`` -- settings for the marshall.

    ```python
    from marshallEngine.feeders.atlas.lightcurve import generate_atlas_lightcurves
    generate_atlas_lightcurves(
        log=log,
        dbConn=dbConn,
        settings=settings
    )
    ```
    """
    log.debug('starting the ``generate_atlas_lightcurves`` function')

    # SELECT SOURCES THAT NEED THEIR ATLAS FP LIGHTCURVES CREATED/UPDATED
    sqlQuery = u"""
        SELECT
                t.transientBucketId
            FROM
                transientBucket t ,pesstoObjects p
            WHERE
                p.transientBucketId=t.transientBucketId
                and t.survey = 'ATLAS FP' and t.limitingMag = 0
                and ((p.atlas_fp_lightcurve < t.dateCreated and p.atlas_fp_lightcurve != 0) or p.atlas_fp_lightcurve is null)
            GROUP BY t.transientBucketId;
    """
    rows = readquery(log=log, sqlQuery=sqlQuery, dbConn=dbConn)
    transientIds = [r["transientBucketId"] for r in rows]

    total = len(transientIds)
    if total > 1000:
        print(
            "ATLAS lightcurves need generated for %(total)s sources - generating next 1000"
            % locals())
        transientIds = transientIds[:1000]
        total = len(transientIds)
    else:
        print("Generating ATLAS lightcurves for %(total)s sources" % locals())

    # SETUP THE INITIAL FIGURE FOR THE PLOT (ONLY ONCE)
    fig = plt.figure(num=None,
                     figsize=(10, 10),
                     dpi=100,
                     facecolor=None,
                     edgecolor=None,
                     frameon=True)
    mpl.rc('ytick', labelsize=18)
    mpl.rc('xtick', labelsize=18)
    mpl.rcParams.update({'font.size': 22})

    # FORMAT THE AXES
    ax = fig.add_axes([0.1, 0.1, 0.8, 0.8], polar=False, frameon=True)
    ax.set_xlabel('MJD', labelpad=20)
    ax.set_yticks([2.2])

    # RHS AXIS TICKS
    plt.setp(ax.xaxis.get_majorticklabels(),
             rotation=45,
             horizontalalignment='right')
    ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

    y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
    ax.yaxis.set_major_formatter(y_formatter)
    ax.xaxis.grid(False)

    # ADD SECOND Y-AXIS
    ax2 = ax.twinx()
    ax2.yaxis.set_major_formatter(y_formatter)
    ax2.set_ylabel('Flux ($\mu$Jy)', rotation=-90., labelpad=27)
    ax2.grid(False)

    # ADD SECOND X-AXIS
    ax3 = ax.twiny()
    ax3.grid(True)
    plt.setp(ax3.xaxis.get_majorticklabels(),
             rotation=45,
             horizontalalignment='left')

    # CONVERTER TO CONVERT MJD TO DATE
    converter = conversions(log=log)

    if len(transientIds) < 3:
        plotPaths = []
        for transientBucketId in transientIds:
            plotPaths.append(
                plot_single_result(log=log,
                                   transientBucketId=transientBucketId,
                                   fig=fig,
                                   converter=converter,
                                   ax=ax,
                                   settings=settings))
    else:
        log.info("""starting multiprocessing""")
        plotPaths = fmultiprocess(log=log,
                                  function=plot_single_result,
                                  inputArray=transientIds,
                                  poolSize=False,
                                  timeout=7200,
                                  fig=fig,
                                  converter=converter,
                                  ax=ax,
                                  settings=settings)
        log.info("""finished multiprocessing""")

    # REMOVE MISSING PLOTStrn
    transientIdGood = [t for p, t in zip(plotPaths, transientIds) if p]
    transientIdBad = [t for p, t in zip(plotPaths, transientIds) if p is None]

    # UPDATE THE atlas_fp_lightcurve DATE FOR TRANSIENTS WE HAVE JUST
    # GENERATED PLOTS FOR
    if len(transientIdGood):
        transientIdGood = (",").join([str(t) for t in transientIdGood])
        sqlQuery = f"""update pesstoObjects set atlas_fp_lightcurve = NOW() where transientBucketID in ({transientIdGood})"""
        writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn)

    # UPDATE THE atlas_fp_lightcurve DATE FOR TRANSIENTS WE HAVE JUST
    # GENERATED PLOTS FOR
    if len(transientIdBad):
        transientIdBad = (",").join([str(t) for t in transientIdBad])
        sqlQuery = f"""update pesstoObjects set atlas_fp_lightcurve = 0 where transientBucketID in ({transientIdBad})"""
        writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn)

    log.debug('completed the ``generate_atlas_lightcurves`` function')
    return None
Beispiel #9
0
def create_lc(
        log,
        cacheDirectory,
        epochs):
    """*create the atlas lc for one transient*

    **Key Arguments**

    - ``cacheDirectory`` -- the directory to add the lightcurve to
    - ``log`` -- logger
    - ``epochs`` -- dictionary of lightcurve data-points
    

    **Return**

    - None
    

    **Usage**

    .. todo::

        add usage info
        create a sublime snippet for usage

    ```python
    usage code
    ```
    
    """
    log.debug('starting the ``create_lc`` function')

    from astrocalc.times import conversions
    # CONVERTER TO CONVERT MJD TO DATE
    converter = conversions(
        log=log
    )

    # c = cyan, o = arange
    magnitudes = {
        'c': {'mjds': [], 'mags': [], 'magErrs': []},
        'o': {'mjds': [], 'mags': [], 'magErrs': []},
        'I': {'mjds': [], 'mags': [], 'magErrs': []},
    }

    summedMagnitudes = {
        'c': {'mjds': [], 'mags': [], 'magErrs': []},
        'o': {'mjds': [], 'mags': [], 'magErrs': []},
        'I': {'mjds': [], 'mags': [], 'magErrs': []},
    }

    limits = {
        'c': {'mjds': [], 'mags': [], 'magErrs': []},
        'o': {'mjds': [], 'mags': [], 'magErrs': []},
        'I': {'mjds': [], 'mags': [], 'magErrs': []},
    }

    discoveryMjd = False
    for epoch in epochs:
        objectName = epoch["atlas_designation"]
        if not epoch["fnu"]:
            continue

        if epoch["mjd_obs"] < 50000.:
            continue

        if not epoch["snr"] <= 5 and (not discoveryMjd or discoveryMjd > epoch["mjd_obs"]):
            discoveryMjd = epoch["mjd_obs"]

        if epoch["snr"] <= 3 and epoch["filter"] in ["c", "o", "I"]:
            limits[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
            limits[epoch["filter"]]["mags"].append(epoch["fnu"])
            limits[epoch["filter"]]["magErrs"].append(epoch["fnu_error"])
        elif epoch["filter"] in ["c", "o", "I"]:
            magnitudes[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
            magnitudes[epoch["filter"]]["mags"].append(epoch["fnu"])
            magnitudes[epoch["filter"]]["magErrs"].append(epoch["fnu_error"])

    for fil, d in list(magnitudes.items()):
        distinctMjds = {}
        for m, f, e in zip(d["mjds"], d["mags"], d["magErrs"]):
            key = str(int(math.floor(m)))
            if key not in distinctMjds:
                distinctMjds[key] = {
                    "mjds": [m],
                    "mags": [f],
                    "magErrs": [e]
                }
            else:
                distinctMjds[key]["mjds"].append(m)
                distinctMjds[key]["mags"].append(f)
                distinctMjds[key]["magErrs"].append(e)

        for k, v in list(distinctMjds.items()):
            summedMagnitudes[fil]["mjds"].append(
                old_div(sum(v["mjds"]), len(v["mjds"])))
            summedMagnitudes[fil]["mags"].append(
                old_div(sum(v["mags"]), len(v["mags"])))
            summedMagnitudes[fil]["magErrs"].append(sum(v["magErrs"]) / len(v["magErrs"]
                                                                            ) / math.sqrt(len(v["magErrs"])))

    if not discoveryMjd:
        return

    # COMMENT THIS LINE OUT TO PLOT ALL MAGNITUDE MEASUREMENTS INSTEAD OF
    # SUMMED
    magnitudes = summedMagnitudes

    # DUMP OUT SUMMED ATLAS MAGNITUDE
    # for m, l, e in zip(limits['o']["mjds"], limits['o']["mags"], limits['o']["magErrs"]):
    #     print "%(m)s, o, %(l)s, %(e)s, <3" % locals()
    # for m, l, e in zip(limits['c']["mjds"], limits['c']["mags"], limits['c']["magErrs"]):
    #     print "%(m)s, c, %(l)s, %(e)s, <3" % locals()

    # for m, l, e in zip(magnitudes['o']["mjds"], magnitudes['o']["mags"], magnitudes['o']["magErrs"]):
    #     print "%(m)s, o, %(l)s, %(e)s," % locals()
    # for m, l, e in zip(magnitudes['c']["mjds"], magnitudes['c']["mags"], magnitudes['c']["magErrs"]):
    #     print "%(m)s, c, %(l)s, %(e)s," % locals()

    discoveryUT = converter.mjd_to_ut_datetime(
        mjd=discoveryMjd, datetimeObject=True)

    discoveryUT = discoveryUT.strftime("%Y %m %d %H:%M")

    summedMagnitudes = {}

    # GENERATE THE FIGURE FOR THE PLOT
    fig = plt.figure(
        num=None,
        figsize=(10, 10),
        dpi=100,
        facecolor=None,
        edgecolor=None,
        frameon=True)

    mpl.rc('ytick', labelsize=20)
    mpl.rc('xtick', labelsize=20)
    mpl.rcParams.update({'font.size': 22})

    # FORMAT THE AXES
    ax = fig.add_axes(
        [0.1, 0.1, 0.8, 0.8],
        polar=False,
        frameon=True)
    ax.set_xlabel('MJD', labelpad=20)
    ax.set_ylabel('Apparent Magnitude', labelpad=15)

    # ax.set_yscale('log')

    # ATLAS OBJECT NAME LABEL AS TITLE
    fig.text(0.1, 1.02, objectName, ha="left", fontsize=40)

    # RHS AXIS TICKS
    plt.setp(ax.xaxis.get_majorticklabels(),
             rotation=45, horizontalalignment='right')
    import matplotlib.ticker as mtick
    ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

    # ADD MAGNITUDES AND LIMITS FOR EACH FILTER
    handles = []

    # SET AXIS LIMITS FOR MAGNTIUDES
    upperMag = -99
    lowerMag = 99

    # DETERMINE THE TIME-RANGE OF DETECTION FOR THE SOURCE
    mjdList = magnitudes['o']['mjds'] + \
        magnitudes['c']['mjds'] + magnitudes['I']['mjds']

    if len(mjdList) == 0:
        return

    lowerDetectionMjd = min(mjdList)
    upperDetectionMjd = max(mjdList)
    mjdLimitList = limits['o']['mjds'] + \
        limits['c']['mjds'] + limits['I']['mjds']
    priorLimitsFlavour = None
    for l in sorted(mjdLimitList):
        if l < lowerDetectionMjd and l > lowerDetectionMjd - 30.:
            priorLimitsFlavour = 1
    if not priorLimitsFlavour:
        for l in mjdLimitList:
            if l < lowerDetectionMjd - 30.:
                priorLimitsFlavour = 2
                lowerMJDLimit = l - 2

    if not priorLimitsFlavour:
        fig.text(0.1, -0.08, "* no recent pre-discovery detection limit > $5\\sigma$",
                 ha="left", fontsize=16)

    postLimitsFlavour = None

    for l in sorted(mjdLimitList):
        if l > upperDetectionMjd and l < upperDetectionMjd + 10.:
            postLimitsFlavour = 1
    if not postLimitsFlavour:
        for l in reversed(mjdLimitList):
            if l > upperDetectionMjd + 10.:
                postLimitsFlavour = 2
                upperMJDLimit = l + 2

    if priorLimitsFlavour or postLimitsFlavour:
        limits = {
            'c': {'mjds': [], 'mags': [], 'magErrs': []},
            'o': {'mjds': [], 'mags': [], 'magErrs': []},
            'I': {'mjds': [], 'mags': [], 'magErrs': []},
        }
        for epoch in epochs:
            if epoch["filter"] not in ["c", "o", "I"]:
                continue
            objectName = epoch["atlas_designation"]
            if not epoch["fnu"]:
                continue

            if epoch["mjd_obs"] < 50000.:
                continue

            if (epoch["snr"] <= 3 and ((priorLimitsFlavour == 1 and epoch["mjd_obs"] > lowerDetectionMjd - 30.) or (priorLimitsFlavour == 2 and epoch["mjd_obs"] > lowerMJDLimit) or priorLimitsFlavour == None) and ((postLimitsFlavour == 1 and epoch["mjd_obs"] < upperDetectionMjd + 10.) or (postLimitsFlavour == 2 and epoch["mjd_obs"] < upperMJDLimit) or postLimitsFlavour == None)):
                limits[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
                limits[epoch["filter"]]["mags"].append(epoch["fnu"])
                # 000 limits[epoch["filter"]]["magErrs"].append(epoch["dm"])
                limits[epoch["filter"]]["magErrs"].append(epoch["fnu_error"])

    allMags = magnitudes['o']['mags'] + magnitudes['c']['mags']
    magRange = max(allMags) - min(allMags)

    deltaMag = magRange * 0.1

    if len(limits['o']['mjds']):
        limitLeg = ax.errorbar(limits['o']['mjds'], limits['o']['mags'], yerr=limits[
            'o']['magErrs'], color='#FFA500', fmt='o', mfc='white', mec='#FFA500', zorder=1, ms=12., alpha=0.8, linewidth=0.4,  label='<3$\\sigma$ ', capsize=10, markeredgewidth=1.2)

        # ERROBAR CAP THICKNESS
        handles.append(limitLeg)
        limitLeg[1][0].set_markeredgewidth('0.4')
        limitLeg[1][1].set_markeredgewidth('0.4')

        # if min(limits['o']['mags']) < lowerMag:
        #     lowerMag = min(limits['o']['mags'])
    if len(limits['c']['mjds']):
        limitLeg = ax.errorbar(limits['c']['mjds'], limits['c']['mags'], yerr=limits[
            'c']['magErrs'], color='#2aa198', fmt='o', mfc='white', mec='#2aa198', zorder=1, ms=12., alpha=0.8, linewidth=0.4, label='<3$\\sigma$ ', capsize=10, markeredgewidth=1.2)
        # ERROBAR CAP THICKNESS
        limitLeg[1][0].set_markeredgewidth('0.4')
        limitLeg[1][1].set_markeredgewidth('0.4')
        if not len(handles):
            handles.append(limitLeg)

    if len(limits['I']['mjds']):
        limitLeg = ax.errorbar(limits['I']['mjds'], limits['I']['mags'], yerr=limits[
            'I']['magErrs'], color='#dc322f', fmt='o', mfc='white', mec='#dc322f', zorder=1, ms=12., alpha=0.8, linewidth=0.4, label='<3$\\sigma$ ', capsize=10, markeredgewidth=1.2)
        # ERROBAR CAP THICKNESS
        limitLeg[1][0].set_markeredgewidth('0.4')
        limitLeg[1][1].set_markeredgewidth('0.4')
        if not len(handles):
            handles.append(limitLeg)

    if len(magnitudes['o']['mjds']):
        orangeMag = ax.errorbar(magnitudes['o']['mjds'], magnitudes['o']['mags'], yerr=magnitudes[
            'o']['magErrs'], color='#FFA500', fmt='o', mfc='#FFA500', mec='#FFA500', zorder=1, ms=12., alpha=0.8, linewidth=1.2,  label='o-band mag ', capsize=10)

        # ERROBAR CAP THICKNESS
        orangeMag[1][0].set_markeredgewidth('0.7')
        orangeMag[1][1].set_markeredgewidth('0.7')
        handles.append(orangeMag)
        if max(np.array(magnitudes['o']['mags']) + np.array(magnitudes['o']['magErrs'])) > upperMag:
            upperMag = max(
                np.array(magnitudes['o']['mags']) + np.array(magnitudes['o']['magErrs']))
            upperMagIndex = np.argmax((
                magnitudes['o']['mags']) + np.array(magnitudes['o']['magErrs']))

        if min(np.array(magnitudes['o']['mags']) - np.array(magnitudes['o']['magErrs'])) < lowerMag:
            lowerMag = min(
                np.array(magnitudes['o']['mags']) - np.array(magnitudes['o']['magErrs']))
            lowerMagIndex = np.argmin((
                magnitudes['o']['mags']) - np.array(magnitudes['o']['magErrs']))

    if len(magnitudes['c']['mjds']):
        cyanMag = ax.errorbar(magnitudes['c']['mjds'], magnitudes['c']['mags'], yerr=magnitudes[
            'c']['magErrs'], color='#2aa198', fmt='o', mfc='#2aa198', mec='#2aa198', zorder=1, ms=12., alpha=0.8, linewidth=1.2, label='c-band mag ', capsize=10)
        # ERROBAR CAP THICKNESS
        cyanMag[1][0].set_markeredgewidth('0.7')
        cyanMag[1][1].set_markeredgewidth('0.7')
        handles.append(cyanMag)
        if max(np.array(magnitudes['c']['mags']) + np.array(magnitudes['c']['magErrs'])) > upperMag:
            upperMag = max(
                np.array(magnitudes['c']['mags']) + np.array(magnitudes['c']['magErrs']))
            upperMagIndex = np.argmax((
                magnitudes['c']['mags']) + np.array(magnitudes['c']['magErrs']))

        if min(np.array(magnitudes['c']['mags']) - np.array(magnitudes['c']['magErrs'])) < lowerMag:
            lowerMag = min(
                np.array(magnitudes['c']['mags']) - np.array(magnitudes['c']['magErrs']))
            lowerMagIndex = np.argmin(
                (magnitudes['c']['mags']) - np.array(magnitudes['c']['magErrs']))

    if len(magnitudes['I']['mjds']):
        cyanMag = ax.errorbar(magnitudes['I']['mjds'], magnitudes['I']['mags'], yerr=magnitudes[
            'I']['magErrs'], color='#dc322f', fmt='o', mfc='#dc322f', mec='#dc322f', zorder=1, ms=12., alpha=0.8, linewidth=1.2, label='I-band mag ', capsize=10)
        # ERROBAR CAP THICKNESS
        cyanMag[1][0].set_markeredgewidth('0.7')
        cyanMag[1][1].set_markeredgewidth('0.7')
        handles.append(cyanMag)
        if max(np.array(magnitudes['I']['mags']) + np.array(magnitudes['I']['magErrs'])) > upperMag:
            upperMag = max(
                np.array(magnitudes['I']['mags']) + np.array(magnitudes['I']['magErrs']))
            upperMagIndex = np.argmax((
                magnitudes['I']['mags']) + np.array(magnitudes['I']['magErrs']))

        if min(np.array(magnitudes['I']['mags']) - np.array(magnitudes['I']['magErrs'])) < lowerMag:
            lowerMag = min(
                np.array(magnitudes['I']['mags']) - np.array(magnitudes['I']['magErrs']))
            lowerMagIndex = np.argmin(
                (magnitudes['I']['mags']) - np.array(magnitudes['I']['magErrs']))

    plt.legend(handles=handles, prop={
               'size': 13.5}, bbox_to_anchor=(0.95, 1.2), loc=0, borderaxespad=0., ncol=4, scatterpoints=1)

    # SET THE TEMPORAL X-RANGE
    allMjd = magnitudes['o']['mjds'] + magnitudes['c']['mjds']
    xmin = min(allMjd) - 5.
    xmax = max(allMjd) + 5.
    ax.set_xlim([xmin, xmax])

    ax.set_ylim([0. - deltaMag, upperMag + deltaMag])
    y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
    ax.yaxis.set_major_formatter(y_formatter)

    # PLOT THE MAGNITUDE SCALE
    axisUpperFlux = upperMag
    axisLowerFlux = 1e-29

    axisLowerMag = -2.5 * math.log10(axisLowerFlux) - 48.6
    axisUpperMag = -2.5 * math.log10(axisUpperFlux) - 48.6

    ax.set_yticks([2.2])
    import matplotlib.ticker as ticker

    magLabels = [20., 19.5, 19.0, 18.5,
                 18.0, 17.5, 17.0, 16.5, 16.0, 15.5, 15.0]
    magFluxes = [pow(10, old_div(-(m + 48.6), 2.5)) * 1e27 for m in magLabels]

    ax.yaxis.set_major_locator(ticker.FixedLocator((magFluxes)))
    ax.yaxis.set_major_formatter(ticker.FixedFormatter((magLabels)))
    # FLIP THE MAGNITUDE AXIS
    # plt.gca().invert_yaxis()

    # ADD SECOND Y-AXIS
    ax2 = ax.twinx()
    ax2.set_ylim([0. - deltaMag, upperMag + deltaMag])
    ax2.yaxis.set_major_formatter(y_formatter)

    # RELATIVE TIME SINCE DISCOVERY
    lower, upper = ax.get_xlim()
    utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
    utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)

    # ADD SECOND X-AXIS
    ax3 = ax.twiny()
    ax3.set_xlim([utLower, utUpper])
    ax3.grid(True)
    ax.xaxis.grid(False)
    plt.setp(ax3.xaxis.get_majorticklabels(),
             rotation=45, horizontalalignment='left')
    ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d'))
    # ax3.set_xlabel('Since Discovery (d)',  labelpad=10,)

    # # Put a legend on plot
    # box = ax.get_position()
    # ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
    # ax.legend(loc='top right', bbox_to_anchor=(1.1, 0.5), prop={'size': 8})

    # from matplotlib.ticker import LogLocator
    # minorLocator = LogLocator(base=10, subs=[2.0, 5.0])
    # if magRange < 1.5:
    #     minorLocator = LogLocator(
    #         base=10, subs=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0])
    # ax2.yaxis.set_minor_locator(minorLocator)
    # ax2.yaxis.set_minor_formatter(y_formatter)
    # ax2.tick_params(axis='y', which='major', pad=5)
    # ax2.tick_params(axis='y', which='minor', pad=5)
    ax2.set_ylabel('$F_{nu} \\times 1e^{27}$', rotation=-90.,  labelpad=27)

    discoveryText = "discovery epoch\nmjd %(discoveryMjd)2.2f\n%(discoveryUT)s UT" % locals(
    )
    ax.text(0.05, 0.95, discoveryText,
            verticalalignment='top', horizontalalignment='left',
            transform=ax.transAxes,
            color='black', fontsize=12, linespacing=1.5)

    ax2.grid(False)
    # SAVE PLOT TO FILE
    pathToOutputPlotFolder = ""
    title = objectName + " forced photometry lc"
    # Recursively create missing directories
    if not os.path.exists(cacheDirectory):
        os.makedirs(cacheDirectory)
    fileName = cacheDirectory + "/atlas_fp_lightcurve.png"
    plt.savefig(fileName, bbox_inches='tight', transparent=False,
                pad_inches=0.1)

    # CLEAR FIGURE
    plt.clf()

    log.debug('completed the ``create_lc`` function')
    return None
Beispiel #10
0
    def _create_lightcurve_plot_file(
            self,
            dataset,
            flatdata,
            flatLimits,
            objectNames,
            saveLocation,
            saveFileName):
        """*Generate the lightcurve and save to file*

        **Key Arguments**

        - ``log`` -- logger
        - ``dataset`` -- the observational dataset split into filters (and then mags, limits etc)
        - ``flatdata`` -- a flattened dataset to determine current magnitude
        - ``flatLimits`` -- a flattened dataset of non-detection limits
        - ``objectNames`` -- a single name or a list of names
        - ``saveLocation`` -- the folder to save the plot file to
        - ``saveFileName`` -- the filename to give the plot file (without extension)


        **Return**

        - ``filepath`` -- path to the lightcurve file
        - ``currentMag`` -- a prediction of the current magnitude if there is enough recent data
        - ``gradient`` -- a prediction of the gradient of recent data (on rise or decline?)

        """
        self.log.debug('starting the ``_create_lightcurve_plot_file`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(
            log=self.log
        )

        # INITIATE THE PLOT FIGURE - SQUARE
        fig = plt.figure(
            num=None,
            figsize=(10, 10),
            dpi=100,
            facecolor=None,
            edgecolor=None,
            frameon=True)
        ax = fig.add_subplot(1, 1, 1)

        # TICK LABEL SIZE
        mpl.rc('ytick', labelsize=25)
        mpl.rc('xtick', labelsize=25)
        mpl.rcParams.update({'font.size': 25})

        # INITIAL RESTRICTIONS
        currentMag = -9999
        gradient = -9999

        # WORK OUT RELATIVE DATES - NEEDED FOR CURRENT MAG ESTIMATES
        fixedTimeDataList = flatdata["mjd"]

        todayMjd = now(
            log=self.log
        ).get_mjd()

        timeList = []
        timeList[:] = [t - todayMjd for t in flatdata["mjd"]]

        # DETERMINE SENSIBLE AXIS LIMITS FROM FLATTENED DATA
        bigTimeArray, bigMagArray = np.array(
            flatdata["mjd"]), np.array(flatdata["mag"])
        xLowerLimit = min(bigTimeArray)
        xUpperLimit = max(bigTimeArray)
        latestTime = xUpperLimit
        xBorder = math.fabs((xUpperLimit - xLowerLimit)) * 0.1
        if xBorder < 5:
            xBorder = 5.
        xLowerLimit -= xBorder
        xUpperLimit += xBorder
        fixedXUpperLimit = xUpperLimit

        # REALTIVE TIMES - TO PREDICT CURRENT MAG
        relativeTimeArray = []
        relativeTimeArray[:] = [r - todayMjd for r in bigTimeArray]
        rxLowerLimit = min(relativeTimeArray)
        rxUpperLimit = max(relativeTimeArray)
        rlatestTime = xUpperLimit

        # POLYNOMIAL CONSTAINTS USING COMBINED DATASETS
        # POLYNOMIAL/LINEAR SETTINGS
        # SETTINGS FILE
        polyOrder = 3
        # EITHER USE DATA IN THESE LAST NUMBER OF DAYS OR ...
        lastNumDays = 10.
        # ... IF NOT ENOUGH DATA USE THE LAST NUMBER OF DATA POINTS
        predictCurrentMag = True
        lastNumDataPoints = 3
        numAnchors = 2
        anchorSeparation = 30
        latestMag = bigMagArray[0]
        anchorPointMag = latestMag + 20.
        polyTimeArray, polyMagArray = [], []
        newArray = np.array([])

        # QUIT IF NOT ENOUGH DATA FOR POLYNOMIAL
        if len(bigTimeArray) <= lastNumDataPoints:
            predictCurrentMag = False
        while predictCurrentMag and lastNumDataPoints < 6:
            if len(bigTimeArray) <= lastNumDataPoints:
                predictCurrentMag = False
            elif predictCurrentMag and bigTimeArray[-1] - bigTimeArray[-lastNumDataPoints] < 5:
                lastNumDataPoints += 1
            else:
                break
        if predictCurrentMag and bigTimeArray[-1] - bigTimeArray[-lastNumDataPoints] < 5:
            predictCurrentMag = False

        # FIND THE MOST RECENT OBSERVATION TAKEN > LASTNUMDAYS DAYS BEFORE THE LAST
        # OBSERVATION
        breakpoint = 0
        for thisIndex, v in enumerate(relativeTimeArray):
            if breakpoint:
                break
            if v < max(relativeTimeArray) - lastNumDays:
                breakpoint = 1
        else:
            if breakpoint == 0:
                predictCurrentMag = False

        if predictCurrentMag:
            # DETERMINE GRADIENT OF SLOPE FROM LAST `LASTNUMDAYS` DAYS
            linearTimeArray = relativeTimeArray[0:thisIndex]
            linearMagArray = bigMagArray[0:thisIndex].tolist()
            # FIT AND PLOT THE POLYNOMIAL ASSOCSIATED WITH ALL DATA SETS
            thisLinear = np.polyfit(linearTimeArray, linearMagArray, 1)
            gradient = thisLinear[0]

            # FROM GRADIENT DETERMINE WHERE ANCHOR POINTS ARE PLACED
            if gradient > 0.1:
                firstAnchorPointTime = 120.
            elif gradient < -0.5:
                firstAnchorPointTime = 50
            elif gradient > -0.5:
                firstAnchorPointTime = 120 - (np.abs(gradient) - 0.1) * 300.
            else:
                firstAnchorPointTime = 120

            if firstAnchorPointTime > 120.:
                firstAnchorPointTime = 120.

            firstAnchorPointTime = firstAnchorPointTime + latestTime
            if firstAnchorPointTime < 30.:
                firstAnchorPointTime = 30.

            # CREATE THE ARRAY OF DATA USED TO GERNERATE THE POLYNOMIAL
            polyTimeArray = relativeTimeArray[0:thisIndex]
            polyMagArray = bigMagArray[0:thisIndex].tolist()

            printArray = []
            printArray[:] = [float("%(i)0.1f" % locals())
                             for i in polyTimeArray]
            infoText = "time array : %(printArray)s" % locals()
            warningColor = "#dc322f"

            # ANCHOR THE POLYNOMIAL IN THE FUTURE SO THAT ALL PREDICTED LIGHTCURVES
            # EVENTUALLY FADE TO NOTHING
            for i in range(numAnchors):
                polyTimeArray.insert(0, firstAnchorPointTime + i *
                                     anchorSeparation)
                polyMagArray.insert(0, anchorPointMag)

            # POLYNOMIAL LIMTIS
            xPolyLowerLimit = min(polyTimeArray) - 2.0
            xPolyUpperLimit = max(polyTimeArray) + 2.0

        # SET AXIS LIMITS
        xUpperLimit = 5
        yLowerLimit = min(bigMagArray) - 0.3
        yUpperLimit = max(bigMagArray) + 0.5
        yBorder = math.fabs((yUpperLimit - yLowerLimit)) * 0.1
        yLowerLimit -= yBorder
        yUpperLimit += yBorder

        # EXTEND LOWER X-LIMIT FOR NON-DETECTIONS
        xLowerTmp = xLowerLimit
        for t, m in zip(flatLimits["mjd"], flatLimits["mag"]):
            if m > yLowerLimit and t < xLowerTmp + 2 and t > xLowerLimit - 40:
                xLowerTmp = t - 2
        xLowerLimit = xLowerTmp

        if predictCurrentMag:
            thisPoly = np.polyfit(polyTimeArray, polyMagArray, polyOrder)
            # FLATTEN INTO A FUNCTION TO MAKE PLOTTING EASIER
            flatLinear = np.poly1d(thisLinear)
            flatPoly = np.poly1d(thisPoly)
            xData = np.arange(xPolyLowerLimit, xPolyUpperLimit, 1)
            plt.plot(xData, flatPoly(xData), label="poly")
            plt.plot(xData, flatLinear(xData), label="linear")

            # PREDICT A CURRENT MAGNITUDE FROM THE PLOT
            currentMag = flatPoly(0.)
            if currentMag < latestMag:
                currentMag = currentMag + 0.2
            self.log.debug(
                'currentMag: %(currentMag)0.2f, m=%(gradient)s' % locals())

            ls = "*g" % locals()
            currentMagArray = np.array([currentMag])
            nowArray = np.array([todayMjd])
            line = ax.plot(nowArray, currentMagArray,
                           ls, label="current estimate")

            # SET THE AXES / VIEWPORT FOR THE PLOT
            if currentMag < yLowerLimit:
                yLowerLimit = currentMag - 0.4

        plt.clf()
        plt.cla()
        ax = fig.add_subplot(1, 1, 1)

        # PLOT DATA VIA FILTER. MAGS AND LIMITS
        filterColor = {
            "r": "#29a329",
            "g": "#268bd2",
            "G": "#859900",
            "o": "#cb4b16",
            "c": "#2aa198",
            "U": "#6c71c4",
            "B": "blue",
            "V": "#008000",
            "R": "#e67300",
            "I": "#dc322f",
            "w": "#cc2900",
            "y": "#ff6666",
            "z": "#990000",
        }
        i = 0
        handles = []
        handlesAdded = []
        for k, v in list(dataset.items()):
            mag = v["mag"]
            magErr = v["magErr"]
            magMjd = v["magMjd"]
            limit = v["limit"]
            limitMjd = v["limitMjd"]
            magNoErr = v["magNoErr"]
            magNoErrMjd = v["magNoErrMjd"]
            magNoErrFudge = v["magNoErrFudge"]

            if k in filterColor:
                color = filterColor[k]
            else:
                color = "black"

            if len(limit):
                for l, m in zip(limit, limitMjd):
                    plt.text(m, l, u"\u21A7", fontname='STIXGeneral',
                             size=30, va='top', ha='center', clip_on=True, color=color, zorder=1)
            if len(magNoErr):
                theseMags = ax.errorbar(magNoErrMjd, magNoErr, yerr=magNoErrFudge, color=color, fmt='o', mfc=color,
                                        mec=color, zorder=2, ms=12., alpha=0.8, linewidth=1.2,  label=k, capsize=0)
                theseMags[-1][0].set_linestyle('--')

            if len(mag):
                theseMags = ax.errorbar(magMjd, mag, yerr=magErr, color=color, fmt='o', mfc=color,
                                        mec=color, zorder=3, ms=12., alpha=0.8, linewidth=1.2,  label=k, capsize=10)

            if not len(mag):
                theseMags = ax.errorbar([-500], [20], yerr=[0.2], color=color, fmt='o', mfc=color,
                                        mec=color, zorder=3, ms=12., alpha=0.8, linewidth=1.2,  label=k, capsize=10)

            if k not in handlesAdded:
                handles.append(theseMags)
                handlesAdded.append(k)

        # ADD LEGEND
        plt.legend(handles=handles, prop={
                   'size': 13.5}, bbox_to_anchor=(1., 1.25), loc=0, borderaxespad=0., ncol=18, scatterpoints=1)

        # RHS AXIS TICKS
        plt.setp(ax.xaxis.get_majorticklabels(),
                 rotation=45, horizontalalignment='right')
        ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

        # CHANGE PLOT TO FIXED TIME
        # SETUP THE AXES
        xUpperLimit = fixedXUpperLimit
        ax.set_xlabel('MJD',  labelpad=20)
        ax.set_ylabel('Magnitude',  labelpad=20)
        ax.set_title('')
        ax.set_xlim([xLowerLimit, xUpperLimit])
        ax.set_ylim([yUpperLimit, yLowerLimit])
        ax.xaxis.set_major_formatter(ticker.FormatStrFormatter('%d'))

        # GENERATE UT DATE AXIS FOR TOP OF PLOT
        lower, upper = ax.get_xlim()
        utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
        utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)
        ax3 = ax.twiny()
        ax3.set_xlim([utLower, utUpper])
        ax3.grid(True)
        ax.xaxis.grid(False)
        plt.setp(ax3.xaxis.get_majorticklabels(),
                 rotation=45, horizontalalignment='left', fontsize=14)
        ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d, %y'))

        # Y TICK FORMAT
        y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
        ax.yaxis.set_major_formatter(y_formatter)

        # PRINT CURRENT MAG AS SANITY CHECK
        # fig.text(0.1, 1.02, currentMag, ha="left", fontsize=40)

        # RECURSIVELY CREATE MISSING DIRECTORIES
        if not os.path.exists(saveLocation):
            try:
                os.makedirs(saveLocation)
            except:
                pass
        # SAVE THE PLOT
        filepath = """%(saveLocation)s%(saveFileName)s.png""" % locals()
        plt.savefig(filepath, format='PNG', bbox_inches='tight', transparent=False,
                    pad_inches=0.4)
        # plt.show()
        plt.clf()  # clear figure
        plt.close()

        # TEST THAT PLOT FILE HAS ACTUALLY BEEN GENERATED
        try:
            with open(filepath):
                pass
            fileExists = True
        except IOError:
            raise IOError(
                "the path --pathToFile-- %s does not exist on this machine" %
                (filepath,))
            filepath = False

        self.log.debug('completed the ``_create_lightcurve_plot_file`` method')

        return filepath, currentMag, gradient
Beispiel #11
0
    def _update_day_tracker_table(self):
        """* update day tracker table*

        **Key Arguments:**
            # -

        **Return:**
            - None

        **Usage:**
            ..  todo::

                - add usage info
                - create a sublime snippet for usage
                - write a command-line tool for this method
                - update package tutorial with command-line tool info if needed

            .. code-block:: python 

                usage code 

        """
        self.log.info('starting the ``_update_day_tracker_table`` method')

        # YESTERDAY MJD
        mjd = now(log=self.log).get_mjd()
        yesterday = int(math.floor(mjd - 1))

        sqlQuery = u"""
            SELECT mjd FROM atlas_moving_objects.day_tracker order by mjd desc limit 1
        """ % locals()
        rows = readquery(
            log=self.log,
            sqlQuery=sqlQuery,
            dbConn=self.atlasMoversDBConn,
        )
        highestMjd = int(rows[0]["mjd"])

        converter = conversions(log=self.log)

        sqlData = []
        for m in range(highestMjd, yesterday):
            # CONVERTER TO CONVERT MJD TO DATE
            utDate = converter.mjd_to_ut_datetime(mjd=m,
                                                  sqlDate=True,
                                                  datetimeObject=False)
            sqlData.append({"mjd": m, "ut_date": utDate})

        insert_list_of_dictionaries_into_database_tables(
            dbConn=self.atlasMoversDBConn,
            log=self.log,
            dictList=sqlData,
            dbTableName="day_tracker",
            uniqueKeyList=["mjd"],
            dateModified=False,
            batchSize=10000,
            replace=True)

        self.atlasMoversDBConn.commit()

        self.log.info('completed the ``_update_day_tracker_table`` method')
        return None
Beispiel #12
0
    def _clean_data_pre_ingest(self, surveyName, withinLastDays=False):
        """*clean up the list of dictionaries containing the ATLAS data, pre-ingest*

        **Key Arguments**

        - ``surveyName`` -- the ATLAS survey name
        -  ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit
        

        **Return**

        - ``dictList`` -- the cleaned list of dictionaries ready for ingest
        

        **Usage**

        To clean the data from the ATLAS survey:

        ```python
        dictList = ingesters._clean_data_pre_ingest(surveyName="ATLAS")
        ```

        Note you will also be able to access the data via ``ingester.dictList``
        
        """
        self.log.debug('starting the ``_clean_data_pre_ingest`` method')

        self.dictList = []

        # CALC MJD LIMIT
        if withinLastDays:
            mjdLimit = now(log=self.log).get_mjd() - float(withinLastDays)

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(log=self.log)

        for row in self.csvDicts:
            # IF NOW IN THE LAST N DAYS - SKIP
            flagMjd = converter.ut_datetime_to_mjd(
                utDatetime=row["followup_flag_date"])

            if withinLastDays and (float(row["earliest_mjd"]) < mjdLimit
                                   and float(flagMjd) < mjdLimit):
                continue

            # MASSAGE THE DATA IN THE INPUT FORMAT TO WHAT IS NEEDED IN THE
            # FEEDER SURVEY TABLE IN THE DATABASE
            target = row["target"]
            diff = row["diff"]
            ref = row["ref"]
            targetImageURL = None
            refImageURL = None
            diffImageURL = None

            if target:
                mjdStr = str(int(float(target.split("_")[1])))
                if target:
                    iid, mjdString, diffId, ippIdet, type = target.split('_')
                    targetImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/site_media/images/data/atlas4/" % locals() + '/' + \
                        mjdStr + '/' + target + '.jpeg'
                    objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid

            if ref:
                mjdStr = str(int(float(ref.split("_")[1])))
                if ref:
                    iid, mjdString, diffId, ippIdet, type = ref.split('_')
                    refImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/site_media/images/data/atlas4/" % locals() + '/' + \
                        mjdStr + '/' + ref + '.jpeg'
                    objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid

            if diff:
                mjdStr = str(int(float(diff.split("_")[1])))
                if diff:
                    iid, mjdString, diffId, ippIdet, type = diff.split('_')
                    diffImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/site_media/images/data/atlas4/" % locals() + '/' + \
                        mjdStr + '/' + diff + '.jpeg'
                    objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid

            discDate = converter.mjd_to_ut_datetime(mjd=row["earliest_mjd"],
                                                    sqlDate=True)

            thisDictionary = {}
            thisDictionary["candidateID"] = row["name"]
            thisDictionary["ra_deg"] = row["ra"]
            thisDictionary["dec_deg"] = row["dec"]
            thisDictionary["mag"] = row["earliest_mag"]
            thisDictionary["observationMJD"] = row["earliest_mjd"]
            thisDictionary["filter"] = row["earliest_filter"]
            thisDictionary["discDate"] = discDate
            thisDictionary["discMag"] = row["earliest_mag"]
            thisDictionary["suggestedType"] = row["object_classification"]
            thisDictionary["targetImageURL"] = targetImageURL
            thisDictionary["refImageURL"] = refImageURL
            thisDictionary["diffImageURL"] = diffImageURL
            thisDictionary["objectURL"] = objectURL

            self.dictList.append(thisDictionary)

        self.log.debug('completed the ``_clean_data_pre_ingest`` method')
        return self.dictList
Beispiel #13
0
        )
        angularSeparation, north, east = calculator.get()
        print """%(angularSeparation)s arcsec (%(north)s N, %(east)s E)""" % locals()

    if timeflip:
        try:
            inputMjd = float(datetime)
            if datetime[0] not in ["0", "1", "2"]:
                inputMjd = True
            else:
                inputMjd = False
        except:
            inputMjd = False
        from astrocalc.times import conversions
        converter = conversions(
            log=log
        )

        if inputMjd == False:
            try:
                mjd = converter.ut_datetime_to_mjd(utDatetime=datetime)
                print mjd
            except Exception, e:
                print e
        else:
            try:
                utDate = converter.mjd_to_ut_datetime(mjd=datetime)
                print utDate
            except Exception, e:
                print e