示例#1
0
    def _clean_data_pre_ingest(
            self,
            surveyName,
            withinLastDays=False):
        """*clean up the list of dictionaries containing the useradded data, pre-ingest*

        **Key Arguments**

        - ``surveyName`` -- the useradded survey name
        -  ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit


        **Return**

        - ``dictList`` -- the cleaned list of dictionaries ready for ingest


        **Usage**

        To clean the data from the useradded survey:

        ```python
        dictList = ingesters._clean_data_pre_ingest(surveyName="useradded")
        ```

        Note you will also be able to access the data via ``ingester.dictList``

        """
        self.log.info('starting the ``_clean_data_pre_ingest`` method')

        self.dictList = []

        # CALC MJD LIMIT
        if withinLastDays:
            mjdLimit = now(
                log=self.log
            ).get_mjd() - float(withinLastDays)

        for row in self.csvDicts:
            # IF NOW IN THE LAST N DAYS - SKIP
            if withinLastDays and float(row["mjd_obs"]) < mjdLimit:
                continue

            # MASSAGE THE DATA IN THE INPT FORMAT TO WHAT IS NEEDED IN THE
            # FEEDER SURVEY TABLE IN THE DATABASE
            thisDictionary = {}
            # thisDictionary["candidateID"] = row["ps1_designation"]
            # ...

            self.dictList.append(thisDictionary)

        self.log.info('completed the ``_clean_data_pre_ingest`` method')
        return self.dictList
    def _get_ps1_pointings(
            self,
            gwid,
            inPastDays,
            inFirstDays):
        """
        *get ps1 pointings to add to the plot*

        **Key Arguments: **
            - ``gwid`` - - the unique ID of the gravitational wave to plot
            - ``inPastDays`` - - used for the `history` plots(looking back from today)
            - ``inFirstDays`` - - used in the `timeline` plots(looking forward from wave detection)

        **Return: **
            - ``ps1Pointings`` - - the pointings to place on the plot
        """
        self.log.debug('starting the ``_get_ps1_pointings`` method')

        # DETERMINE THE TEMPORAL CONSTRAINTS FOR MYSQL QUERY
        if inPastDays != False or inPastDays == 0:
            nowMjd = now(
                log=self.log
            ).get_mjd()
            mjdStart = nowMjd - inPastDays
            mjdEnd = 10000000000
            if inPastDays == 0:
                mjdStart = 0.0
        else:
            print inPastDays

        if inFirstDays:
            mjdStart = self.settings["gravitational waves"][
                gwid]["mjd"] + inFirstDays[0]
            mjdEnd = self.settings["gravitational waves"][
                gwid]["mjd"] + inFirstDays[1]
            if inFirstDays[1] == 0:
                mjdEnd = 10000000000

        sqlQuery = u"""
            SELECT raDeg, decDeg FROM ps1_pointings where gw_id = "%(gwid)s" and mjd between %(mjdStart)s and %(mjdEnd)s
        """ % locals()
        ps1Pointings = readquery(
            log=self.log,
            sqlQuery=sqlQuery,
            dbConn=self.ligo_virgo_wavesDbConn
        )

        self.log.debug('completed the ``_get_ps1_pointings`` method')
        return ps1Pointings
示例#3
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    from astrocalc.coords import unit_conversion
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="CRITICAL",
               options_first=True,
               projectName="astrocalc",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    coordflip = a["coordflip"]
    sep = a["sep"]
    timeflip = a["timeflip"]
    trans = a["trans"]
    now = a["now"]
    dist = a["dist"]
    ra = a["ra"]
    ra1 = a["ra1"]
    ra2 = a["ra2"]
    dec = a["dec"]
    dec1 = a["dec1"]
    dec2 = a["dec2"]
    datetime = a["datetime"]
    north = a["north"]
    east = a["east"]
    distVal = a["distVal"]
    hVal = a["hcFlag"]
    OmegaMatter = a["wmFlag"]
    OmegaVacuum = a["wvFlag"]
    mpcFlag = a["mpcFlag"]
    redshiftFlag = a["redshiftFlag"]
    cartesianFlag = a["cartesianFlag"]

    # CALL FUNCTIONS/OBJECTS
    if coordflip:

        if cartesianFlag:
            converter = unit_conversion(log=log)
            x, y, z = converter.ra_dec_to_cartesian(ra="23 45 21.23232",
                                                    dec="+01:58:5.45341")
            print(x, y, z)
            return

        try:
            ra = float(ra)
            dec = float(dec)
            degree = True
        except Exception as e:
            degree = False

        if degree is True:
            converter = unit_conversion(log=log)
            try:
                ra = converter.ra_decimal_to_sexegesimal(ra=ra, delimiter=":")
                dec = converter.dec_decimal_to_sexegesimal(dec=dec,
                                                           delimiter=":")
            except Exception as e:
                print(e)
                sys.exit(0)

            print(ra, dec)
        else:
            converter = unit_conversion(log=log)
            try:
                ra = converter.ra_sexegesimal_to_decimal(ra=ra)
                dec = converter.dec_sexegesimal_to_decimal(dec=dec)
            except Exception as e:
                print(e)
                sys.exit(0)
            print(ra, dec)

    if sep:
        from astrocalc.coords import separations
        calculator = separations(
            log=log,
            ra1=ra1,
            dec1=dec1,
            ra2=ra2,
            dec2=dec2,
        )
        angularSeparation, north, east = calculator.get()
        print("""%(angularSeparation)s arcsec (%(north)s N, %(east)s E)""" %
              locals())

    if timeflip:
        try:
            inputMjd = float(datetime)
            if datetime[0] not in ["0", "1", "2"]:
                inputMjd = True
            else:
                inputMjd = False
        except:
            inputMjd = False
        from astrocalc.times import conversions
        converter = conversions(log=log)

        if inputMjd == False:
            try:
                mjd = converter.ut_datetime_to_mjd(utDatetime=datetime)
                print(mjd)
            except Exception as e:
                print(e)
        else:
            try:
                utDate = converter.mjd_to_ut_datetime(mjd=datetime)
                print(utDate)
            except Exception as e:
                print(e)

    if trans:
        # TRANSLATE COORDINATES ACROSS SKY
        from astrocalc.coords import translate
        newRa, newDec = translate(log=log,
                                  ra=ra,
                                  dec=dec,
                                  northArcsec=float(north),
                                  eastArcsec=float(east)).get()
        from astrocalc.coords import unit_conversion
        converter = unit_conversion(log=log)
        ra = converter.ra_decimal_to_sexegesimal(ra=newRa, delimiter=":")
        dec = converter.dec_decimal_to_sexegesimal(dec=newDec, delimiter=":")

        print("%(newRa)s, %(newDec)s (%(ra)s, %(dec)s)" % locals())

    if now:
        from astrocalc.times import now
        mjd = now(log=log).get_mjd()
        print(mjd)

    if dist and redshiftFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        if not hcFlag:
            hcFlag = 70.
        if not wmFlag:
            wmFlag = 0.3
        if not wvFlag:
            wvFlag = 0.7
        dists = c.redshift_to_distance(z=float(distVal),
                                       WM=float(wmFlag),
                                       WV=float(wvFlag),
                                       H0=float(hcFlag))
        print("Distance Modulus: " + str(dists["dmod"]) + " mag")
        print("Luminousity Distance: " + str(dists["dl_mpc"]) + " Mpc")
        print("Angular Size Scale: " + str(dists["da_scale"]) + " kpc/arcsec")
        print("Angular Size Distance: " + str(dists["da_mpc"]) + " Mpc")
        print("Comoving Radial Distance: " + str(dists["dcmr_mpc"]) + " Mpc")

    if dist and mpcFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        z = c.distance_to_redshift(mpc=float(distVal))
        print("z = %(z)s" % locals())

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
    def parse_panstarrs_nightlogs(
            self,
            updateAll=False):
        """*download and parse the ps1 night logs from the range of time a wave survey campaign is active*

        The night-log data is added to the ps1_nightlogs table

        **Key Arguments:**
            - ``updateAll`` -- update all of the PS1 nightlogs. This will take sometime, the default is to lift the logs from the last 7 days. Default *False*.

        **Return:**
            - None

        **Usage:**
            ..  todo::

                - add usage info
                - create a sublime snippet for usage
                - update package tutorial if needed

            .. code-block:: python

                usage code

        """
        self.log.debug('starting the ``parse_panstarrs_nightlogs`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(
            log=self.log
        )

        createStatement = """
CREATE TABLE `ps1_nightlogs` (
  `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
  `airm` double DEFAULT NULL,
  `comments` varchar(200) DEFAULT NULL,
  `decDeg` double DEFAULT NULL,
  `etime` double DEFAULT NULL,
  `f` varchar(10) DEFAULT NULL,
  `filesetID` varchar(100) DEFAULT NULL,
  `raDeg` double DEFAULT NULL,
  `telescope_pointing` varchar(200) DEFAULT NULL,
  `time_registered` datetime DEFAULT NULL,
  `type` varchar(100) DEFAULT NULL,
  `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
  `dateLastModified` datetime DEFAULT CURRENT_TIMESTAMP,
  `updated` varchar(45) DEFAULT '0',
  PRIMARY KEY (`primaryId`),
  UNIQUE KEY `filesetid` (`filesetID`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
"""

        from astrocalc.times import now
        mjdNow = now(
            log=self.log
        ).get_mjd()

        # WAVE METADATA FOUND IN SETTINGS FILE
        for wave in self.settings["gravitational waves"]:
            # GIVE A 3 DAY WINDOW EITHER SIDE OF WAVE TIME-RANGE
            mjdLower = int(self.settings["gravitational waves"][
                wave]["mjd"] - 21. - 3.)
            mjdUpper = int(self.settings["gravitational waves"][
                wave]["mjd"] + 31. + 3.)

            if updateAll == False:
                if mjdUpper < mjdNow - 7.:
                    continue
                if mjdUpper > mjdNow:
                    mjdUpper = int(mjdNow)
                if mjdLower < mjdNow - 7.:
                    mjdLower = int(mjdNow - 7.)

            # METRIC NIGHT LOGS FOR EACH NIGHT FOUND AT A URL SIMILAR TO :
            # "http://ipp0022.ifa.hawaii.edu/ps1sc/metrics/2016-12-14/index.html"
            urls = []
            for i in range(mjdUpper - mjdLower + 3):
                mjd = i + mjdLower
                utDate = converter.mjd_to_ut_datetime(
                    mjd=mjd,
                    sqlDate=False,
                    datetimeObject=True
                )
                utDate = utDate.strftime("%Y-%m-%d")
                urls.append("http://ipp0022.ifa.hawaii.edu/ps1sc/metrics/%(utDate)s/index.html" % locals(
                ))

            localUrls = multiobject_download(
                urlList=urls,
                downloadDirectory="/tmp",
                log=self.log,
                timeStamp=True,
                timeout=180,
                concurrentDownloads=2,
                resetFilename=False,
                credentials=False,  # { 'username' : "...", "password", "..." }
                longTime=True,
                indexFilenames=False
            )

            for url in localUrls:
                if not url:
                    continue
                pathToReadFile = url
                try:
                    self.log.debug("attempting to open the file %s" %
                                   (pathToReadFile,))
                    readFile = codecs.open(
                        pathToReadFile, encoding='utf-8', mode='r')
                    thisData = readFile.read()
                    readFile.close()
                except IOError, e:
                    message = 'could not open the file %s' % (pathToReadFile,)
                    self.log.critical(message)
                    raise IOError(message)
                readFile.close()

                regex = re.compile(r'<pre>\s*# (filesetID.*?)</pre>', re.S)
                matchObject = re.finditer(
                    regex,
                    thisData
                )

                for match in matchObject:
                    csvReader = csv.DictReader(
                        io.StringIO(match.group(1)), delimiter='|')
                    nightLog = []
                    for row in csvReader:
                        cleanDict = {}
                        for k, v in row.iteritems():
                            cleanDict[k.strip().replace(" ", "_")] = v.strip()
                        if "telescope_pointing" in cleanDict:
                            cleanDict["raDeg"] = cleanDict["telescope_pointing"].split()[
                                0]
                            cleanDict["decDeg"] = cleanDict["telescope_pointing"].split()[
                                1]
                        if "time_registered" in cleanDict:
                            cleanDict["time_registered"] = cleanDict[
                                "time_registered"].replace("Z", "")
                        nightLog.append(cleanDict)

                dataSet = list_of_dictionaries(
                    log=self.log,
                    listOfDictionaries=nightLog
                )
                # Recursively create missing directories
                if not os.path.exists("/tmp/ps1_nightlogs"):
                    os.makedirs("/tmp/ps1_nightlogs")
                mysqlData = dataSet.mysql(
                    tableName="ps1_nightlogs", filepath="/tmp/ps1_nightlogs/ps1_nightlog_%(utDate)s.sql" % locals(), createStatement=createStatement)

                directory_script_runner(
                    log=self.log,
                    pathToScriptDirectory="/tmp/ps1_nightlogs",
                    databaseName=self.settings["database settings"][
                        "ligo_virgo_waves"]["db"],
                    loginPath=self.settings["database settings"][
                        "ligo_virgo_waves"]["loginPath"],
                    successRule="delete",
                    failureRule="failed"
                )
示例#5
0
    def _create_lightcurve_plot_file(self, dataset, flatdata, flatLimits,
                                     objectNames, saveLocation, saveFileName):
        """*Generate the lightcurve and save to file*

        **Key Arguments**

        - ``log`` -- logger
        - ``dataset`` -- the observational dataset split into filters (and then mags, limits etc)
        - ``flatdata`` -- a flattened dataset to determine current magnitude
        - ``flatLimits`` -- a flattened dataset of non-detection limits
        - ``objectNames`` -- a single name or a list of names
        - ``saveLocation`` -- the folder to save the plot file to
        - ``saveFileName`` -- the filename to give the plot file (without extension)


        **Return**

        - ``filepath`` -- path to the lightcurve file
        - ``currentMag`` -- a prediction of the current magnitude if there is enough recent data
        - ``gradient`` -- a prediction of the gradient of recent data (on rise or decline?)

        """
        self.log.debug('starting the ``_create_lightcurve_plot_file`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(log=self.log)

        # INITIATE THE PLOT FIGURE - SQUARE
        fig = plt.figure(num=None,
                         figsize=(10, 10),
                         dpi=100,
                         facecolor=None,
                         edgecolor=None,
                         frameon=True)
        ax = fig.add_subplot(1, 1, 1)

        # TICK LABEL SIZE
        mpl.rc('ytick', labelsize=25)
        mpl.rc('xtick', labelsize=25)
        mpl.rcParams.update({'font.size': 25})

        # INITIAL RESTRICTIONS
        currentMag = -9999
        gradient = -9999

        # WHAT IS TODAY MJD (FIR CURRENT MAG ESTIMATE)
        todayMjd = now(log=self.log).get_mjd()

        # MAKE ARRAYS OF TIME AND MAG FOR PLOTS
        bigTimeArray, bigMagArray = np.array(flatdata["mjd"]), np.array(
            flatdata["mag"])
        # SORT TWO LIST BASED ON FIRST
        bigTimeArray, bigMagArray = zip(
            *[(x, y) for x, y in sorted(zip(bigTimeArray, bigMagArray))])

        # BIN DATA FOR POLYNOMIALS
        binData = True
        if binData is True:
            distinctMjds = {}
            for mjd, mag in zip(bigTimeArray, bigMagArray):
                # DICT KEY IS THE UNIQUE INTEGER MJD
                key = str(int(math.floor(mjd / 1.0)))
                # FIRST DATA POINT OF THE NIGHTS? CREATE NEW DATA SET
                if key not in distinctMjds:
                    distinctMjds[key] = {"mjds": [mjd], "mags": [mag]}
                # OR NOT THE FIRST? APPEND TO ALREADY CREATED LIST
                else:
                    distinctMjds[key]["mjds"].append(mjd)
                    distinctMjds[key]["mags"].append(mag)

            # ALL DATA NOW IN MJD SUBSETS. SO FOR EACH SUBSET (I.E. INDIVIDUAL
            # NIGHTS) ...
            summedMagnitudes = {'mjds': [], 'mags': []}
            for k, v in list(distinctMjds.items()):
                # GIVE ME THE MEAN MJD
                meanMjd = sum(v["mjds"]) / len(v["mjds"])
                summedMagnitudes["mjds"].append(meanMjd)
                # GIVE ME THE MEAN MAG
                meanMag = sum(v["mags"]) / len(v["mags"])
                summedMagnitudes["mags"].append(meanMag)

            bigTimeArray = summedMagnitudes["mjds"]
            bigMagArray = summedMagnitudes["mags"]

        bigTimeArray = np.array(bigTimeArray)
        bigMagArray = np.array(bigMagArray)

        # DETERMINE SENSIBLE AXIS LIMITS FROM FLATTENED DATA
        # LIMITS HERE ARE LOWER AND UPPER MJDS FOR X-AXIS
        xLowerLimit = bigTimeArray.min()
        xUpperLimit = bigTimeArray.max()
        latestTime = xUpperLimit
        xBorder = math.fabs((xUpperLimit - xLowerLimit)) * 0.1
        if xBorder < 5:
            xBorder = 5.
        xLowerLimit -= xBorder
        xUpperLimit += xBorder
        fixedXUpperLimit = xUpperLimit
        timeRange = xUpperLimit - xLowerLimit

        # POLYNOMIAL CONSTAINTS USING COMBINED DATASETS
        # POLYNOMIAL/LINEAR SETTINGS
        # SETTINGS FILE
        polyOrder = 5
        # EITHER USE DATA IN THESE LAST NUMBER OF DAYS OR ...
        lastNumDays = 10.
        # ... IF NOT ENOUGH DATA USE THE LAST NUMBER OF DATA POINTS
        predictCurrentMag = True
        lastNumDataPoints = 3
        numAnchors = 3
        anchorSeparation = 70
        latestMag = bigMagArray[0]
        anchorPointMag = latestMag + 0.5
        polyTimeArray, polyMagArray = [], []

        # QUIT IF NOT ENOUGH DATA FOR POLYNOMIAL
        if len(bigTimeArray) <= lastNumDataPoints or timeRange < 3.:
            predictCurrentMag = False

        if max(bigTimeArray) < todayMjd - 120:
            predictCurrentMag = False

        if predictCurrentMag:
            # USE ONLY THE LAST N DAYS OF DATA FOR LINEAR FIT
            mask = np.where(bigTimeArray - bigTimeArray.max() < -lastNumDays,
                            False, True)

            # DETERMINE GRADIENT OF SLOPE FROM LAST `LASTNUMDAYS` DAYS
            linearTimeArray = bigTimeArray[mask]
            linearMagArray = bigMagArray[mask]
            # FIT AND PLOT THE POLYNOMIAL ASSOCSIATED WITH ALL DATA SETS
            thisLinear = chebfit(linearTimeArray, linearMagArray, 1)
            gradient = thisLinear[1]

            firstAnchorPointTime = anchorSeparation + latestTime

            # CREATE THE ARRAY OF DATA USED TO GERNERATE THE POLYNOMIAL
            polyTimeArray = bigTimeArray
            polyMagArray = bigMagArray

            # ANCHOR THE POLYNOMIAL IN THE FUTURE SO THAT ALL PREDICTED LIGHTCURVES
            # EVENTUALLY FADE TO NOTHING
            extraTimes = np.arange(0, numAnchors) * \
                anchorSeparation + firstAnchorPointTime
            extraMags = np.ones(numAnchors) * anchorPointMag
            polyTimeArray = np.append(polyTimeArray, extraTimes)
            polyMagArray = np.append(polyMagArray, extraMags)

            # POLYNOMIAL LIMTIS
            xPolyLowerLimit = min(polyTimeArray) - 2.0
            xPolyUpperLimit = max(polyTimeArray) + 2.0

        # SET AXIS LIMITS
        xUpperLimit = 5
        yLowerLimit = min(bigMagArray) - 0.3
        yUpperLimit = max(bigMagArray) + 0.5
        yBorder = math.fabs((yUpperLimit - yLowerLimit)) * 0.1
        yLowerLimit -= yBorder
        yUpperLimit += yBorder

        # EXTEND LOWER X-LIMIT FOR NON-DETECTIONS
        xLowerTmp = xLowerLimit
        for t, m in zip(flatLimits["mjd"], flatLimits["mag"]):
            if m > yLowerLimit and t < xLowerTmp + 2 and t > xLowerLimit - 40:
                xLowerTmp = t - 2
        xLowerLimit = xLowerTmp

        if predictCurrentMag:
            thisPoly = chebfit(polyTimeArray, polyMagArray, polyOrder)
            # FLATTEN INTO A FUNCTION TO MAKE PLOTTING EASIER
            xData = np.arange(xPolyLowerLimit, todayMjd + 50, 1)
            flatLinear = chebval(xData, thisLinear)
            flatPoly = chebval(xData, thisPoly)
            plt.plot(xData, flatPoly, label="poly")
            plt.plot(xData, flatLinear, label="linear")

            # PREDICT A CURRENT MAGNITUDE FROM THE PLOT

            currentMag = chebval(todayMjd, thisPoly)
            self.log.debug('currentMag: %(currentMag)0.2f, m=%(gradient)s' %
                           locals())

            ls = "*g" % locals()
            currentMagArray = np.array([currentMag])
            nowArray = np.array([todayMjd])
            line = ax.plot(nowArray,
                           currentMagArray,
                           ls,
                           label="current estimate")

            lineExtras = ax.plot(extraTimes, extraMags, "+")

            # SET THE AXES / VIEWPORT FOR THE PLOT
            # if currentMag < yLowerLimit:
            #     yLowerLimit = currentMag - 0.4

        if currentMag > 23:
            currentMag = -9999.

        plt.clf()
        plt.cla()
        ax = fig.add_subplot(1, 1, 1)
        # print(currentMag)
        # print(bigTimeArray)
        # print(bigMagArray)

        # PLOT DATA VIA FILTER. MAGS AND LIMITS
        filterColor = {
            "r": "#29a329",
            "g": "#268bd2",
            "G": "#859900",
            "o": "#cb4b16",
            "c": "#2aa198",
            "U": "#6c71c4",
            "B": "blue",
            "V": "#008000",
            "R": "#e67300",
            "I": "#dc322f",
            "w": "#cc2900",
            "y": "#ff6666",
            "z": "#990000",
        }
        i = 0
        handles = []
        handlesAdded = []
        for k, v in list(dataset.items()):
            mag = v["mag"]
            magErr = v["magErr"]
            magMjd = v["magMjd"]
            limit = v["limit"]
            limitMjd = v["limitMjd"]
            magNoErr = v["magNoErr"]
            magNoErrMjd = v["magNoErrMjd"]
            magNoErrFudge = v["magNoErrFudge"]

            if k in filterColor:
                color = filterColor[k]
            else:
                color = "black"

            if len(limit):
                for l, m in zip(limit, limitMjd):
                    plt.text(m,
                             l,
                             u"\u21A7",
                             fontname='STIXGeneral',
                             size=30,
                             va='top',
                             ha='center',
                             clip_on=True,
                             color=color,
                             zorder=1)
            if len(magNoErr):
                theseMags = ax.errorbar(magNoErrMjd,
                                        magNoErr,
                                        yerr=magNoErrFudge,
                                        color=color,
                                        fmt='o',
                                        mfc=color,
                                        mec=color,
                                        zorder=2,
                                        ms=12.,
                                        alpha=0.8,
                                        linewidth=1.2,
                                        label=k,
                                        capsize=0)
                theseMags[-1][0].set_linestyle('--')

            if len(mag):
                theseMags = ax.errorbar(magMjd,
                                        mag,
                                        yerr=magErr,
                                        color=color,
                                        fmt='o',
                                        mfc=color,
                                        mec=color,
                                        zorder=3,
                                        ms=12.,
                                        alpha=0.8,
                                        linewidth=1.2,
                                        label=k,
                                        capsize=10)

            if not len(mag):
                theseMags = ax.errorbar([-500], [20],
                                        yerr=[0.2],
                                        color=color,
                                        fmt='o',
                                        mfc=color,
                                        mec=color,
                                        zorder=3,
                                        ms=12.,
                                        alpha=0.8,
                                        linewidth=1.2,
                                        label=k,
                                        capsize=10)

            if k not in handlesAdded:
                handles.append(theseMags)
                handlesAdded.append(k)

        # ADD LEGEND
        plt.legend(handles=handles,
                   prop={'size': 13.5},
                   bbox_to_anchor=(1., 1.25),
                   loc=0,
                   borderaxespad=0.,
                   ncol=18,
                   scatterpoints=1)

        # RHS AXIS TICKS
        plt.setp(ax.xaxis.get_majorticklabels(),
                 rotation=45,
                 horizontalalignment='right')
        ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

        # CHANGE PLOT TO FIXED TIME
        # SETUP THE AXES
        xUpperLimit = fixedXUpperLimit
        ax.set_xlabel('MJD', labelpad=20, fontsize=30)
        ax.set_ylabel('Magnitude', labelpad=20, fontsize=30)
        ax.set_title('')
        ax.set_xlim([xLowerLimit, xUpperLimit])
        ax.set_ylim([yUpperLimit, yLowerLimit])
        ax.xaxis.set_major_formatter(ticker.FormatStrFormatter('%d'))

        # GENERATE UT DATE AXIS FOR TOP OF PLOT
        lower, upper = ax.get_xlim()
        utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
        utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)
        ax3 = ax.twiny()
        ax3.set_xlim([utLower, utUpper])
        ax3.grid(True)
        ax.xaxis.grid(False)
        plt.setp(ax3.xaxis.get_majorticklabels(),
                 rotation=45,
                 horizontalalignment='left',
                 fontsize=14)
        ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d, %y'))

        # Y TICK FORMAT
        y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
        ax.yaxis.set_major_formatter(y_formatter)

        # PRINT CURRENT MAG AS SANITY CHECK
        # fig.text(0.1, 1.02, currentMag, ha="left", fontsize=40)

        # RECURSIVELY CREATE MISSING DIRECTORIES
        if not os.path.exists(saveLocation):
            try:
                os.makedirs(saveLocation)
            except:
                pass
        # SAVE THE PLOT
        filepath = """%(saveLocation)s%(saveFileName)s.png""" % locals()
        plt.savefig(filepath,
                    format='PNG',
                    bbox_inches='tight',
                    transparent=False,
                    pad_inches=0.4)
        # plt.show()
        plt.clf()  # clear figure
        plt.close()

        # TEST THAT PLOT FILE HAS ACTUALLY BEEN GENERATED
        try:
            with open(filepath):
                pass
            fileExists = True
        except IOError:
            raise IOError(
                "the path --pathToFile-- %s does not exist on this machine" %
                (filepath, ))
            filepath = False

        self.log.debug('completed the ``_create_lightcurve_plot_file`` method')

        return filepath, currentMag, gradient
示例#6
0
    def _clean_data_pre_ingest(self, surveyName, withinLastDays=False):
        """*clean up the list of dictionaries containing the PS data, pre-ingest*

        **Key Arguments**

        - ``surveyName`` -- the PS survey name
        -  ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit


        **Return**

        - ``dictList`` -- the cleaned list of dictionaries ready for ingest


        **Usage**

        To clean the data from the PS 3pi survey:

        ```python
        dictList = ingesters._clean_data_pre_ingest(surveyName="3pi")
        ```

        Note you will also be able to access the data via ``ingester.dictList``

        """
        self.log.debug('starting the ``_clean_data_pre_ingest`` method')

        self.dictList = []

        # CALC MJD LIMIT
        if withinLastDays:
            mjdLimit = now(log=self.log).get_mjd() - float(withinLastDays)

        for row in self.csvDicts:
            # IF NOW IN THE LAST N DAYS - SKIP
            if withinLastDays and float(row["mjd_obs"]) < mjdLimit:
                continue
            if float(row["ra_psf"]) < 0:
                row["ra_psf"] = 360. + float(row["ra_psf"])
            thisDictionary = {}

            thisDictionary["candidateID"] = row["ps1_designation"]
            thisDictionary["ra_deg"] = row["ra_psf"]
            thisDictionary["dec_deg"] = row["dec_psf"]
            thisDictionary["mag"] = row["cal_psf_mag"]
            thisDictionary["magerr"] = row["psf_inst_mag_sig"]
            thisDictionary["observationMJD"] = row["mjd_obs"]
            thisDictionary["filter"] = row["filter"]

            try:
                thisDictionary["discDate"] = row["followup_flag_date"]
            except:
                pass
            thisDictionary["discMag"] = row["cal_psf_mag"]

            if "transient_object_id" in list(row.keys()):
                thisDictionary[
                    "objectURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/psdb/candidate/" % locals(
                    ) + row["transient_object_id"]
            else:
                thisDictionary[
                    "objectURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/psdb/candidate/" % locals(
                    ) + row["id"]

            # CLEAN UP IMAGE URLS
            target = row["target"]
            if target:
                id, mjdString, diffId, ippIdet, type = target.split('_')
                thisDictionary["targetImageURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/site_media/images/data/%(surveyName)s" % locals() + '/' + \
                    str(int(float(mjdString))) + '/' + target + '.jpeg'

            ref = row["ref"]
            if ref:
                id, mjdString, diffId, ippIdet, type = ref.split('_')
                thisDictionary["refImageURL"]  = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/site_media/images/data/%(surveyName)s" % locals() + '/' + \
                    str(int(float(mjdString))) + '/' + ref + '.jpeg'

            diff = row["diff"]
            if diff:
                id, mjdString, diffId, ippIdet, type = diff.split('_')
                thisDictionary["diffImageURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/site_media/images/data/%(surveyName)s" % locals() + '/' + \
                    str(int(float(mjdString))) + '/' + diff + '.jpeg'

            self.dictList.append(thisDictionary)

        self.log.debug('completed the ``_clean_data_pre_ingest`` method')
        return self.dictList
示例#7
0
    def _create_lightcurve_plot_file(
            self,
            dataset,
            flatdata,
            flatLimits,
            objectNames,
            saveLocation,
            saveFileName):
        """*Generate the lightcurve and save to file*

        **Key Arguments**

        - ``log`` -- logger
        - ``dataset`` -- the observational dataset split into filters (and then mags, limits etc)
        - ``flatdata`` -- a flattened dataset to determine current magnitude
        - ``flatLimits`` -- a flattened dataset of non-detection limits
        - ``objectNames`` -- a single name or a list of names
        - ``saveLocation`` -- the folder to save the plot file to
        - ``saveFileName`` -- the filename to give the plot file (without extension)


        **Return**

        - ``filepath`` -- path to the lightcurve file
        - ``currentMag`` -- a prediction of the current magnitude if there is enough recent data
        - ``gradient`` -- a prediction of the gradient of recent data (on rise or decline?)

        """
        self.log.debug('starting the ``_create_lightcurve_plot_file`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(
            log=self.log
        )

        # INITIATE THE PLOT FIGURE - SQUARE
        fig = plt.figure(
            num=None,
            figsize=(10, 10),
            dpi=100,
            facecolor=None,
            edgecolor=None,
            frameon=True)
        ax = fig.add_subplot(1, 1, 1)

        # TICK LABEL SIZE
        mpl.rc('ytick', labelsize=25)
        mpl.rc('xtick', labelsize=25)
        mpl.rcParams.update({'font.size': 25})

        # INITIAL RESTRICTIONS
        currentMag = -9999
        gradient = -9999

        # WORK OUT RELATIVE DATES - NEEDED FOR CURRENT MAG ESTIMATES
        fixedTimeDataList = flatdata["mjd"]

        todayMjd = now(
            log=self.log
        ).get_mjd()

        timeList = []
        timeList[:] = [t - todayMjd for t in flatdata["mjd"]]

        # DETERMINE SENSIBLE AXIS LIMITS FROM FLATTENED DATA
        bigTimeArray, bigMagArray = np.array(
            flatdata["mjd"]), np.array(flatdata["mag"])
        xLowerLimit = min(bigTimeArray)
        xUpperLimit = max(bigTimeArray)
        latestTime = xUpperLimit
        xBorder = math.fabs((xUpperLimit - xLowerLimit)) * 0.1
        if xBorder < 5:
            xBorder = 5.
        xLowerLimit -= xBorder
        xUpperLimit += xBorder
        fixedXUpperLimit = xUpperLimit

        # REALTIVE TIMES - TO PREDICT CURRENT MAG
        relativeTimeArray = []
        relativeTimeArray[:] = [r - todayMjd for r in bigTimeArray]
        rxLowerLimit = min(relativeTimeArray)
        rxUpperLimit = max(relativeTimeArray)
        rlatestTime = xUpperLimit

        # POLYNOMIAL CONSTAINTS USING COMBINED DATASETS
        # POLYNOMIAL/LINEAR SETTINGS
        # SETTINGS FILE
        polyOrder = 3
        # EITHER USE DATA IN THESE LAST NUMBER OF DAYS OR ...
        lastNumDays = 10.
        # ... IF NOT ENOUGH DATA USE THE LAST NUMBER OF DATA POINTS
        predictCurrentMag = True
        lastNumDataPoints = 3
        numAnchors = 2
        anchorSeparation = 30
        latestMag = bigMagArray[0]
        anchorPointMag = latestMag + 20.
        polyTimeArray, polyMagArray = [], []
        newArray = np.array([])

        # QUIT IF NOT ENOUGH DATA FOR POLYNOMIAL
        if len(bigTimeArray) <= lastNumDataPoints:
            predictCurrentMag = False
        while predictCurrentMag and lastNumDataPoints < 6:
            if len(bigTimeArray) <= lastNumDataPoints:
                predictCurrentMag = False
            elif predictCurrentMag and bigTimeArray[-1] - bigTimeArray[-lastNumDataPoints] < 5:
                lastNumDataPoints += 1
            else:
                break
        if predictCurrentMag and bigTimeArray[-1] - bigTimeArray[-lastNumDataPoints] < 5:
            predictCurrentMag = False

        # FIND THE MOST RECENT OBSERVATION TAKEN > LASTNUMDAYS DAYS BEFORE THE LAST
        # OBSERVATION
        breakpoint = 0
        for thisIndex, v in enumerate(relativeTimeArray):
            if breakpoint:
                break
            if v < max(relativeTimeArray) - lastNumDays:
                breakpoint = 1
        else:
            if breakpoint == 0:
                predictCurrentMag = False

        if predictCurrentMag:
            # DETERMINE GRADIENT OF SLOPE FROM LAST `LASTNUMDAYS` DAYS
            linearTimeArray = relativeTimeArray[0:thisIndex]
            linearMagArray = bigMagArray[0:thisIndex].tolist()
            # FIT AND PLOT THE POLYNOMIAL ASSOCSIATED WITH ALL DATA SETS
            thisLinear = np.polyfit(linearTimeArray, linearMagArray, 1)
            gradient = thisLinear[0]

            # FROM GRADIENT DETERMINE WHERE ANCHOR POINTS ARE PLACED
            if gradient > 0.1:
                firstAnchorPointTime = 120.
            elif gradient < -0.5:
                firstAnchorPointTime = 50
            elif gradient > -0.5:
                firstAnchorPointTime = 120 - (np.abs(gradient) - 0.1) * 300.
            else:
                firstAnchorPointTime = 120

            if firstAnchorPointTime > 120.:
                firstAnchorPointTime = 120.

            firstAnchorPointTime = firstAnchorPointTime + latestTime
            if firstAnchorPointTime < 30.:
                firstAnchorPointTime = 30.

            # CREATE THE ARRAY OF DATA USED TO GERNERATE THE POLYNOMIAL
            polyTimeArray = relativeTimeArray[0:thisIndex]
            polyMagArray = bigMagArray[0:thisIndex].tolist()

            printArray = []
            printArray[:] = [float("%(i)0.1f" % locals())
                             for i in polyTimeArray]
            infoText = "time array : %(printArray)s" % locals()
            warningColor = "#dc322f"

            # ANCHOR THE POLYNOMIAL IN THE FUTURE SO THAT ALL PREDICTED LIGHTCURVES
            # EVENTUALLY FADE TO NOTHING
            for i in range(numAnchors):
                polyTimeArray.insert(0, firstAnchorPointTime + i *
                                     anchorSeparation)
                polyMagArray.insert(0, anchorPointMag)

            # POLYNOMIAL LIMTIS
            xPolyLowerLimit = min(polyTimeArray) - 2.0
            xPolyUpperLimit = max(polyTimeArray) + 2.0

        # SET AXIS LIMITS
        xUpperLimit = 5
        yLowerLimit = min(bigMagArray) - 0.3
        yUpperLimit = max(bigMagArray) + 0.5
        yBorder = math.fabs((yUpperLimit - yLowerLimit)) * 0.1
        yLowerLimit -= yBorder
        yUpperLimit += yBorder

        # EXTEND LOWER X-LIMIT FOR NON-DETECTIONS
        xLowerTmp = xLowerLimit
        for t, m in zip(flatLimits["mjd"], flatLimits["mag"]):
            if m > yLowerLimit and t < xLowerTmp + 2 and t > xLowerLimit - 40:
                xLowerTmp = t - 2
        xLowerLimit = xLowerTmp

        if predictCurrentMag:
            thisPoly = np.polyfit(polyTimeArray, polyMagArray, polyOrder)
            # FLATTEN INTO A FUNCTION TO MAKE PLOTTING EASIER
            flatLinear = np.poly1d(thisLinear)
            flatPoly = np.poly1d(thisPoly)
            xData = np.arange(xPolyLowerLimit, xPolyUpperLimit, 1)
            plt.plot(xData, flatPoly(xData), label="poly")
            plt.plot(xData, flatLinear(xData), label="linear")

            # PREDICT A CURRENT MAGNITUDE FROM THE PLOT
            currentMag = flatPoly(0.)
            if currentMag < latestMag:
                currentMag = currentMag + 0.2
            self.log.debug(
                'currentMag: %(currentMag)0.2f, m=%(gradient)s' % locals())

            ls = "*g" % locals()
            currentMagArray = np.array([currentMag])
            nowArray = np.array([todayMjd])
            line = ax.plot(nowArray, currentMagArray,
                           ls, label="current estimate")

            # SET THE AXES / VIEWPORT FOR THE PLOT
            if currentMag < yLowerLimit:
                yLowerLimit = currentMag - 0.4

        plt.clf()
        plt.cla()
        ax = fig.add_subplot(1, 1, 1)

        # PLOT DATA VIA FILTER. MAGS AND LIMITS
        filterColor = {
            "r": "#29a329",
            "g": "#268bd2",
            "G": "#859900",
            "o": "#cb4b16",
            "c": "#2aa198",
            "U": "#6c71c4",
            "B": "blue",
            "V": "#008000",
            "R": "#e67300",
            "I": "#dc322f",
            "w": "#cc2900",
            "y": "#ff6666",
            "z": "#990000",
        }
        i = 0
        handles = []
        handlesAdded = []
        for k, v in list(dataset.items()):
            mag = v["mag"]
            magErr = v["magErr"]
            magMjd = v["magMjd"]
            limit = v["limit"]
            limitMjd = v["limitMjd"]
            magNoErr = v["magNoErr"]
            magNoErrMjd = v["magNoErrMjd"]
            magNoErrFudge = v["magNoErrFudge"]

            if k in filterColor:
                color = filterColor[k]
            else:
                color = "black"

            if len(limit):
                for l, m in zip(limit, limitMjd):
                    plt.text(m, l, u"\u21A7", fontname='STIXGeneral',
                             size=30, va='top', ha='center', clip_on=True, color=color, zorder=1)
            if len(magNoErr):
                theseMags = ax.errorbar(magNoErrMjd, magNoErr, yerr=magNoErrFudge, color=color, fmt='o', mfc=color,
                                        mec=color, zorder=2, ms=12., alpha=0.8, linewidth=1.2,  label=k, capsize=0)
                theseMags[-1][0].set_linestyle('--')

            if len(mag):
                theseMags = ax.errorbar(magMjd, mag, yerr=magErr, color=color, fmt='o', mfc=color,
                                        mec=color, zorder=3, ms=12., alpha=0.8, linewidth=1.2,  label=k, capsize=10)

            if not len(mag):
                theseMags = ax.errorbar([-500], [20], yerr=[0.2], color=color, fmt='o', mfc=color,
                                        mec=color, zorder=3, ms=12., alpha=0.8, linewidth=1.2,  label=k, capsize=10)

            if k not in handlesAdded:
                handles.append(theseMags)
                handlesAdded.append(k)

        # ADD LEGEND
        plt.legend(handles=handles, prop={
                   'size': 13.5}, bbox_to_anchor=(1., 1.25), loc=0, borderaxespad=0., ncol=18, scatterpoints=1)

        # RHS AXIS TICKS
        plt.setp(ax.xaxis.get_majorticklabels(),
                 rotation=45, horizontalalignment='right')
        ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))

        # CHANGE PLOT TO FIXED TIME
        # SETUP THE AXES
        xUpperLimit = fixedXUpperLimit
        ax.set_xlabel('MJD',  labelpad=20)
        ax.set_ylabel('Magnitude',  labelpad=20)
        ax.set_title('')
        ax.set_xlim([xLowerLimit, xUpperLimit])
        ax.set_ylim([yUpperLimit, yLowerLimit])
        ax.xaxis.set_major_formatter(ticker.FormatStrFormatter('%d'))

        # GENERATE UT DATE AXIS FOR TOP OF PLOT
        lower, upper = ax.get_xlim()
        utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
        utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)
        ax3 = ax.twiny()
        ax3.set_xlim([utLower, utUpper])
        ax3.grid(True)
        ax.xaxis.grid(False)
        plt.setp(ax3.xaxis.get_majorticklabels(),
                 rotation=45, horizontalalignment='left', fontsize=14)
        ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d, %y'))

        # Y TICK FORMAT
        y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
        ax.yaxis.set_major_formatter(y_formatter)

        # PRINT CURRENT MAG AS SANITY CHECK
        # fig.text(0.1, 1.02, currentMag, ha="left", fontsize=40)

        # RECURSIVELY CREATE MISSING DIRECTORIES
        if not os.path.exists(saveLocation):
            try:
                os.makedirs(saveLocation)
            except:
                pass
        # SAVE THE PLOT
        filepath = """%(saveLocation)s%(saveFileName)s.png""" % locals()
        plt.savefig(filepath, format='PNG', bbox_inches='tight', transparent=False,
                    pad_inches=0.4)
        # plt.show()
        plt.clf()  # clear figure
        plt.close()

        # TEST THAT PLOT FILE HAS ACTUALLY BEEN GENERATED
        try:
            with open(filepath):
                pass
            fileExists = True
        except IOError:
            raise IOError(
                "the path --pathToFile-- %s does not exist on this machine" %
                (filepath,))
            filepath = False

        self.log.debug('completed the ``_create_lightcurve_plot_file`` method')

        return filepath, currentMag, gradient
示例#8
0
    def test_now_function(self):

        from astrocalc.times import now
        nowTime = now(
            log=log
        ).get_mjd()
示例#9
0
    def _update_day_tracker_table(self):
        """* update day tracker table*

        **Key Arguments:**
            # -

        **Return:**
            - None

        **Usage:**
            ..  todo::

                - add usage info
                - create a sublime snippet for usage
                - write a command-line tool for this method
                - update package tutorial with command-line tool info if needed

            .. code-block:: python 

                usage code 

        """
        self.log.info('starting the ``_update_day_tracker_table`` method')

        # YESTERDAY MJD
        mjd = now(log=self.log).get_mjd()
        yesterday = int(math.floor(mjd - 1))

        sqlQuery = u"""
            SELECT mjd FROM atlas_moving_objects.day_tracker order by mjd desc limit 1
        """ % locals()
        rows = readquery(
            log=self.log,
            sqlQuery=sqlQuery,
            dbConn=self.atlasMoversDBConn,
        )
        highestMjd = int(rows[0]["mjd"])

        converter = conversions(log=self.log)

        sqlData = []
        for m in range(highestMjd, yesterday):
            # CONVERTER TO CONVERT MJD TO DATE
            utDate = converter.mjd_to_ut_datetime(mjd=m,
                                                  sqlDate=True,
                                                  datetimeObject=False)
            sqlData.append({"mjd": m, "ut_date": utDate})

        insert_list_of_dictionaries_into_database_tables(
            dbConn=self.atlasMoversDBConn,
            log=self.log,
            dictList=sqlData,
            dbTableName="day_tracker",
            uniqueKeyList=["mjd"],
            dateModified=False,
            batchSize=10000,
            replace=True)

        self.atlasMoversDBConn.commit()

        self.log.info('completed the ``_update_day_tracker_table`` method')
        return None
示例#10
0
    def _clean_data_pre_ingest(self, surveyName, withinLastDays=False):
        """*clean up the list of dictionaries containing the ATLAS data, pre-ingest*

        **Key Arguments**

        - ``surveyName`` -- the ATLAS survey name
        -  ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit
        

        **Return**

        - ``dictList`` -- the cleaned list of dictionaries ready for ingest
        

        **Usage**

        To clean the data from the ATLAS survey:

        ```python
        dictList = ingesters._clean_data_pre_ingest(surveyName="ATLAS")
        ```

        Note you will also be able to access the data via ``ingester.dictList``
        
        """
        self.log.debug('starting the ``_clean_data_pre_ingest`` method')

        self.dictList = []

        # CALC MJD LIMIT
        if withinLastDays:
            mjdLimit = now(log=self.log).get_mjd() - float(withinLastDays)

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(log=self.log)

        for row in self.csvDicts:
            # IF NOW IN THE LAST N DAYS - SKIP
            flagMjd = converter.ut_datetime_to_mjd(
                utDatetime=row["followup_flag_date"])

            if withinLastDays and (float(row["earliest_mjd"]) < mjdLimit
                                   and float(flagMjd) < mjdLimit):
                continue

            # MASSAGE THE DATA IN THE INPUT FORMAT TO WHAT IS NEEDED IN THE
            # FEEDER SURVEY TABLE IN THE DATABASE
            target = row["target"]
            diff = row["diff"]
            ref = row["ref"]
            targetImageURL = None
            refImageURL = None
            diffImageURL = None

            if target:
                mjdStr = str(int(float(target.split("_")[1])))
                if target:
                    iid, mjdString, diffId, ippIdet, type = target.split('_')
                    targetImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/site_media/images/data/atlas4/" % locals() + '/' + \
                        mjdStr + '/' + target + '.jpeg'
                    objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid

            if ref:
                mjdStr = str(int(float(ref.split("_")[1])))
                if ref:
                    iid, mjdString, diffId, ippIdet, type = ref.split('_')
                    refImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/site_media/images/data/atlas4/" % locals() + '/' + \
                        mjdStr + '/' + ref + '.jpeg'
                    objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid

            if diff:
                mjdStr = str(int(float(diff.split("_")[1])))
                if diff:
                    iid, mjdString, diffId, ippIdet, type = diff.split('_')
                    diffImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/site_media/images/data/atlas4/" % locals() + '/' + \
                        mjdStr + '/' + diff + '.jpeg'
                    objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid

            discDate = converter.mjd_to_ut_datetime(mjd=row["earliest_mjd"],
                                                    sqlDate=True)

            thisDictionary = {}
            thisDictionary["candidateID"] = row["name"]
            thisDictionary["ra_deg"] = row["ra"]
            thisDictionary["dec_deg"] = row["dec"]
            thisDictionary["mag"] = row["earliest_mag"]
            thisDictionary["observationMJD"] = row["earliest_mjd"]
            thisDictionary["filter"] = row["earliest_filter"]
            thisDictionary["discDate"] = discDate
            thisDictionary["discMag"] = row["earliest_mag"]
            thisDictionary["suggestedType"] = row["object_classification"]
            thisDictionary["targetImageURL"] = targetImageURL
            thisDictionary["refImageURL"] = refImageURL
            thisDictionary["diffImageURL"] = diffImageURL
            thisDictionary["objectURL"] = objectURL

            self.dictList.append(thisDictionary)

        self.log.debug('completed the ``_clean_data_pre_ingest`` method')
        return self.dictList
示例#11
0
        )
        ra = converter.ra_decimal_to_sexegesimal(
            ra=newRa,
            delimiter=":"
        )
        dec = converter.dec_decimal_to_sexegesimal(
            dec=newDec,
            delimiter=":"
        )

        print "%(newRa)s, %(newDec)s (%(ra)s, %(dec)s)" % locals()

    if now:
        from astrocalc.times import now
        mjd = now(
            log=log
        ).get_mjd()
        print mjd

    if dist and redshiftFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        if not hcFlag:
            hcFlag = 70.
        if not wmFlag:
            wmFlag = 0.3
        if not wvFlag:
            wvFlag = 0.7
        dists = c.redshift_to_distance(
            z=float(distVal),
            WM=float(wmFlag),