Esempio n. 1
0
 def process_reduced_data(self, target, alert=None):
     if not alert:
         try:
             target_datum = ReducedDatum.objects.filter(
                 target=target,
                 data_type='PHOTOMETRY',
                 source_name=self.name).first()
             if not target_datum:
                 return
             alert = self.fetch_alert(target_datum.source_location)
         except HTTPError:
             raise Exception(
                 'Unable to retrieve alert information from broker')
     for prv_candidate in alert.get('prv_candidate'):
         if all([
                 key in prv_candidate['candidate']
                 for key in ['jd', 'magpsf', 'fid']
         ]):
             jd = Time(prv_candidate['candidate']['jd'],
                       format='jd',
                       scale='utc')
             jd.to_datetime(timezone=TimezoneInfo())
             value = {
                 'magnitude': prv_candidate['candidate']['magpsf'],
                 'filter': filters[prv_candidate['candidate']['fid']]
             }
             rd, created = ReducedDatum.objects.get_or_create(
                 timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                 value=json.dumps(value),
                 source_name=self.name,
                 source_location=alert['lco_id'],
                 data_type='photometry',
                 target=target)
             rd.save()
Esempio n. 2
0
    def find_and_ingest_photometry(self, targets):

        ftp_tunnel = ftplib.FTP(BROKER_URL)
        ftp_tunnel.login()
        ftp_file_path = os.path.join('ogle', 'ogle4', 'ews')
        ftp_tunnel.cwd(ftp_file_path)

        previous_year = targets[0].name.split('-')[1]
        ftp_tunnel.cwd(previous_year)
        for target in targets:

            year = target.name.split('-')[1]
            event = target.name.split('-')[2] + '-' + target.name.split('-')[3]

            if year == previous_year:

                pass

            else:

                ftp_tunnel.cwd('../../')
                ftp_tunnel.cwd(year)

            ftp_tunnel.cwd(event.lower())
            ftp_tunnel.retrbinary('RETR phot.dat',
                                  open('./data/ogle_phot.dat', 'wb').write)
            photometry = np.loadtxt('./data/ogle_phot.dat')
            photometry = photometry[
                photometry[:, 0].argsort()[::-1],
            ]  #going backward to save time on ingestion
            ftp_tunnel.cwd('../')
            for index, point in enumerate(photometry):

                jd = Time(point[0], format='jd', scale='utc')
                jd.to_datetime(timezone=TimezoneInfo())
                data = {
                    'magnitude': point[1],
                    'filter': 'OGLE_I',
                    'error': point[2]
                }
                rd, created = ReducedDatum.objects.get_or_create(
                    timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                    value=data,
                    source_name='OGLE',
                    source_location=target.name,
                    data_type='photometry',
                    target=target)

                if created:

                    rd.save()

                else:
                    # photometry already there (I hope!)
                    break
            os.remove('./data/ogle_phot.dat')
Esempio n. 3
0
    def process_reduced_data(self, target, alert=None):
        if not alert:
            try:
                alert = self.fetch_alert(target.name)

            except HTTPError:
                raise Exception('Unable to retrieve alert information from broker')

        if alert is not None:
            alert_name = alert['name']
            alert_link = alert.get('per_alert', {})['link']
            lc_url = f'{BASE_BROKER_URL}/alerts/alert/{alert_name}/lightcurve.csv'
            alert_url = f'{BASE_BROKER_URL}/{alert_link}'
        elif target:
            lc_url = f'{BASE_BROKER_URL}/{target.name}/lightcurve.csv'
            alert_url = f'{BASE_BROKER_URL}/alerts/alert/{target.name}/'
        else:
            return

        response = requests.get(lc_url)
        response.raise_for_status()
        html_data = response.text.split('\n')
        
        try:
            times = [Time(i.timestamp).jd for i in ReducedDatum.objects.filter(target=target) if i.data_type == 'photometry']
        except: 
            times = []
        
        for entry in html_data[2:]:
            phot_data = entry.split(',')

            if len(phot_data) == 3:
            
                jd = Time(float(phot_data[1]), format='jd', scale='utc')
                jd.to_datetime(timezone=TimezoneInfo())

                if ('untrusted' not in phot_data[2]) and ('null' not in phot_data[2]) and (jd.value not in times):
                    
                    value = {
                    'magnitude': float(phot_data[2]),
                    'filter': 'G'
                    }

                    rd, _ = ReducedDatum.objects.get_or_create(
                            timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                            value=value,
                            source_name=self.name,
                            source_location=alert_url,
                            data_type='photometry',
                            target=target)
                    
                    rd.save()

        return
Esempio n. 4
0
    def process_reduced_data(self, target, alert=None):

        base_url = BROKER_URL.replace('/alertsindex', '/alert')

        if not alert:
            try:
                alert = self.fetch_alert(target.name)

            except HTTPError:
                raise Exception('Unable to retrieve alert information from broker')

        alert_url = BROKER_URL.replace('/alerts/alertsindex',
                                       alert['per_alert']['link'])

        if alert:
            lc_url = path.join(base_url, alert['name'], 'lightcurve.csv')
        elif target:
            lc_url = path.join(base_url, target.name, 'lightcurve.csv')
        else:
            return

        response = requests.get(lc_url)
        response.raise_for_status()
        html_data = response.text.split('\n')

        for entry in html_data[2:]:
            phot_data = entry.split(',')

            if len(phot_data) == 3:
                if 'untrusted' not in phot_data[2] and 'null' not in phot_data[2]:
                    jd = Time(float(phot_data[1]), format='jd', scale='utc')
                    jd.to_datetime(timezone=TimezoneInfo())

                    value = {
                        'magnitude': float(phot_data[2]),
                        'filter': 'G'
                    }

                    rd, created = ReducedDatum.objects.get_or_create(
                        timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                        value=json.dumps(value),
                        source_name=self.name,
                        source_location=alert_url,
                        data_type='photometry',
                        target=target)
                    rd.save()

        return
Esempio n. 5
0
    def to_generic_alert(self, alert):
        if alert['lastmjd']:
            timestamp = Time(alert['lastmjd'], format='mjd',
                             scale='utc').to_datetime(timezone=TimezoneInfo())
        else:
            timestamp = ''
        url = f'{ALERCE_URL}/object/{alert["oid"]}'

        # Use the smaller value between r and g if both are present, else use the value that is present
        mag = None
        if alert['mean_magpsf_r'] is not None and alert[
                'mean_magpsf_g'] is not None:
            mag = alert['mean_magpsf_g'] if alert['mean_magpsf_r'] > alert[
                'mean_magpsf_g'] else alert['mean_magpsf_r']
        elif alert['mean_magpsf_r'] is not None:
            mag = alert['mean_magpsf_r']
        elif alert['mean_magpsf_g'] is not None:
            mag = alert['mean_magpsf_g']

        if alert['pclassrf'] is not None:
            score = alert['pclassrf']
        elif alert['pclassearly'] is not None:
            score = alert['pclassearly']
        else:
            score = None

        return GenericAlert(timestamp=timestamp,
                            url=url,
                            id=alert['oid'],
                            name=alert['oid'],
                            ra=alert['meanra'],
                            dec=alert['meandec'],
                            mag=mag,
                            score=score)
Esempio n. 6
0
    def _process_photometry_from_plaintext(self, data_product, extras):

        photometry = []

        data_aws = default_storage.open(data_product.data.name, 'r')
        data = ascii.read(data_aws.read(),
                          names=['time', 'filter', 'magnitude', 'error'])

        if len(data) < 1:
            raise InvalidFileFormatException('Empty table or invalid file type')

        for datum in data:
            time = Time(float(datum['time']), format='mjd')
            utc = TimezoneInfo(utc_offset=0*units.hour)
            time.format = 'datetime'
            value = {
                'timestamp': time.to_datetime(timezone=utc),
                'magnitude': datum['magnitude'],
                'filter': datum['filter'],
                'error': datum['error']
            }
            value.update(extras)

            photometry.append(value)

        return photometry
Esempio n. 7
0
    def _process_photometry_from_plaintext(self, data_product):
        """
        Processes the photometric data from a plaintext file into a dict, which can then be  stored as a ReducedDatum
        for further processing or display. File is read using astropy as specified in the below documentation. The file
        is expected to be a multi-column delimited file, with headers for time, magnitude, filter, and error.
        # http://docs.astropy.org/en/stable/io/ascii/read.html

        Parameters
        ----------
        data_product : tom_dataproducts.models.DataProduct
            Photometric DataProduct which will be processed into a dict

        Returns
        -------
        dict
            python dict containing the data from the DataProduct
        """

        photometry = {}

        data = ascii.read(data_product.data.path)
        for datum in data:
            time = Time(float(datum['time']), format='mjd')
            utc = TimezoneInfo(utc_offset=0*units.hour)
            time.format = 'datetime'
            value = {
                'magnitude': datum['magnitude'],
                'filter': datum['filter'],
                'error': datum['error']
            }
            photometry.setdefault(time.to_datetime(timezone=utc), []).append(value)

        return photometry
Esempio n. 8
0
    def _process_photometry_from_plaintext(self, data_product):
        """
        Processes the photometric data from a plaintext file into a list of dicts. File is read using astropy as
        specified in the below documentation. The file is expected to be a multi-column delimited file, with headers for
        time, magnitude, filter, and error.
        # http://docs.astropy.org/en/stable/io/ascii/read.html

        :param data_product: Photometric DataProduct which will be processed into a list of dicts
        :type data_product: DataProduct

        :returns: python list containing the photometric data from the DataProduct
        :rtype: list
        """

        photometry = []

        data = ascii.read(data_product.data.path)
        if len(data) < 1:
            raise InvalidFileFormatException('Empty table or invalid file type')

        for datum in data:
            time = Time(float(datum['time']), format='mjd')
            utc = TimezoneInfo(utc_offset=0*units.hour)
            time.format = 'datetime'
            value = {
                'timestamp': time.to_datetime(timezone=utc),
                'magnitude': datum['magnitude'],
                'filter': datum['filter'],
                'error': datum['error']
            }
            photometry.append(value)

        return photometry
Esempio n. 9
0
    def to_generic_alert(self, alert):
        if alert['lastmjd']:
            timestamp = Time(alert['lastmjd'], format='mjd',
                             scale='utc').to_datetime(timezone=TimezoneInfo())
        else:
            timestamp = ''
        url = '{0}/{1}/{2}'.format(ALERCE_URL, 'object', alert['oid'])

        exits = (alert['mean_magpsf_g'] is None
                 and alert['mean_magpsf_r'] is not None)
        both_exists = (alert['mean_magpsf_g'] is not None
                       and alert['mean_magpsf_r'] is not None)
        bigger = (
            both_exists
            and (alert['mean_magpsf_r'] < alert['mean_magpsf_g'] is not None))
        is_r = any([exits, bigger])

        max_mag = alert['mean_magpsf_r'] if is_r else alert['mean_magpsf_g']

        if alert['pclassrf']:
            score = alert["pclassrf"]
        elif alert['pclassearly']:
            score = alert['pclassearly']
        else:
            score = None

        return GenericAlert(timestamp=timestamp,
                            url=url,
                            id=alert['oid'],
                            name=alert['oid'],
                            ra=alert['meanra'],
                            dec=alert['meandec'],
                            mag=max_mag,
                            score=score)
Esempio n. 10
0
    def _process_photometry_from_plaintext(self, data_product):
        """
        Processes the photometric data from a plaintext file into a dict, which can then be  stored as a ReducedDatum
        for further processing or display. File is read using astropy as specified in the below documentation. The file
        is expected to be a multi-column delimited file, with headers for time, magnitude, filter, and error.
        # http://docs.astropy.org/en/stable/io/ascii/read.html

        :param data_product: Photometric DataProduct which will be processed into a dict
        :type data_product: DataProduct

        :returns: python dict containing the data from the DataProduct
        :rtype: dict
        """
        photometry = {}

        data = ascii.read(data_product.data.name, format='fixed_width')
        if len(data) < 1:
            raise InvalidFileFormatException(
                'Empty table or invalid file type')

        utc = TimezoneInfo(tzname='UTC')
        for datum in data:
            time = Time(datum['MJD'], format='mjd')
            value = {
                'magnitude': datum['mag'],
                'filter': datum['filt'],
                'error': datum['dmag']
            }
            photometry.setdefault(time.to_datetime(timezone=utc),
                                  []).append(value)
        return photometry
Esempio n. 11
0
def settzone():
    global t, ttemp, tzone, utc_offset
    tzone = str(tvar.get())
    if tzone == 'EST':
        utc_offset = TimezoneInfo(utc_offset=-5 * u.hour)  # EST
    if tzone == 'EDT':
        utc_offset = TimezoneInfo(utc_offset=-4 * u.hour)  # EDT
    if tzone == 'UTC':
        utc_offset = TimezoneInfo(utc_offset=-0 * u.hour)  # UTC

    datelbl.configure(text=t.to_datetime(
        timezone=utc_offset).strftime("%B %d, %Y at %H:%M " + tzone))
    jdlbl.configure(text="JD " + str('% 11.3f' % t.jd))
    deletemoons()
    updatemoons()
    moonlabel.configure(text="Click a moon to measure its position!")
    poslabel.configure(text="")
Esempio n. 12
0
    def process_reduced_data(self, target, alert=None):
        if not alert:
            try:
                target_datum = ReducedDatum.objects.filter(
                    target=target,
                    data_type='photometry',
                    source_name=self.name).first()
                if not target_datum:
                    return
                alert = self.fetch_alert(target_datum.source_location)
            except HTTPError:
                raise Exception(
                    'Unable to retrieve alert information from broker')
        if not alert.get('prv_candidate'):
            alert = self.fetch_alert(alert['lco_id'])

        candidates = [{
            'candidate': alert.get('candidate')
        }] + alert.get('prv_candidate')
        for candidate in candidates:
            if all([
                    key in candidate['candidate']
                    for key in ['jd', 'magpsf', 'fid', 'sigmapsf']
            ]):
                nondetection = False
            elif all(key in candidate['candidate']
                     for key in ['jd', 'diffmaglim', 'fid']):
                nondetection = True
            else:
                continue
            jd = Time(candidate['candidate']['jd'], format='jd', scale='utc')
            jd.to_datetime(timezone=TimezoneInfo())
            value = {'filter': filters[candidate['candidate']['fid']]}
            if nondetection:
                value['limit'] = candidate['candidate']['diffmaglim']
            else:
                value['magnitude'] = candidate['candidate']['magpsf']
                value['error'] = candidate['candidate']['sigmapsf']
            rd, _ = ReducedDatum.objects.get_or_create(
                timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                value=value,
                source_name=self.name,
                source_location=alert['lco_id'],
                data_type='photometry',
                target=target)
Esempio n. 13
0
 def __init__(self, ) -> None:
     # Variables holding the site information
     self.location: EarthLocation = EarthLocation.from_geodetic(
         lon=Longitude("00d00m00.0s"),
         lat=Latitude("00d00m00.0s"),
         height=0.0 * u.meter,
     )
     self.name: str = "La Serena"
     self.tz: TimezoneInfo = TimezoneInfo(utc_offset=-4 * u.hour)
Esempio n. 14
0
def calculate_ephem(ra, dec, date, tzoffset=0, site=["MWA"], show_sun=False, draw_peaks=False):
    """Compute the ephemeris for a target on a given day at a target position."""

    # first, let's set up the times we want to evaluate the source position for
    year, month, day = date.split("/")
    hours = np.arange(0, 24, 0.01)
    hr = [int(i) for i in hours]
    mi = [int((hours[i]-hr[i]) * 60) for i in range(len(hours))]
    se = [0]*len(hours)
    t = [datetime(int(year), int(month), int(day), h, m, s) for h, m, s in zip(hr, mi, se)]

    times = Time(t, scale='utc', format='datetime')  # list of Time objects at which the ephemeris is computed
    tz = TimezoneInfo(utc_offset=tzoffset * u.hour)  # timezone object to convert times
    plttimes = date2num([t for t in times.datetime])  # times in plottable format

    # set up figure for plot
    fig = plt.figure(figsize=(10, 8))
    ax = fig.add_subplot(111)

    # site can be a list, so we need to create an Observatory for each
    # and calculate the ephemeris for each location
    site_max = []
    coords = SkyCoord(ra, dec, unit=(u.hourangle, u.deg))

    logger.info("Target: {0}".format(coords.to_string('hmsdms')))
    for s in site:
        o = Observatory(s)
        o.compute_target_position(coords, times, tz)
        logger.info("    max. target elevation  = {0}".format(o.altmax))
        logger.info("    time of max. elevation = {0} UTC{1}".format(o.maxtimeLocalStr, o.utcoffsetStr))

        if show_sun:
            o.compute_sun_position(times[::30])
        site_max.append(plot_ephem(ax, plttimes, o, plot_sun=show_sun, draw_peaks=draw_peaks))

    # plotting aesthetics
    ax.set_xlim(min(plttimes), max(plttimes))
    if ax.get_ylim()[1] > 90:
        ax.set_ylim(-10, 91)
    else:
        ax.set_ylim(-10, None)

    ys = [ax.get_ylim()[0]] * len(plttimes)
    ax.fill_between(plttimes, ys, interpolate=True, color='gray')  # fill from axis lower limit to 0

    title_str = "Target :: {0}\n{1}".format(coords.to_string('hmsdms'), "\n".join(site_max))
    ax.set_title(title_str)
    ax.set_xlabel("Time (UTC)", fontsize=14)
    ax.set_ylabel("Elevation  [deg]", fontsize=14)

    ax.xaxis.set_major_formatter(DateFormatter("%m/%d %H:%M"))
    plt.xticks(rotation=30, ha="right")
    ax.legend()
    ax.grid()
    plt.tight_layout()
    plt.show()
Esempio n. 15
0
def ingest_photometry_to_an_event(target,times_in_jd, mags, emags, filters, origin = '', force_ingest = False):

    for index,time in enumerate(times_in_jd):
        
        jd = Time(time, format='jd', scale='utc')
        jd.to_datetime(timezone=TimezoneInfo())
        data = {   'magnitude': mags[ind],
                   'filter': filters[ind],
                   'error': emags[ind]
               }
        rd, created = ReducedDatum.objects.get_or_create(
                timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                value=data,
                source_name=target.name,
                source_location=alert['lco_id'],
                data_type='photometry',
                target=target)
        
        rd.save()
Esempio n. 16
0
 def _compute_night(self, dateobs, timezone):
     t = Time(dateobs, format='isot', scale='utc')
     if timezone is not None:
         timezone = TimezoneInfo(utc_offset=timezone * units.hour)
     # Assume all images where taken in the same night
     dat = t.to_datetime(timezone)
     yyyy = dat.year
     mm = dat.month
     dd = dat.day
     # Assume one night goes from dd 12pm to nextdd-1 12pm
     if dat.hour < 12:
         dd -= 1
     return '{:04d}-{:02d}-{:02d}'.format(yyyy, mm, dd)
Esempio n. 17
0
    def __init__(self,year,month,day):
        """
            Setup the nightlog framework for a given obsday.
        """
        self.obsday = year+month+day
        self.root_dir = os.path.join(os.environ['NL_DIR'],self.obsday)
        self.image_dir = os.path.join(self.root_dir,"images")
        self.os_dir = os.path.join(self.root_dir,"OperationsScientist")
        self.dqs_dir = os.path.join(self.root_dir,"DataQualityAssessment")
        self.other_dir = os.path.join(self.root_dir,"OtherInput")
        self.os_startcal_dir = os.path.join(self.os_dir,'StartCal')
        self.os_obs_dir = os.path.join(self.os_dir,'Observations')
        self.other_obs_dir = os.path.join(self.other_dir,'Observations')
        self.dqs_exp_dir=self.dqs_dir+'Exposures/'
        self.os_pb_dir = os.path.join(self.os_dir,'Problem')
        self.dqs_pb_dir = os.path.join(self.dqs_dir,'Problem')
        self.other_pb_dir = os.path.join(self.other_dir,'Problem')
        self.nightplan_file = os.path.join(self.os_dir,'objectives.pkl')
        self.milestone_file = os.path.join(self.os_dir,'milestones.pkl')
        self.os_cl = os.path.join(self.os_dir,'checklist')
        self.dqs_cl = os.path.join(self.dqs_dir,'checklist')
        self.exp_file_pkl = os.path.join(self.dqs_dir,'exposures.pkl')
        self.dqs_exp_file = os.path.join(self.dqs_dir,'exposures')
        self.weather_file = os.path.join(self.os_dir,'weather.csv')
        self.meta_json = os.path.join(self.root_dir,'nightlog_meta.json')
        self.image_file = os.path.join(self.image_dir, 'image_list')
        self.upload_image_file = os.path.join(self.image_dir, 'upload_image_list')
        self.contributer_file = os.path.join(self.root_dir, 'contributer_file')
        self.summary_file = os.path.join(self.root_dir, 'summary_file')
        self.explist_file = os.path.join(self.root_dir, 'exposures.csv')
        self.telem_plots_file = os.path.join(self.root_dir, 'telem_plots.png')

        # Set this if you want to allow for replacing lines or not
        self.replace = True

        self.utc = TimezoneInfo()
        self.kp_zone = TimezoneInfo(utc_offset=-7*u.hour)
Esempio n. 18
0
    def __init__(self, latitude, longitude, altitude, utc_offset=-6):

        self.lat = latitude
        self.lon = longitude
        if type(latitude) == str:
            self.lat = parse_latlon_string(latitude)

        if type(latitude) == str:
            self.lon = parse_latlon_string(longitude)

        self.location = EarthLocation(lat=self.lat * u.deg,
                                      lon=self.lon * u.deg,
                                      height=290 * u.m)
        self.utc_offset = utc_offset
        self.tz = TimezoneInfo(utc_offset=self.utc_offset * u.hour)  # UTC+1
Esempio n. 19
0
    def to_generic_alert(self, alert):
        if alert['lastmjd']:
            timestamp = Time(alert['lastmjd'], format='mjd',
                             scale='utc').to_datetime(timezone=TimezoneInfo())
        else:
            timestamp = ''
        url = '{0}/{1}/{2}'.format(ALERCE_URL, 'vue/object', alert['oid'])

        return GenericAlert(timestamp=timestamp,
                            url=url,
                            id=alert['oid'],
                            name=alert['oid'],
                            ra=alert['meanra'],
                            dec=alert['meandec'],
                            mag=alert['mean_magpsf_g'],
                            score=None)
Esempio n. 20
0
def extract_obstime_from_name(filename, tz=13):
    """
    Talks a filename in XSC convention and generates a Time object based on it.

    filename: the XSC convention filename from which time and date will be extracted
    tz: the timezone that the date and time are referenced to (+13 for NZT)
    """
    name = filename.split("/")[-1]
    datebits = name.split("--")
    (Y,M,D) = datebits[0].split("-")
    (h,m,s) = datebits[1].split("-")
    ms = datebits[2].split(".")[0]
    tz = TimezoneInfo(utc_offset=tz*u.hour)
    t = datetime(int(Y), int(M), int(D), int(h), int(m), int(s), 1000*int(ms), tzinfo=tz)
    obstime = Time(t)
    obstime.format = 'unix'
    return obstime
Esempio n. 21
0
def result(request):
    long = request.POST.get('obsLon')
    lat = request.POST.get('obsLat')
    obsdate = request.POST.get('obsDate')
    obsTime = request.POST.get('obsTime')
    ob = ephem.Observer()
    ob.long = long
    ob.lat = lat
    ob.date = ephem.localtime(ephem.Date(str(obsdate) + " " + str(obsTime)))
    ob.elevation = 100
    stars = [["65:7:30", "45:38:42"], ["25:5:31.2", "50:29:27.6"],
             ["91.17.27.6", "19:15:14.4"], ["158:13:1.2", "25:13:19.2"],
             ["224:24:7.2", "73:4:40.8"]]
    mandiobserver = ephem.Observer()
    mandiobserver.long = "31:46:31.44"
    mandiobserver.lat = "76.59.9.96"
    mandiobserver.elevation = 1000
    mandiobserver.date = ephem.localtime(ephem.Date("2018/03/30 03:00:00"))
    new_values = []
    for star in stars:
        ra, dec = mandiobserver.radec_of(star[0], star[1])
        star1 = ephem.FixedBody()
        star1._ra = ra
        star1._dec = dec
        star1.compute(ob)
        new_values.append(star1)
    new_values1 = []
    for value in new_values:
        s = [str(value.alt), str(value.az)]
        new_values1.append(s)
    context = {'values': new_values1, 'long': long, 'lat': lat}
    newt = TimezoneInfo(utc_offset=5.5 * u.hour)
    ob1 = Observer(latitude=ob.lat * u.deg,
                   longitude=ob.long * u.deg,
                   elevation=ob.elevation * u.m,
                   timezone=newt)
    for value in new_values:
        starCoord = SkyCoord(ra=value.ra * u.deg, dec=value.dec * u.deg)
        starObj = FixedTarget(coord=starCoord)
        plot_sky(starObj, ob1, Time(["2018-03-30 12:00:00"]))
    plt.show()
    # plt.savefig('static/astro/images/plot.jpg')
    return render(request, 'astro/result.html', context)
Esempio n. 22
0
    def to_generic_alert(self, alert):
        if alert['lastmjd']:
            timestamp = Time(alert['lastmjd'], format='mjd',
                             scale='utc').to_datetime(timezone=TimezoneInfo())
        else:
            timestamp = ''
        url = f'{ALERCE_URL}/object/{alert["oid"]}'

        mag = None  # mag is no longer returned in the object list

        score = alert['probability']

        return GenericAlert(timestamp=timestamp,
                            url=url,
                            id=alert['oid'],
                            name=alert['oid'],
                            ra=alert['meanra'],
                            dec=alert['meandec'],
                            mag=mag,
                            score=score)
Esempio n. 23
0
def obs_info(lat_str, lon_str, elev, utc_offset):
    """
    Create 'astropy.coord.EarthLocation' and 'astropy.time.TimezoneInfo' 
    objects from observatoty geographical coordinates and timezone offset.

    Parameters
    ----------
    lat_str : str
        Observatory latitude.
    lon_str : str
        Observatory longitude.
    elev : float
        Observatory elevation (in meters).
    utc_offset : float
        Observatoty timezone offset (in hours).

    Returns
    -------
    tp_info : tuple
        Tuple with 'astropy.coord.EarthLocation' (first element) and 
        'astropy.time.TimezoneInfo' objetcs (second element).

    """

    # get observatory location and timezone
    la = coord.Latitude(lat_str, unit=u.deg)
    lo = coord.Longitude(lon_str, unit=u.deg)
    el = u.Quantity(elev, unit=u.m)
    uo = u.Quantity(utc_offset, unit=u.hour)  # utc offset (hours)

    # create earthlocation and timezoneinfo objects
    oloc = coord.EarthLocation(lat=la, lon=lo, height=el)
    otmz = TimezoneInfo(utc_offset=uo)

    tp_info = (oloc, otmz)
    return tp_info
Esempio n. 24
0
    def fetch_alerts(self, ):
        mars = MARSBroker()

        #download from ZTF web server (i.e. Przemek webpage)
        ztf_ipac = urllib.request.urlopen(BROKER_URL).readlines()

        list_of_events = [
            str(i)[6:-8] for i in ztf_ipac if '<td>ZTF' in str(i)
        ]
        list_of_mars_links = [
            str(i).split('"')[1] for i in ztf_ipac
            if '<td><a href="https://mars.lco.global/' in str(i)
        ]

        for index, event in enumerate(list_of_events):

            MARS_candidates = requests.get(list_of_mars_links[index] +
                                           '&format=json').json()
            if len(MARS_candidates['results']) == 0:
                pass

            else:

                cone_search = str(MARS_candidates['results'][0]['candidate']
                                  ['ra']) + ',' + str(
                                      MARS_candidates['results'][0]
                                      ['candidate']['dec']) + ',' + str(0.0003)
                mars_form = MARSQueryForm({
                    'cone': cone_search,
                    'query_name': 'Query ZTF IPAC : ' + event,
                    'broker': 'MARS'
                })
                mars_form.is_valid()
                query = BrokerQuery.objects.create(
                    name='Query ZTF IPAC : ' + event,
                    broker=mars.name,
                    parameters=mars_form.cleaned_data)
                alerts = mars.fetch_alerts(query.parameters)
                alerts = [*alerts]

                name = event
                ra = np.median([alert['candidate']['ra'] for alert in alerts])
                dec = np.median(
                    [alert['candidate']['dec'] for alert in alerts])

                try:

                    target = Target.objects.get(name=name)

                except:

                    target, created = Target.objects.get_or_create(
                        name=name, ra=ra, dec=dec, type='SIDEREAL', epoch=2000)

                    if created:

                        target.save()

                filters = {1: 'g_ZTF', 2: 'r_ZTF', 3: 'i_ZTF'}
                try:
                    times = [
                        Time(i.timestamp).jd
                        for i in ReducedDatum.objects.filter(target=target)
                        if i.data_type == 'photometry'
                    ]
                except:
                    times = []

                for alert in alerts:

                    if all([
                            key in alert['candidate']
                            for key in ['jd', 'magpsf', 'fid', 'sigmapsf']
                    ]):
                        jd = Time(alert['candidate']['jd'],
                                  format='jd',
                                  scale='utc')
                        jd.to_datetime(timezone=TimezoneInfo())

                        if alert['candidate']['isdiffpos'] == 't':
                            signe = 1
                        else:
                            signe = -1

                        flux = 10**(
                            -0.4 * alert['candidate']['magnr']
                        ) + signe * 10**(-0.4 * alert['candidate']['magpsf'])
                        eflux = ((10**(-0.4 * alert['candidate']['magnr']) *
                                  alert['candidate']['sigmagnr'])**2 +
                                 (signe *
                                  10**(-0.4 * alert['candidate']['magpsf']) *
                                  alert['candidate']['sigmapsf'])**2)**0.5

                        mag = -2.5 * np.log10(flux)
                        emag = eflux / flux

                        value = {
                            'magnitude': mag,
                            'filter': filters[alert['candidate']['fid']],
                            'error': emag
                        }

                        if (jd.value not in times):

                            rd, _ = ReducedDatum.objects.get_or_create(
                                timestamp=jd.to_datetime(
                                    timezone=TimezoneInfo()),
                                value=value,
                                source_name='ZTF IPAC',
                                source_location='IRSA',
                                data_type='photometry',
                                target=target)

                            rd.save()

                        else:

                            pass

                    else:

                        pass
Esempio n. 25
0
@author: cook
"""
from astropy.table import Table
from astropy.time import Time, TimezoneInfo
from astropy import units as uu
from datetime import datetime
import matplotlib.pyplot as plt
import numpy as np
import os
import glob
from tqdm import tqdm

# =============================================================================
# Define variables
# =============================================================================
EST = TimezoneInfo(-4 * uu.hour)

TODAY = '2021-03-31'

WORKSPACE = '/spirou/cook/db_test'

LOG_DIR = '/spirou/drs-data/mini-data-07000/msg/processing/APEROG*'

LOG_STR = '-!|'

# =============================================================================
# Define functions
# =============================================================================

# =============================================================================
# Start of code
Esempio n. 26
0
def target_post_save(target, created):
  def get(objectId):
    url = 'https://mars.lco.global/'
    request = {'queries':
      [
        {'objectId': objectId}
      ]
      }
  
    try:
      r = requests.post(url, json=request)
      results = r.json()['results'][0]['results']
      return results
    
    except Exception as e:
      return [None,'Error message : \n'+str(e)]
 
  logger.info('Target post save hook: %s created: %s', target, created)

  if target_extra_field(target=target, name='tweet'):
    #Post to Twitter!
    twitter_url = 'https://api.twitter.com/1.1/statuses/update.json'

    api_key = os.environ['TWITTER_APIKEY']
    api_secret = os.environ['TWITTER_SECRET']
    access_token = os.environ['TWITTER_ACCESSTOKEN']
    access_secret = os.environ['TWITTER_ACCESSSECRET']
    auth = OAuth1(api_key, api_secret, access_token, access_secret)

    coords = SkyCoord(target.ra, target.dec, unit=u.deg)
    coords = coords.to_string('hmsdms', sep=':',precision=1,alwayssign=True)

    #Explosion emoji
    tweet = ''.join([u'\U0001F4A5 New target alert! \U0001F4A5\n',
        'Name: {name}\n'.format(name=target.name),
        'Coordinates: {coords}\n'.format(coords=coords)])
    status = {
            'status': tweet
    }

    response = requests.post(twitter_url, params=status, auth=auth)
 

  if 'ZTF' in target.name:
    objectId = target.name 
    alerts = get(objectId)
    
    filters = {1: 'g_ZTF', 2: 'r_ZTF', 3: 'i_ZTF'}
    for alert in alerts:
        if all([key in alert['candidate'] for key in ['jd', 'magpsf', 'fid', 'sigmapsf']]):
            jd = Time(alert['candidate']['jd'], format='jd', scale='utc')
            jd.to_datetime(timezone=TimezoneInfo())
            value = {
                'magnitude': alert['candidate']['magpsf'],
                'filter': filters[alert['candidate']['fid']],
                'error': alert['candidate']['sigmapsf']
            }
            rd, created = ReducedDatum.objects.get_or_create(
                timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                value=json.dumps(value),
                source_name=target.name,
                source_location=alert['lco_id'],
                data_type='photometry',
                target=target)
            rd.save()
Esempio n. 27
0
    def handle(self, *args, **options):

        username = os.getenv('IRSA_USERNAME')
        password = os.getenv('IRSA_PASSWORD')
        filters = {'zg': 'g_ZTF', 'zr': 'r_ZTF'}
        all_events = options['events_to_harvest']

        list_of_targets = Target.objects.filter(name=all_events)

        if all_events == 'all':
            list_of_targets = Target.objects.filter()
        if all_events == 'alive':
            list_of_targets = Target.objects.filter(
                targetextra__in=TargetExtra.objects.filter(key='Alive',
                                                           value=True))
        if all_events[0] == '[':

            years = all_events[1:-1].split(',')
            events = Target.objects.filter()
            list_of_targets = []
            for year in years:

                list_of_targets = [i for i in events if year in i.name]

        list_of_targets = list(list_of_targets)
        random.shuffle(list_of_targets)

        for target in list_of_targets:

            ra = target.ra
            dec = target.dec
            radius = 0.0001  #arsec

            try:
                times = [
                    Time(i.timestamp).jd
                    for i in ReducedDatum.objects.filter(target=target)
                    if i.data_type == 'photometry'
                ]
            except:
                times = []

            try:
                url = 'https://irsa.ipac.caltech.edu/cgi-bin/ZTF/nph_light_curves?POS=CIRCLE ' + str(
                    ra) + ' ' + str(dec) + ' ' + str(radius) + '&FORMAT=CSV'
                response = requests.get(url,
                                        timeout=20,
                                        auth=(username, password))

                content = list(
                    csv.reader(response.content.decode('utf-8').splitlines(),
                               delimiter=','))
                light = np.array(content)

                if len(light) > 1:
                    #mjd, mag, magerr, filter
                    lightcurve = np.c_[light[1:, 3], light[1:, 4],
                                       light[1:, 5], light[1:, 7]]

                    for line in lightcurve:
                        try:
                            jd = Time(float(line[0]) + 2400000.5,
                                      format='jd',
                                      scale='utc')

                            mag = float(line[1])
                            emag = float(line[2])

                            filt = filters[line[-1]]
                            value = {
                                'magnitude': mag,
                                'filter': filt,
                                'error': emag
                            }

                            jd.to_datetime(timezone=TimezoneInfo())

                            if (jd.value not in times):

                                rd, _ = ReducedDatum.objects.get_or_create(
                                    timestamp=jd.to_datetime(
                                        timezone=TimezoneInfo()),
                                    value=value,
                                    source_name='ZTFDR3',
                                    source_location='IRSA',
                                    data_type='photometry',
                                    target=target)

                                rd.save()

                            else:

                                pass

                        except:

                            pass

            except:
                print('Can not connect to IRSA')
                pass
Esempio n. 28
0
    def find_and_ingest_photometry(self, events, targets):
        '''
        Searches the ASAS-SN photometry database using RA and Dec of photometry candidates and a 2 arcminute radius
        Creates and saves a ReducedDatum object of the given Target and its associated photometry data
        '''
        targets = targets
        i = 0
        lightcurvelinks = []
        lightcurvepartlinks = []
        indices_with_photometry_data = []
        rd_list = []

        events = events
        while(i < len(events)):
            samplera = events[i][2]
            sampledec = events[i][3]
            sampleralist = samplera.split(':')
            sampledeclist = sampledec.split(':')
            photometryurl = os.path.join("https://asas-sn.osu.edu/photometry?utf8=%E2%9C%93&ra="
                                            + sampleralist[0] + "%3A"+sampleralist[1] + "%3A" + sampleralist[2] + "&dec=" + sampledeclist[0]
                                            + "%3A"+sampledeclist[1] + "%3A"+sampledeclist[2]
                                            + "&radius=.033333&vmag_min=&vmag_max=&epochs_min=&epochs_max=&rms_min=&rms_max=&sort_by=raj2000")
            html_page = urllib.request.urlopen(photometryurl)
            soup = BeautifulSoup(html_page, "lxml")

            for link in soup.findAll('a'):
                s = str(link.get('href'))
                if('/photometry/' in s):
                    lightcurvepartlinks.append(link.get('href'))
                    indices_with_photometry_data.append(i)
            i = i + 1

        for partlink in lightcurvepartlinks:
            lightcurvelinks.append(os.path.join('https://asas-sn.osu.edu' + partlink))

        '''
        Reads links with photometry data
        '''
        k = 0
        for link in lightcurvelinks:
            running = True
            hjd = []
            ut_date = []
            camera = []
            myfilter = []
            mag = []
            mag_error = []
            flux = []
            flux_error = []
            '''
            Parses through each page of data, starting at page 1
            '''
            i = 1
            while(running == True):
                functional_link = os.path.join(link+"?page=" + str(i))
                table = []
                try:

                    page = requests.get(functional_link)
                    doc = lh.fromstring(page.content)
                    tr_elements = doc.xpath('//tr')
                    h = 0
                    for t in tr_elements[0]:
                        h += 1
                        content = t.text_content()
                        table.append((content, []))
                    for m in range(1, len(tr_elements)):
                        row = tr_elements[m]
                        '''
                        If row is not of size 8, the data is not from the right table
                        '''
                        if len(row) != 8:
                            break
                        h = 0
                        for t in row.iterchildren():
                            data = t.text_content()
                            if h>0:
                                try:
                                    data = int(data)
                                except:
                                    pass
                            table[h][1].append(data)
                            h += 1
                        for element in table[0][1]:
                            hjd.append(element)
                        for element in table[1][1]:
                            ut_date.append(element)
                        for element in table[2][1]:
                            camera.append(element)
                        for element in table[3][1]:
                            myfilter.append(element)
                        for element in table[4][1]:
                            mag.append(float(element))
                        for element in table[5][1]:
                            mag_error.append(float(element))
                        for element in table[6][1]:
                            flux.append(element)
                        for element in table[7][1]:
                            flux_error.append(element)

                except IndexError:
                    running == False
                    break
                i = i + 1
            n = 0
            while(n < len(hjd)):
                data = {'magnitude': mag[n], 'filter': myfilter[n],
                    'error': mag_error[n]}
                time_to_float = float(hjd[n])
                jd = Time(time_to_float, format='jd').jd
                jd = Time(jd, format='jd', scale='utc')
                index = indices_with_photometry_data[k]
                target = targets[index]
                try:
                    times = [Time(i.timestamp).jd for i in ReducedDatum.objects.filter(target=target) if i.data_type == 'photometry']
                except: 
                    times = []
                if  (jd.value not in times):  
                                rd, _ = ReducedDatum.objects.get_or_create(
                                        timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                                        value=data,
                                        source_name='ASAS-SN',
                                        source_location='ASAS-SN',
                                        data_type='photometry',
                                        target=target)
                                rd.save()
                                rd_list.append(rd)
                else:
                    pass
                

                n = n + 1  # repeats for all of the data points on the link for a specific target
            k = k + 1  # repeats for all targets 
        return rd_list
Esempio n. 29
0
    Generates a table of the last exposure for every unique TILEID.
    Mainly serves as a helper function for get_surveyprogress()

    Args:
        exposures: Table of exposures

    Returns Table object
    '''
    def last(arr):
        return arr[len(arr) - 1]

    return exposures.group_by("TILEID").groups.aggregate(last)


utc_offset = -7 * u.hour
tzone = TimezoneInfo(utc_offset=utc_offset)
t1 = Time(58821, format='mjd', scale='utc')
t = t1.to_datetime(timezone=tzone)


def get_progress(exposures, tiles, program):
    '''Get survey and tile progress for a given program

    Args:
        exposures: Table with columns PROGRAM, TILEID, MJD
        tiles: Table with columns TILEID, EXPOSEFAC
        program: str program name to filter

    Returns (time, survey_progress, tile_progress)

    time = array of datetime objects;
Esempio n. 30
0
# create moon animation frame
moonframe = tk.Frame(window, width=framewidth, height=frameheight / 4)
moonframe.grid_propagate(0)
moonframe.place(x=0, y=0)

# create time control frame
timeframe = tk.Frame(window, width=framewidth, height=frameheight * 3 / 4)
timeframe.grid_propagate(0)
timeframe.place(x=0, y=frameheight / 4)

# Get current time.  Default timezone set to EST.
tzonelist = ['UTC', 'EST', 'EDT', 'Others coming soon']
tvar = tk.StringVar(timeframe)
tvar.set(tzonelist[0])
tzone = 'EST'
utc_offset = TimezoneInfo(utc_offset=-5 * u.hour)  # EST by default
ttemp = dt.datetime.now(tz=utc_offset)
t = Time(ttemp)

# plot the moons
directory = './'
moons = load_file(directory + 'jup365.bsp')
io = moons['IO']
europa = moons['EUROPA']
ganymede = moons['GANYMEDE']
callisto = moons['CALLISTO']
jupiter = moons['JUPITER BARYCENTER']
#ts = load.timescale()
ts = load.timescale(builtin=True)
sky = tk.Canvas(moonframe, width=framewidth, height=frameheight / 4)
sky.config(bg="black")