Ejemplo n.º 1
0
	def main(self):
		
		recentmjd = date_to_mjd(datetime.datetime.utcnow() - datetime.timedelta(14))
		survey_obs = SurveyObservation.objects.filter(obs_mjd__gt=recentmjd)
		field_pk = survey_obs.values('survey_field').distinct()
		survey_fields = SurveyField.objects.filter(pk__in = field_pk).select_related()
		
		for s in survey_fields:

			width_corr = 3.1/np.abs(np.cos(s.dec_cen))
			ra_offset = Angle(width_corr/2., unit=u.deg)
			dec_offset = Angle(3.1/2., unit=u.deg)
			sc = SkyCoord(s.ra_cen,s.dec_cen,unit=u.deg)
			ra_min = sc.ra - ra_offset
			ra_max = sc.ra + ra_offset
			dec_min = sc.dec - dec_offset
			dec_max = sc.dec + dec_offset
			result_set = []
			for page in range(500):
				print(s,page)
				marsurl = '%s/?format=json&sort_value=jd&sort_order=desc&ra__gt=%.7f&ra__lt=%.7f&dec__gt=%.7f&dec__lt=%.7f&jd__gt=%i&rb__gt=0.5&page=%i'%(
					self.options.ztfurl,ra_min.deg,ra_max.deg,dec_min.deg,dec_max.deg,recentmjd+2400000.5,page+1)
				client = coreapi.Client()
				try:
					schema = client.get(marsurl)
					if 'results' in schema.keys():
						result_set = np.append(result_set,schema['results'])
					else: break
				except: break
				#break

			transientdict,nsn = self.parse_data(result_set,date_to_mjd(datetime.datetime.utcnow() - datetime.timedelta(2)))
			print('uploading %i transient detections'%nsn)
			self.send_data(transientdict)
Ejemplo n.º 2
0
def execute_after_save(sender, instance, created, *args, **kwargs):

	tag_K2 = False

	if created:
		print("Transient Created: %s" % instance.name)
		print("Internal Survey: %s" % instance.internal_survey)

		if tag_K2:
			is_k2_C16_validated, C16_msg = IsK2Pixel(instance.ra, instance.dec, "16")
			is_k2_C17_validated, C17_msg = IsK2Pixel(instance.ra, instance.dec, "17")
			is_k2_C19_validated, C19_msg = IsK2Pixel(instance.ra, instance.dec, "19")

			print("K2 C16 Val: %s; K2 Val Msg: %s" % (is_k2_C16_validated, C16_msg))
			print("K2 C17 Val: %s; K2 Val Msg: %s" % (is_k2_C17_validated, C17_msg))
			print("K2 C19 Val: %s; K2 Val Msg: %s" % (is_k2_C19_validated, C19_msg))

			if is_k2_C16_validated:
				k2c16tag = TransientTag.objects.get(name='K2 C16')
				instance.k2_validated = True
				instance.k2_msg = C16_msg
				instance.tags.add(k2c16tag)
			
			elif is_k2_C17_validated:
				k2c17tag = TransientTag.objects.get(name='K2 C17')
				instance.k2_validated = True
				instance.k2_msg = C17_msg
				instance.tags.add(k2c17tag)

			elif is_k2_C19_validated:
				k2c19tag = TransientTag.objects.get(name='K2 C19')
				instance.k2_validated = True
				instance.k2_msg = C19_msg
				instance.tags.add(k2c19tag)

		print('Checking TESS')
		if instance.disc_date:
			TESSFlag = tess_obs(instance.ra,instance.dec,date_to_mjd(instance.disc_date)+2400000.5)
			if TESSFlag:
				try:
					tesstag = TransientTag.objects.get(name='TESS')
					instance.tags.add(tesstag)
				except: pass
		else:
			TESSFlag = tess_obs(instance.ra,instance.dec,date_to_mjd(instance.modified_date)+2400000.5)
			if TESSFlag:
				try:
					tesstag = TransientTag.objects.get(name='TESS')
					instance.tags.add(tesstag)
				except: pass

		print('Checking Thacher')
		if thacher_transient_search(instance.ra,instance.dec):
			try:
				thachertag = TransientTag.objects.get(name='Thacher')
				instance.tags.add(thachertag)
			except: pass
			
		instance.save()
Ejemplo n.º 3
0
    def main(self):

        recentmjd = date_to_mjd(datetime.datetime.utcnow() -
                                datetime.timedelta(7))
        survey_obs = SurveyObservation.objects.filter(obs_mjd__gt=recentmjd)
        field_pk = survey_obs.values('survey_field').distinct()
        survey_fields = SurveyField.objects.filter(
            pk__in=field_pk).select_related()

        for s in survey_fields:

            sc = SkyCoord(s.ra_cen, s.dec_cen, unit=u.deg)

            total = 10000
            records_per_page = 10000
            page = 1
            sortBy = "firstmjd"
            classearly = 19
            pclassearly = 0.5

            params = {
                #"total": total,
                "records_per_pages": records_per_page,
                "page": page,
                "sortBy": sortBy,
                "query_parameters": {
                    "filters": {
                        "pclassearly": pclassearly,
                        "classearly": classearly,
                    },
                    "dates": {
                        "firstmjd": {
                            "min": date_to_mjd(datetime.datetime.utcnow()) - 3,
                        }
                    },
                    "coordinates": {
                        "ra": sc.ra.deg,
                        "dec": sc.dec.deg,
                        "sr": 1.65 * 3600
                    }
                }
            }

            client = AlerceAPI()
            try:
                result_set = client.query(params)  #, format='pandas')
            except:
                continue

            if not len(result_set): continue

            transientdict, nsn = self.parse_data(result_set)
            print('uploading %i transients' % nsn)
            self.send_data(transientdict)
Ejemplo n.º 4
0
    def LikelyYSEField(self):
        d = self.dec * np.pi / 180
        width_corr = 3.3 / np.abs(np.cos(d))
        # Define the tile offsets:
        ra_offset = cd.Angle(width_corr / 2., unit=u.deg)
        dec_offset = cd.Angle(3.3 / 2., unit=u.deg)

        sf = SurveyField.objects.filter(~Q(obs_group__name='ZTF')).\
          filter((Q(ra_cen__gt = self.ra-ra_offset.degree) &
            Q(ra_cen__lt = self.ra+ra_offset.degree) &
            Q(dec_cen__gt = self.dec-dec_offset.degree) &
            Q(dec_cen__lt = self.dec+dec_offset.degree)))

        if len(sf):
            so = SurveyObservation.objects.filter(
                survey_field__field_id=sf[0].field_id).filter(
                    obs_mjd__isnull=False).order_by('-obs_mjd')
            if len(so):
                time_since_last_obs = date_to_mjd(
                    datetime.datetime.utcnow()) - so[0].obs_mjd
            else:
                time_since_last_obs = None
            return sf[0].field_id, sf[0].ra_cen, sf[
                0].dec_cen, time_since_last_obs
        else:
            return None, None, None, None
Ejemplo n.º 5
0
	def main(self):
		
		recentmjd = date_to_mjd(datetime.datetime.utcnow() - datetime.timedelta(7))
		survey_obs = SurveyObservation.objects.filter(obs_mjd__gt=recentmjd)
		field_pk = survey_obs.values('survey_field').distinct()
		survey_fields = SurveyField.objects.filter(pk__in = field_pk).select_related()
		
		for s in survey_fields:

			width_corr = 3.1/np.abs(np.cos(s.dec_cen))
			ra_offset = Angle(width_corr/2., unit=u.deg)
			dec_offset = Angle(3.1/2., unit=u.deg)
			sc = SkyCoord(s.ra_cen,s.dec_cen,unit=u.deg)
			ra_min = sc.ra - ra_offset
			ra_max = sc.ra + ra_offset
			dec_min = sc.dec - dec_offset
			dec_max = sc.dec + dec_offset
			
			query = query_template.copy()
			query['query']['bool']['must'][0]['range']['ra']['gte'] = ra_min.deg
			query['query']['bool']['must'][0]['range']['ra']['lte'] = ra_max.deg
			query['query']['bool']['must'][1]['range']['dec']['gte'] = dec_min.deg
			query['query']['bool']['must'][1]['range']['dec']['lte'] = dec_max.deg
			query['query']['bool']['must'][2]['range']['properties.ztf_rb']['gte'] = 0.5
			query['query']['bool']['must'][3]['range']['properties.ztf_jd']['gte'] = recentmjd+2400000.5
			result_set = search(query)
			
			transientdict,nsn = self.parse_data(result_set)
			print('uploading %i transients'%nsn)
			self.send_data(transientdict)
			
			import pdb; pdb.set_trace()
Ejemplo n.º 6
0
    def main(self, transient_name=None, update_forced=False):

        # candidate transients
        min_date = datetime.datetime.utcnow() - datetime.timedelta(
            minutes=self.options.max_time_minutes)
        nowmjd = date_to_mjd(datetime.datetime.utcnow())
        #transient_name='2020sck'
        if transient_name is None and not update_forced:
            transients = Transient.objects.filter(
                created_date__gte=min_date).filter(
                    ~Q(tags__name='YSE')
                    & ~Q(tags__name='YSE Stack')).order_by('-created_date')
        elif update_forced:
            min_date_forcedphot = datetime.datetime.utcnow(
            ) - datetime.timedelta(days=7)
            transients = Transient.objects.filter(
                ~Q(tags__name='YSE') & ~Q(tags__name='YSE Stack')
                & Q(transientphotometry__transientphotdata__obs_date__gt=
                    min_date_forcedphot)
                & Q(disc_date__gt=datetime.datetime.utcnow() -
                    datetime.timedelta(days=1000))).distinct()
        else:
            transients = Transient.objects.filter(name=transient_name)

        # candidate survey images
        survey_images = SurveyObservation.objects.filter(status__name='Successful').\
         filter(obs_mjd__gt=nowmjd-self.options.max_days_yseimage).filter(diff_id__isnull=False)

        transient_list,ra_list,dec_list,diff_id_list,warp_id_list,mjd_list,filt_list = \
         [],[],[],[],[],[],[]
        for t in transients:

            sit = survey_images.filter(
                Q(survey_field__ra_cen__gt=t.ra - 1.55)
                | Q(survey_field__ra_cen__lt=t.ra + 1.55)
                | Q(survey_field__dec_cen__gt=t.dec - 1.55)
                | Q(survey_field__dec_cen__lt=t.dec + 1.55))

            if len(sit):
                sct = SkyCoord(t.ra, t.dec, unit=u.deg)
            for s in sit:
                sc = SkyCoord(s.survey_field.ra_cen,
                              s.survey_field.dec_cen,
                              unit=u.deg)
                if sc.separation(sct).deg < 1.65:
                    transient_list += [t.name]
                    ra_list += [t.ra]
                    dec_list += [t.dec]
                    diff_id_list += [s.diff_id]
                    warp_id_list += [s.warp_id]
                    mjd_list += [s.obs_mjd]
                    filt_list += [s.photometric_band.name]
        nt = len(np.unique(transient_list))
        print('{} transients to upload!'.format(nt))
        if nt == 0: return 0
        print('trying to upload transients:')
        for t in np.unique(transient_list):
            print(t)

        stamp_request_name, skycelldict = self.stamp_request(
            transient_list, ra_list, dec_list, diff_id_list, warp_id_list, [])
        print('submitted stamp request {}'.format(stamp_request_name))

        phot_request_names = self.forcedphot_request(transient_list, ra_list,
                                                     dec_list, mjd_list,
                                                     filt_list, diff_id_list,
                                                     skycelldict)
        print('submitted phot requests:')
        for prn in phot_request_names:
            print(prn)

        print(
            'jobs were submitted, waiting up to 10 minutes for them to finish')
        # wait until the jobs are done
        jobs_done = False
        tstart = time.time()
        while not jobs_done and time.time() - tstart < 600:
            print('waiting 60 seconds to check status...')
            time.sleep(60)
            done_stamp, success_stamp = self.get_status(stamp_request_name)
            doneall_phot = True
            for phot_request_name in phot_request_names:
                done_phot, success_phot = self.get_status(phot_request_name)
                if not done_phot: doneall_phot = False
            if done_stamp and doneall_phot: jobs_done = True

        if not jobs_done:
            raise RuntimeError('job timeout!')

        # get the data from the jobs
        img_dict = self.get_stamps(stamp_request_name, transient_list)

        # save to data model
        phot_dict = self.get_phot(phot_request_names, transient_list, ra_list,
                                  dec_list, img_dict)

        #write the stack jobs
        transient_list, ra_list, dec_list, stack_id_list = [], [], [], []
        for t in phot_dict.keys():
            for s, r, d in zip(phot_dict[t]['stack_id'], phot_dict[t]['ra'],
                               phot_dict[t]['dec']):
                stack_id_list += [s]
                ra_list += [r]
                dec_list += [d]
                transient_list += [t]

        stack_request_name, skycelldict = self.stamp_request(
            transient_list,
            ra_list,
            dec_list, [], [],
            stack_id_list,
            skycelldict=skycelldict)
        print('submitted stack request {}'.format(stack_request_name))

        # submit the stack jobs
        tstart = time.time()
        jobs_done = False
        while not jobs_done and time.time() - tstart < 600:
            print('waiting 60 seconds to check status...')
            time.sleep(60)
            done_stamp, success_stamp = self.get_status(stack_request_name)
            if done_stamp: jobs_done = True
        if not success_stamp:
            raise RuntimeError('jobs failed!')
        if not jobs_done:
            raise RuntimeError('job timeout!')

        # get the data from the stack job
        stack_img_dict = self.get_stamps(stack_request_name, transient_list)

        # save to data model
        self.write_to_db(phot_dict, img_dict, stack_img_dict)

        print('uploaded transients:')
        for t in img_dict.keys():
            print(t)
        return len(list(img_dict.keys()))
Ejemplo n.º 7
0
	def do(self,debug=False):
		
		# run this under Admin
		user = User.objects.get(username='******')

		# get New, Watch, FollowupRequested, Following
		transients_to_classify = \
			Transient.objects.filter(Q(status__name = 'New') |
									 Q(status__name = 'Watch') |
									 Q(status__name = 'FollowupRequested') |
									 Q(status__name = 'Following'))

		light_curve_list_z,light_curve_list_noz,transient_list_z,transient_list_noz = [],[],[],[]
		for t in transients_to_classify: #.filter(Q(name = '2019np') | Q(name = '2019gf')):
			ra, dec, objid, redshift = t.ra, t.dec, t.name,t.z_or_hostz()

			if not t.mw_ebv is None:
				mwebv = t.mw_ebv
			else:
				mwebv = 0.0


			photdata = get_all_phot_for_transient(user, t.id)
			if not photdata: continue
			gobs = photdata.filter(band__name = 'g-ZTF')
			robs = photdata.filter(band__name = 'r-ZTF')
			if not len(gobs) and not len(robs): continue
			mjd, passband, flux, fluxerr, mag, magerr, zeropoint, photflag = \
				np.array([]),np.array([]),[],[],[],[],[],[]
			
			if redshift: transient_list_z += [t]
			else: transient_list_noz += [t]
				
			first_detection_set = False
			for obs,filt in zip([gobs.order_by('obs_date'),robs.order_by('obs_date')],['g','r']):
				for p in obs:
					if p.data_quality: continue
					if len(np.where((filt == passband) & (np.abs(mjd - date_to_mjd(p.obs_date)) < 0.001))[0]): continue

					mag += [p.mag]
					if p.mag_err:
						magerr += [p.mag_err]
						fluxerr_obs = 0.4*np.log(10)*p.mag_err
					else:
						magerr += [0.001]
						fluxerr_obs = 0.4*np.log(10)*0.001

					flux_obs = 10**(-0.4*(p.mag-27.5))
					mjd = np.append(mjd,[date_to_mjd(p.obs_date)])
					flux += [flux_obs]
					fluxerr += [fluxerr_obs]
					zeropoint += [27.5]
					passband = np.append(passband,[filt])
					
					if flux_obs/fluxerr_obs > 5 and not first_detection_set:
						photflag += [6144]
						first_detection_set = True
					elif flux_obs/fluxerr_obs > 5:
						photflag += [4096]
					else: photflag += [0]
					#except: import pdb; pdb.set_trace()

			try:
				if redshift:
					light_curve_info = (mjd, flux, fluxerr, passband,
										zeropoint, photflag, ra, dec, objid, redshift, mwebv)
					light_curve_list_z += [light_curve_info,]
				else:
					light_curve_info = (mjd, flux, fluxerr, passband, zeropoint, photflag, ra, dec, objid, None, mwebv)			
					light_curve_list_noz += [light_curve_info,]
			except:
				import pdb; pdb.set_trace()

		if len(light_curve_list_noz):
			classification_noz = Classify(light_curve_list_noz, known_redshift=False, bcut=False, zcut=None)
			predictions_noz = classification_noz.get_predictions()
		if len(light_curve_list_z):
			classification_z = Classify(light_curve_list_z, known_redshift=True, bcut=False, zcut=None)
			predictions_z = classification_z.get_predictions()
			
		if debug:
			import matplotlib
			matplotlib.use('MacOSX')
			import matplotlib.pyplot as plt
			plt.ion()
			classification_z.plot_light_curves_and_classifications()

		for tl in [transient_list_z,transient_list_noz]:
			for t,i in zip(tl,range(len(tl))):
				best_predictions = predictions_z[0][i][-1,:]

				adjusted_best_predictions = np.zeros(10)
				idx,outclassnames,PIa = 0,[],0
				for j in range(len(classification_z.class_names)):
					if classification_z.class_names[j] == 'Pre-explosion': continue
					elif classification_z.class_names[j].startswith('SNIa'): PIa += best_predictions[j]
					else:
						outclassnames += [classification_z.class_names[j]]
						adjusted_best_predictions[idx] = best_predictions[j]
						idx += 1
				outclassnames += ['SN Ia']
				outclassnames = np.array(outclassnames)
				adjusted_best_predictions[9] = PIa

				print(t.name,outclassnames[adjusted_best_predictions == np.max(adjusted_best_predictions)][0])
				transient_class = outclassnames[adjusted_best_predictions == np.max(adjusted_best_predictions)][0]
				photo_class = TransientClass.objects.filter(name = classdict[transient_class])

				if len(photo_class):
					t.photo_class = photo_class[0]
					t.save()
				else:
					print('class %s not in DB'%classdict[transient_class])
					raise RuntimeError('class %s not in DB'%classdict[transient_class])
Ejemplo n.º 8
0
    def parse_data(self, result_set):
        client = AlerceAPI()

        try:
            transientdict = {}
            obj, ra, dec = [], [], []
            nsn = 0
            for i, s in enumerate(result_set):
                print(s['oid'])
                sc = SkyCoord(s['meanra'], s['meandec'], unit=u.deg)
                try:
                    ps_prob = get_ps_score(sc.ra.deg, sc.dec.deg)
                except:
                    ps_prob = None

                mw_ebv = float('%.3f' % (sfd(sc) * 0.86))

                if s['oid'] not in transientdict.keys():
                    tdict = {
                        'name': s['oid'],
                        'status': 'New',
                        'ra': s['meanra'],
                        'dec': s['meandec'],
                        'obs_group': 'ZTF',
                        'tags': ['ZTF in YSE Fields'],
                        'disc_date': mjd_to_date(s['firstmjd']),
                        'mw_ebv': mw_ebv,
                        'point_source_probability': ps_prob
                    }
                    obj += [s['oid']]
                    ra += [s['meanra']]
                    dec += [s['meandec']]

                    PhotUploadAll = {"mjdmatchmin": 0.01, "clobber": False}
                    photometrydict = {
                        'instrument': 'ZTF-Cam',
                        'obs_group': 'ZTF',
                        'photdata': {}
                    }

                else:
                    tdict = transientdict[s['oid']]
                    if s['firstmjd'] < date_to_mjd(tdict['disc_date']):
                        tdict['disc_date'] = mjd_to_date(s['firstmjd'])

                    PhotUploadAll = transientdict[
                        s['oid']]['transientphotometry']
                    photometrydict = PhotUploadAll['ZTF']

                SN_det = client.get_detections(s['oid'])  #, format='pandas')
                filtdict = {1: 'g', 2: 'r'}
                for p in SN_det:
                    flux = 10**(-0.4 * (p['magpsf'] - 27.5))
                    flux_err = np.log(10) * 0.4 * flux * p['sigmapsf']

                    phot_upload_dict = {
                        'obs_date': mjd_to_date(p['mjd']),
                        'band': '%s-ZTF' % filtdict[p['fid']],
                        'groups': [],
                        'mag': p['magpsf'],
                        'mag_err': p['sigmapsf'],
                        'flux': flux,
                        'flux_err': flux_err,
                        'data_quality': 0,
                        'forced': 0,
                        'flux_zero_point': 27.5,
                        # might need to fix this later
                        'discovery_point': 0,  #disc_point,
                        'diffim': 1
                    }
                    photometrydict['photdata']['%s_%i' % (mjd_to_date(
                        p['mjd']), i)] = phot_upload_dict

                PhotUploadAll['ZTF'] = photometrydict
                transientdict[s['oid']] = tdict
                transientdict[s['oid']]['transientphotometry'] = PhotUploadAll

                nsn += 1

                #if s['properties']['ztf_object_id'] == 'ZTF18abrfjdh':
                #	import pdb; pdb.set_trace()
                #if s['properties']['passband'] == 'R' and s['properties']['ztf_object_id'] == 'ZTF18abrfjdh':
                #	import pdb; pdb.set_trace()
        except Exception as e:
            print(e)
            exc_type, exc_obj, exc_tb = sys.exc_info()
            print(exc_tb.tb_lineno)

        print(nsn)
        return transientdict, nsn
Ejemplo n.º 9
0
    def parse_data(self, result_set):
        transientdict = {}
        obj, ra, dec = [], [], []
        nsn = 0
        for i, s in enumerate(result_set):
            #if 'astrorapid_skipped' in s['properties'].keys(): continue
            if 'streams' not in s.keys(
            ) or 'yse_candidate_test' not in s["streams"]:
                continue

            #if s['properties']['ztf_object_id'] == 'ZTF20aaykvgb': import pdb; pdb.set_trace()
            #print(s['properties']['snfilter_known_exgal'])
            if s['properties']['snfilter_known_exgal'] == 1:
                # name, ra, dec, redshift
                # print(s['properties']['ztf_object_id'])
                antareslink = '{}/loci/{}/catalog-matches'.format(
                    self.options.antaresapi, s['locus_id'])
                r = requests.get(antareslink)
                data = json.loads(r.text)

                for k in _allowed_galaxy_catalogs.keys():
                    if k in data['result'].keys():
                        hostdict = {
                            'name':
                            k + '_' + str(data['result'][k][0][
                                _allowed_galaxy_catalogs[k]['name_key']]),
                            'ra':
                            data['result'][k][0][_allowed_galaxy_catalogs[k]
                                                 ['ra_key']],
                            'dec':
                            data['result'][k][0][_allowed_galaxy_catalogs[k]
                                                 ['dec_key']]
                        }
                        if _allowed_galaxy_catalogs[k][
                                'redshift_key'] is not None:
                            hostdict['redshift'] = data['result'][k][0][
                                _allowed_galaxy_catalogs[k]['redshift_key']]

            else:
                hostdict = {}

            sc = SkyCoord(s['properties']['ztf_ra'],
                          s['properties']['ztf_dec'],
                          unit=u.deg)
            try:
                ps_prob = get_ps_score(sc.ra.deg, sc.dec.deg)
            except:
                ps_prob = None

            mw_ebv = float('%.3f' % (sfd(sc) * 0.86))

            if s['properties']['ztf_object_id'] not in transientdict.keys():
                tdict = {
                    'name': s['properties']['ztf_object_id'],
                    'status': 'New',
                    'ra': s['properties']['ztf_ra'],
                    'dec': s['properties']['ztf_dec'],
                    'obs_group': 'ZTF',
                    'tags': ['ZTF in YSE Fields'],
                    'disc_date':
                    mjd_to_date(s['properties']['ztf_jd'] - 2400000.5),
                    'mw_ebv': mw_ebv,
                    'point_source_probability': ps_prob,
                    'host': hostdict
                }
                obj += [s['properties']['ztf_object_id']]
                ra += [s['properties']['ztf_ra']]
                dec += [s['properties']['ztf_dec']]

                PhotUploadAll = {"mjdmatchmin": 0.01, "clobber": False}
                photometrydict = {
                    'instrument': 'ZTF-Cam',
                    'obs_group': 'ZTF',
                    'photdata': {}
                }

            else:
                tdict = transientdict[s['properties']['ztf_object_id']]
                if s['properties']['ztf_jd'] - 2400000.5 < date_to_mjd(
                        tdict['disc_date']):
                    tdict['disc_date'] = mjd_to_date(
                        s['properties']['ztf_jd'] - 2400000.5)

                PhotUploadAll = transientdict[
                    s['properties']['ztf_object_id']]['transientphotometry']
                photometrydict = PhotUploadAll['ZTF']

            flux = 10**(-0.4 * (s['properties']['ztf_magpsf'] - 27.5))
            flux_err = np.log(
                10) * 0.4 * flux * s['properties']['ztf_sigmapsf']

            phot_upload_dict = {
                'obs_date': mjd_to_date(s['properties']['ztf_jd'] - 2400000.5),
                'band': '%s-ZTF' % s['properties']['passband'].lower(),
                'groups': [],
                'mag': s['properties']['ztf_magpsf'],
                'mag_err': s['properties']['ztf_sigmapsf'],
                'flux': flux,
                'flux_err': flux_err,
                'data_quality': 0,
                'forced': 0,
                'flux_zero_point': 27.5,
                # might need to fix this later
                'discovery_point': 0,  #disc_point,
                'diffim': 1
            }
            photometrydict['photdata']['%s_%i' %
                                       (mjd_to_date(s['properties']['ztf_jd'] -
                                                    2400000.5),
                                        i)] = phot_upload_dict

            PhotUploadAll['ZTF'] = photometrydict
            transientdict[s['properties']['ztf_object_id']] = tdict
            transientdict[s['properties']['ztf_object_id']][
                'transientphotometry'] = PhotUploadAll

            nsn += 1

            #if s['properties']['ztf_object_id'] == 'ZTF18abrfjdh':
            #	import pdb; pdb.set_trace()
            #if s['properties']['passband'] == 'R' and s['properties']['ztf_object_id'] == 'ZTF18abrfjdh':
            #	import pdb; pdb.set_trace()

        return transientdict, nsn
Ejemplo n.º 10
0
    def do(self, tag_K2=False):

        try:
            nowdate = datetime.datetime.utcnow() - datetime.timedelta(1)
            transients = Transient.objects.filter(created_date__gt=nowdate)
            for t in transients:
                print('checking transient %s' % t)
                if tag_K2:
                    is_k2_C16_validated, C16_msg = IsK2Pixel(t.ra, t.dec, "16")
                    is_k2_C17_validated, C17_msg = IsK2Pixel(t.ra, t.dec, "17")
                    is_k2_C19_validated, C19_msg = IsK2Pixel(t.ra, t.dec, "19")

                    print("K2 C16 Val: %s; K2 Val Msg: %s" %
                          (is_k2_C16_validated, C16_msg))
                    print("K2 C17 Val: %s; K2 Val Msg: %s" %
                          (is_k2_C17_validated, C17_msg))
                    print("K2 C19 Val: %s; K2 Val Msg: %s" %
                          (is_k2_C19_validated, C19_msg))

                    if is_k2_C16_validated:
                        k2c16tag = TransientTag.objects.get(name='K2 C16')
                        t.k2_validated = True
                        t.k2_msg = C16_msg
                        t.tags.add(k2c16tag)

                    elif is_k2_C17_validated:
                        k2c17tag = TransientTag.objects.get(name='K2 C17')
                        t.k2_validated = True
                        t.k2_msg = C17_msg
                        t.tags.add(k2c17tag)

                    elif is_k2_C19_validated:
                        k2c19tag = TransientTag.objects.get(name='K2 C19')
                        t.k2_validated = True
                        t.k2_msg = C19_msg
                        t.tags.add(k2c19tag)

                tag_TESS, tag_Thacher = True, True  #False,False
                print('Checking TESS')
                if tag_TESS and t.disc_date:
                    TESSFlag = tess_obs(t.ra, t.dec,
                                        date_to_mjd(t.disc_date) + 2400000.5)
                    if TESSFlag:
                        print('tagging %s' % t)
                        try:
                            tesstag = TransientTag.objects.get(name='TESS')
                            t.tags.add(tesstag)
                        except:
                            pass
                else:
                    TESSFlag = tess_obs(
                        t.ra, t.dec,
                        date_to_mjd(t.modified_date) + 2400000.5)
                    if TESSFlag:
                        print('tagging %s' % t)
                        try:
                            tesstag = TransientTag.objects.get(name='TESS')
                            t.tags.add(tesstag)
                        except:
                            pass

                print('Checking Thacher')
                if tag_Thacher and thacher_transient_search(t.ra, t.dec):
                    try:
                        thachertag = TransientTag.objects.get(name='Thacher')
                        t.tags.add(thachertag)
                    except:
                        pass

                t.save()

        except Exception as e:
            print(e)
Ejemplo n.º 11
0
	def parse_data(self,result_set):
		transientdict = {}
		obj,ra,dec = [],[],[]
		nsn = 0
		for i,s in enumerate(result_set):
			if 'astrorapid_skipped' in s['properties'].keys(): continue
			
			sc = SkyCoord(s['properties']['ztf_ra'],s['properties']['ztf_dec'],unit=u.deg)
			try:
				ps_prob = get_ps_score(sc.ra.deg,sc.dec.deg)
			except:
				ps_prob = None

			mw_ebv = float('%.3f'%(sfd(sc)*0.86))

			if s['properties']['ztf_object_id'] not in transientdict.keys():
				tdict = {'name':s['properties']['ztf_object_id'],
						 'status':'New',
						 'ra':s['properties']['ztf_ra'],
						 'dec':s['properties']['ztf_dec'],
						 'obs_group':'ZTF',
						 'tags':['ZTF'],
						 'disc_date':mjd_to_date(s['properties']['ztf_jd']-2400000.5),
						 'mw_ebv':mw_ebv,
						 'point_source_probability':ps_prob}
				obj += [s['properties']['ztf_object_id']]
				ra += [s['properties']['ztf_ra']]
				dec += [s['properties']['ztf_dec']]

				PhotUploadAll = {"mjdmatchmin":0.01,
								 "clobber":False}
				photometrydict = {'instrument':'ZTF-Cam',
								  'obs_group':'ZTF',
								  'photdata':{}}

			else:
				tdict = transientdict[s['properties']['ztf_object_id']]
				if s['properties']['ztf_jd']-2400000.5 < date_to_mjd(tdict['disc_date']):
					tdict['disc_date'] = mjd_to_date(s['properties']['ztf_jd']-2400000.5)

				PhotUploadAll = transientdict[s['properties']['ztf_object_id']]['transientphotometry']
				photometrydict = PhotUploadAll['ZTF']
								
			flux = 10**(-0.4*(s['properties']['ztf_magpsf']-27.5))
			flux_err = np.log(10)*0.4*flux*s['properties']['ztf_sigmapsf']

			phot_upload_dict = {'obs_date':mjd_to_date(s['properties']['ztf_jd']-2400000.5),
								'band':'%s-ZTF'%s['properties']['passband'].lower(),
								'groups':[],
								'mag':s['properties']['ztf_magpsf'],
								'mag_err':s['properties']['ztf_sigmapsf'],
								'flux':flux,
								'flux_err':flux_err,
								'data_quality':0,
								'forced':0,
								'flux_zero_point':27.5,
								# might need to fix this later
								'discovery_point':0, #disc_point,
								'diffim':1}
			photometrydict['photdata']['%s_%i'%(mjd_to_date(s['properties']['ztf_jd']-2400000.5),i)] = phot_upload_dict

			PhotUploadAll['ZTF'] = photometrydict
			transientdict[s['properties']['ztf_object_id']] = tdict
			transientdict[s['properties']['ztf_object_id']]['transientphotometry'] = PhotUploadAll

			nsn += 1

			#if s['properties']['ztf_object_id'] == 'ZTF18abrfjdh':
			#	import pdb; pdb.set_trace()
			#if s['properties']['passband'] == 'R' and s['properties']['ztf_object_id'] == 'ZTF18abrfjdh':
			#	import pdb; pdb.set_trace()

		return transientdict,nsn