Example #1
0
def stealStuff(file_name, file_mode, base_url, timeout=20, verbose=False):
    from urllib2 import Request, urlopen, URLError, HTTPError

    #create the url and the request
    url = base_url + file_name
    req = Request(url)
    successful_download = False
    count = 0

    # Open the url:
    while not successful_download and count < 6:
        count += 1
        trys_left = 5 - count
        try:
            f = urlopen(req, timeout=timeout)
            if verbose:
                print "downloading " + url

            # Open our local file for writing
            local_file = open(file_name, "w" + file_mode)
            #Write to our local file
            local_file.write(f.read())
            local_file.close()
            successful_download = True

        #handle errors
        except HTTPError, e:
            print "HTTP Error:", e.code, url
            print "Trying again: %i attempts remaining" % (trys_left + 1)
            if trys_left <= -1: qErr.qErr()
        except URLError, e:
            print "URL Error:", e.reason, url
            print "Trying again: %i attempts remaining" % (trys_left + 1)
            if trys_left <= -1: qErr.qErr()
Example #2
0
def stealStuff(file_name,file_mode,base_url, timeout=20, verbose=False):
    from urllib2 import Request, urlopen, URLError, HTTPError
    
    #create the url and the request
    url = base_url + file_name
    req = Request(url)
    successful_download = False
    count = 0
    
    # Open the url:
    while not successful_download and count < 6:
        count += 1
        trys_left = 5-count
        try:
            f = urlopen(req, timeout=timeout)
            if verbose: 
                print "downloading " + url

            # Open our local file for writing
            local_file = open(file_name, "w" + file_mode)
            #Write to our local file
            local_file.write(f.read())
            local_file.close()
            successful_download = True

        #handle errors
        except HTTPError, e:
            print "HTTP Error:",e.code , url
            print "Trying again: %i attempts remaining" % (trys_left+1)
            if trys_left <= -1: qErr.qErr()
        except URLError, e:
            print "URL Error:",e.reason , url
            print "Trying again: %i attempts remaining" % (trys_left+1)
            if trys_left <= -1: qErr.qErr()
Example #3
0
def _mail_html(gcn,
               mail_to,
               clobber=False,
               tweet=True,
               out_url_path='http://swift.qmorgan.com/',
               grbhtml=None):
    if not hasattr(gcn, 'mailed_web'):
        gcn.mailed_web = False
    if not gcn.mailed_web:
        email_subject = 'New Web Page for Swift trigger %i' % int(
            gcn.triggerid)
        email_body = '''Please visit %s%i/
        for information on this event, updated as more information arrives.''' % (
            out_url_path, int(gcn.triggerid))

        # Crap, the following doesn't work because of clobber=True when reloading new GCNs
        # EVENTUALL UPDATE SO WE DON'T RELOAD THE ENTIRE GCN EACH TIME, JUST ADD THE NEW NOTICES?
        # gcn.mailed_web = True
        # send_gmail.domail(mail_to,email_subject,email_body)
        # LoadGCN.SaveGCN(gcn)
        #
        # Temporary hack
        mailchkpath = storepath + '/.mlchk%i' % int(gcn.triggerid)
        if not os.path.exists(mailchkpath) or clobber == True:
            cmd = "echo y > %s" % mailchkpath
            os.system(cmd)
            print 'Email with web link has not been sent yet; doing that now...'
            send_gmail.domail(mail_to, email_subject, email_body)
            if tweet:
                try:
                    # python-twitter requires some kind of oAuth authentication now which is a pain
                    # so just use tumblr, linked to the q.mailbot account.
                    tumblrmail = '*****@*****.**'
                    # import twitter # requires http://code.google.com/p/python-twitter/
                    #import tinyurl # requires http://pypi.python.org/pypi/TinyUrl/0.1.0
                    bigurl = '%s%i/' % (out_url_path, int(gcn.triggerid))
                    #littleurl = tinyurl.create_one(bigurl)
                    if grbhtml:
                        ra = str(gcn.best_pos[0]).rstrip('0')
                        dec = str(gcn.best_pos[1]).rstrip('0')
                        uncertainty = str(gcn.best_pos[2]).rstrip('0')
                        pos_label = gcn.best_pos_type,
                        twitsub = "New GRB! Swift Trigger %i" % (int(
                            gcn.triggerid))
                        twittext = ''' *  RA = %s<br> *  Dec = %s<br> *  Uncertainty = %s %s<br> *  Visit %s for more info''' % (
                            ra, dec, uncertainty, pos_label, bigurl)

                    else:
                        twitsub = ''
                        twittext = 'New GRB! Swift Trigger %i. Visit %s for more info.' % (
                            int(gcn.triggerid), bigurl)
                    # api = twitter.Api(username='******', password='******')
                    # status = api.PostUpdate(twittext)
                    print 'Sending Tweet - %s' % (twittext)
                    send_gmail.domail(tumblrmail, twitsub, twittext, sig=False)
                except:
                    qErr.qErr()

        else:
            print 'Email has already been sent for this trigger.'
Example #4
0
def LoadGCN(triggerid, clobber=False, redownload_gcn=False):
    ### LOAD or CREATE PICKLE STORAGE FILE
    # Attempt to load pickle file
    pklpath = storepath + 'sw' + str(triggerid) + 'GCN.pkl'
    loadedgcn = qPickle.load(pklpath)
    # If couldn't load, or clobber == True, create a new instance of the class
    if clobber or not loadedgcn:
        # Create new instance of GCN Notice
        loadedgcn = GCNNotice(triggerid, clobber=redownload_gcn)
        try:
            # Extract values from GCN Notice
            loadedgcn.extract_values()
            loadedgcn.get_positions()
            if loadedgcn.successful_load:
                # Save new Pickle file
                qPickle.save(loadedgcn, pklpath, clobber=True)
            else:
                errtitle = 'Could not succesfully load GCN for trigger %s' % (
                    str(triggerid))
                qErr.qErr(errtitle=errtitle)
                return
        except:
            errtitle = 'Could not extract GCN values for trigger %s' % (
                str(triggerid))
            qErr.qErr(errtitle=errtitle)
    return loadedgcn
Example #5
0
 def grabgcnfromweb(self):
     """
     Based on trigger number, goes to the web and grabs
     the GCN information and puts it in a list of strings.
     """
     import urllib2
     gcnpath = storepath + 'gcn_notices/' + str(self.triggerid) + '.swift'
     gcnappendpath = storepath + 'gcn_notices/' + str(
         self.triggerid) + '.swift.append'
     if self.clobber or not os.path.exists(gcnpath):
         try:
             gcnaddress = 'http://gcn.gsfc.nasa.gov/gcn/other/%s.swift' % str(
                 self.triggerid)
             gcnwebsite = urllib2.urlopen(gcnaddress)
             gcnstring = gcnwebsite.read()
             f = file(gcnpath, 'w')
             f.write(gcnstring)
             f.close()
             thedelimiter = '//////////////////////////////////////////////////////////////////////'
             if os.path.exists(gcnappendpath):
                 g = file(gcnappendpath, 'r')
                 gcn_append = g.read()
                 gcnstring += '\n' + thedelimiter + '\n' + gcn_append
                 g.close()
             # Split up the text file by GCN Notice - make it a list of strings
             gcn_notices = gcnstring.split(thedelimiter)
             num_of_gcns = len(gcn_notices)
             self.gcn_notices = gcn_notices
             self.num_of_gcns = num_of_gcns
             print "Finished loading GCN Notices from web for trigger %s" % self.triggerid
         except:
             errtitle = "Cannot load GCN Notice from web."
             errtext = "Cannot load GCN Notice %s" % self.triggerid
             qErr.qErr(errtitle=errtitle, errtext=errtext)
     else:
         try:
             f = file(gcnpath, 'r')
             gcnstring = f.read()
             thedelimiter = '//////////////////////////////////////////////////////////////////////'
             # Split up the text file by GCN Notice - make it a list of strings
             if os.path.exists(gcnappendpath):
                 g = file(gcnappendpath, 'r')
                 gcn_append = g.read()
                 gcnstring += '\n' + thedelimiter + '\n' + gcn_append
                 g.close()
             gcn_notices = gcnstring.split(thedelimiter)
             num_of_gcns = len(gcn_notices)
             self.gcn_notices = gcn_notices
             self.num_of_gcns = num_of_gcns
             f.close()
             print "Finished loading GCN Notices from web for trigger %s" % self.triggerid
         except:
             errtitle = "Cannot read GCN Notice from file %s" % (gcnpath)
             qErr.qErr(errtitle=errtitle)
Example #6
0
 def grabgcnfromweb(self):
     """
     Based on trigger number, goes to the web and grabs
     the GCN information and puts it in a list of strings.
     """
     import urllib2
     gcnpath = storepath + 'gcn_notices/' + str(self.triggerid) + '.swift'
     gcnappendpath = storepath + 'gcn_notices/' + str(self.triggerid) + '.swift.append'
     if self.clobber or not os.path.exists(gcnpath):
         try:
             gcnaddress = 'http://gcn.gsfc.nasa.gov/gcn/other/%s.swift' % str(self.triggerid)
             gcnwebsite = urllib2.urlopen(gcnaddress)
             gcnstring = gcnwebsite.read()
             f = file(gcnpath,'w')
             f.write(gcnstring)
             f.close()
             thedelimiter = '//////////////////////////////////////////////////////////////////////'
             if os.path.exists(gcnappendpath):
                 g = file(gcnappendpath,'r')
                 gcn_append = g.read() 
                 gcnstring += '\n' + thedelimiter + '\n' + gcn_append
                 g.close()
             # Split up the text file by GCN Notice - make it a list of strings
             gcn_notices = gcnstring.split(thedelimiter)
             num_of_gcns = len(gcn_notices)
             self.gcn_notices = gcn_notices
             self.num_of_gcns = num_of_gcns
             print "Finished loading GCN Notices from web for trigger %s" % self.triggerid
         except:
             errtitle="Cannot load GCN Notice from web."
             errtext = "Cannot load GCN Notice %s" % self.triggerid
             qErr.qErr(errtitle=errtitle,errtext=errtext)
     else:
         try:
             f = file(gcnpath,'r')
             gcnstring = f.read()
             thedelimiter = '//////////////////////////////////////////////////////////////////////'
             # Split up the text file by GCN Notice - make it a list of strings
             if os.path.exists(gcnappendpath):
                 g = file(gcnappendpath,'r')
                 gcn_append = g.read() 
                 gcnstring += '\n' + thedelimiter + '\n' + gcn_append
                 g.close()
             gcn_notices = gcnstring.split(thedelimiter)
             num_of_gcns = len(gcn_notices)
             self.gcn_notices = gcn_notices
             self.num_of_gcns = num_of_gcns
             f.close()
             print "Finished loading GCN Notices from web for trigger %s" % self.triggerid
         except:
             errtitle = "Cannot read GCN Notice from file %s" % (gcnpath)
             qErr.qErr(errtitle=errtitle) 
Example #7
0
def PSNFlow(email=True,email_to='*****@*****.**'):
    while(True):
        feed_url="http://www.cbat.eps.harvard.edu/unconf/tocp.xml"
        new_rss_list = Monitor_PSN_RSS(feed_url)
        if len(new_rss_list) > 10 and email==True:
            errmsg = 'new_rss_list is huge; not doing trigger actions'
            qErr.qErr(errtext=errmsg)
        elif new_rss_list != []:
            for entry in new_rss_list:
                _do_new_entry_actions(entry,email=email,email_to=email_to)
        
        print time.ctime()
        time.sleep(1800)
Example #8
0
def PSNFlow(email=True, email_to='*****@*****.**'):
    while (True):
        feed_url = "http://www.cbat.eps.harvard.edu/unconf/tocp.xml"
        new_rss_list = Monitor_PSN_RSS(feed_url)
        if len(new_rss_list) > 10 and email == True:
            errmsg = 'new_rss_list is huge; not doing trigger actions'
            qErr.qErr(errtext=errmsg)
        elif new_rss_list != []:
            for entry in new_rss_list:
                _do_new_entry_actions(entry, email=email, email_to=email_to)

        print time.ctime()
        time.sleep(1800)
Example #9
0
def _download_and_obtain_psn_string(followup_url):
    try:
        sock = urllib2.urlopen(followup_url)
    except:
        errmsg = '%s does not exist. Returning None.' % (followup_url)
        qErr.qErr(errtext=errmsg)
        return None
    html = sock.read()
    sock.close()
    soup=BeautifulSoup(html)
    psn = soup.find('pre')
    if psn == None:
        errmsg = 'Cannot find PSN string in %s. Returning None.' % (followup_url)
        qErr.qErr(errtext=errmsg)
        return None
    elif len(psn.contents) > 1:
        errmsg = '%s has more than one PSN String. Look into this.' % (followup_url)
        qErr.qErr(errtext=errmsg)
    psn_string = psn.contents[0]
    psn_string = psn_string.strip()
    if len(psn_string) != 99:
        errmsg = 'PSN string is of the wrong length. Expected 99, got %i' % len(psn_string)
        errmsg += ' %s\n%s\n' % (followup_url,psn_string)
        qErr.qErr(errtext=errmsg)
        return None
    return str(psn_string)
Example #10
0
def _download_and_obtain_psn_string(followup_url):
    try:
        sock = urllib2.urlopen(followup_url)
    except:
        errmsg = '%s does not exist. Returning None.' % (followup_url)
        qErr.qErr(errtext=errmsg)
        return None
    html = sock.read()
    sock.close()
    soup = BeautifulSoup(html)
    psn = soup.find('pre')
    if psn == None:
        errmsg = 'Cannot find PSN string in %s. Returning None.' % (
            followup_url)
        qErr.qErr(errtext=errmsg)
        return None
    elif len(psn.contents) > 1:
        errmsg = '%s has more than one PSN String. Look into this.' % (
            followup_url)
        qErr.qErr(errtext=errmsg)
    psn_string = psn.contents[0]
    psn_string = psn_string.strip()
    if len(psn_string) != 99:
        errmsg = 'PSN string is of the wrong length. Expected 99, got %i' % len(
            psn_string)
        errmsg += ' %s\n%s\n' % (followup_url, psn_string)
        qErr.qErr(errtext=errmsg)
        return None
    return str(psn_string)
Example #11
0
def _mail_html(gcn,mail_to,clobber=False,tweet=True,out_url_path='http://swift.qmorgan.com/',grbhtml=None):
    if not hasattr(gcn,'mailed_web'):
        gcn.mailed_web = False
    if not gcn.mailed_web:
        email_subject = 'New Web Page for Swift trigger %i' % int(gcn.triggerid)
        email_body = '''Please visit %s%i/
        for information on this event, updated as more information arrives.''' % (out_url_path,int(gcn.triggerid)) 
        
        # Crap, the following doesn't work because of clobber=True when reloading new GCNs
        # EVENTUALL UPDATE SO WE DON'T RELOAD THE ENTIRE GCN EACH TIME, JUST ADD THE NEW NOTICES?
        # gcn.mailed_web = True
        # send_gmail.domail(mail_to,email_subject,email_body)
        # LoadGCN.SaveGCN(gcn)
        #
        # Temporary hack
        mailchkpath = storepath + '/.mlchk%i' % int(gcn.triggerid)
        if not os.path.exists(mailchkpath) or clobber == True:
            cmd = "echo y > %s" % mailchkpath 
            os.system(cmd)
            print 'Email with web link has not been sent yet; doing that now...'
            send_gmail.domail(mail_to,email_subject,email_body)
            if tweet:
                try:
                    # python-twitter requires some kind of oAuth authentication now which is a pain
                    # so just use tumblr, linked to the q.mailbot account.
                    tumblrmail = '*****@*****.**'
                    # import twitter # requires http://code.google.com/p/python-twitter/
                    #import tinyurl # requires http://pypi.python.org/pypi/TinyUrl/0.1.0 
                    bigurl = '%s%i/' % (out_url_path,int(gcn.triggerid))
                    #littleurl = tinyurl.create_one(bigurl)
                    if grbhtml:
                        ra=str(gcn.best_pos[0]).rstrip('0')
                        dec=str(gcn.best_pos[1]).rstrip('0')
                        uncertainty=str(gcn.best_pos[2]).rstrip('0')
                        pos_label=gcn.best_pos_type,
                        twitsub = "New GRB! Swift Trigger %i" % (int(gcn.triggerid))
                        twittext = ''' *  RA = %s<br> *  Dec = %s<br> *  Uncertainty = %s %s<br> *  Visit %s for more info''' % (ra,dec,uncertainty,pos_label,bigurl)
                        
                    else:
                        twitsub = ''
                        twittext = 'New GRB! Swift Trigger %i. Visit %s for more info.' % (int(gcn.triggerid),bigurl)
                    # api = twitter.Api(username='******', password='******') 
                    # status = api.PostUpdate(twittext)
                    print 'Sending Tweet - %s' % (twittext)
                    send_gmail.domail(tumblrmail,twitsub,twittext,sig=False)
                except: qErr.qErr()
                
        else:
            print 'Email has already been sent for this trigger.'
Example #12
0
 def copy_file(self, file_path):
     try:
         cmd = "cp %s %s" % (file_path, self.out_dir)
         # having problems with shutil.copy
         # shutil.copy(file_path,self.out_dir)
         os.system(cmd)
         newpath = self.out_dir + '/' + os.path.basename(file_path)
         return newpath
     except:
         if not os.path.exists(file_path):
             errmesg = "File %s does not exist. Could not copy to %s" % (
                 file_path, self.out_dir)
             qErr.qErr(errtitle=errmesg)
         else:
             errmesg = "Could not copy file %s to %s" % (file_path,
                                                         self.out_dir)
             qErr.qErr(errtitle=errmesg)
Example #13
0
 def __init__(self,triggerid,filetype="NoType",clobber=False):
     self.filetype = filetype
     self.triggerid = triggerid
     self.clobber = clobber
     self.parsed_types = []
     self.available_types = []
     # Be sure to update if the web version has changed!
     # If not already saved on disk, grab the gcn from web
     try:
         self.grabgcnfromweb()
         # Once grabbed from web, create the dictionary
         self.createdict()
         self.successful_load = True
     except ValueError: 
         errtitle="Cannot Load GCN Notice for trigger %s" % str(triggerid)
         qErr.qErr(errtitle=errtitle)
         self.successful_load = False
     except AttributeError:
         errtitle="ATTRIBUTE ERROR. Cannot Create dictionary for trigger %s?" % str(triggerid)
         qErr.qErr(errtitle=errtitle)    
         self.successful_load = False
Example #14
0
 def __init__(self, triggerid, filetype="NoType", clobber=False):
     self.filetype = filetype
     self.triggerid = triggerid
     self.clobber = clobber
     self.parsed_types = []
     self.available_types = []
     # Be sure to update if the web version has changed!
     # If not already saved on disk, grab the gcn from web
     try:
         self.grabgcnfromweb()
         # Once grabbed from web, create the dictionary
         self.createdict()
         self.successful_load = True
     except ValueError:
         errtitle = "Cannot Load GCN Notice for trigger %s" % str(triggerid)
         qErr.qErr(errtitle=errtitle)
         self.successful_load = False
     except AttributeError:
         errtitle = "ATTRIBUTE ERROR. Cannot Create dictionary for trigger %s?" % str(
             triggerid)
         qErr.qErr(errtitle=errtitle)
         self.successful_load = False
Example #15
0
def LoadGCN(triggerid, clobber=False, redownload_gcn=False):
    ### LOAD or CREATE PICKLE STORAGE FILE 
    # Attempt to load pickle file
    pklpath = storepath+'sw'+str(triggerid)+'GCN.pkl'
    loadedgcn = qPickle.load(pklpath)
    # If couldn't load, or clobber == True, create a new instance of the class
    if clobber or not loadedgcn:
        # Create new instance of GCN Notice
        loadedgcn = GCNNotice(triggerid, clobber=redownload_gcn)
        try:
            # Extract values from GCN Notice
            loadedgcn.extract_values()
            loadedgcn.get_positions()
            if loadedgcn.successful_load:
                # Save new Pickle file
                qPickle.save(loadedgcn,pklpath,clobber=True)
            else:
                errtitle = 'Could not succesfully load GCN for trigger %s' % (str(triggerid)) 
                qErr.qErr(errtitle=errtitle)
                return
        except:
            errtitle = 'Could not extract GCN values for trigger %s' % (str(triggerid)) 
            qErr.qErr(errtitle=errtitle)
    return loadedgcn
Example #16
0
def HeartBeatCheck(rssurl='http://swift.qmorgan.com/heartbeat.xml',
                   deadtime=1800):

    #Open logging file
    f = open(storepath + 'heartbeat.log', 'a')

    deadtime = datetime.timedelta(
        0, deadtime)  # Convert deadtime into timedelta object
    rssinst = feedparser.parse(rssurl)
    if len(rssinst['entries']) > 1:
        raise ValueError('Your RSS feed should only have one entry.')

    try:
        assert 'bozo_exception' not in rssinst, 'Server might be dead! Cannot load xml page.'
    except:
        errtitle = 'Cannot load RSS URL %s. Server down?' % (rssurl)
        f.write(errtitle + '\n')
        qErr.qErr(errtitle=errtitle)

    updatedtime = datetime.datetime.strptime(rssinst.entries[0]['updated'],
                                             '%a, %d %b %Y %H:%M:%S %Z')
    nowtime = datetime.datetime.now()

    delta = nowtime - updatedtime

    print delta
    f.write('At ' + str(nowtime) + ' it has been ' + str(delta) +
            ' since heartbeat\n')

    #adding four minutes buffer to account for clock differences on computers
    fourminutes = datetime.timedelta(0, 240)
    comparetime = nowtime + fourminutes

    try:
        asserttext = 'WARNING: updated time seems to be > 4 minutes in the future. Clocks out of sync?'
        assert updatedtime < comparetime, asserttext
    except:
        f.write(asserttext + '\n')
        qErr.qErr(errtitle='Server/Client Clocks out of Sync!')

    try:
        asserttext = 'Server might be dead; has been ' + str(
            delta) + ' since last heartbeat'
        assert delta < deadtime, asserttext
    except:
        f.write(asserttext + '\n')
        qErr.qErr(errtitle='Server might be dead!')
    f.close()
Example #17
0
def HeartBeatCheck(rssurl='http://swift.qmorgan.com/heartbeat.xml',deadtime=1800):
    
    #Open logging file
    f=open(storepath+'heartbeat.log','a')
    
    
    deadtime = datetime.timedelta(0,deadtime) # Convert deadtime into timedelta object
    rssinst = feedparser.parse(rssurl)
    if len(rssinst['entries']) > 1:
        raise ValueError('Your RSS feed should only have one entry.')
        
    try:
        assert 'bozo_exception' not in rssinst, 'Server might be dead! Cannot load xml page.'
    except:
        errtitle='Cannot load RSS URL %s. Server down?' % (rssurl)
        f.write(errtitle+'\n')
        qErr.qErr(errtitle=errtitle)
        
    updatedtime = datetime.datetime.strptime(rssinst.entries[0]['updated'],'%a, %d %b %Y %H:%M:%S %Z')
    nowtime = datetime.datetime.now()

    delta = nowtime - updatedtime

    print delta
    f.write('At ' + str(nowtime) + ' it has been ' + str(delta)+ ' since heartbeat\n')
    
    #adding four minutes buffer to account for clock differences on computers
    fourminutes = datetime.timedelta(0,240)
    comparetime = nowtime + fourminutes
    
    try:
        asserttext = 'WARNING: updated time seems to be > 4 minutes in the future. Clocks out of sync?'
        assert updatedtime < comparetime, asserttext
    except:
        f.write(asserttext+'\n')
        qErr.qErr(errtitle='Server/Client Clocks out of Sync!')

    try:
        asserttext = 'Server might be dead; has been ' + str(delta) + ' since last heartbeat'
        assert delta < deadtime, asserttext
    except:
        f.write(asserttext+'\n')
        qErr.qErr(errtitle='Server might be dead!')
    f.close()    
Example #18
0
            local_file.close()
            successful_download = True

        #handle errors
        except HTTPError, e:
            print "HTTP Error:", e.code, url
            print "Trying again: %i attempts remaining" % (trys_left + 1)
            if trys_left <= -1: qErr.qErr()
        except URLError, e:
            print "URL Error:", e.reason, url
            print "Trying again: %i attempts remaining" % (trys_left + 1)
            if trys_left <= -1: qErr.qErr()
        except:
            print "Couldn't Download!"
            print "Trying again: %i attempts remaining" % (trys_left + 1)
            if trys_left <= -1: qErr.qErr()


def downloadImage(img_url, out_name=None, timeout=20):

    #create file name based on known pattern
    # Now download the image. If these were text files,
    # or other ascii types, just pass an empty string
    # for the second param ala stealStuff(file_name,'',base_url)
    if not out_name:
        try:
            out_name = img_url.split('/')[-1]
        except:
            out_name = 'qImage.jpg'
    stealStuff(out_name, "b", img_url, timeout=timeout)
Example #19
0
def Monitor_PSN_RSS(feed_url="http://www.cbat.eps.harvard.edu/unconf/tocp.xml",save_latest=True):
    '''
    This function checks to see if a particular RSS entry has already been loaded by
    entering it in a sqlite database.  
    
    To keep checking this feed, put in infinite while loop with a set delay time
    # 
    # while(True):
    #     sql_tuple_list = Monitor_PSN_RSS("http://feedurl.xml")
    #     time.sleep(60)
    '''
    from time import strftime
    import sqlite3

    try:
        import feedparser
    except: 
        print "feedparser module not installed"
        print "visit http://www.feedparser.org/"
        sys.exit(1)
    
    # Database management code stolen from http://www.halotis.com/2009/07/01/rss-twitter-bot-in-python/
    DATABASE = storepath + 'psn_rss_feed.sqlite'
    conn = sqlite3.connect(DATABASE)
    conn.row_factory = sqlite3.Row
    c = conn.cursor()
    # Create the table if it doesn't exist
    c.execute('CREATE TABLE IF NOT EXISTS RSSContent (`updated`, `title`, `dateAdded`, `id`, `content`, `url`)')
    
    sql_entry_list=[]
    new_rss_entry_list=[]
    
    rssinst = feedparser.parse(feed_url)
    if save_latest:
        try:
            last_entry = rssinst['entries'][0] # saving this for testing purposes
            last_entry_outpath = storepath + 'psn_last_entry.pkl'
            qPickle.save(last_entry,last_entry_outpath,clobber=True)
        except:
            qErr.qErr("Could not save last_entry")
    duplicate_count = 0
    for entry in rssinst['entries']:
        if duplicate_count < 3:
            # check for duplicates
            c.execute('select * from RSSContent where updated=?', (entry.updated,)) #should be unique
            if not c.fetchall():
                if True:
                    xml_file = entry.link  # apparently the entry.link is the address I wanted
#                    print xml_file
                    shortened_link = xml_file
                    
                    if not 'link' in entry:
                        errtitle='link value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'title' in entry:
                        errtitle='title value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'summary' in entry:
                        errtitle='summary value not in RSS entry; using blank value'
                        print errtitle
                        summary = 'unknown'
                    else:
                        summary = entry.summary
                    if not 'id' in entry:
                        errtitle='id value not in RSS entry; using blank value'
                        print errtitle
                        entryid = 'unknown'
                    else:
                        entryid = entry.id
                        
                    try:
                        sql_entry = (entry.updated, entry.title, entryid, summary, entry.link)
                        print sql_entry
                        c.execute('insert into RSSContent (`updated`, `title`, `id`, `content`, `url`) values (?,?,?,?,?)', sql_entry)
                        sql_entry_list.append(sql_entry)
                        new_rss_entry_list.append(entry)
                    except:
                        qErr.qErr()
                        print "Could not update RSS database for entry %s" % (entry.updated)
            else:
                duplicate_count += 1
            conn.commit()
        else:
            # break the loop if more than 3 duplicates; really only need to 
            # see one duplicate to break the loop, but adding this just in case
            # (since newer feed entries are at the top, no need to loop through
            # every single one. if there are no new ones, you should know immediately)
            break 
    return new_rss_entry_list
Example #20
0
def Monitor_PSN_RSS(feed_url="http://www.cbat.eps.harvard.edu/unconf/tocp.xml",
                    save_latest=True):
    '''
    This function checks to see if a particular RSS entry has already been loaded by
    entering it in a sqlite database.  
    
    To keep checking this feed, put in infinite while loop with a set delay time
    # 
    # while(True):
    #     sql_tuple_list = Monitor_PSN_RSS("http://feedurl.xml")
    #     time.sleep(60)
    '''
    from time import strftime
    import sqlite3

    try:
        import feedparser
    except:
        print "feedparser module not installed"
        print "visit http://www.feedparser.org/"
        sys.exit(1)

    # Database management code stolen from http://www.halotis.com/2009/07/01/rss-twitter-bot-in-python/
    DATABASE = storepath + 'psn_rss_feed.sqlite'
    conn = sqlite3.connect(DATABASE)
    conn.row_factory = sqlite3.Row
    c = conn.cursor()
    # Create the table if it doesn't exist
    c.execute(
        'CREATE TABLE IF NOT EXISTS RSSContent (`updated`, `title`, `dateAdded`, `id`, `content`, `url`)'
    )

    sql_entry_list = []
    new_rss_entry_list = []

    rssinst = feedparser.parse(feed_url)
    if save_latest:
        try:
            last_entry = rssinst['entries'][
                0]  # saving this for testing purposes
            last_entry_outpath = storepath + 'psn_last_entry.pkl'
            qPickle.save(last_entry, last_entry_outpath, clobber=True)
        except:
            qErr.qErr("Could not save last_entry")
    duplicate_count = 0
    for entry in rssinst['entries']:
        if duplicate_count < 3:
            # check for duplicates
            c.execute('select * from RSSContent where updated=?',
                      (entry.updated, ))  #should be unique
            if not c.fetchall():
                if True:
                    xml_file = entry.link  # apparently the entry.link is the address I wanted
                    #                    print xml_file
                    shortened_link = xml_file

                    if not 'link' in entry:
                        errtitle = 'link value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'title' in entry:
                        errtitle = 'title value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'summary' in entry:
                        errtitle = 'summary value not in RSS entry; using blank value'
                        print errtitle
                        summary = 'unknown'
                    else:
                        summary = entry.summary
                    if not 'id' in entry:
                        errtitle = 'id value not in RSS entry; using blank value'
                        print errtitle
                        entryid = 'unknown'
                    else:
                        entryid = entry.id

                    try:
                        sql_entry = (entry.updated, entry.title, entryid,
                                     summary, entry.link)
                        print sql_entry
                        c.execute(
                            'insert into RSSContent (`updated`, `title`, `id`, `content`, `url`) values (?,?,?,?,?)',
                            sql_entry)
                        sql_entry_list.append(sql_entry)
                        new_rss_entry_list.append(entry)
                    except:
                        qErr.qErr()
                        print "Could not update RSS database for entry %s" % (
                            entry.updated)
            else:
                duplicate_count += 1
            conn.commit()
        else:
            # break the loop if more than 3 duplicates; really only need to
            # see one duplicate to break the loop, but adding this just in case
            # (since newer feed entries are at the top, no need to loop through
            # every single one. if there are no new ones, you should know immediately)
            break
    return new_rss_entry_list
Example #21
0
def _do_new_entry_actions(new_entry,email=True,email_to='*****@*****.**'):
    # being fed a parsed rss entry
    try:
        psn_id_full=new_entry.id.split('/followups/')[1].strip('"').split('.html')[0]
        # for some reason, the URL has a space when PSN label gets added
        # http://cbat.eps.harvard.edu/unconf/followups/PSN J15111485+4609115
        #u'PSN J15111485+4609115'
        psn_id = str(psn_id_full.split()[-1])
        #u'J15111485+4609115'
    except:
        qErr.qErr(errtitle="PSN ID URL malformed", errtext=new_entry.id)
        psn_id = "Unknown"
        psn_id_full = "Unknown"
    # check if it's in the pickle file
    # if so, update it - add to summary list
    
    all_pkl_path = storepath + 'psn_parsed_entries.pkl'
    
    all_entries = qPickle.load(all_pkl_path)
    if all_entries == None:
        all_entries = {}
    is_new_id = False
    if not psn_id in all_entries.keys():
        is_new_id = True
        all_entries.update({psn_id:{}}) #update with a new empty dict with proper id
    
    # load and parse the PSN string    
    psn_url="http://cbat.eps.harvard.edu/unconf/followups/%s" % (psn_id)
    psn_string = _download_and_obtain_psn_string(psn_url)
    if psn_string != None:
        psn_dict = _parse_psn_format(psn_string)
    else:
        psn_dict = None
    
    ## Make html 
    if psn_dict:
        all_entries[psn_id].update(psn_dict) #add/update the dictionary values; though should they change?
        
        dss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=dss2red" % (psn_dict['ra_deg'],psn_dict['dec_deg'],psn_dict['designation'])
        dss_html = "<a href='%s'>DSS Finding Chart</a><br>" % (dss_url)
        sdss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=sdss" % (psn_dict['ra_deg'],psn_dict['dec_deg'],psn_dict['designation'])
        sdss_html = "<a href='%s'>SDSS Finding Chart</a> (May not be available)<br>" % (sdss_url)
        
        pretty_output='''
<br><br>
<table border="0">
<tr><td>Object:</td><td>%s</td></tr>
<tr><td>Designation:</td><td>%s</td></tr>
<tr><td>Discovery date:</td><td>%s</td></tr>
<tr><td>Mag at date:</td><td>%s</td></tr>
<tr><td>Filter:</td><td>%s</td></tr>
<tr><td>RA:</td><td>%s (= %f)</td></tr>
<tr><td>Dec:</td><td>%s (= %f)</td></tr>
<tr><td>Presumed host:</td><td>%s</td></tr>
<tr><td>Offset from host:</td><td>%s, %s (arcsec)</td></tr>
<tr><td>Discoverer:</td><td>%s</td></tr>
<tr><td>Obs. arc:</td><td>%s</td></tr>
</table>
<br>
        ''' %  (psn_dict['obj_type'],psn_dict['designation'],
        psn_dict['date_string'].replace(' ','-').replace('2013','UT2013'),
        psn_dict['mag'],psn_dict['filter'],
        psn_dict['ra'],psn_dict['ra_deg'],psn_dict['dec'],psn_dict['dec_deg'],
        psn_dict['locale'],psn_dict['ra_offset'],psn_dict['dec_offset'],
        psn_dict['discoverer'],psn_dict['arc'])
    else:
        pretty_output = 'Cannot parse PSN Message.'
    
    print pretty_output
    
    html_body = '''<html><body>
    <a href="%s">%s</a>''' % (psn_url,psn_id)
    if is_new_id:
        html_body += ' (First report of this transient)'
    else:
        html_body += ' (Update)'
    html_body += '<br><br>'
    if psn_dict:
        html_body += dss_html
        html_body += sdss_html
        html_body += pretty_output
    html_body+= new_entry.summary
    html_body+= '<br><br><br></body></html>'
    
    if 'summary_list' in all_entries[psn_id]:
        summary_list = all_entries[psn_id]['summary_list']
        summary_list.append(new_entry.summary)
    else:
        summary_list = [new_entry.summary]
    all_entries[psn_id].update({'summary_list':summary_list})
    
    # do email if new
        
    if email == True:
        if is_new_id:
            subject = "New Transient %s" % (psn_id_full)
        else:
            subject = "Update to Transient %s" % (psn_id_full)
        print "Sending email: '%s'" % (subject)
        send_gmail.domail(email_to,subject,html_body,html=True)
    
    # do separate/no email if updated?
    
    # save the updated pickle file
    qPickle.save(all_entries,all_pkl_path,clobber=True)
    return is_new_id
Example #22
0
def MonitorRSS(feed_url):
    '''
    This function checks to see if a particular RSS entry has already been loaded by
    entering it in a sqlite database.  
    
    To keep checking this feed, put in infinite while loop with a set delay time
    # 
    # while(True):
    #     sql_tuple_list = MonitorRSS("http://feedurl.xml")
    #     time.sleep(60)
    '''
    from time import strftime
    import sqlite3

    try:
        import feedparser
    except:
        print "feedparser module not installed"
        print "visit http://www.feedparser.org/"
        sys.exit(1)

    # Database management code stolen from http://www.halotis.com/2009/07/01/rss-twitter-bot-in-python/
    DATABASE = storepath + 'gcn_rss_feed.sqlite'
    conn = sqlite3.connect(DATABASE)
    conn.row_factory = sqlite3.Row
    c = conn.cursor()
    # Create the table if it doesn't exist
    c.execute(
        'CREATE TABLE IF NOT EXISTS RSSContent (`url`, `title`, `dateAdded`, `id`, `content`, `xml_location`)'
    )

    sql_entry_list = []

    rssinst = feedparser.parse(feed_url)
    for entry in rssinst['entries']:
        if True:
            # check for duplicates
            c.execute('select * from RSSContent where url=?', (entry.link, ))
            if not c.fetchall():
                if True:
                    xml_file = entry.link  # apparently the entry.link is the address I wanted
                    #                    print xml_file
                    shortened_link = xml_file

                    if not 'link' in entry:
                        errtitle = 'link value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'title' in entry:
                        errtitle = 'title value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'summary' in entry:
                        errtitle = 'summary value not in RSS entry; using blank value'
                        print errtitle
                        summary = 'unknown'
                    else:
                        summary = entry.summary
                    if not 'id' in entry:
                        errtitle = 'id value not in RSS entry; using blank value'
                        print errtitle
                        entryid = 'unknown'
                    else:
                        entryid = entry.id

                    try:
                        sql_entry = (entry.link, entry.title, entryid, summary,
                                     shortened_link)
                        print sql_entry
                        c.execute(
                            'insert into RSSContent (`url`, `title`, `id`, `content`, `xml_location`) values (?,?,?,?,?)',
                            sql_entry)
                        sql_entry_list.append(sql_entry)
                    except:
                        qErr.qErr()
                        print "Could not update RSS database for entry %s" % (
                            entry.title)
            conn.commit()

    return sql_entry_list
Example #23
0
            local_file.close()
            successful_download = True

        #handle errors
        except HTTPError, e:
            print "HTTP Error:",e.code , url
            print "Trying again: %i attempts remaining" % (trys_left+1)
            if trys_left <= -1: qErr.qErr()
        except URLError, e:
            print "URL Error:",e.reason , url
            print "Trying again: %i attempts remaining" % (trys_left+1)
            if trys_left <= -1: qErr.qErr()
        except:
            print "Couldn't Download!"
            print "Trying again: %i attempts remaining" % (trys_left+1)
            if trys_left <= -1: qErr.qErr()

def downloadImage(img_url,out_name=None, timeout=20):
    
    #create file name based on known pattern
    # Now download the image. If these were text files,
    # or other ascii types, just pass an empty string
    # for the second param ala stealStuff(file_name,'',base_url)
    if not out_name:
        try:
            out_name = img_url.split('/')[-1]
        except:
            out_name = 'qImage.jpg'
    stealStuff(out_name,"b",img_url, timeout=timeout)

def MakeFindingChart(ra=198.40130,dec=8.09730,uncertainty=1.8,src_name='GRB090313',pos_label='XRT',survey='dss2red',cont_str='Contact: Test', size=3.0,err_shape='cross',incl_scale=True,return_svg=False):
Example #24
0
    def createdict(self):
        '''Creates the dictionary From the web-based GCN notices; this function
        grabs the keys and string values from that GCN notice and puts them
        into a dictionary self.dict  
        '''
        # If we don't already have the gcn list loaded, grab it from the web
        if hasattr(self, 'gcn_notices') == False:
            self.grabgcnfromweb()
        commentstring = ''
        # for gcn in gcn_list:
        # 	# Search through each notice to determine type
        # 	gcnsplitlist = gcn.splitlines()
        self.dict = {}
        gcn_type_list = []
        type_already_found = []
        gcndict = {}
        add_to_where = 0
        self.good_gcn_notices = []
        for gcn in self.gcn_notices:
            partialdict = {}

            # Pre July 2005 there was a change in the format of the GCN Notices
            # However, these values are all the same in the TDRSS. The true
            # Units are c/s according to the TDRSS help, even though it says otherwise
            if '[cnts/sec]' in gcn:
                gcn = gcn.replace('cnts/sec', 'image_cnts')
                gcn = gcn.replace(' Peak=', ' Image_Peak=')

            # Make sure not a empty string and check to make sure it is long enough
            # Q Edits 8/24/09

            gcnsplit = gcn.splitlines()
            if '' in gcnsplit: gcnsplit.remove('')
            if ' ' in gcnsplit: gcnsplit.remove(' ')
            if len(gcnsplit) > 2:
                # Find what the notice type is  - 3rd line
                typeline = gcnsplit[2]
                typesplit = typeline.split(':     ')
                if typesplit[0] != 'NOTICE_TYPE':
                    print 'THIRD LINE IS NOT NOTICE_TYPE!'
                    print gcnsplit[0:5]
                    qErr.qErr(errtitle='Third line is not notice type!',
                              errtext=gcn)
                    raise Exception
                else:
                    # Append the GCN to the list of well-formatted notices
                    self.good_gcn_notices.append(gcn)
                    # Append the type of the GCN to the gcn type list
                    gcn_type_list.append(typesplit[1])
            else:
                print "Split Line in GCN web Page is not long enough."
        # DETERMINE WHAT THE LATEST OF THAT TYPE IS
        # for gcn_type in gcn_type_list:
        for gcn_type in gcn_type_list:
            typecount = gcn_type_list.count(gcn_type)
            # Clearing out strings and sub dictionarys
            partialdict = {}
            subdict = {}
            commentstring = ''
            if where(type_already_found, gcn_type) == []:
                # Use my defined 'where' function to return a list of indices
                gcn_wherelist = where(gcn_type_list, gcn_type)
                # Grab the latest index from the list; + add_to_where because first is ''
                gcn_index = gcn_wherelist[-1]  #+ add_to_where
                if typecount > 1:
                    print "%s instances of %s found; choosing the latest" % (
                        typecount, gcn_type)
                    type_already_found.append(gcn_type)
                else:
                    print "1 instance of %s found" % gcn_type
                gcnstring = self.good_gcn_notices[gcn_index]
                gcnlines = gcnstring.splitlines()
                for line in gcnlines:
                    # Strip to avoid splitting issues with ':  '
                    line = line.strip()
                    linelist = line.split(':  ')
                    if len(linelist) > 2:
                        print 'SOMETHINGS WRONG - we have two instances of ":  " in this line on the gcn'
                        print line
                    if len(linelist) == 2:
                        # Add to dictionary
                        if linelist[0] != 'COMMENTS':
                            subdict = {linelist[0]: linelist[1].strip()}
                            partialdict.update(subdict)
                        if linelist[0] == 'COMMENTS':
                            commentstring += linelist[1].strip() + ';'
                            subdict = {'COMMENTS': commentstring}
                            partialdict.update(subdict)

                ########### Error checking############
                print(partialdict['NOTICE_TYPE'], gcn_type)
                if not partialdict['NOTICE_TYPE'] == gcn_type:
                    qErr.qErr(errtitle='Notice Types do not match!')
                    raise Exception
                ######################################

                subdict = {gcn_type: partialdict}
                self.dict.update(subdict)

                self.last_notice_loaded = gcn_type
        print "Finished populating dictionary for trigger %s" % self.triggerid
Example #25
0
def MonitorRSS(feed_url):
    '''
    This function checks to see if a particular RSS entry has already been loaded by
    entering it in a sqlite database.  
    
    To keep checking this feed, put in infinite while loop with a set delay time
    # 
    # while(True):
    #     sql_tuple_list = MonitorRSS("http://feedurl.xml")
    #     time.sleep(60)
    '''
    from time import strftime
    import sqlite3

    try:
        import feedparser
    except: 
        print "feedparser module not installed"
        print "visit http://www.feedparser.org/"
        sys.exit(1)
    
    # Database management code stolen from http://www.halotis.com/2009/07/01/rss-twitter-bot-in-python/
    DATABASE = storepath + 'gcn_rss_feed.sqlite'
    conn = sqlite3.connect(DATABASE)
    conn.row_factory = sqlite3.Row
    c = conn.cursor()
    # Create the table if it doesn't exist
    c.execute('CREATE TABLE IF NOT EXISTS RSSContent (`url`, `title`, `dateAdded`, `id`, `content`, `xml_location`)')
    
    sql_entry_list=[]
    
    rssinst = feedparser.parse(feed_url)
    for entry in rssinst['entries']:
        if True:
            # check for duplicates
            c.execute('select * from RSSContent where url=?', (entry.link,))
            if not c.fetchall():
                if True:
                    xml_file = entry.link  # apparently the entry.link is the address I wanted
#                    print xml_file
                    shortened_link = xml_file
                    
                    if not 'link' in entry:
                        errtitle='link value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'title' in entry:
                        errtitle='title value not in RSS entry'
                        qErr.qErr(errtitle=errtitle)
                    if not 'summary' in entry:
                        errtitle='summary value not in RSS entry; using blank value'
                        print errtitle
                        summary = 'unknown'
                    else:
                        summary = entry.summary
                    if not 'id' in entry:
                        errtitle='id value not in RSS entry; using blank value'
                        print errtitle
                        entryid = 'unknown'
                    else:
                        entryid = entry.id
                        
                    try:
                        sql_entry = (entry.link, entry.title, entryid, summary, shortened_link)
                        print sql_entry
                        c.execute('insert into RSSContent (`url`, `title`, `id`, `content`, `xml_location`) values (?,?,?,?,?)', sql_entry)
                        sql_entry_list.append(sql_entry)
                    except:
                        qErr.qErr()
                        print "Could not update RSS database for entry %s" % (entry.title)
            conn.commit()
            
    return sql_entry_list
Example #26
0
def _do_all_trigger_actions(triggerid,  incl_reg=True,incl_fc=True,\
                        mail_reg=False, mail_to='*****@*****.**',\
                        make_html=True, html_path='/home/amorgan/www/swift/',\
                        mail_html=True, feed_type = 'talons', tweet = True, force_mail=False,\
                        feed_url="http://www.thinkingtelescopes.lanl.gov/rss/talons_swift.xml",
                        update_rss=True, rss_path='/home/amorgan/www/swift/rss.xml',
                        out_url_path='http://swift.qmorgan.com/',
                        update_database='GRB_full',grb_name=None):
    #out_url_path used to be 'http://astro.berkeley.edu/~amorgan/Swift/'
    
    if update_database:
        db = LoadDB(update_database)
        
    
    triggerid = triggerid.lstrip('0')
    print 'Loading GCN for trigger %s' % (triggerid)
    gcn = LoadGCN.LoadGCN(triggerid, clobber=True)
    if not gcn.successful_load:
        return # if we didn't load successfully, dont try to do trigger actions
        
    # From the date of the GRB, we can take a guess as to what the GRB name will be.
    # Note this takes the new naming convention of putting A after each new burst.
    # With this info, we can add it to the database.
    if 'grb_date_str' in gcn.pdict:
        grb_name_guess_A = gcn.pdict['grb_date_str'].translate(None,'/') + 'A'
        grb_name_guess_B = gcn.pdict['grb_date_str'].translate(None,'/') + 'B'
        grb_name_guess_C = gcn.pdict['grb_date_str'].translate(None,'/') + 'C'
        grb_name_guess_D = gcn.pdict['grb_date_str'].translate(None,'/') + 'D'
        # Look for the latest instances of the possible grb names for this trigger date
        new_grb=False
        if update_database and not grb_name:
            if grb_name_guess_D in db.dict: 
                grb_name = grb_name_guess_D
            elif grb_name_guess_C in db.dict:
                grb_name = grb_name_guess_C
            elif grb_name_guess_B in db.dict:
                grb_name = grb_name_guess_B
            elif grb_name_guess_A in db.dict:
                grb_name = grb_name_guess_A
            else:
                grb_name = grb_name_guess_A
                new_grb=True
                errtitle='New GRB %s added to database! Trigger %s' % (grb_name,str(triggerid))
                print errtitle
                qErr.qErr(errtitle=errtitle)
            if not new_grb:
                # if not a new grb, double check that our name guess was correct by comparing triggerids
                if not 'triggerid_str' in db.dict[grb_name].keys():
                    # this means that it hasn't been parsed by the swift online table, so the grb_name is not confirmed correct
                    if not 'gcn_triggerid' in db.dict[grb_name].keys():
                        update_database=None
                        errtitle='Unidentifiable GRB! Not adding to database'
                        errtext="""Attempting to update the database entry for GRB %s
                        with triggerid %s failed. There is no triggerid_str nor
                        gcn_triggerid in the corresponding dictionary for this GRB
                        entry in the database, so a cross-check could not be performed.
                        Update code and re-check accordingly.""" % (grb_name,str(triggerid))
                        qErr.qErr(errtitle=errtitle,errtext=errtext)
                    elif not str(db.dict[grb_name]['gcn_triggerid']) == str(triggerid):
                        update_database=None
                        errtitle='GRB Triggerid/name mismatch! not adding to database'
                        errtext="""Attempting to update the database entry for GRB %s
                        with triggerid %s failed. The correct triggerid in the database for
                        that GRB name is %s according to GCN notices. This may indicate 
                        a mismatch in the database; Manually check what the correct GRB/id pair 
                        is.  The correct GRB name needs to be determined
                        for this GRB to be added to the database.""" % (grb_name,str(triggerid),str(db.dict[grb_name]['gcn_triggerid']))
                        qErr.qErr(errtitle=errtitle,errtext=errtext)
                    else:
                        errtitle='Updated GRB %s in the database! Trigger %s; double check GRB/ID match is correct.' % (grb_name,str(triggerid))
                        print errtitle
                        qErr.qErr(errtitle=errtitle)
                elif not db.dict[grb_name]['triggerid_str'] == str(triggerid):
                    update_database=None
                    errtitle='GRB Triggerid/name mismatch! not adding to database'
                    errtext="""Attempting to update the database entry for GRB %s
                    with triggerid %s failed. The correct triggerid in the database for
                    that GRB name is %s. The correct GRB name needs to be determined
                    for this GRB to be added to the database.""" % (grb_name,str(triggerid),db.dict[new_grb]['triggerid_str'])
                    qErr.qErr(errtitle=errtitle,errtext=errtext)
                else:
                    errtitle='Updated GRB %s in the database! Trigger %s' % (grb_name,str(triggerid))
                    print errtitle
                    qErr.qErr(errtitle=errtitle)
    
    newdict = {}
        
    # Eventually want to depreciate the following function
    # and make a generic ds9 region file creating function
            #reg_file_path = gcn.get_positions(create_reg_file=True)
    if incl_reg:
        try:
            reg_path = _incl_reg(gcn)
            if not reg_path: 
                print '\nCOULDNT FIND REG PATH\n'
                qErr.qErr(errtitle='COULDNT FIND REG PATH')
            newdict.update({"reg_path":reg_path})
        except: qErr.qErr()
    if incl_fc:
        try:
            fc_path = _incl_fc(gcn,last_pos_check=True)
            if not fc_path: 
                print '\nCOULDNT FIND FC PATH\n'
                qErr.qErr(errtitle='COULDNT FIND FC PATH')
            newdict.update({"fc_path":fc_path})
        except: qErr.qErr()
    if mail_reg:
        try:
            mail_grb_region(gcn,mail_to,reg_path)
        except: qErr.qErr()
    if make_html:
        try:
            grbhtml = make_grb_html(gcn, html_path=html_path, reg_path=reg_path, fc_path=fc_path)
            newdict.update({"out_dir":grbhtml.out_dir})
            if mail_html and grbhtml.successful_export:
                _mail_html(gcn,mail_to,clobber=force_mail,tweet=tweet,out_url_path=out_url_path,grbhtml=grbhtml)
        except: qErr.qErr()
    if update_rss:
        try:
            _update_rss(gcn, rss_path=rss_path,out_url_path='http://swift.qmorgan.com/')
            print "Updating RSS Feed"
        except: qErr.qErr()

    if update_database:
        db.update_db_info_for_single_key(grb_name,newdict,add_key_if_not_exist=new_grb,Reload=False)
        gcn.extract_values()
        gcn.get_positions()
        db.update_db_info_for_single_key(grb_name,gcn.pdict,add_key_if_not_exist=new_grb,Reload=False)
        SaveDB(db)
Example #27
0
def _do_new_entry_actions(new_entry,
                          email=True,
                          email_to='*****@*****.**'):
    # being fed a parsed rss entry
    try:
        psn_id_full = new_entry.id.split('/followups/')[1].strip('"').split(
            '.html')[0]
        # for some reason, the URL has a space when PSN label gets added
        # http://cbat.eps.harvard.edu/unconf/followups/PSN J15111485+4609115
        #u'PSN J15111485+4609115'
        psn_id = str(psn_id_full.split()[-1])
        #u'J15111485+4609115'
    except:
        qErr.qErr(errtitle="PSN ID URL malformed", errtext=new_entry.id)
        psn_id = "Unknown"
        psn_id_full = "Unknown"
    # check if it's in the pickle file
    # if so, update it - add to summary list

    all_pkl_path = storepath + 'psn_parsed_entries.pkl'

    all_entries = qPickle.load(all_pkl_path)
    if all_entries == None:
        all_entries = {}
    is_new_id = False
    if not psn_id in all_entries.keys():
        is_new_id = True
        all_entries.update({psn_id:
                            {}})  #update with a new empty dict with proper id

    # load and parse the PSN string
    psn_url = "http://cbat.eps.harvard.edu/unconf/followups/%s" % (psn_id)
    psn_string = _download_and_obtain_psn_string(psn_url)
    if psn_string != None:
        psn_dict = _parse_psn_format(psn_string)
    else:
        psn_dict = None

    ## Make html
    if psn_dict:
        all_entries[psn_id].update(
            psn_dict
        )  #add/update the dictionary values; though should they change?

        dss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=dss2red" % (
            psn_dict['ra_deg'], psn_dict['dec_deg'], psn_dict['designation'])
        dss_html = "<a href='%s'>DSS Finding Chart</a><br>" % (dss_url)
        sdss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=sdss" % (
            psn_dict['ra_deg'], psn_dict['dec_deg'], psn_dict['designation'])
        sdss_html = "<a href='%s'>SDSS Finding Chart</a> (May not be available)<br>" % (
            sdss_url)

        pretty_output = '''
<br><br>
<table border="0">
<tr><td>Object:</td><td>%s</td></tr>
<tr><td>Designation:</td><td>%s</td></tr>
<tr><td>Discovery date:</td><td>%s</td></tr>
<tr><td>Mag at date:</td><td>%s</td></tr>
<tr><td>Filter:</td><td>%s</td></tr>
<tr><td>RA:</td><td>%s (= %f)</td></tr>
<tr><td>Dec:</td><td>%s (= %f)</td></tr>
<tr><td>Presumed host:</td><td>%s</td></tr>
<tr><td>Offset from host:</td><td>%s, %s (arcsec)</td></tr>
<tr><td>Discoverer:</td><td>%s</td></tr>
<tr><td>Obs. arc:</td><td>%s</td></tr>
</table>
<br>
        ''' % (psn_dict['obj_type'], psn_dict['designation'],
               psn_dict['date_string'].replace(' ', '-').replace(
                   '2013', 'UT2013'), psn_dict['mag'], psn_dict['filter'],
               psn_dict['ra'], psn_dict['ra_deg'], psn_dict['dec'],
               psn_dict['dec_deg'], psn_dict['locale'], psn_dict['ra_offset'],
               psn_dict['dec_offset'], psn_dict['discoverer'], psn_dict['arc'])
    else:
        pretty_output = 'Cannot parse PSN Message.'

    print pretty_output

    html_body = '''<html><body>
    <a href="%s">%s</a>''' % (psn_url, psn_id)
    if is_new_id:
        html_body += ' (First report of this transient)'
    else:
        html_body += ' (Update)'
    html_body += '<br><br>'
    if psn_dict:
        html_body += dss_html
        html_body += sdss_html
        html_body += pretty_output
    html_body += new_entry.summary
    html_body += '<br><br><br></body></html>'

    if 'summary_list' in all_entries[psn_id]:
        summary_list = all_entries[psn_id]['summary_list']
        summary_list.append(new_entry.summary)
    else:
        summary_list = [new_entry.summary]
    all_entries[psn_id].update({'summary_list': summary_list})

    # do email if new

    if email == True:
        if is_new_id:
            subject = "New Transient %s" % (psn_id_full)
        else:
            subject = "Update to Transient %s" % (psn_id_full)
        print "Sending email: '%s'" % (subject)
        send_gmail.domail(email_to, subject, html_body, html=True)

    # do separate/no email if updated?

    # save the updated pickle file
    qPickle.save(all_entries, all_pkl_path, clobber=True)
    return is_new_id
Example #28
0
    def createdict(self):
        '''Creates the dictionary From the web-based GCN notices; this function
        grabs the keys and string values from that GCN notice and puts them
        into a dictionary self.dict  
        '''
        # If we don't already have the gcn list loaded, grab it from the web
        if hasattr(self,'gcn_notices') == False:
            self.grabgcnfromweb()
        commentstring = ''
        # for gcn in gcn_list:
        # 	# Search through each notice to determine type
        # 	gcnsplitlist = gcn.splitlines()
        self.dict={}
        gcn_type_list = []
        type_already_found = []
        gcndict = {}
        add_to_where = 0
        self.good_gcn_notices = []
        for gcn in self.gcn_notices:
            partialdict = {}
            
            # Pre July 2005 there was a change in the format of the GCN Notices
            # However, these values are all the same in the TDRSS. The true
            # Units are c/s according to the TDRSS help, even though it says otherwise
            if '[cnts/sec]' in gcn:
                gcn = gcn.replace('cnts/sec','image_cnts')
                gcn = gcn.replace(' Peak=',' Image_Peak=')
            
            # Make sure not a empty string and check to make sure it is long enough
            # Q Edits 8/24/09
            
            gcnsplit = gcn.splitlines()
            if '' in gcnsplit: gcnsplit.remove('')
            if ' ' in gcnsplit: gcnsplit.remove(' ')
            if len(gcnsplit) > 2:
                # Find what the notice type is  - 3rd line
                typeline = gcnsplit[2]
                typesplit = typeline.split(':     ')
                if typesplit[0] != 'NOTICE_TYPE':
                    print 'THIRD LINE IS NOT NOTICE_TYPE!' 
                    print gcnsplit[0:5]
                    qErr.qErr(errtitle='Third line is not notice type!',errtext=gcn)
                    raise Exception
                else:
                    # Append the GCN to the list of well-formatted notices
                    self.good_gcn_notices.append(gcn)
                    # Append the type of the GCN to the gcn type list
                    gcn_type_list.append(typesplit[1])
            else: 
                print "Split Line in GCN web Page is not long enough."
        # DETERMINE WHAT THE LATEST OF THAT TYPE IS
        # for gcn_type in gcn_type_list:
        for gcn_type in gcn_type_list:
            typecount = gcn_type_list.count(gcn_type)
            # Clearing out strings and sub dictionarys
            partialdict={}
            subdict={}
            commentstring=''
            if where(type_already_found,gcn_type) == []:
                # Use my defined 'where' function to return a list of indices
                gcn_wherelist = where(gcn_type_list,gcn_type)
                # Grab the latest index from the list; + add_to_where because first is ''
                gcn_index = gcn_wherelist[-1] #+ add_to_where 
                if typecount > 1:
                    print "%s instances of %s found; choosing the latest" % (typecount, gcn_type)
                    type_already_found.append(gcn_type)
                else:
                    print "1 instance of %s found" % gcn_type
                gcnstring = self.good_gcn_notices[gcn_index]
                gcnlines = gcnstring.splitlines()
                for line in gcnlines:
                    # Strip to avoid splitting issues with ':  '
                    line = line.strip()
                    linelist = line.split(':  ')
                    if len(linelist) > 2:
                        print 'SOMETHINGS WRONG - we have two instances of ":  " in this line on the gcn'
                        print line
                    if len(linelist) == 2:
                        # Add to dictionary
                        if linelist[0] != 'COMMENTS':
                            subdict = {linelist[0]:linelist[1].strip()}
                            partialdict.update(subdict)
                        if linelist[0] == 'COMMENTS':
                            commentstring += linelist[1].strip() + ';'
                            subdict = {'COMMENTS':commentstring}
                            partialdict.update(subdict)
                
                ########### Error checking############            
                print(partialdict['NOTICE_TYPE'],gcn_type)
                if not partialdict['NOTICE_TYPE'] == gcn_type:
                    qErr.qErr(errtitle='Notice Types do not match!')
                    raise Exception
                ######################################
                    
                subdict = {gcn_type:partialdict}
                self.dict.update(subdict)

                self.last_notice_loaded = gcn_type
        print "Finished populating dictionary for trigger %s" % self.triggerid
Example #29
0
def _do_all_trigger_actions(triggerid,  incl_reg=True,incl_fc=True,\
                        mail_reg=False, mail_to='*****@*****.**',\
                        make_html=True, html_path='/home/amorgan/www/swift/',\
                        mail_html=True, feed_type = 'talons', tweet = True, force_mail=False,\
                        feed_url="http://www.thinkingtelescopes.lanl.gov/rss/talons_swift.xml",
                        update_rss=True, rss_path='/home/amorgan/www/swift/rss.xml',
                        out_url_path='http://swift.qmorgan.com/',
                        update_database='GRB_full',grb_name=None):
    #out_url_path used to be 'http://astro.berkeley.edu/~amorgan/Swift/'

    if update_database:
        db = LoadDB(update_database)

    triggerid = triggerid.lstrip('0')
    print 'Loading GCN for trigger %s' % (triggerid)
    gcn = LoadGCN.LoadGCN(triggerid, clobber=True)
    if not gcn.successful_load:
        return  # if we didn't load successfully, dont try to do trigger actions

    # From the date of the GRB, we can take a guess as to what the GRB name will be.
    # Note this takes the new naming convention of putting A after each new burst.
    # With this info, we can add it to the database.
    if 'grb_date_str' in gcn.pdict:
        grb_name_guess_A = gcn.pdict['grb_date_str'].translate(None, '/') + 'A'
        grb_name_guess_B = gcn.pdict['grb_date_str'].translate(None, '/') + 'B'
        grb_name_guess_C = gcn.pdict['grb_date_str'].translate(None, '/') + 'C'
        grb_name_guess_D = gcn.pdict['grb_date_str'].translate(None, '/') + 'D'
        # Look for the latest instances of the possible grb names for this trigger date
        new_grb = False
        if update_database and not grb_name:
            if grb_name_guess_D in db.dict:
                grb_name = grb_name_guess_D
            elif grb_name_guess_C in db.dict:
                grb_name = grb_name_guess_C
            elif grb_name_guess_B in db.dict:
                grb_name = grb_name_guess_B
            elif grb_name_guess_A in db.dict:
                grb_name = grb_name_guess_A
            else:
                grb_name = grb_name_guess_A
                new_grb = True
                errtitle = 'New GRB %s added to database! Trigger %s' % (
                    grb_name, str(triggerid))
                print errtitle
                qErr.qErr(errtitle=errtitle)
            if not new_grb:
                # if not a new grb, double check that our name guess was correct by comparing triggerids
                if not 'triggerid_str' in db.dict[grb_name].keys():
                    # this means that it hasn't been parsed by the swift online table, so the grb_name is not confirmed correct
                    if not 'gcn_triggerid' in db.dict[grb_name].keys():
                        update_database = None
                        errtitle = 'Unidentifiable GRB! Not adding to database'
                        errtext = """Attempting to update the database entry for GRB %s
                        with triggerid %s failed. There is no triggerid_str nor
                        gcn_triggerid in the corresponding dictionary for this GRB
                        entry in the database, so a cross-check could not be performed.
                        Update code and re-check accordingly.""" % (
                            grb_name, str(triggerid))
                        qErr.qErr(errtitle=errtitle, errtext=errtext)
                    elif not str(db.dict[grb_name]['gcn_triggerid']) == str(
                            triggerid):
                        update_database = None
                        errtitle = 'GRB Triggerid/name mismatch! not adding to database'
                        errtext = """Attempting to update the database entry for GRB %s
                        with triggerid %s failed. The correct triggerid in the database for
                        that GRB name is %s according to GCN notices. This may indicate 
                        a mismatch in the database; Manually check what the correct GRB/id pair 
                        is.  The correct GRB name needs to be determined
                        for this GRB to be added to the database.""" % (
                            grb_name, str(triggerid),
                            str(db.dict[grb_name]['gcn_triggerid']))
                        qErr.qErr(errtitle=errtitle, errtext=errtext)
                    else:
                        errtitle = 'Updated GRB %s in the database! Trigger %s; double check GRB/ID match is correct.' % (
                            grb_name, str(triggerid))
                        print errtitle
                        qErr.qErr(errtitle=errtitle)
                elif not db.dict[grb_name]['triggerid_str'] == str(triggerid):
                    update_database = None
                    errtitle = 'GRB Triggerid/name mismatch! not adding to database'
                    errtext = """Attempting to update the database entry for GRB %s
                    with triggerid %s failed. The correct triggerid in the database for
                    that GRB name is %s. The correct GRB name needs to be determined
                    for this GRB to be added to the database.""" % (
                        grb_name, str(triggerid),
                        db.dict[new_grb]['triggerid_str'])
                    qErr.qErr(errtitle=errtitle, errtext=errtext)
                else:
                    errtitle = 'Updated GRB %s in the database! Trigger %s' % (
                        grb_name, str(triggerid))
                    print errtitle
                    qErr.qErr(errtitle=errtitle)

    newdict = {}

    # Eventually want to depreciate the following function
    # and make a generic ds9 region file creating function
    #reg_file_path = gcn.get_positions(create_reg_file=True)
    if incl_reg:
        try:
            reg_path = _incl_reg(gcn)
            if not reg_path:
                print '\nCOULDNT FIND REG PATH\n'
                qErr.qErr(errtitle='COULDNT FIND REG PATH')
            newdict.update({"reg_path": reg_path})
        except:
            qErr.qErr()
    if incl_fc:
        try:
            fc_path = _incl_fc(gcn, last_pos_check=True)
            if not fc_path:
                print '\nCOULDNT FIND FC PATH\n'
                qErr.qErr(errtitle='COULDNT FIND FC PATH')
            newdict.update({"fc_path": fc_path})
        except:
            qErr.qErr()
    if mail_reg:
        try:
            mail_grb_region(gcn, mail_to, reg_path)
        except:
            qErr.qErr()
    if make_html:
        try:
            grbhtml = make_grb_html(gcn,
                                    html_path=html_path,
                                    reg_path=reg_path,
                                    fc_path=fc_path)
            newdict.update({"out_dir": grbhtml.out_dir})
            if mail_html and grbhtml.successful_export:
                _mail_html(gcn,
                           mail_to,
                           clobber=force_mail,
                           tweet=tweet,
                           out_url_path=out_url_path,
                           grbhtml=grbhtml)
        except:
            qErr.qErr()
    if update_rss:
        try:
            _update_rss(gcn,
                        rss_path=rss_path,
                        out_url_path='http://swift.qmorgan.com/')
            print "Updating RSS Feed"
        except:
            qErr.qErr()

    if update_database:
        db.update_db_info_for_single_key(grb_name,
                                         newdict,
                                         add_key_if_not_exist=new_grb,
                                         Reload=False)
        gcn.extract_values()
        gcn.get_positions()
        db.update_db_info_for_single_key(grb_name,
                                         gcn.pdict,
                                         add_key_if_not_exist=new_grb,
                                         Reload=False)
        SaveDB(db)