Example #1
0
def vsBuild():
    global redo
    global tryredo
    global passed
    global parsed
    #Please ensure that the current working directory is $autopsy/testing/script
    oldpath = os.getcwd()
    os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32"))
    vs = []
    vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe")
    vs.append(os.path.join("Tsk-win.sln"))
    vs.append("/p:configuration=release")
    vs.append("/p:platform=x64")
    vs.append("/t:clean")
    vs.append("/t:rebuild")
    print(vs)
    VSpth = make_local_path("..", "VSOutput.txt")
    VSout = open(VSpth, 'a')
    subprocess.call(vs, stdout=VSout)
    VSout.close()
    os.chdir(oldpath)
    chk = os.path.join("..", "..", "..","sleuthkit", "win32", "x64", "Release", "libtsk_jni.dll")
    if not os.path.exists(chk):
        print("path doesn't exist")
        global errorem
        global attachl
        global email_enabled
        if(not tryredo):
            errorem += "LIBTSK C++ failed to build.\n"
            attachl.append(VSpth)
            if email_enabled: 
                Emailer.send_email(parsed, errorem, attachl, False)
        tryredo = True
        passed = False
        redo = True
    def run(self):

        stratBuilder = StrategyBuilder()
        evaluator = Evaluator()

        listNum = 0
        for watchlist in watchlists:

            listNum += 1
            print('\nWatchlist', listNum)

            #Retrieve stock data
            tickers = self.getStocks(watchlist)

            if self.SORT_BY_PERF:
                tickers = evaluator.run(interval=self.INTERVAL,
                                        stocks=tickers,
                                        strat=stratBuilder.Strategy1)

            i = 14  #Start at 14 to retrieve indicator data
            #Loop through each day
            numDays = tickers[0].closeList.size
            while i < numDays:
                self.runningDate = tickers[0].closeList.axes[0].date[i]

                for stock in tickers:

                    if i < stock.closeList.size:

                        stock.setCurrentPrice(stock.closeList[i])

                        #Get Buy/Sell result from Strategy
                        stratResult = stratBuilder.Strategy1(stock, i)

                        if stratResult == 'Buy':
                            #append to list to buy
                            self.buyList.append([self.runningDate, stock.code])
                        elif (stratResult == 'Sell'):
                            #append to list to sell
                            self.sellList.append(
                                [self.runningDate, stock.code])
                i += 1

            print('Buy:')
            for stock in self.buyList:
                print(stock[0], stock[1])

            print('Sell:')
            for stock in self.sellList:
                print(stock[0], stock[1])

            Emailer.send(watchlist, self.buyList, self.sellList)

            self.buyList.clear()
            self.sellList.clear()
Example #3
0
	def run(self):
		#Load password from local file
		f = open('data.xtxt', 'r');
		lines = f.read().split('\n');
		for line in lines:
			for char in '\r\n':
				line.translate(None, char)
	
		emailer = Emailer()
		emailer.setup(lines[0], lines[1]) 
		emailer.send_command('buy', 'RVT 10') 
Example #4
0
def compile(errore, attachli, parsedin, branch):
    global to
    global server
    global subj
    global email_enabled 
    global redo
    global tryredo
    global failedbool
    global errorem
    errorem = errore
    global attachl
    attachl = attachli
    global passed
    global parsed
    parsed = parsedin
    passed = True
    tryredo = False
    redo = True
    while(redo):
        passed = True
        if(passed):
            gitPull("sleuthkit", branch)
        if(passed):
            vsBuild()
            print("TSK") 
        if(passed):
            gitPull("autopsy", branch)
        if(passed):
            antBuild("datamodel", False, branch)
            print("DataModel")
        if(passed):
            antBuild("autopsy", True, branch)
            print("Aut")
        if(passed):
            redo = False
        else:
            print("Compile Failed")
            time.sleep(3600)
    attachl = []
    errorem = "The test standard didn't match the gold standard.\n"
    failedbool = False
    if(tryredo):
        errorem = ""
        errorem += "Rebuilt properly.\n"
        if email_enabled: 
            Emailer.send_email(to, server, subj, errorem, attachl)
        attachl = []
        passed = True
Example #5
0
def antBuild(which, Build, branch):
    print("building: ", which)
    global redo
    global passed
    global tryredo
    global parsed
    directory = os.path.join("..", "..")
    ant = []
    if which == "datamodel":
        directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java")
    ant.append("ant")
    ant.append("-f")
    ant.append(directory)
    ant.append("clean")
    if(Build):
        ant.append("build")
    else:
        ant.append("dist")
    antpth = make_local_path("..", "ant" + which + "Output.txt")
    antout = open(antpth, 'a')
    succd = subprocess.call(ant, stdout=antout)
    antout.close()
    global errorem
    global attachl
    global email_enabled
    global to
    global subj
    global server
    if which == "datamodel":
        chk = os.path.join("..", "..", "..","sleuthkit",  "bindings", "java", "dist", "TSK_DataModel.jar")
        try:
            open(chk)
        except IOError as e:
            if(not tryredo):
                errorem += "DataModel Java build failed on branch " + branch + ".\n"
                attachl.append(antpth)
                if email_enabled: 
                    Emailer.send_email(to, server, subj, errorem, attachl)
            passed = False
            tryredo = True
    elif (succd != 0 and (not tryredo)):
        errorem += "Autopsy build failed on branch " + branch + ".\n"
        attachl.append(antpth)
        Emailer.send_email(to, server, subj, errorem, attachl)
        tryredo = True
    elif (succd != 0):
        passed = False
Example #6
0
def index():
    username = None
    password = None
    email = None
    form = loginForm()
    if form.validate_on_submit():
        try:
            course_info = GetHTML.htmlHandle(form.username.data,form.password.data)

            timesold = course_info[2]
            names = course_info[1]
            course_info[2] = Formatter.formatTimes(course_info[2])
            sdates = {'M':'26','T':'20','W':'21','R':'22','F':'23'}

            snames = {}

            for i in range(len(timesold)):
                snames[names[i]] = len(timesold[i])

            dnames = {}

            k = 0
            for j in range(7):
                for i in range(snames[names[j]]):
                    dnames[k] = names[j]
                    k += 1

            filename = 'OliniCalendar.ics'

            iCalCreation.iCalWrite(course_info[2],"201501","20150430T000000",sdates,dnames,filename)

            ical = open('OliniCalendar.ics','r')

            email = form.email.data
            Emailer.iCalCreator(email,ical)

            os.remove('OliniCalendar.ics')
            # return render_template(html_sched)
            
            form.username.data = ''
            form.password.data = ''
            form.email.data = ''
        except:
            "Oops"

    return render_template('index.html', form=form)
    def __init__(self):
        self.parser = HTML_parser.HTML_parser()
        self.bets_DB = BettingDB.BettingDB()
        self.emailer = Emailer.Emailer()
        self.games = {}
        self.date = datetime.datetime.now()

        random.seed(int(time.time()))
Example #8
0
def main():
    fieldConfigFile, config_inputdir, jobType = parse_opts()
    configItems = ConfigUtils.setConfigs(config_inputdir, fieldConfigFile)
    lg = pyLogger(configItems)
    logger = lg.setConfig()
    sc = SocrataClient(config_inputdir, configItems, logger)
    client = sc.connectToSocrata()
    clientItems = sc.connectToSocrataConfigItems()
    scrud = SocrataCRUD(client, clientItems, configItems, logger)
    sQobj = SocrataQueries(clientItems, configItems, logger)
    email_list = DatasetUtils.getDatasetAsDictListPageThrough(
        configItems['dd']['index']['fbf'], sQobj,
        configItems['dd']['index']['qryCols'])

    e = Emailer(configItems)
    ec = EmailComposer(configItems, e)
    base_email_txt = ec.getBaseMsgText('systems_updt')
    subject_line = e._emailConfigs['email_situations']['systems_updt'][
        'subject_line']
    wkbks_dir = configItems['wkbk_dir'] + '/blank_wkbks/'
    for item in email_list:
        msgBody = base_email_txt % (item['coordinator_name'],
                                    item['template_file_name'])
        attachment_dictList = [{
            item['template_file_name']:
            wkbks_dir + item['template_file_name']
        }, {
            'InventoryUpdateGuidance.pdf':
            wkbks_dir + 'InventoryUpdateGuidance.pdf'
        }]
        recipientsActual = {
            'To': item['primary_data_coordinator'],
            'bcc': '*****@*****.**'
        }
        if 'secondary_data_coordinator' in item.keys():
            recipientsActual['cc'] = item['secondary_data_coordinator']
        print recipientsActual
        #recipientsFake =  { 'To': '*****@*****.**', 'cc':'*****@*****.**', 'bcc': '*****@*****.**'}
        mail_recipient = e.sendEmails(subject_line,
                                      msgBody,
                                      fname_attachment=None,
                                      fname_attachment_fullpath=None,
                                      recipients=recipientsActual,
                                      attachment_dictList=attachment_dictList,
                                      isETL=False)
Example #9
0
    def __init__(self, league, sleep_time):
        self.league = league
        self.parser = HTML_Parser.HTML_Parser(self.league)
        self.bets_DB = BettingDB.BettingDB()
        self.emailer = Emailer.Emailer()
        self.games = {}
        self.date = datetime.datetime.now()
        self.sleep_time = sleep_time

        random.seed(int(time.time()))
Example #10
0
    def setup(self):
        #Load password from local file
        f = open('data.xtxt', 'r')
        lines = f.read().split('\n')
        for line in lines:
            for char in '\r\n':
                line.translate(None, char)

        self.emailer = Emailer.Emailer()
        self.emailer.setup(lines[0], lines[1])
Example #11
0
def compile(errore, attachli, parsedin):
    global redo
    global tryredo
    global failedbool
    global errorem
    errorem = errore
    global attachl
    attachl = attachli
    global passed
    global parsed
    parsed = parsedin
    passed = True
    tryredo = False
    redo = True
    while (redo):
        passed = True
        if (passed):
            gitPull("sleuthkit")
        if (passed):
            vsBuild()
        if (passed):
            gitPull("autopsy")
        if (passed):
            antBuild("datamodel", False)
        if (passed):
            antBuild("autopsy", True)
        if (passed):
            redo = False
        else:
            print("Compile Failed")
            time.sleep(3600)
    attachl = []
    errorem = "The test standard didn't match the gold standard.\n"
    failedbool = False
    if (tryredo):
        errorem = ""
        errorem += "Rebuilt properly.\n"
        Emailer.send_email(parsed, errorem, attachl, True)
        attachl = []
        passed = True
Example #12
0
def compile(errore, attachli, parsedin):
	global redo
	global tryredo
	global failedbool
	global errorem
	errorem = errore
	global attachl
	attachl = attachli
	global passed
	global parsed
	parsed = parsedin
	passed = True
	tryredo = False
	redo = True
	while(redo):
		passed = True
		if(passed):
			gitPull("sleuthkit")
		if(passed):
			vsBuild()
		if(passed):
			gitPull("autopsy")
		if(passed):
			antBuild("datamodel", False)
		if(passed):
			antBuild("autopsy", True)
		if(passed):
			redo = False
		else:
			print("Compile Failed")
			time.sleep(3600)
	attachl = []
	errorem = "The test standard didn't match the gold standard.\n"
	failedbool = False
	if(tryredo):
		errorem = ""
		errorem += "Rebuilt properly.\n"
		Emailer.send_email(parsed, errorem, attachl, True)
		attachl = []
		passed = True
Example #13
0
    def generateEmailMsg(configItems, activity, email_content):
        notification_datsetids = []
        msg_body = configItems['email_msg_template']['header'] + configItems[
            'activity'][activity]['email_msg']['msg_header']
        attachment_list = []
        for content in email_content['content']:
            msg_body = msg_body + "<p>" + content['report_html'] + "</p>"
            notification_datsetids = notification_datsetids + content[
                'datasetids']

        subject_line = configItems['activity'][activity]['email_msg'][
            'subject_line'] + ": " + str(
                email_content['number_of_actions']) + " " + configItems[
                    'activity'][activity]['email_msg']['subject_line_action']
        msg_body = msg_body + configItems['email_msg_template']['footer']
        em = Emailer(configItems)
        em.sendEmails(subject_line,
                      msg_body.encode('utf-8').strip(),
                      fname_attachment=email_content['report_attachment_name'],
                      fname_attachment_fullpath=email_content[
                          'report_attachment_fullpath'])
        return notification_datsetids
Example #14
0
def gitPull(TskOrAutopsy):
	global SYS
	global errorem
	global attachl
	ccwd = ""
	gppth = Emailer.make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt")
	attachl.append(gppth)
	gpout = open(gppth, 'a')
	toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy
	call = ["git", "pull", toPull]
	if TskOrAutopsy == "sleuthkit":
		ccwd = os.path.join("..", "..", "..", "sleuthkit")
	else:
		ccwd = os.path.join("..", "..")
	subprocess.call(call, stdout=sys.stdout, cwd=ccwd)
	gpout.close()
Example #15
0
    def run(self):
        #Load password from local file
        f = open('data.xtxt', 'r')
        lines = f.read().split('\n')
        for line in lines:
            for char in '\r\n':
                line.translate(None, char)

        emailer = Emailer()
        emailer.setup(lines[0], lines[1])
        emailer.send_command('buy', 'RVT 10')
Example #16
0
def gitPull(TskOrAutopsy):
    global SYS
    global errorem
    global attachl
    ccwd = ""
    gppth = Emailer.make_local_path("..",
                                    "GitPullOutput" + TskOrAutopsy + ".txt")
    attachl.append(gppth)
    gpout = open(gppth, 'a')
    toPull = "http://www.github.com/sleuthkit/" + TskOrAutopsy
    call = ["git", "pull", toPull]
    if TskOrAutopsy == "sleuthkit":
        ccwd = os.path.join("..", "..", "..", "sleuthkit")
    else:
        ccwd = os.path.join("..", "..")
    subprocess.call(call, stdout=gpout, cwd=ccwd)
    gpout.close()
 def sendJobStatusEmail(self, finishedDataSets):
     msgBody  = ""
     for i in range(len(finishedDataSets)):
         #remove the column definitions, check if records where inserted
         dataset = self.sucessStatus( DictUtils.removeKeys(finishedDataSets[i], self.keysToRemove))
         msg = self.makeJobStatusMsg( finishedDataSets[i]  )
         msgBody  = msgBody  + msg
     subject_line = self.getJobStatus()
     email_attachment = self.makeJobStatusAttachment(finishedDataSets)
     e = Emailer(self.configItems)
     emailconfigs = e.setConfigs()
     if os.path.isfile(self.logfile_fullpath):
         e.sendEmails( subject_line, msgBody, self.logfile_fname, self.logfile_fullpath)
     else:
         e.sendEmails( subject_line, msgBody)
     print "****************JOB STATUS******************"
     print subject_line
     print "Email Sent!"
     self._logger.info("****************JOB STATUS******************")
     self._logger.info( subject_line)
     self._logger.info("Email Sent!")
     self._logger.info("****************JOB END******************")
Example #18
0
def process_instr(inst, year, doy, do_DB=True):
    # Function to process a single day of GPS data.  A
    # movie and a keogram will be generated.  If requested, the information
    # will be added into the airglow database
    #
    # INPUTS:
    #   inst - instrument code to process
    #   year, doy - year and day of year of date to be processed
    #
    # OPTIONAL INPUTS:
    #   do_DB - put information into the airglow SQL database (default=True)
    #
    # HISTORY:
    #   Written by Daniel J. Fisher on 05 Feb 2014

    # Create the process_dn date
    process_dn = datetime.datetime(year, 1,
                                   1) + datetime.timedelta(days=doy - 1)

    instr = inst[0:-2]

    # Find out where the instrument was
    site = gpsinfo.get_site_of(inst, process_dn)

    # Get the info for the instrument on this date
    inst_info = gpsinfo.get_instr_info(inst, process_dn)
    site_info = gpsinfo.get_site_info(site)
    inst_id = inst_info['sql_inst_id']
    horz = inst_info['horizon']
    site_id = site_info['sql_id']
    site_name = site_info['Name']
    timezone = site_info['Timezone']

    # Create needed string variables
    year = process_dn.strftime('%Y')
    month = process_dn.strftime('%m')
    day = process_dn.strftime('%d')
    date = process_dn.strftime('%y%m%d')

    # Create the data directory name for the data to be processed
    data_dir = '/rdata/airglow/gps/' + inst + '/' + site + '/' + year + '/'
    os.chdir(data_dir)

    # Do Scintmon S4 Processing
    if instr == 'scintmo':
        # Move data to data folder
        raw_dir = data_dir + 'raw_data/'
        os.system('mv ' + raw_dir + date + '*.[f,o,n]* ' + data_dir)
        files = glob.glob(date + '*.fsl')
        files.sort()
        # Go through all s4 data per day
        for f in files:
            s4filename = inst + f[
                -5] + '_' + site + '_' + year + month + day + '_s4.png'
            os.system('chmod 777 ' + data_dir + f[0:-4] + '*')
            print 'Getting lsum4...'
            os.system('/usr/local/bin/lsum4 -n ' + f[0:-4])
            print 'Move raw data...'
            os.system('chmod 750 ' + data_dir + f[0:-4] + '*')

            sumfile = glob.glob(f[0:-4] + '.sum')
            if (sumfile):
                # change ownership
                os.system('chmod 750 ' + sumfile[0])
                os.system('chown airglow.gps ' + sumfile[0])
                # get output image and times
                startut, stoput, s4fig = ScintMonDisplay.PlotDay(
                    sumfile[0], horz)
                s4fig.savefig(data_dir + s4filename)
                # Add info to the database, if requested
                if do_DB:
                    print 'Do databasing...'
                    database([s4filename], data_dir, startut, stoput,
                             [inst_id], site_id)
                os.system('rm -f ' + data_dir + s4filename)
            else:
                subject = "!!! Processing Error (\'" + inst + '\',' + year + ',' + str(
                    doy) + ') @ ' + site
                print subject
                Emailer.emailerror(
                    subject,
                    'lsum4 problem - move rawdata back and reprocess?')

    # Do Scinda S4/TEC Processing
    elif instr == 'scinda':
        try:
            print 'Running wintep-p...'
            os.system('/usr/local/bin/wintec-p-daily.pl ' + date + '*.scn')
            print 'Move raw data...'
            # Standardize filenames
            s4filename = inst + '_' + site + '_' + year + month + day + '_s4.png'
            tecfilename = inst + '_' + site + '_' + year + month + day + '_tec.png'
            os.rename(data_dir + 'plots/S' + date + '.PNG',
                      data_dir + s4filename)
            os.rename(data_dir + 'plots/T' + date + '.PNG',
                      data_dir + tecfilename)
            # Get times
            files = glob.glob(data_dir + date + '*.scn')
            files.sort()
            local = pytz.timezone(timezone)
            # find start time
            data = open(files[0], 'r')
            r = re.compile('[ \t\n\r:]+')
            line = r.split(data.readline())
            dtime = datetime.datetime(2000 + int(line[1]), int(
                line[2]), int(line[3])) + datetime.timedelta(seconds=line[4])
            dtime = local.localize(dtime)
            startut = dtime.astimezone(utc).strftime('%Y-%m-%d %H:%M:%S')
            data.close()
            # find end time
            r = re.compile('[ \t\n\r:]+')
            for zelda in reversed(open(files[-1]).readlines()):
                line = r.split(zelda)
                if line[0] == 'T':
                    dtime = datetime.datetime(2000 + int(line[1]), int(
                        line[2]), int(
                            line[3])) + datetime.timedelta(seconds=line[4])
                    dtime = local.localize(dtime)
                    break
            # Add info to the database, if requested
            if do_DB:
                print 'Do databasing...'
                database([s4filename, tecfilename], data_dir, startut, stoput,
                         inst_id, site_id)
            os.system('rm -f ' + data_dir + s4filename)
            os.system('rm -f ' + data_dir + tecfilename)
        except:
            subject = "!!! Processing Error (\'" + inst + '\',' + year + ',' + str(
                doy) + ') @ ' + site
            print subject
            Emailer.emailerror(subject, 'wintec problem')

    # Do Cases Processing
    elif instr == 'cases':

        # NOTE: Python code (originally from /programs/plot_CASES_day.py
        try:
            # Paths to use
            name = inst + '_' + site
            datapath = '/rdata/airglow/gps/' + inst + '/' + site + '/streaming/'
            pngpath = '/rdata/airglow/gps/results/'
            log_fname = '/rdata/airglow/gps/results/' + inst + '_' + site + '.log'
            #log_cmd = '/usr/bin/perl /usr/local/share/Python/programs/load_'+inst+'_'+site+'_logfile.pl'

            # Create the filename of the data to be parsed
            fname = '{:s}dataout_{:s}_{:03d}.bin'.format(datapath, year, doy)

            # Run binflate
            os.popen('/rdata/airglow/gps/cases01/hka/streaming/binflate -i ' +
                     fname)

            # Load txinfo.log file
            txfname = 'txinfo.log'
            txprn, txdn, el, az, txsystem = cases.load_txinfo(txfname)

            # Load scint.log file
            s4fname = 'scint.log'
            s4prn, s4dn, s4, s4system = cases.load_scint(s4fname)
            startut = s4dn[0].strftime('%Y-%m-%d %H:%M:%S')
            stoput = s4dn[-1].strftime('%Y-%m-%d %H:%M:%S')

            # Create plots
            dn = datetime.date(int(year), 1,
                               1) + datetime.timedelta(days=doy - 1)
            s4fname = '{:s}_{:s}H.png'.format(name, dn.strftime('%y%m%d'))
            cases.plot_s4summary(txprn, txdn, el, az, txsystem, s4prn, s4dn,
                                 s4, s4system, pngpath + s4fname)

            # Write the logfile
            fid = open(log_fname, 'w')

            fid.writelines(
                'Site,Instrument,StartUTTime,StopUTTime,SummaryImage,MovieFile\n'
            )
            fid.writelines('{:d},{:d},{:s},{:s},{:s}_{:s}H.png'.format(
                site_id, inst_id, startut, stoput, name,
                dn.strftime('%y%m%d')))
            fid.close()

            # Load the log file into the database
            #os.popen(log_cmd)
            if do_DB:
                database([s4fname], pngpath, startut, stoput, [inst_id],
                         site_id)

            # Move the data
            os.popen('mv {:s}dataout*_{:s}_{:03d}.bin .'.format(
                datapath, year, doy))
            tar = tarfile.open(
                '{:s}_{:s}.tgz'.format(name, dn.strftime('%y%m%d')), 'w:gz')

            tar.add('dataout_{:s}_{:03d}.bin'.format(year, doy))

            if os.path.exists('dataoutiq_{:s}_{:03d}.bin'.format(year, doy)):
                tar.add('dataoutiq_{:s}_{:03d}.bin'.format(year, doy))

            tar.close()

            # Clean up files
            #os.popen('tar czvf {:s}.tgz dataout*_{:s}_{:03d}.bin'.format(dn.strftime('%y%m%d'), year, DOY))
            os.popen('rm dataout*_{:s}_{:03d}.bin'.format(year, doy))
            #os.popen('mv {:s}.tgz {:s}{:s}'.format(dn.strftime('%y%m%d'),data_dir,year))
            os.popen('rm channel.log')
            os.popen('rm iono.log')
            os.popen('rm navsol.log')
            os.popen('rm scint.log')
            os.popen('rm txinfo.log')

        except:
            subject = "!!! Processing Error (\'" + inst + '\',' + year + ',' + str(
                doy) + ') @ ' + site
            print subject
            Emailer.emailerror(subject, 'Cases problem')

    # Send Error if Neither
    else:
        subject = "!!! GPS Problem " + date
        print subject
        Emailer.emailerror(subject, 'Something weird has happened: ' + inst)

    os.chdir('/rdata/airglow/rx/')
Example #19
0
                # Remove older files
                for f in files[:-1]:
                    os.system('rm -f ' + f)
                if 'x3t' == instr:
                    # Ignore missing x3t warnings so leave loop
                    break
                # Append checksums not yet sorted
                os.chdir(rx)
                files = files + glob(name+'*.txt')
                files.sort()

                # Send email if no files exist
                if files == []:
                    subject = "!!! No data received:" + name
                    print subject
                    Emailer.emailerror(emails,subject,'There are no checkfiles for this site/instrument.\nIs it active? Did you set up the scripts properly?')

                else:
                    # Check file stats
                    try:
                        info = open(tracking+files[-1], 'r')
                    except:
                        info = open(rx+files[-1], 'r')
                    info.readline()
                    info.readline()
                    sz = float(info.readline())
                    time = dt.datetime.strptime(info.readline()[:19],'%Y-%m-%d %H:%M:%S')
                    df = float(info.readline())
                    info.close()

                    # Send email if file is too old
Example #20
0
    wrkSpcData = dataUtil.getFMWs(scheduledRepoName)
    notScheduled = schedsEval.compareRepositorySchedule(wrkSpcData)
    unschedFMWsStr = emailReporter.getUnsheduledRepoFMWsStr(notScheduled,
                                                            scheduledRepoName)
    dataCache.setString(unschedFMWsStr)
    logger.info(f'unschedFMWsStr: {unschedFMWsStr}')

    # schedules that reference data on the E: drive
    embedData = schedsEval.getEmbeddedData()
    embedDataEmailStr = emailReporter.getEmbeddedDataEmailStr(embedData)
    dataCache.setString(embedDataEmailStr)
    logger.info(f'embedDataEmailStr: {embedDataEmailStr}')

    # now non prod or non OTHR replications
    nonProd = schedsEval.getNonProdSchedules()
    nonProdEmailStr = emailReporter.getNonProdSchedulesEmailStr(nonProd)
    dataCache.setString(nonProdEmailStr)
    logger.info(f'nonProd: {nonProd}')

    # get destinations with 0 records
    nonProd = schedsEval.getAllBCGWDestinations()
    db = DBEvaluation.DBScheduleQueries(nonProd)
    schedsZeroRecords = db.getZeroRecordDestinations()
    zeroRecords = emailReporter.getZeroRecordsSchedule(schedsZeroRecords)
    dataCache.setString(zeroRecords)
    logger.info(f'zeroRecords: {zeroRecords}')

    # now send the email
    emailer = Emailer.EmailCoorindator(dataCache)
    emailer.sendEmail()
Example #21
0
def sorter(san, pgm):
    '''
    Summary
    -------
        sorter(san,core):
        program that runs through all rx data for one site to sort it.

    Inputs
    ------
        san = site abbreviated name (e.g. uao)
        pgm = program running (for pid checking)

    History
    -------
        2/13/14 -- Written by DJF ([email protected])
        6/11/15 -- Modified for site-by-site multicore
    '''

    # The location of programs (needed since running in crontab)
    dir_local = '/rdata/airglow/'
    #dir_script = '/usr/local/share/airglowrsss/Python/Programs/'
    #python = '/usr/local/python/'
    dir_share = '/rdata/airglow/share/'

    # Close Program if already running (just in case...)
    pid = str(os.getpid())
    pidfile = "/tmp/Sorter_%s.pid" % pgm
    if os.path.isfile("/tmp/Sorterall.pid"):
        print "Sorterall exists, must finish before calling new batch"
        sys.exit()
    elif os.path.isfile(pidfile):
        print "%s already exists, exiting" % pidfile
        sys.exit()
    else:
        file(pidfile, 'w').write(pid)

    # Start Write to Log File
    print "\n!!!!!!!!!!!!!!!!!!!!"
    print '!!! BEGIN TIMESTAMP:', dt.datetime.now()

    # Load instrument dictionary
    code = instrumentcode()
    # Set order so bwc & x3t process first
    ids = ['Cloud', 'TempL']
    ids.extend(code.keys())

    # TRY YOUR HARDEST
    try:
        # Get Data in RX folder
        os.chdir(dir_local + 'rx/')
        #os.system('chmod 774 *') No longer have permissions, tx sends as 774.
        # Get info files for non-standard (Zip->Send->Sort) data
        rxfiles = ["fpi04_kaf", "cas01_hka"]
        for x in rxfiles:
            for files in glob(x + '*'):
                makeinfo(files)
        # Go through txt files to Sort data
        for i in ids:
            # do everything if all, else limit to a site.
            if pgm == 'all':
                search = '*.txt'
            else:
                search = '*' + san + '*.txt'
            for f in glob(i + search):
                # Get information for assembling & sos this unAmericarting file
                name = f[0:18]  # name         = IIIII_SSS_YYYYMMDD
                instrument = f[0:5].lower()  # instrument   = IIIII
                instr = f[0:3].lower()  # instrument   = III__
                inum = f[3:5]  # instrument # = ___II
                site = f[6:9].lower()  # site         = SSS
                # FOR OLDER FILES THAT DID DOY, ELSE STANDARD DAY
                if f[17] in ['.']:
                    date = f[10:17]  # date         = YYYYDDD
                    dates = f[12:17]  # dates        = YYDDD
                    year = int(f[10:14])  # year         = YYYY
                    doy = int(f[14:17])  # doy          = DDD
                    dn = dt.datetime(year, 1, 1) + dt.timedelta(days=doy - 1)
                    month = dn.timetuple().tm_mon
                    day = dn.timetuple().tm_mday
                else:
                    date = f[10:18]  # date         = YYYYMMDD
                    dates = f[12:18]  # dates        = YYMMDD
                    year = int(f[10:14])  # year         = YYYY
                    month = int(f[14:16])  # month        = MM
                    day = int(f[16:18])  # day          = DD
                    dn = dt.datetime(year, month, day)
                    doy = dn.timetuple().tm_yday
                print "\n!!! For", name
                # Fix inum for Letters
                if inum[1].isalpha():
                    inum = inum[1]
                if not (inum[0].isalpha()):
                    emails = activeinstruments()[site][instr][inum]['email']

                ##### TEMPLOG CASE: #####
                if instrument in ['cloud', 'templ']:
                    ### Part 1: Sorting Data
                    print "!!! Begin Sorting..."
                    # Create fake checksum for tracker
                    checkname = code[instrument] + '00' + f[5:]
                    os.system('cp ' + f + ' ' + checkname)
                    makeinfo(checkname)
                    os.rename(checkname, 'tracking/' + checkname)
                    # Move data into directory
                    dir_data = dir_local + 'templogs/' + code[
                        code[instrument]] + '/' + site + '/'
                    os.rename(f, dir_data + f)
                    os.system('chmod 744 ' + dir_data + f)
                    #os.system('chown airglow.airglow ' + dir_data + r)
                    print "!!! Success Sorting"

                elif instr in ['bwc', 'x3t']:
                    ### Send Error if checkfile
                    print "!!! Begin Sorting..."
                    sortinghat([], f)

                ##### FPI CASE: #####
                elif instr in 'fpi':
                    ### Part 1: Sorting Data
                    dir_data = dir_local + 'fpi/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    result = sortinghat(dir_data, f)
                    if result:
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.fpi ' + dir_data + r)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')
                        print "!!! Success Sorting"

                        ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        # Get correct doy from files
                        for r in result:
                            if r[-4:] in '.img':
                                ldn = FPI.ReadIMG(dir_data +
                                                  r).info['LocalTime']
                                if ldn.hour < 12:
                                    ldn -= dt.timedelta(days=1)
                                doy = ldn.timetuple().tm_yday
                                year = ldn.year
                                break
                        # Run processing script for site
                        try:
                            warning = FPIprocess.process_instr(
                                code[instr] + inum, year, doy)
                            if warning:
                                subject = "!!! Manually inspect (\'" + code[
                                    instr] + inum + '\',' + str(
                                        year) + ',' + str(doy) + ') @ ' + site
                                print subject
                                Emailer.emailerror(emails, subject, warning)
                        except:
                            subject = "!!! Processing error (\'" + code[
                                instr] + inum + '\',' + str(year) + ',' + str(
                                    doy) + ') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject,
                                               traceback.format_exc())
                        # Run CV processing for project
                        # ?????
                        print "!!! End Processing"

                ##### GPS CASE: #####
                elif instr in ['tec', 'scn']:
                    ## Part 1: Sorting Data
                    dir_data = dir_local + 'gps/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    # if SCN - Send data to raw folder
                    if instr == 'scn':
                        dir_data = dir_data + '/raw_data/'
                        try:
                            os.makedirs(dir_data)
                            os.system('chmod 755 ' + dir_data)
                        except OSError:
                            print '!!! Raw Folder Exists... moving on'
                    result = sortinghat(dir_data, f)
                    if result:
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.gps ' + dir_data + r)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')

                        print "!!! Success Sorting"

                        ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        # Run processing script for site
                        try:
                            GPSprocess.process_instr(code[instr] + inum, year,
                                                     doy)
                        except:
                            subject = "!!! Processing error (\'" + code[
                                instr] + inum + '\',' + str(year) + ',' + str(
                                    doy) + ') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject,
                                               traceback.format_exc())
                        print "!!! End Processing"

                ##### CASES CASE: #####
                elif instr in ['cas']:
                    ### Partr 1: "Sort Data"
                    # Make sure folder exists
                    dir_data = dir_local + 'gps/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    try:
                        os.makedirs(dir_data)
                        os.system('chmod 755 ' + dir_data)
                    except OSError:
                        print '!!! Raw Folder Exists... moving on'
                    # Move info file to tracking
                    os.system('mv ' + f + ' ./tracking')
                    # Remove files from rx (it was a duplicate)
                    os.system('rm -f ' + name + '*')
                    print "!!! Success Sorting"

                    ### Part 2: Processing Data
                    print "!!! Begin Processing..."
                    # Run processing script for site
                    try:
                        GPSprocess.process_instr(code[instr] + inum, year, doy)
                    except:
                        subject = "!!! Processing error (\'" + code[
                            instr] + inum + '\',' + str(year) + ',' + str(
                                doy) + ') @ ' + site
                        print subject
                        Emailer.emailerror(emails, subject,
                                           traceback.format_exc())

                ##### IMAGER CASE: #####
                elif instr in ['asi', 'nfi', 'pic', 'sky', 'swe']:
                    ### Part 1: Sorting Data
                    dir_data = dir_local + 'imaging/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    result = sortinghat(dir_data, f)
                    if result:
                        if asiinfo.get_site_info(site)['share']:
                            # Check that share folder for copy exists
                            dir_copy = dir_share + site + '/' + str(
                                year) + '/' + str(doy) + '/'
                            try:
                                os.makedirs(dir_copy)
                                os.system('chmod 755' + dir_copy)
                                print "!!! Share Folder Created"
                            except OSError:
                                print '!!! Share Folder Exists... moving on'
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.imaging ' + dir_data + r)
                            #os.system('mv ' + dir_data + r + ' ' + dir_data + str(doy) + '/.')
                            # Copy files if needed
                            if asiinfo.get_site_info(site)['share']:
                                os.system('cp -r ' + dir_data + r + ' ' +
                                          dir_copy)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')
                        print "!!! Success Sorting"

                        ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        ## Get correct doy from files
                        #for r in result:
                        #    if r[-4:] in '.tif':
                        #        ldn = Image.open(dir_data+r).info['UniversalTime'] # Local is standard, but ASI is JJM's timechoice
                        #        if ldn.hour < 12:
                        #            ldn -= dt.timedelta(days = 1)
                        #        doy = ldn.timetuple().tm_yday
                        #        year = ldn.year
                        #        break
                        # Run processing script for site
                        # TODO: Mimic FPIprocess warnings
                        msg = ASIprocess.process_instr(code[instr] + inum,
                                                       year, doy)
                        if msg:
                            subject = "!!! Processing Issue (\'" + code[
                                instr] + inum + '\',' + str(year) + ',' + str(
                                    doy) + ') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject, msg)
                        print "!!! End Processing"

                ##### BAD INSTR CATCH #####
                else:
                    emails = activeinstruments()['ADMIN']['email']
                    subject = "!!! Badly named files: " + name
                    print subject
                    Emailer.emailerror(emails, subject,
                                       'Name is not real instrument...')

    except:
        emails = activeinstruments()['ADMIN']['email']
        subject = "!!! Something is wrong..."
        print subject
        Emailer.emailerror(emails, subject + f, traceback.format_exc())

    finally:
        print "\n!!! Unpack Complete!"
        os.unlink(pidfile)
Example #22
0
                for f in files[:-1]:
                    os.system('rm -f ' + f)
                if 'x3t' == instr:
                    # Ignore missing x3t warnings so leave loop
                    break
                # Append checksums not yet sorted
                os.chdir(rx)
                files = files + glob(name + '*.txt')
                files.sort()

                # Send email if no files exist
                if files == []:
                    subject = "!!! No data received:" + name
                    print subject
                    Emailer.emailerror(
                        emails, subject,
                        'There are no checkfiles for this site/instrument.\nIs it active? Did you set up the scripts properly?'
                    )

                else:
                    # Check file stats
                    try:
                        info = open(tracking + files[-1], 'r')
                    except:
                        info = open(rx + files[-1], 'r')
                    info.readline()
                    info.readline()
                    sz = float(info.readline())
                    time = dt.datetime.strptime(info.readline()[:19],
                                                '%Y-%m-%d %H:%M:%S')
                    df = float(info.readline())
                    info.close()
Example #23
0
times = htmlResults[2]
locs = htmlResults[3]
timesold = times

times = Formatter.formatTimes(times)


sdates = {'M':'26','T':'20','W':'21','R':'22','F':'23'}

# The following code correlates the classes to each timeslot created from formatTimes. 
# Essentially, I screwed up somewhere and both times for the classes are not kept together and "time slots" are made instead. This just maps each class to it's respective slots.
snames = {}

for i in range(len(timesold)):
    snames[names[i]] = len(timesold[i])

dnames = {}

k = 0
for j in range(7):
    for i in range(snames[names[j]]):
        dnames[k] = names[j]
        k += 1
#Back to normal code

iCalCreation.iCalWrite(times, yearmonth, endyearmonthdaytime, sdates, dnames, filename)
ical = open('OliniCalendar.ics','r')
Emailer.iCalCreator(email,ical)

os.remove('OliniCalendar.ics')
Example #24
0
def sorter(san, pgm):
    '''
    Summary
    -------
        sorter(san,core):
        program that runs through all rx data for one site to sort it.

    Inputs
    ------
        san = site abbreviated name (e.g. uao)
        pgm = program running (for pid checking)

    History
    -------
        2/13/14 -- Written by DJF ([email protected])
        6/11/15 -- Modified for site-by-site multicore
    '''

    # Path location of files sent by stations
    dir_rx_local = '/home/tx/rx/'
    # The location of programs (needed since running in crontab)
    dir_local = '/rdata/airglow/'
    #dir_script = '/usr/local/share/airglowrsss/Python/Programs/'
    #python = '/usr/local/python/'
    dir_share = '/rdata/airglow/share/'

    # Close Program if already running (just in case...)
    pid = str(os.getpid())
    pidfile = "/tmp/Sorter_%s.pid" % pgm
    if os.path.isfile("/tmp/Sorterall.pid"):
        print "Sorterall exists, must finish before calling new batch"
        sys.exit()
    elif os.path.isfile(pidfile):
        print "%s already exists, exiting" % pidfile
        sys.exit()
    else:
        file(pidfile, 'w').write(pid)

    # Start Write to Log File
    print "\n!!!!!!!!!!!!!!!!!!!!"
    print '!!! BEGIN TIMESTAMP:', dt.datetime.now()

    # Load instrument dictionary
    code = instrumentcode()
    # Set order so bwc & x3t process first
    ids = ['Cloud', 'TempL']
    ids.extend(code.keys())

    # TRY YOUR HARDEST
    try:
        # Get Data in RX folder
        #os.chdir(dir_local+'rx/')
        os.chdir(dir_rx_local)
        print("Working directory: %s" % (os.getcwd()))
        #os.system('chmod 774 *') No longer have permissions, tx sends as 774.
        # Get info files for non-standard (Zip->Send->Sort) data
        rxfiles = ["fpi04_kaf", "cas01_hka"]
        for x in rxfiles:
            for files in glob(x + '*'):
                makeinfo(files)
        # Go through txt files to Sort data
        for i in ids:
            # do everything if all, else limit to a site.
            if pgm == 'all':
                search = '*.txt'
            else:
                search = '*' + san + '*.txt'
            print('Searching %s files for %s using %s' %
                  (str(i), str(pgm), search))
            for f in glob(i + search):
                #Searching for IIIII_san_YYYYMMDD/YYYYDOY.txt
                # Get information for assembling & sos this unAmericarting file
                name = os.path.splitext(f)[0]
                instrument, site, date = name.lower().split('_')
                instrument = instrument[:5]
                instr = f[0:3].lower()  # instrument   = III__
                inum = f[3:5]  # instrument # = ___II
                try:
                    dn = dt.datetime.strptime(date, "%Y%m%d")
                except:
                    dn = dt.datetime.strptime(date, "%Y%j")
                year = dn.year
                month = dn.month
                day = dn.day
                doy = dn.timetuple().tm_yday
                dates = date[2:]
                # FOR OLDER FILES THAT DID DOY, ELSE STANDARD DAY
                #if f[17] in ['.']:
                #    date = f[10:17]             # date         = YYYYDDD
                #    dates = f[12:17]            # dates        = YYDDD
                #    year = int(f[10:14])        # year         = YYYY
                #    doy = int(f[14:17])         # doy          = DDD
                #    dn = dt.datetime(year,1,1)+dt.timedelta(days = doy-1)
                #    month = dn.timetuple().tm_mon
                #    day = dn.timetuple().tm_mday
                #else:
                #    date = f[10:18]             # date         = YYYYMMDD
                #    dates = f[12:18]            # dates        = YYMMDD
                #    year = int(f[10:14])        # year         = YYYY
                #    month = int(f[14:16])       # month        = MM
                #    day = int(f[16:18])         # day          = DD
                #    dn = dt.datetime(year,month,day)
                #    doy = dn.timetuple().tm_yday
                print "\n!!! For", name
                # Fix inum for Letters
                if inum[1].isalpha():
                    inum = inum[1]
                if not (inum[0].isalpha()):
                    emails = activeinstruments()[site][instr][inum]['email']

                ##### TEMPLOG CASE: #####
                if instrument in ['cloud', 'templ']:
                    ### Part 1: Sorting Data
                    print "!!! Begin Sorting..."
                    # Create fake checksum for tracker
                    checkname = code[instrument] + '00' + f[5:]
                    os.system('cp ' + f + ' ' + checkname)
                    makeinfo(checkname)
                    os.rename(checkname, 'tracking/' + checkname)
                    # Move data into directory
                    dir_data = dir_local + 'templogs/' + code[
                        code[instrument]] + '/' + site + '/'
                    if os.path.exists(dir_data + f):
                        os.remove(dir_data + f)
                    shutil.copy(f, dir_data + f)
                    os.remove(f)
                    #os.rename(f, dir_data + f) # does not work with rclone as it is different filesystem
                    os.system('chmod 744 ' + dir_data + f)
                    #os.system('chown airglow.airglow ' + dir_data + r)
                    print "!!! Success Sorting"

                elif instr in ['bwc', 'x3t']:
                    ### Send Error if checkfile
                    print "!!! Begin Sorting..."
                    sortinghat([], f)

                ##### FPI CASE: #####
                elif instr in 'fpi':
                    ### Part 1: Sorting Data
                    dir_data = dir_local + 'fpi/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    result = sortinghat(dir_data, f)
                    if result:
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.fpi ' + dir_data + r)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')
                        print "!!! Success Sorting"
                        #continue
                        ####Part 2: Processing Data
                        print "!!! Begin Processing..."
                        # Get correct doy from files
                        for r in result:
                            if r[-4:] in '.img':
                                # Find solar local time and subtract 12 hours. That's the definition of date that we use for FPIs.
                                # This is the only way to ensure that no matter what location and time zone, all files
                                # from a night refer to the same date.
                                ldn = FPI.ReadIMG(dir_data +
                                                  r).info['LocalTime']
                                site_info = fpiinfo.get_site_info(site, ldn)
                                utdn = ldn.replace(tzinfo=pytz.timezone(
                                    site_info['Timezone'])).astimezone(
                                        pytz.utc).replace(tzinfo=None)
                                site_lon = np.mod(
                                    site_info['Location'][1] + 180, 360) - 180
                                sltdn = utdn + dt.timedelta(hours=24 *
                                                            site_lon / 360.)
                                dn0 = sltdn - dt.timedelta(
                                    hours=12
                                )  # No matter what time of night, and what location, this will be during the same date
                                doy = dn0.timetuple().tm_yday
                                year = dn0.year
                                break
                        # Run processing script for site
                        process_kwargs={'reference':'laser',\
                                        'send_to_website':True,\
                                        'enable_share':False,\
                                        'send_to_madrigal':True,\
                                        'sky_line_tag':'X',\
                                        'fpi_dir':'/rdata/airglow/fpi/',\
                                        'bw_dir':'/rdata/airglow/templogs/cloudsensor/',\
                                        'x300_dir':'/rdata/airglow/templogs/x300/',\
                                        'results_stub':'/rdata/airglow/fpi/results/'}
                        try:
                            warning = FPIprocess.process_instr(
                                code[instr] + inum, year, doy,
                                **process_kwargs)
                            if warning:
                                subject = "!!! Manually inspect (\'" + code[
                                    instr] + inum + '\',' + str(
                                        year) + ',' + str(doy) + ') @ ' + site
                                print subject
                                print traceback.print_exc()
                                Emailer.emailerror(emails, subject, warning)
                        except:
                            subject = "!!! Processing error (\'" + code[
                                instr] + inum + '\',' + str(year) + ',' + str(
                                    doy) + ') @ ' + site
                            print subject
                            print traceback.print_exc()
                            Emailer.emailerror(emails, subject,
                                               traceback.format_exc())

                        print "!!! End Processing"

                        # Run green line if existing
                        if len(
                            [r for r in result if 'XG' in os.path.basename(r)
                             ]) > 0:
                            process_kwarg['sky_line_tag'] = 'XG'
                            try:
                                warning = FPIprocess.process_instr(
                                    code[instr] + inum, year, doy,
                                    **process_kwargs)
                                if warning:
                                    subject = "!!! Manually inspect (\'" + code[
                                        instr] + inum + '\',' + str(
                                            year) + ',' + str(
                                                doy) + ') @ ' + site
                                    print subject
                                    print traceback.print_exc()
                                    Emailer.emailerror(emails, subject,
                                                       warning)
                            except:
                                subject = "!!! Processing error (\'" + code[
                                    instr] + inum + '\',' + str(
                                        year) + ',' + str(doy) + ') @ ' + site
                                print subject
                                print traceback.print_exc()

                ##### GPS CASE: #####
                elif instr in ['tec', 'scn']:
                    ## Part 1: Sorting Data
                    dir_data = dir_local + 'gps/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    # if SCN - Send data to raw folder
                    if instr == 'scn':
                        dir_data = dir_data + '/raw_data/'
                        try:
                            os.makedirs(dir_data)
                            os.system('chmod 755 ' + dir_data)
                        except OSError:
                            print '!!! Raw Folder Exists... moving on'
                    result = sortinghat(dir_data, f)
                    if result:
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.gps ' + dir_data + r)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')

                        print "!!! Success Sorting"

                        ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        # Run processing script for site
                        try:
                            GPSprocess.process_instr(code[instr] + inum, year,
                                                     doy)
                        except:
                            subject = "!!! Processing error (\'" + code[
                                instr] + inum + '\',' + str(year) + ',' + str(
                                    doy) + ') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject,
                                               traceback.format_exc())
                        print "!!! End Processing"

                ##### CASES CASE: #####
                elif instr in ['cas']:
                    ### Partr 1: "Sort Data"
                    # Make sure folder exists
                    dir_data = dir_local + 'gps/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    try:
                        os.makedirs(dir_data)
                        os.system('chmod 755 ' + dir_data)
                    except OSError:
                        print '!!! Raw Folder Exists... moving on'
                    # Move info file to tracking
                    os.system('mv ' + f + ' ./tracking')
                    # Remove files from rx (it was a duplicate)
                    os.system('rm -f ' + name + '*')
                    print "!!! Success Sorting"

                    ### Part 2: Processing Data
                    print "!!! Begin Processing..."
                    # Run processing script for site
                    try:
                        GPSprocess.process_instr(code[instr] + inum, year, doy)
                    except:
                        subject = "!!! Processing error (\'" + code[
                            instr] + inum + '\',' + str(year) + ',' + str(
                                doy) + ') @ ' + site
                        print subject
                        Emailer.emailerror(emails, subject,
                                           traceback.format_exc())

                ##### IMAGER CASE: #####
                elif instr in ['asi', 'nfi', 'pic', 'sky', 'swe']:
                    ### Part 1: Sorting Data
                    dir_data = dir_local + 'imaging/' + code[
                        instr] + inum + '/' + site + '/' + str(year) + '/'
                    result = sortinghat(dir_data, f)
                    if result:
                        if asiinfo.get_site_info(site)['share']:
                            # Check that share folder for copy exists
                            dir_copy = dir_share + site + '/' + str(
                                year) + '/' + str(doy) + '/'
                            try:
                                os.makedirs(dir_copy)
                                os.system('chmod 755' + dir_copy)
                                print "!!! Share Folder Created"
                            except OSError:
                                print '!!! Share Folder Exists... moving on'
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.imaging ' + dir_data + r)
                            #os.system('mv ' + dir_data + r + ' ' + dir_data + str(doy) + '/.')
                            # Copy files if needed
                            if asiinfo.get_site_info(site)['share']:
                                os.system('cp -r ' + dir_data + r + ' ' +
                                          dir_copy)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')
                        print "!!! Success Sorting"

                        ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        ## Get correct doy from files
                        #for r in result:
                        #    if r[-4:] in '.tif':
                        #        ldn = Image.open(dir_data+r).info['UniversalTime'] # Local is standard, but ASI is JJM's timechoice
                        #        if ldn.hour < 12:
                        #            ldn -= dt.timedelta(days = 1)
                        #        doy = ldn.timetuple().tm_yday
                        #        year = ldn.year
                        #        break
                        # Run processing script for site
                        # TODO: Mimic FPIprocess warnings
                        msg = ASIprocess.process_instr(code[instr] + inum,
                                                       year, doy)
                        if msg:
                            subject = "!!! Processing Issue (\'" + code[
                                instr] + inum + '\',' + str(year) + ',' + str(
                                    doy) + ') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject, msg)
                        print "!!! End Processing"

                ##### BAD INSTR CATCH #####
                else:
                    emails = activeinstruments()['ADMIN']['email']
                    subject = "!!! Badly named files: " + name
                    print subject
                    Emailer.emailerror(emails, subject,
                                       'Name is not real instrument...')

    except Exception as e:
        emails = activeinstruments()['ADMIN']['email']
        subject = "!!! Something is wrong..."
        print subject
        print "Sending emails to Admin..."
        print traceback.print_exc()
        Emailer.emailerror(emails, subject + f, traceback.format_exc())

    finally:
        print "\n!!! Script Complete!"
        os.unlink(pidfile)
Example #25
0
def sorter(san,pgm):
    '''
    Summary
    -------
        sorter(san,core):
        program that runs through all rx data for one site to sort it.

    Inputs
    ------
        san = site abbreviated name (e.g. uao)
        pgm = program running (for pid checking)

    History
    -------
        2/13/14 -- Written by DJF ([email protected])
        6/11/15 -- Modified for site-by-site multicore
    '''
    
    # The location of programs (needed since running in crontab)
    dir_local = '/rdata/airglow/'
    #dir_script = '/usr/local/share/airglowrsss/Python/Programs/'
    #python = '/usr/local/python/'
    dir_share = '/rdata/airglow/share/'
    
    # Close Program if already running (just in case...)
    pid = str(os.getpid())
    pidfile = "/tmp/Sorter_%s.pid"%pgm
    if os.path.isfile("/tmp/Sorterall.pid"):
        print "Sorterall exists, must finish before calling new batch"
        sys.exit()
    elif os.path.isfile(pidfile):
        print "%s already exists, exiting" % pidfile
        sys.exit()
    else:
        file(pidfile, 'w').write(pid)
        
    # Start Write to Log File
    print "\n!!!!!!!!!!!!!!!!!!!!"
    print '!!! BEGIN TIMESTAMP:',dt.datetime.now()

    # Load instrument dictionary
    code = instrumentcode()
    # Set order so bwc & x3t process first
    ids = ['Cloud','TempL']
    ids.extend(code.keys())
   
    
    # TRY YOUR HARDEST
    try:
        # Get Data in RX folder
        os.chdir(dir_local+'rx/')
        #os.system('chmod 774 *') No longer have permissions, tx sends as 774.
        # Get info files for non-standard (Zip->Send->Sort) data
        rxfiles = ["fpi04_kaf","cas01_hka"]
        for x in rxfiles:
            for files in glob(x + '*'):
                makeinfo(files)
        # Go through txt files to Sort data
        for i in ids:
            # do everything if all, else limit to a site.
            if pgm == 'all':
                search = '*.txt'
            else:
                search = '*'+san+'*.txt'
            for f in glob(i + search):
                # Get information for assembling & sos this unAmericarting file
                name = f[0:18]              # name         = IIIII_SSS_YYYYMMDD
                instrument = f[0:5].lower() # instrument   = IIIII
                instr = f[0:3].lower()      # instrument   = III__
                inum = f[3:5]               # instrument # = ___II
                site = f[6:9].lower()       # site         = SSS
                # FOR OLDER FILES THAT DID DOY, ELSE STANDARD DAY
                if f[17] in ['.']:
                    date = f[10:17]             # date         = YYYYDDD
                    dates = f[12:17]            # dates        = YYDDD
                    year = int(f[10:14])        # year         = YYYY
                    doy = int(f[14:17])         # doy          = DDD
                    dn = dt.datetime(year,1,1)+dt.timedelta(days = doy-1)
                    month = dn.timetuple().tm_mon
                    day = dn.timetuple().tm_mday
                else:
                    date = f[10:18]             # date         = YYYYMMDD
                    dates = f[12:18]            # dates        = YYMMDD
                    year = int(f[10:14])        # year         = YYYY
                    month = int(f[14:16])       # month        = MM
                    day = int(f[16:18])         # day          = DD
                    dn = dt.datetime(year,month,day)
                    doy = dn.timetuple().tm_yday
                print "\n!!! For", name
                # Fix inum for Letters
                if inum[1].isalpha():
                    inum = inum[1]
                if not(inum[0].isalpha()):
                    emails = activeinstruments()[site][instr][inum]['email']


                ##### TEMPLOG CASE: #####
                if instrument in ['cloud', 'templ']:
                    ### Part 1: Sorting Data
                    print "!!! Begin Sorting..."
                    # Create fake checksum for tracker
                    checkname = code[instrument]+'00'+f[5:]
                    os.system('cp '+f+' '+checkname)
                    makeinfo(checkname)
                    os.rename(checkname,'tracking/'+checkname)
                    # Move data into directory
                    dir_data = dir_local + 'templogs/' + code[code[instrument]] + '/' + site + '/'
                    os.rename(f, dir_data + f)
                    os.system('chmod 744 ' + dir_data + f)
                    #os.system('chown airglow.airglow ' + dir_data + r)
                    print "!!! Success Sorting"
                    
                elif instr in ['bwc', 'x3t']:
                    ### Send Error if checkfile
                    print "!!! Begin Sorting..."
                    sortinghat([],f)


                ##### FPI CASE: #####
                elif instr in 'fpi':
                    ### Part 1: Sorting Data
                    dir_data = dir_local + 'fpi/' + code[instr] + inum + '/' + site + '/' + str(year) + '/'
                    result = sortinghat(dir_data,f)
                    if result:
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.fpi ' + dir_data + r)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')
                        print "!!! Success Sorting"
                        
                    ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        # Get correct doy from files
                        for r in result:
                            if r[-4:] in '.img':
                                # Find solar local time and subtract 12 hours. That's the definition of date that we use for FPIs.
                                # This is the only way to ensure that no matter what location and time zone, all files
                                # from a night refer to the same date.
                                ldn = FPI.ReadIMG(dir_data+r).info['LocalTime']
                                site_info = fpiinfo.get_site_info(site, ldn)
                                utdn = ldn.replace(tzinfo=pytz.timezone(site_info['Timezone'])).astimezone(pytz.utc).replace(tzinfo=None)
                                site_lon = np.mod(site_info['Location'][1]+180,360)-180
                                sltdn = utdn + dt.timedelta(hours = 24*site_lon/360.)
                                dn0 = sltdn - dt.timedelta(hours=12) # No matter what time of night, and what location, this will be during the same date
                                doy = dn0.timetuple().tm_yday
                                year = dn0.year
                                break
                        # Run processing script for site
                        try:
                            warning = FPIprocess.process_instr(code[instr] + inum,year,doy)
                            if warning:
                                subject = "!!! Manually inspect (\'" + code[instr]+inum+'\','+str(year)+','+str(doy)+') @ ' + site
                                print subject
                                Emailer.emailerror(emails, subject, warning)
                        except:
                            subject = "!!! Processing error (\'" + code[instr]+inum+'\','+str(year)+','+str(doy)+') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject, traceback.format_exc())
                        # Run CV processing for project
                        # ?????
                        print "!!! End Processing"


                ##### GPS CASE: #####
                elif instr in ['tec','scn']:
                    ## Part 1: Sorting Data
                    dir_data = dir_local + 'gps/' + code[instr] + inum + '/' + site + '/' + str(year) + '/'
                    # if SCN - Send data to raw folder
                    if instr == 'scn':
                        dir_data = dir_data + '/raw_data/'
                        try:
                            os.makedirs(dir_data)
                            os.system('chmod 755 ' + dir_data)
                        except OSError:
                            print '!!! Raw Folder Exists... moving on'
                    result = sortinghat(dir_data,f)
                    if result:
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.gps ' + dir_data + r)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')

                        print "!!! Success Sorting"
                        
                    ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        # Run processing script for site
                        try:
                            GPSprocess.process_instr(code[instr] + inum,year,doy)
                        except:
                            subject = "!!! Processing error (\'" + code[instr]+inum+'\','+str(year)+','+str(doy)+') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject, traceback.format_exc())
                        print "!!! End Processing"


                ##### CASES CASE: #####
                elif instr in ['cas']:
                    ### Partr 1: "Sort Data"
                    # Make sure folder exists
                    dir_data = dir_local + 'gps/'+code[instr] + inum + '/' + site + '/' + str(year) + '/'
                    try:
                        os.makedirs(dir_data)
                        os.system('chmod 755 ' + dir_data)
                    except OSError:
                        print '!!! Raw Folder Exists... moving on'
                    # Move info file to tracking
                    os.system('mv ' + f + ' ./tracking')
                    # Remove files from rx (it was a duplicate)
                    os.system('rm -f ' + name + '*')
                    print "!!! Success Sorting"

                    ### Part 2: Processing Data
                    print "!!! Begin Processing..."
                    # Run processing script for site
                    try:
                        GPSprocess.process_instr(code[instr] + inum,year,doy)
                    except:
                        subject = "!!! Processing error (\'" + code[instr]+inum+'\','+str(year)+','+str(doy)+') @ ' + site
                        print subject
                        Emailer.emailerror(emails, subject, traceback.format_exc())                       
                        
                
                ##### IMAGER CASE: #####
                elif instr in ['asi','nfi','pic','sky','swe']:
                    ### Part 1: Sorting Data
                    dir_data = dir_local + 'imaging/' + code[instr] + inum + '/' + site + '/' + str(year) + '/'
                    result = sortinghat(dir_data,f)
                    if result:
                        if asiinfo.get_site_info(site)['share']:
                            # Check that share folder for copy exists
                            dir_copy = dir_share + site + '/' + str(year) + '/' + str(doy) + '/'
                            try:
                                os.makedirs(dir_copy)
                                os.system('chmod 755' + dir_copy)
                                print "!!! Share Folder Created"
                            except OSError:
                                print '!!! Share Folder Exists... moving on'
                        # CHMOD all added files
                        for r in result:
                            os.system('chmod u+rwx,go+rX,go-w ' + dir_data + r)
                            os.system('chown airglow.imaging ' + dir_data + r)
                            #os.system('mv ' + dir_data + r + ' ' + dir_data + str(doy) + '/.')
                            # Copy files if needed
                            if asiinfo.get_site_info(site)['share']:
                                os.system('cp -r ' + dir_data + r + ' ' + dir_copy)
                        # Remove files from rx
                        os.system('rm -f ' + name + '*')
                        print "!!! Success Sorting"
                        
                    ### Part 2: Processing Data
                        print "!!! Begin Processing..."
                        ## Get correct doy from files
                        #for r in result:
                        #    if r[-4:] in '.tif':
                        #        ldn = Image.open(dir_data+r).info['UniversalTime'] # Local is standard, but ASI is JJM's timechoice
                        #        if ldn.hour < 12:
                        #            ldn -= dt.timedelta(days = 1)
                        #        doy = ldn.timetuple().tm_yday
                        #        year = ldn.year
                        #        break
                        # Run processing script for site
                        # TODO: Mimic FPIprocess warnings
                        msg = ASIprocess.process_instr(code[instr] + inum,year,doy)
                        if msg:
                            subject = "!!! Processing Issue (\'" + code[instr]+inum+'\','+str(year)+','+str(doy)+') @ ' + site
                            print subject
                            Emailer.emailerror(emails, subject, msg)
                        print "!!! End Processing"
                        
                        
                ##### BAD INSTR CATCH #####
                else:
                    emails = activeinstruments()['ADMIN']['email'] 
                    subject = "!!! Badly named files: " + name
                    print subject
                    Emailer.emailerror(emails, subject, 'Name is not real instrument...')

    except:
        emails = activeinstruments()['ADMIN']['email'] 
        subject = "!!! Something is wrong..."
        print subject
        Emailer.emailerror(emails, subject+f, traceback.format_exc())
        
    finally:
        print "\n!!! Unpack Complete!"
        os.unlink(pidfile)
Example #26
0
def sortinghat(dir_data,f):
    '''
    Summary
    -------
        result = sortinghat(dir_data,f,code):
        Concatinates/Unzips and moves data to folder. Returns errors and filenames.

    Inputs
    ------
        dir_data = directory that data will be placed in
        f = glob'd info file that contains name, parts, and size

    Outputs
    -------
        result = names - successful sort, else - error

    History
    -------
        7/18/13 -- Written by DJF ([email protected])
    '''
    
    result = 0
    code = instrumentcode()
    # Read info file
    info = open(f, 'r')
    zelda = info.readline().rstrip().split('.tar.gz',1)[0]+'.tar.gz'
    parts = int(info.readline())
    tsize = int(info.readline())
    time = dt.datetime.strptime(info.readline()[:19],'%Y-%m-%d %H:%M:%S')
    info.close()

    # Parse Name
    site = f[6:9]
    instr= f[0:3]
    inum = f[3:5]
    year = f[12:14]
    mon  = f[14:16]
    day  = f[16:18]
    emails = activeinstruments()[site][instr][inum]['email']

    # Check all parts present
    print 'Parts:',len(glob(zelda + '*')),'/',parts
    ## Case 1 - no data created last night
    if(tsize ==0 and parts == 0):
        ## Emails Warning that system is down!
        print '!!! No Data Collected'
        if instr in ['asi','nfi','pic','sky','swe']:
            msg = "%s%s down at %s!\nIs it a full moon?\nInternet & Sortinghat are working, is it an instrument/PC issue." %(code[instr],inum,site)
        else:
            msg = "%s%s down at %s!\nInternet & Sortinghat are working, this is an instrument/PC issue." %(code[instr],inum,site)
        subject = "!!! No data collected on %02s-%02s-%02s" %(mon,day,year)
        Emailer.emailerror(emails,subject,msg)
        # Move info file to tracking folder
        os.system('mv ' + f + ' ./tracking')
    ## Case 2 - all parts sent over in rx
    elif len(glob(zelda + '*')) == parts:
        # Check that folder to data exists
        try:
            os.makedirs(dir_data)
            os.system('chmod u+rwx,go+rX,go-w ' +dir_data)
            print "!!! Folder Created - Verify..."
        except OSError:
            print '!!! Folders Exist... moving on'
        # Concatinate the split files
        oscar = glob(zelda+'*')
        oscar.sort()
        print "\n".join(str(x) for x in oscar)
        os.system('cat ' + zelda + '* > temp.tar.gz')
        os.system('chmod 770 temp.tar.gz')
        # Check that size is correct
        statinfo = os.stat("temp.tar.gz")
        print 'Sizes:',statinfo.st_size,'/',tsize
        if statinfo.st_size == tsize:
            # Untar the gunzip files 
            tar = tarfile.open("temp.tar.gz","r:gz")
            try:
                result = tar.getnames()
                tar.extractall(dir_data)
                os.system('mv ' + f + ' ./tracking')
            except:
                age = (dt.datetime.utcnow()-time).total_seconds()/3600.0
                subject = "!!! Extract Error on %02s-%02s-%02s" %(mon,day,year)
                print subject
                msg = "%s%s issue at %s!\nThis file will not untar.\nBad Zip? Try -p %i" %(code[instr],inum,site,age/24)
                Emailer.emailerror(emails,subject,msg)
                result = []
            tar.close()
        else:
            print '!!! Waiting for complete parts...'
    ## Case 3 - all parts not yet sent
    else:
        print '!!! Waiting for parts...'
    os.system('rm -f temp.tar.gz')
    return(result)
Example #27
0
def sortinghat(dir_data, f):
    '''
    Summary
    -------
        result = sortinghat(dir_data,f,code):
        Concatinates/Unzips and moves data to folder. Returns errors and filenames.

    Inputs
    ------
        dir_data = directory that data will be placed in
        f = glob'd info file that contains name, parts, and size

    Outputs
    -------
        result = names - successful sort, else - error

    History
    -------
        7/18/13 -- Written by DJF ([email protected])
    '''

    result = 0
    code = instrumentcode()
    # Read info file
    info = open(f, 'r')
    zelda = info.readline().rstrip().split('.tar.gz', 1)[0] + '.tar.gz'
    parts = int(info.readline())
    tsize = int(info.readline())
    time = dt.datetime.strptime(info.readline()[:19], '%Y-%m-%d %H:%M:%S')
    info.close()

    # Parse Name
    site = f[6:9]
    instr = f[0:3]
    inum = f[3:5]
    year = f[12:14]
    mon = f[14:16]
    day = f[16:18]
    emails = activeinstruments()[site][instr][inum]['email']

    # Check all parts present
    print 'Parts:', len(glob(zelda + '*')), '/', parts
    ## Case 1 - no data created last night
    if (tsize == 0 and parts == 0):
        ## Emails Warning that system is down!
        print '!!! No Data Collected'
        if instr in ['asi', 'nfi', 'pic', 'sky', 'swe']:
            msg = "%s%s down at %s!\nIs it a full moon?\nInternet & Sortinghat are working, is it an instrument/PC issue." % (
                code[instr], inum, site)
        else:
            msg = "%s%s down at %s!\nInternet & Sortinghat are working, this is an instrument/PC issue." % (
                code[instr], inum, site)
        subject = "!!! No data collected on %02s-%02s-%02s" % (mon, day, year)
        Emailer.emailerror(emails, subject, msg)
        # Move info file to tracking folder
        os.system('mv ' + f + ' ./tracking')
    ## Case 2 - all parts sent over in rx
    elif len(glob(zelda + '*')) == parts:
        # Check that folder to data exists
        try:
            os.makedirs(dir_data)
            os.system('chmod u+rwx,go+rX,go-w ' + dir_data)
            print "!!! Folder Created - Verify..."
        except OSError:
            print '!!! Folders Exist... moving on'
        # Concatinate the split files
        oscar = glob(zelda + '*')
        oscar.sort()
        print "\n".join(str(x) for x in oscar)
        os.system('cat ' + zelda + '* > temp.tar.gz')
        os.system('chmod 770 temp.tar.gz')
        # Check that size is correct
        statinfo = os.stat("temp.tar.gz")
        print 'Sizes:', statinfo.st_size, '/', tsize
        if statinfo.st_size == tsize:
            # Untar the gunzip files
            tar = tarfile.open("temp.tar.gz", "r:gz")
            try:
                result = tar.getnames()
                tar.extractall(dir_data)
                os.system('mv ' + f + ' ./tracking')
            except:
                age = (dt.datetime.utcnow() - time).total_seconds() / 3600.0
                subject = "!!! Extract Error on %02s-%02s-%02s" % (mon, day,
                                                                   year)
                print subject
                msg = "%s%s issue at %s!\nThis file will not untar.\nBad Zip? Try -p %i" % (
                    code[instr], inum, site, age / 24)
                Emailer.emailerror(emails, subject, msg)
                result = []
            tar.close()
        else:
            print '!!! Waiting for complete parts...'
    ## Case 3 - all parts not yet sent
    else:
        print '!!! Waiting for parts...'
    os.system('rm -f temp.tar.gz')
    return (result)