def startACEExtract(event, start, stop, comp_test = 'NA'): 'for a gien event, start and stop data, initiate ACE plottings.' # #--- change time format to year and ydate (float) # begin = start + ':00' #---- need to add "seconds" part to dateFormtCon to work correctly end = stop + ':00' (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- extract ACE Data: put interruption starting/ending time in year, ydate format # createRadDataTable(event, year1, ydate1, year2, ydate2, comp_test)
def startACEPlot(event, start, stop, comp_test = 'NA'): 'for a gien event, start and stop data, initiate ACE plottings.' # #--- change time format to year and ydate (float) # begin = start + ':00' #---- need to add "seconds" part to dateFormtCon to work correctly end = stop + ':00' (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- plot ACE Data # aceDataPlot(event, year1, ydate1, year2, ydate2, comp_test)
def startACEExtract(event, start, stop, comp_test='NA'): 'for a gien event, start and stop data, initiate ACE plottings.' # #--- change time format to year and ydate (float) # begin = start + ':00' #---- need to add "seconds" part to dateFormtCon to work correctly end = stop + ':00' (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- extract ACE Data: put interruption starting/ending time in year, ydate format # createRadDataTable(event, year1, ydate1, year2, ydate2, comp_test)
def startACEPlot(event, start, stop, comp_test='NA'): 'for a gien event, start and stop data, initiate ACE plottings.' # #--- change time format to year and ydate (float) # begin = start + ':00' #---- need to add "seconds" part to dateFormtCon to work correctly end = stop + ':00' (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- plot ACE Data # aceDataPlot(event, year1, ydate1, year2, ydate2, comp_test)
def computeGOESStat(event, startTime, comp_test ='NA'): 'for give event name and interruption stating time, read the data from goes data, and compute statistics' begin = startTime + ':00' #---- modify the format to work with dateFormatCon (year, month, day, hours, minutes, seconds, interruptTime) = tcnv.dateFormatCon(begin) if comp_test == 'test': file = test_data_dir + event + '_goes.txt' else: file = data_dir + event + '_goes.txt' f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() p1Avg = 0 p1Avg2 = 0 p1Max = -1e5 p1Min = 1e8 p1MaxTime = 0 p1MinTime = 0 p1IntValue = 0 p2Avg = 0 p2Avg2 = 0 p2Max = -1e5 p2Min = 1e8 p2MaxTime = 0 p2MinTime = 0 p2IntValue = 0 p5Avg = 0 p5Avg2 = 0 p5Max = -1e5 p5Min = 1e8 p5MaxTime = 0 p5MinTime = 0 p5IntValue = 0 p1cnt = 0 p2cnt = 0 p5cnt = 0 ind = 0 #---- indicator whther the loop passed the interruption time for ent in data: atemp = re.split('\s+|\t+', ent) btemp = re.split('\.', atemp[0]) if ent and btemp[0].isdigit(): atemp = re.split('\s+|\t+', ent) val0 = float(atemp[0]) #--- time val1 = float(atemp[1]) #--- p1 val2 = float(atemp[2]) #--- p2 val3 = float(atemp[3]) #--- p5 if val1 > 0: #--- 0 could mean that there is no data; so we ignore it p1Avg += val1 p1Avg2 += val1 * val1 if val1 > p1Max: p1Max = val1 p1MaxTime = val0 elif val1 < p1Min: p1Min = val1 p1MinTime = val0 p1cnt += 1 if val2 > 0: p2Avg += val2 p2Avg2 += val2 * val2 if val2 > p2Max: p2Max = val2 p2MaxTime = val0 elif val2 < p2Min: p2Min = val2 p2MinTime = val0 p2cnt += 1 if val3 > 0: p5Avg += val3 p5Avg2 += val3 * val3 if val3 > p5Max: p5Max = val3 p5MaxTime = val0 elif val3 < p5Min: p5Min = val3 p5MinTime = val0 p5cnt += 1 # #--- finding the value at the interruption # if interruptTime <= val0 and ind == 0: p1IntValue = val1 p2IntValue = val2 p5IntValue = val3 ind = 1 # #--- compute averages # if p1cnt > 0 and p2cnt > 0 and p5cnt > 0: p1Avg /= p1cnt p2Avg /= p2cnt p5Avg /= p5cnt # #--- compute stndard deviation # p1Sig = math.sqrt(p1Avg2 / p1cnt - p1Avg * p1Avg) p2Sig = math.sqrt(p2Avg2 / p2cnt - p2Avg * p2Avg) p5Sig = math.sqrt(p5Avg2 / p5cnt - p5Avg * p5Avg) if comp_test == 'test': file = test_stat_dir + event + '_goes_stat' else: file = stat_dir + event + '_goes_stat' f = open(file, 'w') f.write('\t\tAvg\t\t\tMax\t\tTime\t\tMin\t\tTime\t\tValue at Interruption Started\n') f.write('--------------------------------------------------------------------------------------------------------------------------\n') line = 'p1\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n' % (p1Avg, p1Sig, p1Max, p1MaxTime, p1Min, p1MinTime, p1IntValue) f.write(line) line = 'p2\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n' % (p2Avg, p2Sig, p2Max, p2MaxTime, p2Min, p2MinTime, p2IntValue) f.write(line) line = 'p5\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n' % (p5Avg, p5Sig, p5Max, p5MaxTime, p5Min, p5MinTime, p5IntValue) f.write(line) f.close()
def computeEphinStat(event, startTime, comp_test='NA'): """ read data from ephin data, and compute statistics input: event --- name of the event startTime --- start time output: <stat_dir>/<event>_ephin_stat """ begin = startTime + ':00' #---- modify the format to work with dateFormatCon (year, month, day, hours, minutes, seconds, interruptTime) = tcnv.dateFormatCon(begin) if comp_test == 'test': file = test_data_dir + event + '_eph.txt' else: file = data_dir + event + '_eph.txt' f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() hrcAvg = 0 hrcAvg2 = 0 hrcMax = -1e5 hrcMin = 1e8 hrcMaxTime = 0 hrcMinTime = 0 hrcIntValue = 0 e150Avg = 0 e150Avg2 = 0 e150Max = -1e5 e150Min = 1e8 e150MaxTime = 0 e150MinTime = 0 e150IntValue = 0 e1300Avg = 0 e1300Avg2 = 0 e1300Max = -1e5 e1300Min = 1e8 e1300MaxTime = 0 e1300MinTime = 0 e1300IntValue = 0 hcnt = 0 e1cnt = 0 e2cnt = 0 ind = 0 #---- indicator whther the loop passed the interruption time dataset = 0 for ent in data: m1 = re.search('Interruption', ent) m2 = re.search('dofy', ent) m3 = re.search('----', ent) m4 = re.search('hrc', ent) # #--- checking new (hrc, e150, e1300) or old (p4 , p41, e1300) dataset # if m4 is not None: dataset = 1 if ent and m1 == None and m2 == None and m3 == None: atemp = re.split('\s+|\t+', ent) if len(atemp) < 4: continue val0 = float(atemp[0]) val1 = float(atemp[1]) val2 = float(atemp[2]) val3 = float(atemp[3]) if val1 > 0: #--- 0 could mean that there is no data; so we ignore it hrcAvg += val1 hrcAvg2 += val1 * val1 if val1 > hrcMax: hrcMax = val1 hrcMaxTime = val0 elif val1 < hrcMin: hrcMin = val1 hrcMinTime = val0 hcnt += 1 if val2 > 0: e150Avg += val2 e150Avg2 += val2 * val2 if val2 > e150Max: e150Max = val2 e150MaxTime = val0 elif val2 < e150Min: e150Min = val2 e150MinTime = val0 e1cnt += 1 if val3 > 0: e1300Avg += val3 e1300Avg2 += val3 * val3 if val3 > e1300Max: e1300Max = val3 e1300MaxTime = val0 elif val3 < e1300Min: e1300Min = val3 e1300MinTime = val0 e2cnt += 1 # #--- finding the value at the interruption # if interruptTime <= val0 and ind == 0: hrcIntValue = val1 e150IntValue = val2 e1300IntValue = val3 ind = 1 # #--- compute averages # hrcAvg /= hcnt e150Avg /= e1cnt e1300Avg /= e2cnt # #--- compute stndard deviation # hrcSig = math.sqrt(hrcAvg2 / hcnt - hrcAvg * hrcAvg) e150Sig = math.sqrt(e150Avg2 / e1cnt - e150Avg * e150Avg) e1300Sig = math.sqrt(e1300Avg2 / e2cnt - e1300Avg * e1300Avg) # file = web_dir + 'Ephin_plot/' + event + '_txt' line = '\t\tAvg\t\t\tMax\t\tTime\t\tMin\t\tTime\t\tValue at Interruption Started\n' line = line + '-' * 95 + '\n' if dataset == 1: line = line + 'hrc\t' else: line = line + 'p4\t' if hrcIntValue > 0: line = line + '%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\ % (hrcAvg, hrcSig, hrcMax, hrcMaxTime, hrcMin, hrcMinTime, hrcIntValue) else: line = line + '%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\tNA\n'\ % (hrcAvg, hrcSig, hrcMax, hrcMaxTime, hrcMin, hrcMinTime) if year < 2014: if dataset == 1: line + 'e150\t' else: line + 'p41\t' line = line + '%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\ % (e150Avg, e150Sig, e150Max, e150MaxTime, e150Min, e150MinTime, e150IntValue) line = line + 'e1300\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\ % (e1300Avg, e1300Sig, e1300Max, e1300MaxTime, e1300Min, e1300MinTime, e1300IntValue) if comp_test == 'test': file = test_stat_dir + event + '_ephin_stat' else: file = stat_dir + event + '_ephin_stat' f = open(file, 'w') f.write(line) f.close()
def ephinDataExtract(event, start, stop, comp_test='NA'): """ extract EPIN related quantities and creates a data table input: event --- event name (e.g.20170911) start --- starting time (format: 2012:03:13:22:41) stop --- stopping time (format: 2012:03:13:22:41) output: <data_dir>/<event>_eph.txt """ begin = start + ':00' #---- to use dateFormatCon correctly, need to add "sec" part end = stop + ':00' # #--- convert time format # (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- change time format and find data collecting period (starts 2 days before the interruption #--- and ends at least 5 days after the stating) # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart,\ plotYearStop, plotStop, pannelNum) = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- read ephin data using arc5gl # ephinList = itrf.useArc5gl('retrieve', 'flight', 'ephin', 1, 'ephrates', pYearStart,\ periodStart, pYearStop, periodStop, deposit='./Working_dir/') # #--- extract needed data # xdate = [] p4 = [] p41 = [] e150 = [] e1300 = [] ecnt = 0 for fits in ephinList: mc = re.search('fits.gz', fits) if mc is None: continue # #--- use dmlist # infile = './Working_dir/' + fits [tcols, tbdata] = itrf.read_fits_file(infile) etime = list(tbdata.field('time')) xdate = xdate + convert_to_ydate(etime, pYearStart) if pYearStart < 2011: p4 = p4 + list(tbdata.field('scp4')) p41 = p41 + list(tbdata.field('scp41')) else: e150 = e150 + list(tbdata.field('sce150')) e1300 = e1300 + list(tbdata.field('sce1300')) ecnt = len(e1300) os.system('rm ./Working_dir/*fits*') # #--- using DataSeeker, extread HRC sheild rate (only if year > 2011) # if pYearStart >= 2011: [stime, veto] = itrf.useDataSeeker(pYearStart, periodStart, pYearStop, periodStop, 'shevart') # #--- converrt time format into day of year # time = [] for ent in stime: ttime = tcnv.convertCtimeToYdate(float(ent)) temp = re.split(':', ttime) year = int(temp[0]) dofy = float(temp[1]) + float(temp[2]) / 24 + float( temp[3]) / 1440 + float(temp[4]) / 86400 time.append(dofy) hcnt = len(time) # #--- matching timing between electron data and hrc data # hrc = len(e150) * [0] j = 0 k = 0 if isLeapYear(pYearStart) == 1: base = 366 else: base = 365 # #--- find the begining # if time[0] < xdate[0]: while time[j] < xdate[0]: j += 1 if j >= hcnt: print "Time span does not overlap. Abort the process." exit(1) elif time[0] > xdate[0]: while time[0] > xdate[k]: k += 1 if k >= ecnt: print "Time span does not overlap. Abort the process." exit(1) hrc[k] = veto[j] tspace = 1.38888888888e-3 / base #--- setting timing bin size: base is given in hrc loop for i in range(k + 1, ecnt): tbeg = xdate[i] - tspace tend = xdate[i] + tspace if j > hcnt - 2: # #--- if the hrc data runs out, just repeat the last data point value # hrc[i] = veto[hcnt - 1] elif time[j] >= tbeg and time[j] <= tend: hrc[i] = veto[j] elif time[j] < tbeg: while time[j] < tbeg: j += 1 hrc[i] = veto[j] elif time[j] > tend: while time[j] > tend: j -= 1 hrc[i] = veto[j] # #--- print out data # line = 'Science Run Interruption: ' + start + '\n\n' if pYearStart < 2011: line = line + 'dofy\t\tp4\t\t\tp41\t\t\te1300\n' line = line + '-------------------------------------------------------------------\n' for m in range(0, ecnt): line = line + '%4.3f\t\t%4.3e\t%4.3e\t%4.3e\n'\ % (float(xdate[m]), float(p4[m]), float(p41[m]), float(e1300[m])) else: line = line + 'dofy\t\thrc\t\te150\t\te1300\n' line = line + '-------------------------------------------------------------------\n' for m in range(0, ecnt): line = line + '%4.3f\t\t%4.3e\t%4.3e\t%4.3e\n'\ % (float(xdate[m]), float(hrc[m]), float(e150[m]), float(e1300[m])) if comp_test == 'test': file = test_data_dir + event + '_eph.txt' else: file = data_dir + event + '_eph.txt' f = open(file, 'w') f.write(line) f.close()
def useArc4gl(operation, dataset, detector, level, filetype, startYear=0, startYdate=0, stopYear=0, stopYdate=0, deposit='./', filename='NA'): """ extract data using arc4gl. input: start, stop (year and ydate), operation (e.g., retrive), dataset (e.g. flight), detector (e.g. hrc), level (eg 0, 1, 2), filetype (e.g, evt1), output file: deposit. return the list of the file name. """ # #--- read a couple of information needed for arc4gl # line = bindata_dir + '.dare' f = open(line, 'r') dare = f.readline().strip() f.close() line = bindata_dir + '.hakama' f = open(line, 'r') hakama = f.readline().strip() f.close() # #--- use arc4gl to extract data # if startYear > 1000: (year1, month1, day1, hours1, minute1, second1, ydate1) = tcnv.dateFormatCon(startYear, startYdate) (year2, month2, day2, hours2, minute2, second2, ydate2) = tcnv.dateFormatCon(stopYear, stopYdate) stringYear1 = str(year1) stringYear2 = str(year2) stringMonth1 = str(month1) if month1 < 10: stringMonth1 = '0' + stringMonth1 stringMonth2 = str(month2) if month2 < 10: stringMonth2 = '0' + stringMonth2 stringDay1 = str(day1) if day1 < 10: stringDay1 = '0' + stringDay1 stringDay2 = str(day2) if day2 < 10: stringDay2 = '0' + stringDay2 stringHour1 = str(hours1) if hours1 < 10: stringHour1 = '0' + stringHour1 stringHour2 = str(hours2) if hours2 < 10: stringHour2 = '0' + stringHour2 stringMinute1 = str(minute1) if minute1 < 10: stringMinute1 = '0' + stringMinute1 stringMinute2 = str(minute2) if minute2 < 10: stringMinute2 = '0' + stringMinute2 stringYear = stringYear1[2] + stringYear1[3] arc_start = stringMonth1 + '/' + stringDay1 + '/' + stringYear + ',' + stringHour1 + ':' + stringMinute1 + ':00' stringYear = stringYear2[2] + stringYear2[3] arc_stop = stringMonth2 + '/' + stringDay2 + '/' + stringYear + ',' + stringHour2 + ':' + stringMinute2 + ':00' f = open('./arc_file', 'w') line = 'operation=' + operation + '\n' f.write(line) line = 'dataset=' + dataset + '\n' f.write(line) line = 'detector=' + detector + '\n' f.write(line) line = 'level=' + str(level) + '\n' f.write(line) line = 'filetype=' + filetype + '\n' f.write(line) if filename != 'NA': line = 'filename=' + filename + '\n' f.write(line) else: f.write('tstart=') f.write(arc_start) f.write('\n') f.write('tstop=') f.write(arc_stop) f.write('\n') f.write('go\n') f.close() # #--- for the command is to retrieve: extract data and return the list of the files extreacted # if operation == 'retrieve': cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file' os.system(cmd) cmd = 'rm ./arc_file' os.system(cmd) # #--- move the extracted file, if depository is specified # if deposit != './': cmd = 'mv *.gz ' + deposit + '.' os.system(cmd) xxx = os.listdir(deposit) cleanedData = [] for fout in os.listdir(deposit): if fnmatch.fnmatch(fout, '*gz'): # cmd = 'gzip -d ' + deposit + '/*gz' cmd = 'gzip -d ' + deposit + fout os.system(cmd) # #--- run arc4gl one more time to read the file names # f = open('./arc_file', 'w') line = 'operation=browse\n' f.write(line) line = 'dataset=' + dataset + '\n' f.write(line) line = 'detector=' + detector + '\n' f.write(line) line = 'level=' + str(level) + '\n' f.write(line) line = 'filetype=' + filetype + '\n' f.write(line) if filename != 'NA': line = 'filename=' + filename + '\n' f.write(line) else: f.write('tstart=') f.write(arc_start) f.write('\n') f.write('tstop=') f.write(arc_stop) f.write('\n') f.write('go\n') f.close() cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file > file_list' os.system(cmd) f = open('./file_list', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./arc_file ./file_list') # #--- extreact fits file names and drop everything else # for ent in data: m = re.search('fits', ent) if m is not None: atemp = re.split('\s+|\t+', ent) cleanedData.append(atemp[0]) return cleanedData # #--- for the command is to browse: return the list of fits file names # else: cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file > file_list' os.system(cmd) f = open('./file_list', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./arc_file ./file_list') # #--- extreact fits file names and drop everything else # cleanedData = [] for ent in data: m = re.search('fits', ent) if m is not None: atemp = re.split('\s+|\t+', ent) cleanedData.append(atemp[0]) return cleanedData
def useArc4gl(operation, dataset, detector, level, filetype, startYear=0, startYdate=0, stopYear=0, stopYdate=0, deposit='./', filename='NA'): "extract data using arc4gl. input: start, stop (year and ydate), operation (e.g., retrive), dataset (e.g. flight), detector (e.g. hrc), level (eg 0, 1, 2), filetype (e.g, evt1), and output file: deposit. return the list of the file name." # #--- read a couple of information needed for arc4gl # line = bindata_dir + '.dare' f = open(line, 'r') dare = f.readline().strip() f.close() line = bindata_dir + '.hakama' f = open(line, 'r') hakama = f.readline().strip() f.close() # #--- use arc4gl to extract ephin data # (year1, month1, day1, hours1, minute1, second1, ydate1) = tcnv.dateFormatCon(startYear, startYdate) (year2, month2, day2, hours2, minute2, second2, ydate2) = tcnv.dateFormatCon(stopYear, stopYdate) stringYear1 = str(year1) stringYear2 = str(year2) arc_start = str(month1) + '/' + str(day1) + '/' + stringYear1[ 2] + stringYear1[3] + ',' + str(hours1) + ':' + str(minute1) + ':00' arc_stop = str(month2) + '/' + str(day2) + '/' + stringYear2[ 2] + stringYear2[3] + ',' + str(hours2) + ':' + str(minute2) + ':00' f = open('./arc_file', 'w') line = 'operation=' + operation + '\n' f.write(line) line = 'dataset=' + dataset + '\n' f.write(line) line = 'detector=' + detector + '\n' f.write(line) line = 'level=' + str(level) + '\n' f.write(line) line = 'filetype=' + filetype + '\n' f.write(line) if filename != 'NA': line = 'filename=' + filename f.write(line) else: f.write('tstart=') f.write(arc_start) f.write('\n') f.write('tstop=') f.write(arc_stop) f.write('\n') f.write('go\n') f.close() # #--- for the command is to retrieve: extract data and return the list of the files extreacted # if operation == 'retrieve': cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file' os.system(cmd) cmd = 'mv *ephinf*.gz ' + deposit os.system(cmd) cmd = 'gzip -d ' + deposit + '/*gz' os.system(cmd) os.system('ls ./Working_dir/ephinf*.fits > ./zlist') f = open('./zlist', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./arc_file ./zlist') return data #--- list of the file names # #--- for the command is to browse: return the list of fits file names # else: cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file > file_list' os.system(cmd) f = open('./file_list', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm /arc_file ./file_list') return data
def computeGOESStat(event, startTime, comp_test='NA'): """ read data from goes data, and compute statistics input: event --- event name startTime --- starting time comp_test --- option, if yes, use the test data output: <stat_dir>/<event>_goes_stat """ begin = startTime + ':00' #---- modify the format to work with dateFormatCon (year, month, day, hours, minutes, seconds, interruptTime) = tcnv.dateFormatCon(begin) if comp_test == 'test': file = test_data_dir + event + '_goes.txt' else: file = data_dir + event + '_goes.txt' f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() p1Avg = 0 p1Avg2 = 0 p1Max = -1e5 p1Min = 1e8 p1MaxTime = 0 p1MinTime = 0 p1IntValue = 0 p2Avg = 0 p2Avg2 = 0 p2Max = -1e5 p2Min = 1e8 p2MaxTime = 0 p2MinTime = 0 p2IntValue = 0 p5Avg = 0 p5Avg2 = 0 p5Max = -1e5 p5Min = 1e8 p5MaxTime = 0 p5MinTime = 0 p5IntValue = 0 p1cnt = 0 p2cnt = 0 p5cnt = 0 ind = 0 #---- indicator whther the loop passed the interruption time for ent in data: atemp = re.split('\s+|\t+', ent) btemp = re.split('\.', atemp[0]) if ent and btemp[0].isdigit(): atemp = re.split('\s+|\t+', ent) if len(atemp) < 4: continue val0 = float(atemp[0]) #--- time val1 = float(atemp[1]) #--- p1 val2 = float(atemp[2]) #--- p2 val3 = float(atemp[3]) #--- p5 if val1 > 0: #--- 0 could mean that there is no data; so we ignore it p1Avg += val1 p1Avg2 += val1 * val1 if val1 > p1Max: p1Max = val1 p1MaxTime = val0 elif val1 < p1Min: p1Min = val1 p1MinTime = val0 p1cnt += 1 if val2 > 0: p2Avg += val2 p2Avg2 += val2 * val2 if val2 > p2Max: p2Max = val2 p2MaxTime = val0 elif val2 < p2Min: p2Min = val2 p2MinTime = val0 p2cnt += 1 if val3 > 0: p5Avg += val3 p5Avg2 += val3 * val3 if val3 > p5Max: p5Max = val3 p5MaxTime = val0 elif val3 < p5Min: p5Min = val3 p5MinTime = val0 p5cnt += 1 # #--- finding the value at the interruption # if interruptTime <= val0 and ind == 0: p1IntValue = val1 p2IntValue = val2 p5IntValue = val3 ind = 1 # #--- compute averages # if p1cnt > 0 and p2cnt > 0 and p5cnt > 0: p1Avg /= p1cnt p2Avg /= p2cnt p5Avg /= p5cnt # #--- compute stndard deviation # try: p1Sig = math.sqrt(p1Avg2 / p1cnt - p1Avg * p1Avg) except: p1Sig = -999 try: p2Sig = math.sqrt(p2Avg2 / p2cnt - p2Avg * p2Avg) except: p2Sig = -999 try: p5Sig = math.sqrt(p5Avg2 / p5cnt - p5Avg * p5Avg) except: p5Sig = -999 if comp_test == 'test': file = test_stat_dir + event + '_goes_stat' else: file = stat_dir + event + '_goes_stat' line = '\t\tAvg\t\t\tMax\t\tTime\t\tMin\t\tTime\t\tValue at Interruption Started\n' line = line + '-' * 95 + '\n' line = line + 'p1\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\ % (p1Avg, p1Sig, p1Max, p1MaxTime, p1Min, p1MinTime, p1IntValue) line = line + 'p2\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\ % (p2Avg, p2Sig, p2Max, p2MaxTime, p2Min, p2MinTime, p2IntValue) line = line + 'p5\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\ % (p5Avg, p5Sig, p5Max, p5MaxTime, p5Min, p5MinTime, p5IntValue) f = open(file, 'w') f.write(line) f.close()
def extractGOESData(event, start, stop, comp_test='NA'): """ Extract GOES data from NOAA site, and create a locat data base. input: event --- event name start --- starting time stop --- stopping time (e.g., 20120313 2012:03:13:22:41 2012:03:14:13:57' comp_test --- option; if given, testing (if comp_test == test, the test data will be read) output: <data_dir>/<event>_goes.txt <stat_dir>/<event>_geos_stat """ # #--- modify date formats # begin = start + ':00' (year1, month1, date1, hours1, minutes1, seconds1, ydate1, dom1, sectime1) = tcnv.dateFormatConAll(begin) end = stop + ':00' (year2, month2, date2, hours2, minutes2, seconds2, ydate2, dom2, sectime2) = tcnv.dateFormatConAll(end) # #--- find plotting range # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart, plotYearStop, plotStop, pannelNum) \ = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- reformat plotting start / stop time # (year3, month3, date3, hours3, minutes3, seconds3, ydate3, dom3, sectime3) = tcnv.dateFormatConAll(pYearStart, periodStart) (year4, month4, date4, hours4, minutes4, seconds4, ydate4, dom4, sectime4) = tcnv.dateFormatConAll(pYearStop, periodStop) # #--- set input data type: old if pre 2011, otherwise new #--- the first two is from http://goes.ngdc.noaa.gov/data/avg/ and the 'new' one is from http://www.swpc.noaa.gov/ftpdir/lists/pchan/ #--- although they all use p1, p2, and p5, the value are not compatible. # if year3 <= 2009: dtype = 'G105' elif year3 < 2011: dtype = 'G115' else: dtype = 'new' # #--- create a list of html address from which we extract GOES data # htmlList = [] # #--- for the starting year and the ending year is same # if year3 == year4: # #--- for the case the starting month and ending month is same # if month3 == month4: tmon = str(month3) if month3 < 10: tmon = '0' + tmon for tday in range(date3, date4 + 1): if tday < 10: tday = '0' + str(tday) else: tday = str(tday) timeStamp = str(year3) + tmon + tday # #--- after 2012, the data are obtained from different site. data are largely missing #--- 2011, and previous to that we have all record at ngdc site # if dtype == 'new': # html = 'http://www.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' html = '/data/mta4/www/DAILY/mta_rad/GOES/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str( year3 ) + '/' + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) else: # #---- for the case, the period goes over two months # if month3 == 2: chk = 4.0 * int(0.25 * year3) if chk == year3: endDate = 29 else: endDate = 28 elif month3 == 1 or month3 == 3 or month3 == 5 or month3 == 7 or month3 == 8 or month3 == 10: endDate = 31 else: endDate = 30 tmon = str(month3) if month3 < 10: tmon = '0' + tmon for tday in range(date3, endDate + 1): timeStamp = str(year3) + tmon + str(tday) if dtype == 'new': html = '/data/mta4/www/DAILY/mta_rad/GOES/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year3) html = html + '/' + dtype + syear[2] + syear[ 3] + tmon + '.TXT' htmlList.append(html) tmon = str(month4) if month4 < 10: tmon = '0' + tmon for tday in range(1, date4 + 1): if tday < 10: tday = '0' + str(tday) else: tday = str(tday) timeStamp = str(year3) + tmon + tday if dtype == 'new': html = '/data/mta4/www/DAILY/mta_rad/GOES/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str( year3) + '/' html = html + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) else: # #--- for the case the period goes over two years # for tday in range(date3, 32): timeStamp = str(year3) + tmon + str(tday) if dtype == 'new': html = '/data/mta4/www/DAILY/mta_rad/GOES/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year3) + '/' html = html + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) for tday in range(1, date4 + 1): if tday < 10: tday = '0' + str(tday) else: tday = str(tday) timeStamp = str(year4) + tmon + tday if dtype == 'new': html = '/data/mta4/www/DAILY/mta_rad/GOES/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year4) + '/' html = html + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) # #--- prepare to print out data # if comp_test == 'test': ofile = test_data_dir + event + '_goes.txt' else: ofile = data_dir + event + '_goes.txt' out = open(ofile, 'w') line = 'Science Run Interruption: ' + str(start) + '\n\n' out.write(line) out.write('dofy\t\tp1\t\t\tp2\t\t\tp5\n') out.write( "-------------------------------------------------------------------\n" ) # #--- now extract data from NOAA web site # for html in htmlList: f = open(html, 'r') data = [line.strip() for line in f.readlines()] f.close() # #--- extract needed data and print them out in a data file # # #--- if data are collected after 2011, start here # if dtype == 'new': for ent in data: atemp = re.split('\s+|\t+', ent) if ent and atemp[0].isdigit(): timestamp = atemp[0] + ':' + atemp[1] + ':' + atemp[ 2] + ':' + atemp[3][0] timestamp = timestamp + atemp[3][1] + ':' + atemp[3][ 2] + atemp[3][3] + ':00' (dyear, dmonth, dday, dhours, dminutes, dseconds, dydate) = tcnv.dateFormatCon(timestamp) if dyear == pYearStart: if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' %\ (dydate, float(atemp[6]), float(atemp[7]), float(atemp[10])) out.write(line) else: # #--- for the case, the period goes over two years # chk = 4.0 * int(0.25 * pYearStart) if chk == pYearStart: base = 366 else: base = 365 dydate += base if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' %\ (dydate, float(atemp[6]), float(atemp[7]), float(atemp[10])) out.write(line) # #--- if the data is collected before 2011, start here # else: for ent in data: atemp = re.split('\s+|\t+', ent) if ent and atemp[0].isdigit(): dyear = atemp[0][0] + atemp[0][1] dyear = int(dyear) if dyear > 90: dyear += 1900 else: dyear += 2000 dydate = float(atemp[2]) dhours = float(atemp[1][0] + atemp[1][1]) dminutes = float(atemp[1][2] + atemp[1][3]) dydate += (dhours / 24.0 + dminutes / 1440.0) if dyear == pYearStart: if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' %\ (dydate, float(atemp[10]), float(atemp[11]), float(atemp[14])) out.write(line) else: # #--- for the case, the period goes over two years # chk = 4.0 * int(0.25 * pYearStart) if chk == pYearStart: base = 366 else: base = 365 dydate += base dydate += base if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' %\ (dydate, float(atemp[10]), float(atemp[11]), float(atemp[14])) out.write(line) if len(htmlList) > 0: out.close()
def extractGOESData(event, start, stop, comp_test='NA'): """ Extract GOES data from NOAA site, and create a locat data base. input: event, interruption start/stop time (e.g., 20120313 2012:03:13:22:41 2012:03:14:13:57' option comp_test is for testing (if comp_test == test, the test data will be read) """ # #--- modify date formats # begin = start + ':00' (year1, month1, date1, hours1, minutes1, seconds1, ydate1, dom1, sectime1) = tcnv.dateFormatConAll(begin) end = stop + ':00' (year2, month2, date2, hours2, minutes2, seconds2, ydate2, dom2, sectime2) = tcnv.dateFormatConAll(end) # #--- find plotting range # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart, plotYearStop, plotStop, pannelNum) \ = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- reformat plotting start / stop time # (year3, month3, date3, hours3, minutes3, seconds3, ydate3, dom3, sectime3) = tcnv.dateFormatConAll(pYearStart, periodStart) (year4, month4, date4, hours4, minutes4, seconds4, ydate4, dom4, sectime4) = tcnv.dateFormatConAll(pYearStop, periodStop) # #--- set input data type: old if pre 2011, otherwise new #--- the first two is from http://goes.ngdc.noaa.gov/data/avg/ and the 'new' one is from http://www.swpc.noaa.gov/ftpdir/lists/pchan/ #--- although they all use p1, p2, and p5, the value are not compatible. # if year3 <= 2009: dtype = 'G105' elif year3 < 2011: dtype = 'G115' else: dtype = 'new' # #--- create a list of html address from which we extract GOES data # htmlList = [] # #--- for the starting year and the ending year is same # if year3 == year4: # #--- for the case the starting month and ending month is same # if month3 == month4: tmon = str(month3) if month3 < 10: tmon = '0' + tmon for tday in range(date3, date4+1): if tday < 10: tday = '0' + str(tday) else: tday = str(tday) timeStamp = str(year3) + tmon + tday # #--- after 2012, the data are obtained from different site. data are largely missing #--- 2011, and previous to that we have all record at ngdc site # if dtype == 'new': # html = 'http://www.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' html = 'http://services.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year3) + '/' + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) else: # #---- for the case, the period goes over two months # if month3 == 2: chk = 4.0 * int(0.25 * year3) if chk == year3: endDate = 29 else: endDate = 28 elif month3 == 1 or month3 == 3 or month3 == 5 or month3 == 7 or month3 == 8 or month3 == 10: endDate = 31 else: endDate = 30 tmon = str(month3) if month3 < 10: tmon = '0' + tmon for tday in range(date3, endDate+1): timeStamp = str(year3) + tmon + str(tday) if dtype == 'new': html = 'http://www.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year3) + '/' + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) tmon = str(month4) if month4 < 10: tmon = '0' + tmon for tday in range(1, date4+1): if tday < 10: tday = '0' + str(tday) else: tday = str(tday) timeStamp = str(year3) + tmon + tday if dtype == 'new': html = 'http://www.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year3) + '/' + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) else: # #--- for the case the period goes over two years # for tday in range(date3, 32): timeStamp = str(year3) + tmon + str(tday) if dtype == 'new': html = 'http://www.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year3) + '/' + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) for tday in range(1, date4+1): if tday < 10: tday = '0' + str(tday) else: tday = str(tday) timeStamp = str(year4) + tmon + tday if dtype == 'new': html = 'http://www.swpc.noaa.gov/ftpdir/lists/pchan/' + timeStamp + '_Gp_pchan_5m.txt' else: syear = str(year3) html = 'http://goes.ngdc.noaa.gov/data/avg/' + str(year4) + '/' + dtype + syear[2] + syear[3] + tmon + '.TXT' htmlList.append(html) # #--- prepare to print out data # if comp_test == 'test': ofile = test_data_dir + event + '_goes.txt' else: ofile = data_dir + event + '_goes.txt' out = open(ofile, 'w') line = 'Science Run Interruption: ' + str(start) +'\n\n' out.write(line) out.write('dofy\t\tp1\t\t\tp2\t\t\tp5\n') out.write("-------------------------------------------------------------------\n") # #--- now extract data from NOAA web site # for html in htmlList: if comp_test == 'test': atemp = re.split('pchan\/', html) afile = house_keeping + 'NOAO_data/' + atemp[1] cmd = 'cp ' + afile + ' ./Working_dir/temp_data' else: cmd = 'lynx -source ' + html + '>./Working_dir/temp_data' os.system(cmd) f = open('./Working_dir/temp_data', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./Working_dir/temp_data') # #--- extract needed data and print them out in a data file # # #--- if data are collected after 2011, start here # if dtype == 'new': for ent in data: atemp = re.split('\s+|\t+', ent) if ent and atemp[0].isdigit(): timestamp = atemp[0] + ':' + atemp[1] + ':' + atemp[2] + ':' + atemp[3][0] + atemp[3][1] + ':' + atemp[3][2] + atemp[3][3] + ':00' (dyear, dmonth, dday, dhours, dminutes, dseconds, dydate) = tcnv.dateFormatCon(timestamp) if dyear == pYearStart: if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' % (dydate, float(atemp[6]), float(atemp[7]), float(atemp[10])) out.write(line) else: # #--- for the case, the period goes over two years # chk = 4.0 * int(0.25 * pYearStart) if chk == pYearStart: base = 366 else: base = 365 dydate += base if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' % (dydate, float(atemp[6]), float(atemp[7]), float(atemp[10])) out.write(line) # #--- if the data is collected before 2011, start here # else: for ent in data: atemp = re.split('\s+|\t+', ent) if ent and atemp[0].isdigit(): dyear = atemp[0][0] + atemp[0][1] dyear = int(dyear) if dyear > 90: dyear += 1900 else: dyear += 2000 dydate = float(atemp[2]) dhours = float(atemp[1][0] + atemp[1][1]) dminutes = float(atemp[1][2] + atemp[1][3]) dydate += (dhours/24.0 + dminutes / 1440.0) if dyear == pYearStart: if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' % (dydate, float(atemp[10]), float(atemp[11]), float(atemp[14])) out.write(line) else: # #--- for the case, the period goes over two years # chk = 4.0 * int(0.25 * pYearStart) if chk == pYearStart: base = 366 else: base = 365 dydate += base dydate += base if dydate >= plotStart and dydate <= plotStop: line = '%4.3f\t\t%3.2e\t%3.2e\t%3.2e\n' % (dydate, float(atemp[10]), float(atemp[11]), float(atemp[14])) out.write(line) if len(htmlList) > 0: out.close()
def computeEphinStat(event, startTime, comp_test = 'NA'): 'for give event name and interruption stating time, read the data from ephin data, and compute statistics' begin = startTime + ':00' #---- modify the format to work with dateFormatCon (year, month, day, hours, minutes, seconds, interruptTime) = tcnv.dateFormatCon(begin) if comp_test == 'test': file = test_data_dir + event + '_eph.txt' else: file = data_dir + event + '_eph.txt' f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() hrcAvg = 0 hrcAvg2 = 0 hrcMax = -1e5 hrcMin = 1e8 hrcMaxTime = 0 hrcMinTime = 0 hrcIntValue = 0 e150Avg = 0 e150Avg2 = 0 e150Max = -1e5 e150Min = 1e8 e150MaxTime = 0 e150MinTime = 0 e150IntValue = 0 e1300Avg = 0 e1300Avg2 = 0 e1300Max = -1e5 e1300Min = 1e8 e1300MaxTime = 0 e1300MinTime = 0 e1300IntValue= 0 hcnt = 0 e1cnt = 0 e2cnt = 0 ind = 0 #---- indicator whther the loop passed the interruption time dataset = 0 for ent in data: m1 = re.search('Interruption', ent) m2 = re.search('dofy', ent) m3 = re.search('----', ent) m4 = re.search('hrc', ent) # #--- checking new (hrc, e150, e1300) or old (p4 , p41, e1300) dataset # if m4 is not None: dataset = 1 if ent and m1 == None and m2 == None and m3 == None: atemp = re.split('\s+|\t+', ent) val0 = float(atemp[0]) val1 = float(atemp[1]) val2 = float(atemp[2]) val3 = float(atemp[3]) if val1 > 0: #--- 0 could mean that there is no data; so we ignore it hrcAvg += val1 hrcAvg2 += val1 * val1 if val1 > hrcMax: hrcMax = val1 hrcMaxTime = val0 elif val1 < hrcMin: hrcMin = val1 hrcMinTime = val0 hcnt += 1 if val2 > 0: e150Avg += val2 e150Avg2 += val2 * val2 if val2 > e150Max: e150Max = val2 e150MaxTime = val0 elif val2 < e150Min: e150Min = val2 e150MinTime = val0 e1cnt += 1 if val3 > 0: e1300Avg += val3 e1300Avg2 += val3 * val3 if val3 > e1300Max: e1300Max = val3 e1300MaxTime = val0 elif val3 < e1300Min: e1300Min = val3 e1300MinTime = val0 e2cnt += 1 # #--- finding the value at the interruption # if interruptTime <= val0 and ind == 0: hrcIntValue = val1 e150IntValue = val2 e1300IntValue = val3 ind = 1 # #--- compute averages # hrcAvg /= hcnt e150Avg /= e1cnt e1300Avg /= e2cnt # #--- compute stndard deviation # hrcSig = math.sqrt(hrcAvg2/hcnt - hrcAvg * hrcAvg) e150Sig = math.sqrt(e150Avg2/e1cnt - e150Avg * e150Avg) e1300Sig = math.sqrt(e1300Avg2/e2cnt - e1300Avg * e1300Avg) # file = web_dir + 'Ephin_plot/' + event + '_txt' if comp_test == 'test': file = test_stat_dir + event + '_ephin_stat' else: file = stat_dir + event + '_ephin_stat' f = open(file, 'w') f.write('\t\tAvg\t\t\tMax\t\tTime\t\tMin\t\tTime\t\tValue at Interruption Started\n') f.write('--------------------------------------------------------------------------------------------------------------------------\n') if dataset == 1: f.write('hrc\t') else: f.write('p4\t') if hrcIntValue > 0: line = '%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n' % (hrcAvg, hrcSig, hrcMax, hrcMaxTime, hrcMin, hrcMinTime, hrcIntValue) else: line = '%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\tNA\n' % (hrcAvg, hrcSig, hrcMax, hrcMaxTime, hrcMin, hrcMinTime) f.write(line) if year < 2014: if dataset == 1: f.write('e150\t') else: f.write('p41\t') line = '%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n' % (e150Avg, e150Sig, e150Max, e150MaxTime, e150Min, e150MinTime, e150IntValue) f.write(line) line = 'e1300\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n' % (e1300Avg, e1300Sig, e1300Max, e1300MaxTime, e1300Min, e1300MinTime, e1300IntValue) f.write(line) f.close()
def computeACEStat(event, start, stop, comp_test='NA'): 'for a gien event, start and stop data, compute ACE statistics. format: 20110804 2011:08:04:07:03 2011:08:07:10:25' # #--- change time format to year and ydate (float) # begin = start + ':00' #---- need to add "seconds" part to dateFormtCon to work correctly end = stop + ':00' (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- find plotting range # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart, plotYearStop, plotStop, pannelNum) \ = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- read ACE data # if comp_test == 'test': line = test_data_dir + event + '_dat.txt' else: line = data_dir + event + '_dat.txt' f = open(line, 'r') data = [line.strip() for line in f.readlines()] f.close() # #--- initialization # cnt1 = 0 cnt2 = 0 cnt3 = 0 cnt4 = 0 cnt5 = 0 cnt6 = 0 cnt7 = 0 cnt8 = 0 cnt9 = 0 cnt10 = 0 cnt11 = 0 cnt12 = 0 cnt13 = 0 e38_a = 0 e38_a2 = 0 e175_a = 0 e175_a2 = 0 p47_a = 0 p47_a2 = 0 p112_a = 0 p112_a2 = 0 p310_a = 0 p310_a2 = 0 p761_a = 0 p761_a2 = 0 p1060_a = 0 p1060_a2 = 0 aniso_a = 0 aniso_a2 = 0 r38_175_a = 0 r38_175_a2 = 0 r47_1060_a = 0 r47_1060_a2 = 0 r112_1060_a = 0 r112_1060_a2 = 0 r310_1060_a = 0 r310_1060_a2 = 0 r761_1060_a = 0 r761_1060_a2 = 0 e38_max = 0 e38_min = 1.0e10 e175_max = 0 e175_min = 1.0e10 p47_max = 0 p47_min = 1.0e10 p112_max = 0 p112_min = 1.0e10 p310_max = 0 p310_min = 1.0e10 p761_max = 0 p761_min = 1.0e10 p1060_max = 0 p1060_min = 1.0e10 aniso_max = 0 aniso_min = 1.0e10 r38_175_max = 0 r38_175_min = 1.0e10 r47_1060_max = 0 r47_1060_min = 1.0e10 r112_1060_max = 0 r112_1060_min = 1.0e10 r310_1060_max = 0 r310_1060_min = 1.0e10 r761_1060_max = 0 r761_1060_min = 1.0e10 e38_max_t = 0 e38_min_t = 0 e175_max_t = 0 e175_min_t = 0 p47_max_t = 0 p47_min_t = 0 p112_max_t = 0 p112_min_t = 0 p310_max_t = 0 p310_min_t = 0 p761_max_t = 0 p761_min_t = 0 p1060_max_t = 0 p1060_min_t = 0 aniso_max_t = 0 aniso_min_t = 0 r38_175_max_t = 0 r38_175_min_t = 0 r47_1060_max_t = 0 r47_1060_min_t = 0 r112_1060_max_t = 0 r112_1060_min_t = 0 r310_1060_max_t = 0 r310_1060_min_t = 0 r761_1060_max_t = 0 r761_1060_min_t = 0 e38_int = 0 e175_int = 0 p47_int = 0 p112_int = 0 p310_int = 0 p761_int = 0 p1060_int = 0 aniso_int = 0 r38_175_int = 0 r47_1060_int = 0 r112_1060_int = 0 r310_1060_int = 0 r761_1060_int = 0 # #--- start accumulating the values # for ent in data: atemp = re.split('\s+|\t+', ent) btemp = re.split('\.', atemp[0]) if atemp[0] and btemp[0].isdigit(): time = float(atemp[0]) e38 = float(atemp[1]) if e38 > 0: cnt1 += 1 if e38 > e38_max: e38_max = e38 e38_max_t = time if e38 < e38_min: e38_min = e38 e38_min_t = time e38_a += e38 e38_a2 += e38 * e38 e175 = float(atemp[2]) if e175 > 0: cnt2 += 1 if e175 > e175_max: e175_max = e175 e175_max_t = time if e175 < e175_min: e175_min = e175 e175_min_t = time e175_a += e175 e175_a2 += e175 * e175 p47 = float(atemp[3]) if p47 > 0: cnt3 += 1 if p47 > p47_max: p47_max = p47 p47_max_t = time if p47 < p47_min: p47_min = p47 p47_min_t = time p47_a += p47 p47_a2 += p47 * p47 p112 = float(atemp[4]) if p112 > 0: cnt4 += 1 if p112 > p112_max: p112_max = p112 p112_max_t = time if p112 < p112_min: p112_min = p112 p112_min_t = time p112_a += p112 p112_a2 += p112 * p112 p310 = float(atemp[5]) if p310 > 0: cnt5 += 1 if p310 > p310_max: p310_max = p310 p310_max_t = time if p310 < p310_min: p310_min = p310 p310_min_t = time p310_a += p310 p310_a2 += p310 * p310 p761 = float(atemp[6]) if p761 > 0: cnt6 += 1 if p761 > p761_max: p761_max = p761 p761_max_t = time if p761 < p761_min: p761_min = p761 p761_min_t = time p761_a += p761 p761_a2 += p761 * p761 p1060 = float(atemp[7]) if p1060 > 0: cnt7 += 1 if p1060 > p1060_max: p1060_max = p1060 p1060_max_t = time if p1060 < p1060_min: p1060_min = p1060 p1060_min_t = time p1060_a += p1060 p1060_a2 += p1060 * p1060 aniso = float(atemp[8]) if aniso > 0: cnt8 += 1 if aniso > aniso_max: aniso_max = aniso aniso_max_t = time if aniso < aniso_min: aniso_min = aniso aniso_min_t = time aniso_a += aniso aniso_a2 += aniso * aniso if e175 > 0: r38_175 = e38 / e175 if r38_175 > 0: cnt9 += 1 if r38_175 > r38_175_max: r38_175_max = r38_175 r38_175_max_t = time if r38_175 < r38_175_min: r38_175_min = r38_175 r38_175_min_t = time r38_175_a += r38_175 r38_175_a2 += r38_175 * r38_175 if p1060 > 0: r47_1060 = p47 / p1060 if r47_1060 > 0: cnt10 += 1 if r47_1060 > r47_1060_max: r47_1060_max = r47_1060 r47_1060_max_t = time if r47_1060 < r47_1060_min: r47_1060_min = r47_1060 r47_1060_min_t = time r47_1060_a += r47_1060 r47_1060_a2 += r47_1060 * r47_1060 r112_1060 = p112 / p1060 if r112_1060 > 0: cnt11 += 1 if r112_1060 > r112_1060_max: r112_1060_max = r112_1060 r112_1060_max_t = time if r112_1060 < r112_1060_min: r112_1060_min = r112_1060 r112_1060_min_t = time r112_1060_a += r112_1060 r112_1060_a2 += r112_1060 * r112_1060 r310_1060 = p310 / p1060 if r310_1060 > 0: cnt12 += 1 if r310_1060 > r310_1060_max: r310_1060_max = r310_1060 r310_1060_max_t = time if r310_1060 < r310_1060_min: r310_1060_min = r310_1060 r310_1060_min_t = time r310_1060_a += r310_1060 r310_1060_a2 += r310_1060 * r310_1060 r761_1060 = p761 / p1060 if r761_1060 > 0: cnt13 += 1 if r761_1060 > r761_1060_max: r761_1060_max = r761_1060 r761_1060_max_t = time if r761_1060 < r761_1060_min: r761_1060_min = r761_1060 r761_1060_min_t = time r761_1060_a += r761_1060 r761_1060_a2 += r761_1060 * r761_1060 e38_int = e38 e175_int = e175 p47_int = p47 p112_int = p112 p310_int = p310 p761_int = p761 p1060_int = p1060 aniso_int = aniso r38_175_int = r38_175 r47_1060_int = r47_1060 r112_1060_int = r112_1060 r310_1060_int = r310_1060 r761_1060_int = r761_1060 # #----a big loop ends here; now compute avg and std # if cnt1 == 0: e38_avg = 0 e38_var = 0 else: e38_avg = e38_a / cnt1 e38_var = math.sqrt(e38_a2 / cnt1 - e38_avg * e38_avg) if cnt2 == 0: e175_avg = 0 e175_var = 0 else: e175_avg = e175_a / cnt2 e175_var = math.sqrt(e175_a2 / cnt2 - e175_avg * e175_avg) if cnt3 == 0: p47_avg = 0 p47_var = 0 else: p47_avg = p47_a / cnt3 p47_var = math.sqrt(p47_a2 / cnt3 - p47_avg * p47_avg) if cnt4 == 0: p112_avg = 0 p112_var = 0 else: p112_avg = p112_a / cnt4 p112_var = math.sqrt(p112_a2 / cnt4 - p112_avg * p112_avg) if cnt5 == 0: p310_avg = 0 p310_var = 0 else: p310_avg = p310_a / cnt5 p310_var = math.sqrt(p310_a2 / cnt5 - p310_avg * p310_avg) if cnt6 == 0: p761_avg = 0 p761_var = 0 else: p761_avg = p761_a / cnt6 p761_var = math.sqrt(p761_a2 / cnt6 - p761_avg * p761_avg) if cnt7 == 0: p1060_avg = 0 p1060_var = 0 else: p1060_avg = p1060_a / cnt7 p1060_var = math.sqrt(p1060_a2 / cnt7 - p1060_avg * p1060_avg) if cnt8 == 0: aniso_avg = 0 aniso_var = 0 else: aniso_avg = aniso_a / cnt8 aniso_var = math.sqrt(aniso_a2 / cnt8 - aniso_avg * aniso_avg) if cnt9 == 0: r38_175_avg = 0 r38_175_var = 0 else: r38_175_avg = r38_175_a / cnt9 r38_175_var = math.sqrt(r38_175_a2 / cnt9 - r38_175_avg * r38_175_avg) if cnt10 == 0: r47_1060_avg = 0 r47_1060_var = 0 else: r47_1060_avg = r47_1060_a / cnt10 r47_1060_var = math.sqrt(r47_1060_a2 / cnt10 - r47_1060_avg * r47_1060_avg) if cnt11 == 0: r112_1060_avg = 0 r112_1060_var = 0 else: r112_1060_avg = r112_1060_a / cnt11 r112_1060_var = math.sqrt(r112_1060_a2 / cnt11 - r112_1060_avg * r112_1060_avg) if cnt12 == 0: r310_1060_avg = 0 r310_1060_var = 0 else: r310_1060_avg = r310_1060_a / cnt12 r310_1060_var = math.sqrt(r310_1060_a2 / cnt12 - r310_1060_avg * r310_1060_avg) if cnt13 == 0: r761_1060_avg = 0 r761_1060_var = 0 else: r761_1060_avg = r761_1060_a / cnt13 r761_1060_var = math.sqrt(r761_1060_a2 / cnt13 - r761_1060_avg * r761_1060_avg) # #--- create stat table # line = 'Data Period (dom): %6.4f - %6.4f\n' % (plotStart, plotStop) line = line + 'Interruption (dom): %6.4f - %6.4f\n\n' % (ydate1, ydate2) line = line + '\t\t\tAvg\t\t Max\t\tTime\tMin\t\tTime\t\tValue at Interruption Started\n' line = line + '-' * 95 + '\n' line = line + 'e38 \t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (e38_avg,e38_var,e38_max,e38_max_t,e38_min,e38_min_t,e38_int) line = line + 'e175\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (e175_avg,e175_var,e175_max,e175_max_t,e175_min,e175_min_t,e175_int) line = line + 'p47 \t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (p47_avg,p47_var,p47_max,p47_max_t,p47_min,p47_min_t,p47_int) line = line + 'p112\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (p112_avg,p112_var,p112_max,p112_max_t,p112_min,p112_min_t,p112_int) line = line + 'p310\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (p310_avg,p310_var,p310_max,p310_max_t,p310_min,p310_min_t,p310_int) line = line + 'p761\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (p761_avg,p761_var,p761_max,p761_max_t,p761_min,p761_min_t,p761_int) line = line + 'p1060\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (p1060_avg,p1060_var,p1060_max,p1060_max_t,p1060_min,p1060_min_t,p1060_int) if year1 < 2014: line = line + 'anisotropy\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n'\ % (aniso_avg,aniso_var,aniso_max,aniso_max_t,aniso_min,aniso_min_t,aniso_int) line = line + '\nHardness:\n' line = line + 'e38/e175\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r38_175_avg,r38_175_var,r38_175_max,r38_175_max_t,r38_175_min,\ r38_175_min_t,r38_175_int) line = line + 'p47/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r47_1060_avg,r47_1060_var,r47_1060_max,r47_1060_max_t,r47_1060_min,\ r47_1060_min_t,r47_1060_int) line = line + 'p112/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r112_1060_avg,r112_1060_var,r112_1060_max,r112_1060_max_t,r112_1060_min,\ r112_1060_min_t,r112_1060_int) line = line + 'p310/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r310_1060_avg,r310_1060_var,r310_1060_max,r310_1060_max_t,r310_1060_min,\ r310_1060_min_t,r310_1060_int) line = line + 'p761/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r761_1060_avg,r761_1060_var,r761_1060_max,r761_1060_max_t,r761_1060_min,\ r761_1060_min_t,r761_1060_int) # #---- find gradient and chooes the steepest rising point # time = [] e1 = [] e2 = [] p1 = [] p2 = [] p3 = [] p4 = [] p5 = [] ans = [] for ent in data: atemp = re.split('\s+|\t+', ent) btemp = re.split('\.', atemp[0]) if atemp[0] and btemp[0].isdigit(): time.append(float(atemp[0])) for j in range(1, 9): if float(atemp[j]) <= 0: atemp[j] = 1.0e-5 e1.append(float(atemp[1])) e2.append(float(atemp[2])) p1.append(float(atemp[3])) p2.append(float(atemp[4])) p3.append(float(atemp[5])) p4.append(float(atemp[6])) p5.append(float(atemp[7])) line = line + '\nSteepest Rise\n' line = line + '------------\n' line = line + '\tTime\t\tSlope(in log per hr)\n' line = line + '----------------------------------------\n' (max_pos, max_slope) = find_jump(e1, time) line = line + 'e1 \t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) (max_pos, max_slope) = find_jump(e2, time) line = line + 'e175\t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) (max_pos, max_slope) = find_jump(p1, time) line = line + 'p47 \t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) (max_pos, max_slope) = find_jump(p2, time) line = line + 'p112\t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) (max_pos, max_slope) = find_jump(p3, time) line = line + 'p310\t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) (max_pos, max_slope) = find_jump(p4, time) line = line + 'p761\t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) (max_pos, max_slope) = find_jump(p5, time) line = line + 'p1060\t' if max_pos == -999: line = line + 'na\t\tna\n' else: line = line + '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) if comp_test == 'test': out = test_stat_dir + event + '_ace_stat' else: out = stat_dir + event + '_ace_stat' f = open(out, 'w') f.write(line) f.close()
def useArc5gl(operation, dataset, detector, level, filetype, startYear = 0, startYdate = 0, stopYear = 0 , stopYdate = 0, deposit = './Working_dir/'): """ extract data using arc5gl. input: start --- stop (year and ydate) operation --- (e.g., retrive) dataset ---(e.g. flight) detector --- (e.g. hrc) level --- (eg 0, 1, 2) filetype ---(e.g, evt1) startYear startYdate stopYear stopYdate output: data --- a list of fits file extracted """ # #--- use arc5gl to extract ephin data # (year1, month1, day1, hours1, minute1, second1, ydate1) = tcnv.dateFormatCon(startYear, startYdate) (year2, month2, day2, hours2, minute2, second2, ydate2) = tcnv.dateFormatCon(stopYear, stopYdate) stringYear1 = str(year1) stringYear2 = str(year2) arc_start = str(month1) + '/' + str(day1) + '/' + stringYear1[2] + stringYear1[3] arc_start = arc_start + ',' + str(hours1) + ':'+ str(minute1) + ':00' arc_stop = str(month2) + '/' + str(day2) + '/' + stringYear2[2] + stringYear2[3] arc_stop = arc_stop + ',' + str(hours2) + ':'+ str(minute2) + ':00' intime = str(startYear) + ':' + adjust_ydate_format(startYdate) + ':00:00:00' arc_start = tcnv.axTimeMTA(intime) intime = str(stopYear) + ':' + adjust_ydate_format(stopYdate) + ':00:00:00' arc_stop = tcnv.axTimeMTA(intime) #print "I AM HERE: " + str(arc_start) + "<--->" + str(arc_stop) line = 'operation=' + operation + '\n' line = line + 'dataset=' + dataset + '\n' line = line + 'detector=' + detector + '\n' line = line + 'level=' + str(level) + '\n' line = line + 'filetype=' + filetype + '\n' line = line + 'tstart=' + str(arc_start) + '\n' line = line + 'tstop=' + str(arc_stop) + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd = 'cd ' + deposit + '; arc5gl -user isobe -script ' + zspace + ' >./zlist' os.system(cmd) cmd = 'rm ' + zspace os.system(cmd) infile = deposit + '/zlist' f = open(infile, 'r') data = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + infile os.system(cmd) return data
def ephinDataExtract(event, start, stop, comp_test='NA'): "extract EPIN related quantities and creates a data table for given event, start and stop time (format: 2012:03:13:22:41)" begin = start + ':00' #---- to use dateFormatCon correctly, need to add "sec" part end = stop + ':00' # #--- convert time format # (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- change time format and find data collecting period (starts 2 days before the interruption and ends at least 5 days after the stating) # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart, plotYearStop, plotStop, pannelNum) \ = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- read ephin data using arc4gl # ephinList = itrf.useArc4gl( 'retrieve', 'flight', 'ephin', 1, 'ephrates', pYearStart, periodStart, pYearStop, periodStop, deposit='./Working_dir', ) # #--- extract needed data # xdate = [] p4 = [] p41 = [] e150 = [] e1300 = [] ecnt = 0 for fits in ephinList: # #--- use dmlist # if pYearStart < 2011: line = fits + '[cols TIME, SCP4, SCP41, SCE1300]' else: line = fits + '[cols TIME, SCE150, SCE1300]' cmd = 'dmlist "' + line + '" opt="data" > ./zout' os.system(cmd) f = open('./zout', 'r') input = [line.strip() for line in f.readlines()] f.close() os.system('rm ./zout') for ent in input: ent.lstrip() atemp = re.split('\s+|\t+', ent) if atemp[0].isdigit(): line = tcnv.convertCtimeToYdate(float(atemp[1])) (year, month, day, hours, minute, second, ydate, dom, sectime) = tcnv.dateFormatConAll(line) # #--- checking the case the year change # if ecnt > 0 and year != startYear: ydate += base elif ecnt == 0: startYear = year chk = 4.0 * int(0.25 * year) if chk == year: base = 366 else: base = 365 xdate.append(ydate) if pYearStart < 2011: if float(atemp[3]) == 0: atemp[3] = 1.0e-4 if float(atemp[4]) == 0: atemp[4] = 1.0e-4 p4.append(atemp[2]) p41.append(atemp[3]) e1300.append(atemp[4]) else: if float(atemp[2]) == 0: atemp[2] = 1.0e-4 if float(atemp[3]) == 0: atemp[3] = 1.0e-4 e150.append(atemp[2]) e1300.append(atemp[3]) ecnt += 1 os.system('rm ./Working_dir/*fits') # #--- using DataSeeker, extread HRC sheild rate (only if year > 2011) # if pYearStart >= 2011: hrcData = itrf.useDataSeeker(pYearStart, periodStart, pYearStop, periodStop, 'mtahrc..hrcveto_avg', 'time,shevart_avg') time = [] veto = [] hcnt = 0 # #--- there are slightly different input line format; so we need to different ways to read the line # for ent in hrcData: m1 = re.search('----', ent) m2 = re.search('ROW', ent) m3 = re.search('DATASEEK', ent) if ent and m1 == None and m2 == None and m3 == None: atemp = re.split('\s+|\t+', ent) if len(atemp) == 4: ttime = tcnv.convertCtimeToYdate(float(atemp[2])) temp = re.split(':', ttime) year = int(temp[0]) dofy = float(temp[1]) + float(temp[2]) / 24 + float( temp[3]) / 1440 + float(temp[4]) / 86400 # #--- checking the case the year changes # if hcnt > 0 and year != startYear: dofy += base elif hcnt == 0: startYear = year chk = 4.0 * int(0.25 * year) if chk == year: base = 366 else: base = 365 time.append(dofy) veto.append(atemp[3]) hcnt += 1 else: ttime = tcnv.convertCtimeToYdate(float(atemp[1])) temp = re.split(':', ttime) year = int(temp[0]) dofy = float(temp[1]) + float(temp[2]) / 24 + float( temp[3]) / 1440 + float(temp[4]) / 86400 if hcnt > 0 and year != startYear: dofy += base elif hcnt == 0: startYear = year chk = 4.0 * int(0.25 * year) if chk == year: base = 366 else: base = 365 time.append(dofy) veto.append(atemp[2]) hcnt += 1 # #--- matching timing between electron data and hrc data # hrc = len(e150) * [0] j = 0 k = 0 # #--- find the begining # if time[0] < xdate[0]: while time[j] < xdate[0]: j += 1 if j >= hcnt: print "Time span does not overlap. Abort the process." exit(1) elif time[0] > xdate[0]: while time[0] > xdate[k]: k += 1 if k >= ecnt: print "Time span does not overlap. Abort the process." exit(1) hrc[k] = veto[j] tspace = 1.38888888888e-3 / base #--- setting timing bin size: base is given in hrc loop for i in range(k + 1, ecnt): tbeg = xdate[i] - tspace tend = xdate[i] + tspace if j > hcnt - 2: hrc[i] = veto[ hcnt - 1] #---- if the hrc data runs out, just repeat the last data point value elif time[j] >= tbeg and time[j] <= tend: hrc[i] = veto[j] elif time[j] < tbeg: while time[j] < tbeg: j += 1 hrc[i] = veto[j] elif time[j] > tend: while time[j] > tend: j -= 1 hrc[i] = veto[j] # #--- print out data # if comp_test == 'test': file = test_data_dir + event + '_eph.txt' else: file = data_dir + event + '_eph.txt' f = open(file, 'w') line = 'Science Run Interruption: ' + start + '\n\n' f.write(line) if pYearStart < 2011: f.write('dofy\t\tp4\t\t\tp41\t\t\te1300\n') f.write( '-------------------------------------------------------------------\n' ) for m in range(0, ecnt): line = '%4.3f\t\t%4.3e\t%4.3e\t%4.3e\n' % (float( xdate[m]), float(p4[m]), float(p41[m]), float(e1300[m])) f.write(line) else: f.write('dofy\t\thrc\t\te150\t\te1300\n') f.write( '-------------------------------------------------------------------\n' ) for m in range(0, ecnt): line = '%4.3f\t\t%4.3e\t%4.3e\t%4.3e\n' % (float( xdate[m]), float(hrc[m]), float(e150[m]), float(e1300[m])) f.write(line) f.close()
def computeACEStat(event, start, stop, comp_test = 'NA'): 'for a gien event, start and stop data, compute ACE statistics. format: 20110804 2011:08:04:07:03 2011:08:07:10:25' # #--- change time format to year and ydate (float) # begin = start + ':00' #---- need to add "seconds" part to dateFormtCon to work correctly end = stop + ':00' (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- find plotting range # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart, plotYearStop, plotStop, pannelNum) \ = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- read ACE data # if comp_test == 'test': line = test_data_dir + event + '_dat.txt' else: line = data_dir + event + '_dat.txt' f = open(line, 'r') data = [line.strip() for line in f.readlines()] f.close() # #--- initialization # cnt1 = 0 cnt2 = 0 cnt3 = 0 cnt4 = 0 cnt5 = 0 cnt6 = 0 cnt7 = 0 cnt8 = 0 cnt9 = 0 cnt10 = 0 cnt11 = 0 cnt12 = 0 cnt13 = 0 e38_a = 0 e38_a2 = 0 e175_a = 0 e175_a2 = 0 p47_a = 0 p47_a2 = 0 p112_a = 0 p112_a2 = 0 p310_a = 0 p310_a2 = 0 p761_a = 0 p761_a2 = 0 p1060_a = 0 p1060_a2 = 0 aniso_a = 0 aniso_a2 = 0 r38_175_a = 0 r38_175_a2 = 0 r47_1060_a = 0 r47_1060_a2 = 0 r112_1060_a = 0 r112_1060_a2 = 0 r310_1060_a = 0 r310_1060_a2 = 0 r761_1060_a = 0 r761_1060_a2 = 0 e38_max = 0 e38_min = 1.0e10 e175_max = 0 e175_min = 1.0e10 p47_max = 0 p47_min = 1.0e10 p112_max = 0 p112_min = 1.0e10 p310_max = 0 p310_min = 1.0e10 p761_max = 0 p761_min = 1.0e10 p1060_max = 0 p1060_min = 1.0e10 aniso_max = 0 aniso_min = 1.0e10 r38_175_max = 0 r38_175_min = 1.0e10 r47_1060_max = 0 r47_1060_min = 1.0e10 r112_1060_max = 0 r112_1060_min = 1.0e10 r310_1060_max = 0 r310_1060_min = 1.0e10 r761_1060_max = 0 r761_1060_min = 1.0e10 e38_max_t = 0 e38_min_t = 0 e175_max_t = 0 e175_min_t = 0 p47_max_t = 0 p47_min_t = 0 p112_max_t = 0 p112_min_t = 0 p310_max_t = 0 p310_min_t = 0 p761_max_t = 0 p761_min_t = 0 p1060_max_t = 0 p1060_min_t = 0 aniso_max_t = 0 aniso_min_t = 0 r38_175_max_t = 0 r38_175_min_t = 0 r47_1060_max_t = 0 r47_1060_min_t = 0 r112_1060_max_t = 0 r112_1060_min_t = 0 r310_1060_max_t = 0 r310_1060_min_t = 0 r761_1060_max_t = 0 r761_1060_min_t = 0 e38_int = 0 e175_int = 0 p47_int = 0 p112_int = 0 p310_int = 0 p761_int = 0 p1060_int = 0 aniso_int = 0 r38_175_int = 0 r47_1060_int = 0 r112_1060_int = 0 r310_1060_int = 0 r761_1060_int = 0 # #--- start accumulating the values # for ent in data: atemp = re.split('\s+|\t+', ent) btemp = re.split('\.', atemp[0]) if atemp[0] and btemp[0].isdigit(): time = float(atemp[0]) e38 = float(atemp[1]) if e38 > 0: cnt1 += 1 if e38 > e38_max: e38_max = e38 e38_max_t = time if e38 < e38_min: e38_min = e38 e38_min_t = time e38_a += e38 e38_a2 += e38*e38 e175 = float(atemp[2]) if e175 > 0: cnt2 += 1 if e175 > e175_max: e175_max = e175 e175_max_t = time if e175 < e175_min: e175_min = e175 e175_min_t = time e175_a += e175 e175_a2 += e175*e175 p47 = float(atemp[3]) if p47 > 0: cnt3 += 1 if p47 > p47_max: p47_max = p47 p47_max_t = time if p47 < p47_min: p47_min = p47 p47_min_t = time p47_a += p47 p47_a2 += p47*p47 p112 = float(atemp[4]) if p112 > 0: cnt4 += 1 if p112 > p112_max: p112_max = p112 p112_max_t = time if p112 < p112_min: p112_min = p112 p112_min_t = time p112_a += p112 p112_a2 += p112*p112 p310 = float(atemp[5]) if p310 > 0: cnt5 += 1 if p310 > p310_max: p310_max = p310 p310_max_t = time if p310 < p310_min: p310_min = p310 p310_min_t = time p310_a += p310 p310_a2 += p310*p310 p761 = float(atemp[6]) if p761 > 0: cnt6 += 1 if p761 > p761_max: p761_max = p761 p761_max_t = time if p761 < p761_min: p761_min = p761 p761_min_t = time p761_a += p761 p761_a2 += p761*p761 p1060= float(atemp[7]) if p1060 > 0: cnt7 += 1 if p1060 > p1060_max: p1060_max = p1060 p1060_max_t = time if p1060 < p1060_min: p1060_min = p1060 p1060_min_t = time p1060_a += p1060 p1060_a2 += p1060*p1060 aniso = float(atemp[8]) if aniso > 0: cnt8 += 1 if aniso > aniso_max: aniso_max = aniso aniso_max_t = time if aniso < aniso_min: aniso_min = aniso aniso_min_t = time aniso_a += aniso aniso_a2 += aniso*aniso if e175 > 0: r38_175 = e38/e175 if r38_175 > 0: cnt9 += 1 if r38_175 > r38_175_max: r38_175_max = r38_175 r38_175_max_t = time if r38_175 < r38_175_min: r38_175_min = r38_175 r38_175_min_t = time r38_175_a += r38_175 r38_175_a2 += r38_175*r38_175 if p1060 > 0: r47_1060 = p47/p1060 if r47_1060 > 0: cnt10 += 1 if r47_1060 > r47_1060_max: r47_1060_max = r47_1060 r47_1060_max_t = time if r47_1060 < r47_1060_min: r47_1060_min = r47_1060 r47_1060_min_t = time r47_1060_a += r47_1060 r47_1060_a2 += r47_1060*r47_1060 r112_1060 = p112/p1060 if r112_1060 > 0: cnt11 += 1 if r112_1060 > r112_1060_max: r112_1060_max = r112_1060 r112_1060_max_t = time if r112_1060 < r112_1060_min: r112_1060_min = r112_1060 r112_1060_min_t = time r112_1060_a += r112_1060 r112_1060_a2 += r112_1060*r112_1060 r310_1060 = p310/p1060 if r310_1060 > 0: cnt12 += 1 if r310_1060 > r310_1060_max: r310_1060_max = r310_1060 r310_1060_max_t = time if r310_1060 < r310_1060_min: r310_1060_min = r310_1060 r310_1060_min_t = time r310_1060_a += r310_1060 r310_1060_a2 += r310_1060*r310_1060 r761_1060 = p761/p1060 if r761_1060 > 0: cnt13 += 1 if r761_1060 > r761_1060_max: r761_1060_max = r761_1060 r761_1060_max_t = time if r761_1060 < r761_1060_min: r761_1060_min = r761_1060 r761_1060_min_t = time r761_1060_a += r761_1060 r761_1060_a2 += r761_1060*r761_1060 e38_int = e38 e175_int = e175 p47_int = p47 p112_int = p112 p310_int = p310 p761_int = p761 p1060_int = p1060 aniso_int = aniso r38_175_int = r38_175 r47_1060_int = r47_1060 r112_1060_int = r112_1060 r310_1060_int = r310_1060 r761_1060_int = r761_1060 # #----a big loop ends here; now compute avg and std # if cnt1 == 0: e38_avg = 0 e38_var = 0 else: e38_avg = e38_a/cnt1 e38_var = math.sqrt(e38_a2/cnt1 - e38_avg * e38_avg) if cnt2 == 0: e175_avg = 0 e175_var = 0 else: e175_avg = e175_a/cnt2 e175_var = math.sqrt(e175_a2/cnt2 - e175_avg*e175_avg) if cnt3 == 0: p47_avg = 0 p47_var = 0 else: p47_avg = p47_a/cnt3 p47_var = math.sqrt(p47_a2/cnt3 - p47_avg * p47_avg) if cnt4 == 0: p112_avg = 0 p112_var = 0 else: p112_avg = p112_a/cnt4 p112_var = math.sqrt(p112_a2/cnt4 - p112_avg * p112_avg) if cnt5 == 0: p310_avg = 0 p310_var = 0 else: p310_avg = p310_a/cnt5 p310_var = math.sqrt(p310_a2/cnt5 - p310_avg * p310_avg) if cnt6 == 0: p761_avg = 0 p761_var = 0 else: p761_avg = p761_a/cnt6 p761_var = math.sqrt(p761_a2/cnt6 - p761_avg * p761_avg) if cnt7 == 0: p1060_avg = 0 p1060_var = 0 else: p1060_avg = p1060_a/cnt7 p1060_var = math.sqrt(p1060_a2/cnt7 - p1060_avg * p1060_avg) if cnt8 == 0: aniso_avg = 0 aniso_var = 0 else: aniso_avg = aniso_a/cnt8 aniso_var = math.sqrt(aniso_a2/cnt8 - aniso_avg * aniso_avg) if cnt9 == 0: r38_175_avg = 0 r38_175_var = 0 else: r38_175_avg = r38_175_a/cnt9 r38_175_var = math.sqrt(r38_175_a2/cnt9 - r38_175_avg * r38_175_avg) if cnt10 == 0: r47_1060_avg = 0 r47_1060_var = 0 else: r47_1060_avg = r47_1060_a/cnt10 r47_1060_var = math.sqrt(r47_1060_a2/cnt10 - r47_1060_avg * r47_1060_avg) if cnt11 == 0: r112_1060_avg = 0 r112_1060_var = 0 else: r112_1060_avg = r112_1060_a/cnt11 r112_1060_var = math.sqrt(r112_1060_a2/cnt11 - r112_1060_avg * r112_1060_avg) if cnt12 == 0: r310_1060_avg = 0 r310_1060_var = 0 else: r310_1060_avg = r310_1060_a/cnt12 r310_1060_var = math.sqrt(r310_1060_a2/cnt12 - r310_1060_avg * r310_1060_avg) if cnt13 == 0: r761_1060_avg = 0 r761_1060_var = 0 else: r761_1060_avg = r761_1060_a/cnt13 r761_1060_var = math.sqrt(r761_1060_a2/cnt13 - r761_1060_avg * r761_1060_avg) # #--- create stat table # if comp_test == 'test': out = test_stat_dir + event + '_ace_stat' else: out = stat_dir + event + '_ace_stat' f = open(out, 'w') line = 'Data Period (dom): %6.4f - %6.4f\n' % (plotStart, plotStop) f.write(line) line = 'Interruption (dom): %6.4f - %6.4f\n' % (ydate1, ydate2) f.write(line) f.write('\t\t\tAvg\t\t Max\t\tTime\tMin\t\tTime\t\tValue at Interruption Started\n') f.write('--------------------------------------------------------------------------------------------------------------------------\n') line = 'e38 \t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (e38_avg,e38_var,e38_max,e38_max_t,e38_min,e38_min_t,e38_int) f.write(line) line = 'e175\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (e175_avg,e175_var,e175_max,e175_max_t,e175_min,e175_min_t,e175_int) f.write(line) line = 'p47 \t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (p47_avg,p47_var,p47_max,p47_max_t,p47_min,p47_min_t,p47_int) f.write(line) line = 'p112\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (p112_avg,p112_var,p112_max,p112_max_t,p112_min,p112_min_t,p112_int) f.write(line) line = 'p310\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (p310_avg,p310_var,p310_max,p310_max_t,p310_min,p310_min_t,p310_int) f.write(line) line = 'p761\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (p761_avg,p761_var,p761_max,p761_max_t,p761_min,p761_min_t,p761_int) f.write(line) line = 'p1060\t\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (p1060_avg,p1060_var,p1060_max,p1060_max_t,p1060_min,p1060_min_t,p1060_int) f.write(line) if year1 < 2014: line = 'anisotropy\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' % (aniso_avg,aniso_var,aniso_max,aniso_max_t,aniso_min,aniso_min_t,aniso_int) f.write(line) f.write('\nHardness:\n') line = 'e38/e175\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r38_175_avg,r38_175_var,r38_175_max,r38_175_max_t,r38_175_min,r38_175_min_t,r38_175_int) f.write(line) line = 'p47/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r47_1060_avg,r47_1060_var,r47_1060_max,r47_1060_max_t,r47_1060_min,r47_1060_min_t,r47_1060_int) f.write(line) line = 'p112/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r112_1060_avg,r112_1060_var,r112_1060_max,r112_1060_max_t,r112_1060_min,r112_1060_min_t,r112_1060_int) f.write(line) line = 'p310/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r310_1060_avg,r310_1060_var,r310_1060_max,r310_1060_max_t,r310_1060_min,r310_1060_min_t,r310_1060_int) f.write(line) line = 'p761/p1060\t%4.3e+/-%4.3e\t%4.3e\t%4.3f \t%4.3e\t%4.3f\t\t%4.3e\n' \ % (r761_1060_avg,r761_1060_var,r761_1060_max,r761_1060_max_t,r761_1060_min,r761_1060_min_t,r761_1060_int) f.write(line) f.write('\n') # #---- find gradient and chooes the steepest rising point # time = [] e1 = [] e2 = [] p1 = [] p2 = [] p3 = [] p4 = [] p5 = [] ans = [] for ent in data: atemp = re.split('\s+|\t+', ent) btemp = re.split('\.', atemp[0]) if atemp[0] and btemp[0].isdigit(): time.append(float(atemp[0])) for j in range(1, 9): if float(atemp[j]) <= 0: atemp[j] = 1.0e-5 e1.append(float(atemp[1])) e2.append(float(atemp[2])) p1.append(float(atemp[3])) p2.append(float(atemp[4])) p3.append(float(atemp[5])) p4.append(float(atemp[6])) p5.append(float(atemp[7])) f.write('Steepest Rise\n') f.write('------------\n') f.write('\tTime\t\tSlope(in log per hr)\n') f.write('----------------------------------------\n') (max_pos, max_slope) = find_jump(e1, time) f.write('e1 \t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) (max_pos, max_slope) = find_jump(e2, time) f.write('e175\t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) (max_pos, max_slope) = find_jump(p1, time) f.write('p47 \t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) (max_pos, max_slope) = find_jump(p2, time) f.write('p112\t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) (max_pos, max_slope) = find_jump(p3, time) f.write('p310\t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) (max_pos, max_slope) = find_jump(p4, time) f.write('p761\t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) (max_pos, max_slope) = find_jump(p5, time) f.write('p1060\t') if max_pos == -999: line = 'na\t\tna\n' else: line = '%5.4f\t\t%3.4f\n' % (time[max_pos], max_slope) f.write(line) f.close()
def ephinDataExtract(event, start, stop, comp_test ='NA'): "extract EPIN related quantities and creates a data table for given event, start and stop time (format: 2012:03:13:22:41)" begin = start + ':00' #---- to use dateFormatCon correctly, need to add "sec" part end = stop + ':00' # #--- convert time format # (year1, month1, day1, hours1, minutes1, seconds1, ydate1) = tcnv.dateFormatCon(begin) (year2, month2, day2, hours2, minutes2, seconds2, ydate2) = tcnv.dateFormatCon(end) # #--- change time format and find data collecting period (starts 2 days before the interruption and ends at least 5 days after the stating) # (pYearStart, periodStart, pYearStop, periodStop, plotYearStart, plotStart, plotYearStop, plotStop, pannelNum) \ = itrf.findCollectingPeriod(year1, ydate1, year2, ydate2) # #--- read ephin data using arc4gl # ephinList = itrf.useArc4gl('retrieve', 'flight', 'ephin', 1, 'ephrates', pYearStart, periodStart, pYearStop, periodStop, deposit='./Working_dir',) # #--- extract needed data # xdate = [] p4 = [] p41 = [] e150 = [] e1300 = [] ecnt = 0 for fits in ephinList: # #--- use dmlist # if pYearStart < 2011: line = fits + '[cols TIME, SCP4, SCP41, SCE1300]' else: line = fits + '[cols TIME, SCE150, SCE1300]' cmd = 'dmlist "' + line + '" opt="data" > ./zout' os.system(cmd) f = open('./zout', 'r') input = [line.strip() for line in f.readlines()] f.close() os.system('rm ./zout') for ent in input: ent.lstrip() atemp = re.split('\s+|\t+', ent) if atemp[0].isdigit(): line = tcnv.convertCtimeToYdate(float(atemp[1])) (year, month, day, hours, minute, second, ydate, dom, sectime) = tcnv.dateFormatConAll(line) # #--- checking the case the year change # if ecnt > 0 and year != startYear: ydate += base elif ecnt == 0: startYear = year chk = 4.0 * int(0.25 * year) if chk == year: base = 366 else: base = 365 xdate.append(ydate) if pYearStart < 2011: if float(atemp[3]) == 0: atemp[3] = 1.0e-4 if float(atemp[4]) == 0: atemp[4] = 1.0e-4 p4.append(atemp[2]) p41.append(atemp[3]) e1300.append(atemp[4]) else: if float(atemp[2]) == 0: atemp[2] = 1.0e-4 if float(atemp[3]) == 0: atemp[3] = 1.0e-4 e150.append(atemp[2]) e1300.append(atemp[3]) ecnt += 1 os.system('rm ./Working_dir/*fits') # #--- using DataSeeker, extread HRC sheild rate (only if year > 2011) # if pYearStart >= 2011: hrcData = itrf.useDataSeeker(pYearStart, periodStart, pYearStop, periodStop, 'mtahrc..hrcveto_avg', 'time,shevart_avg') time = [] veto = [] hcnt = 0 # #--- there are slightly different input line format; so we need to different ways to read the line # for ent in hrcData: m1 = re.search('----', ent) m2 = re.search('ROW', ent) m3 = re.search('DATASEEK', ent) if ent and m1 == None and m2 == None and m3 == None: atemp = re.split('\s+|\t+', ent) if len(atemp) == 4: ttime = tcnv.convertCtimeToYdate(float(atemp[2])) temp = re.split(':', ttime) year = int(temp[0]) dofy = float(temp[1]) + float(temp[2]) / 24 + float(temp[3]) / 1440 + float(temp[4]) / 86400 # #--- checking the case the year changes # if hcnt > 0 and year != startYear: dofy += base elif hcnt == 0: startYear = year chk = 4.0 * int(0.25 * year) if chk == year: base = 366 else: base = 365 time.append(dofy) veto.append(atemp[3]) hcnt += 1 else: ttime = tcnv.convertCtimeToYdate(float(atemp[1])) temp = re.split(':', ttime) year = int(temp[0]) dofy = float(temp[1]) + float(temp[2]) / 24 + float(temp[3]) / 1440 + float(temp[4]) / 86400 if hcnt > 0 and year != startYear: dofy += base elif hcnt == 0: startYear = year chk = 4.0 * int(0.25 * year) if chk == year: base = 366 else: base = 365 time.append(dofy) veto.append(atemp[2]) hcnt += 1 # #--- matching timing between electron data and hrc data # hrc = len(e150) * [0] j = 0 k = 0 # #--- find the begining # if time[0] < xdate[0]: while time[j] < xdate[0]: j += 1 if j >= hcnt: print "Time span does not overlap. Abort the process." exit(1) elif time[0] > xdate[0]: while time[0] > xdate[k]: k += 1 if k >= ecnt: print "Time span does not overlap. Abort the process." exit(1) hrc[k] = veto[j] tspace = 1.38888888888e-3 / base #--- setting timing bin size: base is given in hrc loop for i in range(k+1, ecnt): tbeg = xdate[i] - tspace tend = xdate[i] + tspace if j > hcnt - 2: hrc[i] = veto[hcnt -1] #---- if the hrc data runs out, just repeat the last data point value elif time[j] >= tbeg and time[j] <= tend: hrc[i] = veto[j] elif time[j] < tbeg: while time[j] < tbeg: j += 1 hrc[i] = veto[j] elif time[j] > tend: while time[j] > tend: j -= 1 hrc[i] = veto[j] # #--- print out data # if comp_test == 'test': file = test_data_dir + event + '_eph.txt' else: file = data_dir + event + '_eph.txt' f = open(file, 'w') line = 'Science Run Interruption: ' + start + '\n\n' f.write(line) if pYearStart < 2011: f.write('dofy\t\tp4\t\t\tp41\t\t\te1300\n') f.write('-------------------------------------------------------------------\n') for m in range(0, ecnt): line = '%4.3f\t\t%4.3e\t%4.3e\t%4.3e\n' % (float(xdate[m]), float(p4[m]), float(p41[m]), float(e1300[m])) f.write(line) else: f.write('dofy\t\thrc\t\te150\t\te1300\n') f.write('-------------------------------------------------------------------\n') for m in range(0, ecnt): line = '%4.3f\t\t%4.3e\t%4.3e\t%4.3e\n' % (float(xdate[m]), float(hrc[m]), float(e150[m]), float(e1300[m])) f.write(line) f.close()
def useArc4gl(operation, dataset, detector, level, filetype, startYear = 0, startYdate = 0, stopYear = 0 , stopYdate = 0, deposit='./', filename='NA'): """ extract data using arc4gl. input: start, stop (year and ydate), operation (e.g., retrive), dataset (e.g. flight), detector (e.g. hrc), level (eg 0, 1, 2), filetype (e.g, evt1), output file: deposit. return the list of the file name. """ # #--- read a couple of information needed for arc4gl # line = bindata_dir + '.dare' f = open(line, 'r') dare = f.readline().strip() f.close() line = bindata_dir + '.hakama' f = open(line, 'r') hakama = f.readline().strip() f.close() # #--- use arc4gl to extract data # if startYear > 1000: (year1, month1, day1, hours1, minute1, second1, ydate1) = tcnv.dateFormatCon(startYear, startYdate) (year2, month2, day2, hours2, minute2, second2, ydate2) = tcnv.dateFormatCon(stopYear, stopYdate) stringYear1 = str(year1) stringYear2 = str(year2) stringMonth1 = str(month1) if month1 < 10: stringMonth1 = '0' + stringMonth1 stringMonth2 = str(month2) if month2 < 10: stringMonth2 = '0' + stringMonth2 stringDay1 = str(day1) if day1 < 10: stringDay1 = '0' + stringDay1 stringDay2 = str(day2) if day2 < 10: stringDay2 = '0' + stringDay2 stringHour1 = str(hours1) if hours1 < 10: stringHour1 = '0' + stringHour1 stringHour2 = str(hours2) if hours2 < 10: stringHour2 = '0' + stringHour2 stringMinute1 = str(minute1) if minute1 < 10: stringMinute1 = '0' + stringMinute1 stringMinute2 = str(minute2) if minute2 < 10: stringMinute2 = '0' + stringMinute2 stringYear = stringYear1[2] + stringYear1[3] arc_start = stringMonth1 + '/' + stringDay1 + '/' + stringYear + ',' + stringHour1 + ':'+ stringMinute1 + ':00' stringYear = stringYear2[2] + stringYear2[3] arc_stop = stringMonth2 + '/' + stringDay2 + '/' + stringYear + ',' + stringHour2 + ':'+ stringMinute2 + ':00' f = open('./arc_file', 'w') line = 'operation=' + operation + '\n' f.write(line) line = 'dataset=' + dataset + '\n' f.write(line) line = 'detector=' + detector + '\n' f.write(line) line = 'level=' + str(level) + '\n' f.write(line) line = 'filetype=' + filetype + '\n' f.write(line) if filename != 'NA': line = 'filename=' + filename + '\n' f.write(line) else: f.write('tstart=') f.write(arc_start) f.write('\n') f.write('tstop=') f.write(arc_stop) f.write('\n') f.write('go\n') f.close() # #--- for the command is to retrieve: extract data and return the list of the files extreacted # if operation == 'retrieve': cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file' os.system(cmd) cmd = 'rm ./arc_file' os.system(cmd) # #--- move the extracted file, if depository is specified # if deposit != './': cmd = 'mv *.gz ' + deposit + '.' os.system(cmd) xxx = os.listdir(deposit) cleanedData = [] for fout in os.listdir(deposit): if fnmatch.fnmatch(fout , '*gz'): # cmd = 'gzip -d ' + deposit + '/*gz' cmd = 'gzip -d ' + deposit + fout os.system(cmd) # #--- run arc4gl one more time to read the file names # f = open('./arc_file', 'w') line = 'operation=browse\n' f.write(line) line = 'dataset=' + dataset + '\n' f.write(line) line = 'detector=' + detector + '\n' f.write(line) line = 'level=' + str(level) + '\n' f.write(line) line = 'filetype=' + filetype + '\n' f.write(line) if filename != 'NA': line = 'filename=' + filename + '\n' f.write(line) else: f.write('tstart=') f.write(arc_start) f.write('\n') f.write('tstop=') f.write(arc_stop) f.write('\n') f.write('go\n') f.close() cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file > file_list' os.system(cmd) f = open('./file_list', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./arc_file ./file_list') # #--- extreact fits file names and drop everything else # for ent in data: m = re.search('fits', ent) if m is not None: atemp = re.split('\s+|\t+', ent) cleanedData.append(atemp[0]) return cleanedData # #--- for the command is to browse: return the list of fits file names # else: cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file > file_list' os.system(cmd) f = open('./file_list', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./arc_file ./file_list') # #--- extreact fits file names and drop everything else # cleanedData = [] for ent in data: m = re.search('fits', ent) if m is not None: atemp = re.split('\s+|\t+', ent) cleanedData.append(atemp[0]) return cleanedData
def useArc4gl(operation, dataset, detector, level, filetype, startYear = 0, startYdate = 0, stopYear = 0 , stopYdate = 0, deposit='./', filename='NA'): "extract data using arc4gl. input: start, stop (year and ydate), operation (e.g., retrive), dataset (e.g. flight), detector (e.g. hrc), level (eg 0, 1, 2), filetype (e.g, evt1), and output file: deposit. return the list of the file name." # #--- read a couple of information needed for arc4gl # line = bindata_dir + '.dare' f = open(line, 'r') dare = f.readline().strip() f.close() line = bindata_dir + '.hakama' f = open(line, 'r') hakama = f.readline().strip() f.close() # #--- use arc4gl to extract ephin data # (year1, month1, day1, hours1, minute1, second1, ydate1) = tcnv.dateFormatCon(startYear, startYdate) (year2, month2, day2, hours2, minute2, second2, ydate2) = tcnv.dateFormatCon(stopYear, stopYdate) stringYear1 = str(year1) stringYear2 = str(year2) arc_start = str(month1) + '/' + str(day1) + '/' + stringYear1[2] + stringYear1[3] + ',' + str(hours1) + ':'+ str(minute1) + ':00' arc_stop = str(month2) + '/' + str(day2) + '/' + stringYear2[2] + stringYear2[3] + ',' + str(hours2) + ':'+ str(minute2) + ':00' f = open('./arc_file', 'w') line = 'operation=' + operation + '\n' f.write(line) line = 'dataset=' + dataset + '\n' f.write(line) line = 'detector=' + detector + '\n' f.write(line) line = 'level=' + str(level) + '\n' f.write(line) line = 'filetype=' + filetype + '\n' f.write(line) if filename != 'NA': line = 'filename=' + filename f.write(line) else: f.write('tstart=') f.write(arc_start) f.write('\n') f.write('tstop=') f.write(arc_stop) f.write('\n') f.write('go\n') f.close() # #--- for the command is to retrieve: extract data and return the list of the files extreacted # if operation == 'retrieve': cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file' os.system(cmd) cmd = 'mv *ephinf*.gz ' + deposit os.system(cmd) cmd = 'gzip -d ' + deposit + '/*gz' os.system(cmd) os.system('ls ./Working_dir/ephinf*.fits > ./zlist') f = open('./zlist', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm ./arc_file ./zlist') return data #--- list of the file names # #--- for the command is to browse: return the list of fits file names # else: cmd = 'echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i arc_file > file_list' os.system(cmd) f = open('./file_list', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm /arc_file ./file_list') return data