forked from WIYN-ODI/QuickReduce
-
Notifications
You must be signed in to change notification settings - Fork 0
/
podi_persistency.py
executable file
·1388 lines (988 loc) · 43.5 KB
/
podi_persistency.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#! /usr/bin/env python3
#
# Copyright 2012-2013 Ralf Kotulla
# kotulla@uwm.edu
#
# This file is part of the ODI QuickReduce pipeline package.
#
# If you find this program or parts thereof please make sure to
# cite it appropriately (please contact the author for the most
# up-to-date reference to use). Also if you find any problems
# or have suggestiosn on how to improve the code or its
# functionality please let me know. Comments and questions are
# always welcome.
#
# The code is made publicly available. Feel free to share the link
# with whoever might be interested. However, I do ask you to not
# publish additional copies on your own website or other sources.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
"""
This module handles all functionality related to saturation and persistency
effects. Most functions are called during reduction from within collectcells.
Standalone functions and command-line flags
-------------------------------------------
* **-makecat**
``podi_persistency -makecat (-persistency=dir/) file1.fits file2.fits``
Create saturation catalogs for a number of files and write results to the
directory given with the -persistency flag.
* **-masksattrails**
``podi_persistency -masksattrails input.fits catalog.fits output.fits``
Apply the persistency masking to the specified input file, using the
saturation table from file catalog.fits and write the resulting file into
output.fits. This assumes that the input.fits file is a valid file created
with collectcells.
* **-findclosemjds**
``podi_persistency -findclosemjds (-persistency=/dir) input.fits``
Test-routine to find saturation catalogs within a fixed range of
[-1,600] seconds around the MJD of the specified input frame.
* **-fixpersistency**
``podi_persistency -fixpersistency (-persistency=/dir) input.fits output.fits``
Similar to the -masksettrails functionality, but using all files within a
fixed MJD range ([-1,1800] seconds) around the MJD of the input frame. Results
are written to output.fits. As above it is assumed that input.fits is a valid
frame created with collectcells.
Modules
-------
"""
import sys
import os
import astropy.io.fits as pyfits
import numpy
import scipy
from astLib import astWCS
import jdcal
import itertools
import time
import multiprocessing
import queue
from podi_definitions import *
from podi_commandline import *
import podi_sitesetup as sitesetup
import podi_logging
import logging
try:
import cPickle as pickle
except:
import pickle
# if (sitesetup.number_cpus == "auto"):
# try:
# number_cpus = multiprocessing.cpu_count()
# print "Yippie, found %d CPUs to use in parallel!" % (number_cpus)
# if (number_cpus > sitesetup.max_cpu_count and sitesetup.max_cpu_count > 1):
# number_cpus = sitesetup.max_cpu_count
# print "... but using only %d of them!" % (number_cpus)
# except:
# pass
# else:
# number_cpus = sitesetup.number_cpus
def mp_create_saturation_catalog(queue_in, queue_ret, verbose=False):
"""
This is a small helper routine for the process of creating the saturation
catalogs. It reads filenames from job queue, creates the arrays of pixel
coordinates, and posts the results to a return queue. Actually creating the
fits tables is then handled by the main process.
Parameters
----------
queue_in : Queue
Holds all the input files
queue_ret : Queue
Queue to report results back to main process
"""
logger = logging.getLogger("MakeSetCat")
logger.debug("Starting worker process")
while (True):
cmd = queue_in.get()
if (cmd is None):
queue_in.task_done()
logger.debug("Received shutdown command, terminating")
return
filename, saturation_limit = cmd
cat_name = create_saturation_catalog_ota(filename, None, verbose=verbose,
return_numpy_catalog=True,
saturation_limit=saturation_limit)
queue_ret.put( cat_name )
queue_in.task_done()
return
def create_saturation_catalog(filename, output_dir, verbose=True, mp=False, redo=False,
saturation_limit=65535):
"""
Create catalogs listing all saturated pixels to enable handling saturation
and persistency effects later-on.
The main purpose of this file is to call create_saturation_catalog_ota,
possibly wrapped for with mp_create_saturation_table for parallel
processing.
Parameters
----------
filename : string
One file of the exposure. This file is mainly used to obtain the
necessary information to create all the other filenames for this
exposure.
output_dir : string
Directory to hold all the saturation catalogs. This is the directory
that will be fed into collectcells via the -persistency command line
flag.
mp : bool - not used
redo : bool
Recreate the saturation catalog if it already exists
Returns
-------
"""
logger = logging.getLogger("CreateSaturationCatalog")
logger.info("Creating saturation mask for %s ..." % (filename))
if (os.path.isfile(filename)):
# This is one of the OTA fits files
# extract the necessary information to generate the
# names of all the other filenames
try:
hdulist = pyfits.open(filename)
except IOError:
logger.warning("\rProblem opening file %s...\n" % (filename))
return
except:
podi_logging.log_exception()
hdr_filename = hdulist[0].header['FILENAME']
hdr_items = hdr_filename.split('.')
basename = "%s.%s" % (hdr_items[0], hdr_items[1])
hdulist.close()
# Split the input filename to extract the directory part
directory, dummy = os.path.split(filename)
elif (os.path.isdir(filename)):
# As a safety precaution, if the first parameter is the directory containing
# the files, extract just the ID string to be used for this script
if (filename[-1] == "/"):
filename = filename[:-1]
basedir, basename = os.path.split(filename)
directory = filename
else:
logger.error("Input %s is neither a file nor a directory!" % (filename))
logger.error("Aborting operation due to illegal input.")
return
output_filename = "%s/%s.saturated.fits" % (output_dir, basename)
logger.debug("Output saturation catalog: %s" % (output_filename))
if (os.path.isfile(output_filename) and not redo):
logger.debug("File (%s) exists, skipping!" % (output_filename))
return
# Setup parallel processing
queue = multiprocessing.JoinableQueue()
return_queue = multiprocessing.JoinableQueue()
#return_queue = multiprocessing.Queue()
number_jobs_queued = 0
first_fits_file = None
ota_list = []
for (ota_x, ota_y) in itertools.product(range(8),repeat=2):
ota = ota_x * 10 + ota_y
filename = "%s/%s.%02d.fits" % (directory, basename, ota)
if (not os.path.isfile(filename)):
filename = "%s/%s.%02d.fits.fz" % (directory, basename, ota)
if (not os.path.isfile(filename)):
continue
queue.put( (filename, saturation_limit) )
number_jobs_queued += 1
# Remember the very first FITS file we find. This will serve as the primary HDU
if (first_fits_file is None):
# Create a primary HDU from the first found fits-file
try:
firsthdu = pyfits.open(filename)
except IOError:
logger.warning("Problem opening FITS file %s" % (filename))
continue
logger.debug("Copying general information from file %s" % (filename))
ota_list.append(pyfits.PrimaryHDU(header=firsthdu[0].header))
firsthdu.close()
firsthdu = None
first_fits_file = filename
if (first_fits_file is None):
logger.warning("Couldn't find a valid FITS file, thus nothing to do")
return
# Now start all the workers
logger.debug("Starting worker processes")
processes = []
for i in range(sitesetup.number_cpus):
p = multiprocessing.Process(target=mp_create_saturation_catalog, args=(queue, return_queue, False))
p.start()
processes.append(p)
time.sleep(0.01)
# Tell all workers to shut down when no more data is left to work on
logger.debug("Sending shutdown command to worker processes")
for i in range(len(processes)):
if (verbose): stdout_write("Sending quit command!\n")
queue.put( (None) )
logger.debug("Collecting catalogs for each OTA")
for i in range(number_jobs_queued):
if (verbose): print("reading return ",i)
cat_name = return_queue.get()
if (cat_name is not None):
final_cat, extension_name = cat_name
columns = [
pyfits.Column(name='CELL_X', format='I', array=final_cat[:, 0]),
pyfits.Column(name='CELL_Y', format='I', array=final_cat[:, 1]),
pyfits.Column(name='X', format='I', array=final_cat[:, 2]),
pyfits.Column(name='Y', format='I', array=final_cat[:, 3])
]
# Create the table extension
coldefs = pyfits.ColDefs(columns)
tbhdu = pyfits.BinTableHDU.from_columns(coldefs)
tbhdu.name = extension_name
ota_list.append(tbhdu)
return_queue.task_done()
# Join each process to make thre they terminate(d) correctly
logger.debug("Joining process to ensure proper termination")
for p in processes:
p.join()
hdulist = pyfits.HDUList(ota_list)
output_filename = "%s/%s.saturated.fits" % (output_dir, basename)
clobberfile(output_filename)
logger.debug("Writing output file %s" % (output_filename))
hdulist.writeto(output_filename, overwrite=True)
logger.debug("all done!")
return
def create_saturation_catalog_ota(filename, output_dir, verbose=True,
return_numpy_catalog=False, saturation_limit=65535):
"""
Create a saturation table for a given OTA exposure.
Parameters
----------
filename : string
Filename of the OTA FITS file.
output_dir : string
If return_numpy_catalog is not set, write the saturation catalog into
this directory.
return_numpy_catalog : bool
If set, return the results as numpy array instead of writing individual
files to disk.
Returns
-------
None - if no saturated pixels are found in this frame
ndarray, extname - if return_numpy_catalog is set
Nothing - if return_numpy_array is not set
"""
logger = logging.getLogger("OTASatCat")
# Open filename
logger.debug("Input filename: %s" % (filename))
try:
hdulist = pyfits.open(filename)
except IOError:
logger.debug("Can't open file %s" % (filename))
return None
except:
podi_logging.log_exception()
return None
mjd = hdulist[0].header['MJD-OBS']
obsid = hdulist[0].header['OBSID']
ota = int(hdulist[0].header['FPPOS'][2:4])
datatype = hdulist[0].header['FILENAME'][0]
logger = logging.getLogger("CreateSatCat: %s, OTA %02d" % (obsid, ota))
logger.debug("Starting work")
full_coords = numpy.zeros(shape=(0,4)) #, dtype=numpy.int16)
saturated_pixels_total = 0
for ext in range(1, len(hdulist)):
if (not is_image_extension(hdulist[ext])):
continue
# Find all saturated pixels (values >= 65K)
data = hdulist[ext].data
saturated = (data >= saturation_limit)
# print hdulist[ext].header['EXTNAME'], data.shape, numpy.max(data)
# Skip this cell if no pixels are saturated
number_saturated_pixels = numpy.sum(saturated)
if (number_saturated_pixels <= 0):
continue
saturated_pixels_total += number_saturated_pixels
wn_cellx = hdulist[ext].header['WN_CELLX']
wn_celly = hdulist[ext].header['WN_CELLY']
# logger.debug("number of saturated pixels in cell %d,%d: %d" % (wn_cellx, wn_celly, number_saturated_pixels))
# Do some book-keeping preparing for the masking
rows, cols = numpy.indices(data.shape)
saturated_rows = rows[saturated]
saturated_cols = cols[saturated]
#print saturated_rows.shape, saturated_cols.shape
coordinates = numpy.zeros(shape=(number_saturated_pixels,4))
coordinates[:,0] = wn_cellx
coordinates[:,1] = wn_celly
coordinates[:,2] = saturated_cols[:]
coordinates[:,3] = saturated_rows[:]
full_coords = numpy.append(full_coords, coordinates, axis=0) #coordinates if full_coords == None else
final_cat = numpy.array(full_coords, dtype=numpy.dtype('int16'))
if (saturated_pixels_total <= 0):
logger.debug("No saturated pixels found, well done!")
return None
logger.debug("Found %d saturated pixels, preparing catalog" % (saturated_pixels_total))
# Now define the columns for the table
columns = [\
pyfits.Column(name='CELL_X', format='I', array=final_cat[:, 0]),
pyfits.Column(name='CELL_Y', format='I', array=final_cat[:, 1]),
pyfits.Column(name='X', format='I', array=final_cat[:, 2]),
pyfits.Column(name='Y', format='I', array=final_cat[:, 3])
]
# Create the table extension
coldefs = pyfits.ColDefs(columns)
tbhdu = pyfits.BinTableHDU.from_columns(coldefs)
extension_name = "OTA%02d.SATPIX" % (ota)
tbhdu.name = extension_name
if (return_numpy_catalog):
logger.debug("Returning results as numpy catalog")
return final_cat, extension_name
# Also copy the primary header into the new catalog
primhdu = pyfits.PrimaryHDU(header=hdulist[0].header)
# Create a HDUList for output
out_hdulist = pyfits.HDUList([primhdu, tbhdu])
# And create the output file
output_filename = "%s/%s%s.%02d.saturated.fits" % (output_dir, datatype, obsid, ota)
stdout_write("Writing output: %s\n" % (output_filename))
clobberfile(output_filename)
out_hdulist.writeto(output_filename, overwrite=True)
if (verbose):
print("some of the saturated pixels:\n",final_cat[0:10,:])
#numpy.savetxt("test", final_cat)
#print full_coords.shape
logger.debug("Retuning final FITS table catalog")
return final_cat
def mask_saturation_defects(catfilename, ota, data):
"""
Create a map, for the specified OTA, where are pixels affected by trailing
are flagged. These pixels are then set to NaN to hopefully be removed
during stacking.
Parameters
----------
catfilename : string
name of the saturation catalog file
ota : int
OTA ID of this OTA data block
data : ndarray
Input ndarray holding the data for this OTA
Returns
-------
ndarray with all pixels affected by saturation masked out.
Warning
-------
This function does not yet handle binned data!
"""
logger = logging.getLogger("MaskSatDefects")
# Open the catalog file
logger.debug("Trying to open file %s" % (catfilename))
if (not os.path.isfile(catfilename)):
logger.warning("Could not find saturation catalog (%s)" % (catfilename))
return data
try:
catlist = pyfits.open(catfilename)
except:
logger.warning("Unable to open saturation catalog (%s)" % (catfilename))
return data
extension_name = "OTA%02d.SATPIX" % (ota)
#print catfilename, ota, data.shape
try:
ota_cat = catlist[extension_name].data
except:
logger.debug("This OTA (%02d) does not have any saturated pixels" % (ota))
#print "couldn't find catalog",extension_name
return data
logger.debug("Found saturated pixels in this OTA (%02d)" % (ota))
# Now we have a valid catalog extension
# First of all, create a frame for the mask
mask = numpy.zeros(shape=data.shape)
cell_x = ota_cat.field('CELL_X')
cell_y = ota_cat.field('CELL_Y')
pixel_x = ota_cat.field('X')
pixel_y = ota_cat.field('Y')
# Combine the cell x/y coordinates
cell_xy = cell_x * 10 + cell_y
unique_cells = set(cell_xy)
for cell in unique_cells:
#print ota, cell
in_this_cell = (cell_xy == cell)
saturated_cols = pixel_x[in_this_cell]
saturated_rows = pixel_y[in_this_cell]
unique_cols = set(saturated_cols)
# extract the mask block for the current cell
cx, cy = int(math.floor(cell/10)), cell % 10
#print cx, cy
bx, tx, by, ty = cell2ota__get_target_region(cx,cy)
#print bx, tx, by, ty
cell_mask = mask[by:ty, bx:tx]
row_ids, col_ids = numpy.indices((cell_mask.shape[0],1))
for col in unique_cols:
if (col >= cell_mask.shape[1]):
continue
this_col_saturated = saturated_rows[saturated_cols == col]
##print "working on col",col #saturated[col,:]
#this_col_saturated = row_ids[saturated[:,col]]
##print "saturated in this col",this_col_saturated
min_y = numpy.min(this_col_saturated)
max_y = numpy.max(this_col_saturated)
cell_mask[min_y:, col] = 1
# Re-insert the cell mask into the larger mask
mask[by:ty, bx:tx] = cell_mask
logger.debug("Masking out %d pixels in OTA %02d" % (numpy.sum(mask), ota))
# Now we have the full mask, mark all pixels as invalid
#print mask[0:10,0:10]
data[mask == 1] = numpy.NaN
return data
def load_saturation_table_list(indexfile, mjd_catalog_list):
"""
Reads the simple index file with the list of available saturation tables
and their MJDs. This speed up processing.
"""
# Make sure the file exists
if (not os.path.isfile(indexfile)):
return mjd_catalog_list
# Open the file, read its content, and add to the existing filelist
pickled_file = indexfile+".pickle"
mdj_catalog_list = None
if (os.path.isfile(pickled_file)):
try:
pickle_dict = open(pickled_file, "rb")
#print "Reading pickled file..."
mjd_catalog_list = pickle.load(pickle_dict)
close(pickle_dict)
except:
pass
if (mjd_catalog_list is None):
# This means we couldn't find or read the pickled catalog
# in that case, read the regular ascii index file
with open(indexfile, "r") as fh:
lines = fh.readlines()
for line in lines:
items = line.strip().split("-->")
try:
abs_filename = items[0].strip()
mjd = float(items[1].strip())
exptime = float(items[2].strip())
#print items,"-->", abs_filename, mjd
# only add the file to the catalog if it exists
if (os.path.isfile(abs_filename)):
mjd_catalog_list[abs_filename] = (mjd, exptime)
except:
print("@@@@@ ERROR in podi_persistency:")
print("@@@@@ Problem reading line:")
print("@@@@@",line)
#print "read from file:\n",mjd_catalog_list,"\n\n"
return mjd_catalog_list
def save_saturation_table_list(filename, mjd_catalog_list):
"""
Write the catalog back to an index file so we can access it again
in the future without having to re-read the MJDs from each file.
"""
# Create the index filename if the input is only a directory
if (os.path.isdir(filename)):
filename = "%s/index.cat" % (filename)
file_listing = ['%s --> %.12f %.3f' % (catfile, mjd, exptime)
for catfile, (mjd, exptime) in mjd_catalog_list.iteritems()]
with open(filename, "w") as fh:
fh.write("\n".join(file_listing)+"\n")
fh.close()
pickled_cat = filename+".pickle"
with open(pickled_cat, "wb") as pf:
pickle.dump(mjd_catalog_list, pf)
pf.close()
return
def get_list_of_saturation_tables(directory, mjd_catalog_list=None):
"""
Search the specified directory and create an inventory of available
saturation maps. For each file we store the filename and the MJD-OBS header
value that we will later use to specify the amount of correct required.
"""
logger = logging.getLogger("GetSatCats")
if (mjd_catalog_list is None):
mjd_catalog_list = {}
# Get a list of all files in the specified directory
if (not os.path.isdir(directory)):
logger.warning("Unable to find specified persistency-directory: %s" % (directory))
return mjd_catalog_list
try:
filelist = os.listdir(directory)
except OSError:
# something's weird here, directory exists but can't be accessed
return mjd_catalog_list
indexfile = "%s/index.cat" % (directory)
mjd_catalog_list = load_saturation_table_list(indexfile, mjd_catalog_list)
for filename in filelist:
# The file should contain the name "saturated.fits"
if (filename.find("saturated.fits") < 0):
# this does not look like a valid file
continue
full_filename = "%s/%s" % (directory, filename)
abs_filename = os.path.abspath(full_filename)
if (not abs_filename in mjd_catalog_list):
try:
hdulist = pyfits.open(full_filename)
mjd = hdulist[0].header['MJD-OBS']
exptime = hdulist[0].header['EXPTIME']
# print "Adding file",abs_filename,":",mjd
mjd_catalog_list[abs_filename] = (mjd, exptime)
hdulist.close()
except:
pass
# At the end of the run, dump the list of files into the directory
save_saturation_table_list(indexfile, mjd_catalog_list)
return mjd_catalog_list
def select_from_saturation_tables(mjd_catalog_list, search_mjd, delta_mjd_range=[0,600]):
"""
This routine filters the list of saturation maps to select only files within
the specified delta_mjd window. Intervals are given in second, and both the
upper and lower limit are considered to be within the window.
Parameters
----------
mjd_catalog_list : dictionary
List of all known saturation tables and their respective MJD (modified
julian date) times.
search_mjd : float
MJD of the frame currently being processes
delta_mjd_range : float[2]
Search range of MJDs. If a saturation catalog is within the search
range, its filename is returned for further processing.
If delta_mjd_range is ``None``, only the saturation catalog with an MJD
identical to search_mjd is returned.
Returns
-------
close_mjd_files : dictionary
Dictionary containing the filenames of all saturation catalogs with MJD
in the search range and their respective MJD times.
"""
close_mjd_files = {}
# print "\n"*3,search_mjd, delta_mjd_range,"\n"*3
for full_filename, (mjd, exptime) in mjd_catalog_list.iteritems():
#mjd = mjd_catalog_list[full_filename]
delta_mjd = (search_mjd - mjd) * 86400.
# print "%80s %14.7f %9.3f %12.3f (%12.3f)" % (full_filename, mjd, exptime, delta_mjd, delta_mjd-exptime)
if (delta_mjd_range is None):
if (delta_mjd > -1 and delta_mjd < 1):
return full_filename
else:
# print (delta_mjd >= delta_mjd_range[0]), \
# (delta_mjd <= delta_mjd_range[1]), \
# (delta_mjd-exptime <= delta_mjd_range[1])
if ((delta_mjd) >= delta_mjd_range[0] and
(delta_mjd-exptime) <= delta_mjd_range[1]):
close_mjd_files[full_filename] = (mjd, exptime)
#close_mjd_files.append( (mjd, full_filename) )
if (delta_mjd_range is None):
return None
return close_mjd_files
def correct_persistency_effects(ota, data, mjd, filelist):
"""
Create a map, for the specified OTA, where are pixels affected by
persistency are flagged with the MJD of their last saturation. From this we
can then derive the required correction.
The detailed prescription for the amplitude of the correction is still
unknown, so for the time being all persistent pixels are simply masked out
(set to NaN).
At the present, this function is more complicated than it would need to be,
but it is prepared for future improvements that correct and not just mask
out the persistency effect.
Parameters
----------
ota : int
Which OTA does the data belong to
data : ndarray
ndarray with the data for this OTA
mjd : float
MJD of this exposure, so we can correct the effect based on the time-
difference between this exposure and the time of last saturation.
filelist : dictionary
dictionary of all saturation catalogs and their respective MJDs.
Returns
-------
corrected data : ndarray
Returns the data with affected pixels being masked out (set to NaNs)
Warning
-------
This routine likely does not handle binning correctly.
"""
logger = logging.getLogger("CorrectPersistency(%02d)" % (ota))
# First of all, create a frame for the mask
mask = numpy.zeros(shape=data.shape)
# extract all mjds
mjds = []
catalog = []
for catfilename, (cat_mjd, exptime) in filelist.iteritems():
#print mjd, catfilename
mjds.append(mjd)
catalog.append( (cat_mjd, catfilename, exptime) )
# Now sort the list of MJD's from smallest (earliest) to largest (latest)
mjd_sorting = numpy.argsort(numpy.array(mjds))
# And create a new filelist with MJDs sorted
mjd_sorted_filelist = []
for i in range(len(mjds)-1, -1, -1):
mjd_sorted_filelist.append(catalog[mjd_sorting[i]])
#print filelist[mjd_sorting[i]][0]
for cat_mjd, catfilename, exptime in mjd_sorted_filelist:
if (not os.path.isfile(catfilename)):
logger.warning("Could not find saturation catalog (%s)" % (catfilename))
continue
try:
# Open the catalog file
catlist = pyfits.open(catfilename)
extension_name = "OTA%02d.SATPIX" % (ota)
d_mjd = mjd - cat_mjd + exptime/86400.
except:
logger.warning("Unable to open saturation catalog (%s)" % (catfilename))
continue
#print ota, catfilename, d_mjd, d_mjd*86400
try:
ota_cat = catlist[extension_name].data
except:
#print "couldn't find catalog",extension_name
continue
# Now we have a valid catalog extension
cell_x = ota_cat.field('CELL_X')
cell_y = ota_cat.field('CELL_Y')
pixel_x = ota_cat.field('X')
pixel_y = ota_cat.field('Y')
# Combine the cell x/y coordinates
cell_xy = cell_x * 10 + cell_y
unique_cells = set(cell_xy)
for cell in unique_cells:
#print ota, cell
in_this_cell = (cell_xy == cell)
saturated_cols = pixel_x[in_this_cell]
saturated_rows = pixel_y[in_this_cell]
unique_cols = set(saturated_cols)
# extract the mask block for the current cell
cx, cy = int(math.floor(cell/10)), cell % 10
#print cx, cy
bx, tx, by, ty = cell2ota__get_target_region(cx,cy)
#print bx, tx, by, ty
cell_mask = mask[by:ty, bx:tx]
row_ids, col_ids = numpy.indices((cell_mask.shape[0],1))
for col in unique_cols:
if (col >= cell_mask.shape[1]):
continue
this_col_saturated = saturated_rows[saturated_cols == col]
max_y = numpy.max(this_col_saturated)
cell_mask[:max_y, col] = cat_mjd
# Re-insert the cell mask into the larger mask
mask[by:ty, bx:tx] = cell_mask
# Now we have the full mask, mark all pixels as invalid
correction = mask > 0
data[correction] = numpy.NaN
return data
def map_persistency_effects(hdulist, verbose=False):
"""
outdated - do not use
"""
mask_thisframe_list = {}
mask_timeseries_list = {}
if (verbose): stdout_write("Creating persistency masks ...")
saturated_pixels_total = 0
extensions_with_saturated_pixels = 0
pixels_masked_out_thisframe = 0
pixels_masked_out_timeseries = 0
#
# Check all cells in this file (for on OTA)
#
for ext in range(len(hdulist)):
# Skip extensions that are no Image HDUs
if (not is_image_extension(hdulist[ext])):
continue
extname = hdulist[ext].header['EXTNAME']
if (verbose): stdout_write("Working on extension %s (%d)\n" % (extname, ext))
# Find all saturated pixels (values >= 65K)
data = hdulist[ext].data
saturated = (data >= 65535)
# Skip this cell if no pixels are saturated
number_saturated_pixels = numpy.sum(saturated)
if (number_saturated_pixels <= 0):
continue
if (verbose): print("number of saturated pixels:", number_saturated_pixels)
saturated_pixels_total += number_saturated_pixels
extensions_with_saturated_pixels += 1
# Do some book-keeping preparing for the masking
rows, cols = numpy.indices(data.shape)
mask_thisframe = numpy.zeros(shape=data.shape)
mask_thisframe = mask_thisframe > 1
mask_time = numpy.zeros(shape=data.shape)
mask_time = mask_time > 1
#mask_time.fill(False)
saturated_rows = rows[saturated]
saturated_cols = cols[saturated]
unique_cols = set(saturated_cols)
#
# Now convert the list of saturated pixels into a map
#
# Old, slow method
if (False):
for i in range(saturated_rows.shape[0]):
mask_up = (cols == saturated_cols[i]) & (rows >= saturated_rows[i])
mask_down = (cols == saturated_cols[i]) & (rows <= saturated_rows[i])