def _watch_posts(self): host = ask_user('My IP:') port = int(ask_user('My port:')) def notify_new_post(post): print(format_posts([post])) print() try: rpc_server = SimpleXMLRPCServer((host, port), logRequests=False, allow_none=True) rpc_server.register_function(notify_new_post) self._server.register_listener(self._user_id, host, port) print('Press Ctrl-C to stop') print('Listening...') print() rpc_server.serve_forever() except KeyboardInterrupt: pass finally: if rpc_server: rpc_server.server_close() self._server.unregister_listener(self._user_id)
def ask_user(user_conf): LOG.info('Stage: hostname configuration\n') utils.fmt_print('==== HOSTNAME CONFIGURE ====') txt = 'Do you want to set the hostname(yes, no) [yes]: ' set_host = utils.ask_user(txt, ('yes, no'), 'yes') if set_host.lower() == 'yes': txt = 'Input the FQDN hostname you want to use for this host: ' user_conf['hostname'] = utils.ask_user(txt, check=utils.check_hostname)
def _add_post(self): subject = ask_user('Subject:') title = ask_user('Title:') body = ask_user('Body:') try: self._server.add_post(self._user_id, subject, title, body) except Fault as ex: print(get_message(ex))
def _ask_ntp(user_conf): LOG.info('Stage: ntp server configuration\n') utils.fmt_print('==== NTP SERVER CONFIGURE ====') txt = 'Do you have some local ntp servers to use(yes, no) [yes]: ' set_ntp = utils.ask_user(txt, ('yes, no'), 'yes') if set_ntp.lower() == 'yes': txt = 'Input the ntp server ip(seperated by ",", eg 10.10.1.1,10.10.1,2): ' user_conf['ntp_server'] = utils.ask_user(txt, check=utils.check_ip_list) else: user_conf['ntp_server'] = ''
def config_cinder(user_conf): # whether we need to set CONFIG_CINDER_VOLUMES_CREATE yes or no user_conf['os_rdo_cinder'] = not cinder_volume_exist() if not user_conf['os_rdo_cinder']: LOG.warn('No cinder volume group(%s) found' % CINDER_VOLUME_NAME) txt = 'Do you want to create cinder volume group now(yes, no) [yes]: ' cfg_cinder = utils.ask_user(txt, ('yes, no'), 'yes') if cfg_cinder.lower() == 'yes': txt = 'Please input the name of the device you want to use for cinder: ' cinder_dev = utils.ask_user(txt, check=lambda x: os.path.exists(x)) user_conf['os_cinder_dev'] = cinder_dev user_conf['os_rdo_cinder'] = False
def _add_subject(self): subject_name = ask_user('Subject name:') try: posts_limit = int(ask_user('Posts limit:')) except ValueError: print('Not a valid number') return try: self._server.add_subject(self._user_id, subject_name, posts_limit) except Fault as ex: print(get_message(ex))
def _print_filtered_posts(self): subject = ask_user('Subject:') min_date = ask_user('Minimum date (yyyy-MM-dd hh:mm):') max_date = ask_user('Maximum date (yyyy-MM-dd hh:mm):') try: posts = self._server.get_posts(subject, min_date, max_date) except Fault as ex: print(get_message(ex)) return print() print(format_posts(posts))
def ask_user(user_conf): if user_conf['role'] != 'controller': # nothing shoule be done for other kinds of node for now return LOG.info('Stage: openstack configuration\n') utils.fmt_print('==== OPENSTACK CONFIGURE ====') while True: # fmt_print('Confirm admin password:'******'The password to use for keystone admin user: '******'Confirm admin password: '******'os_pwd'] = pwd break else: utils.fmt_print('Sorry, passwords do not match') compute_hosts_txt = "IP adresses of compute hosts(seperated by ',', eg '10.10.1.2,10.10.1.3'): " user_conf['compute_hosts'] = utils.ask_user(compute_hosts_txt, check=utils.check_ip_list) # cinder config config_cinder(user_conf)
def _ask_tun_nic(user_conf): txt = "which nic do you want to use as tunnel " \ "interface: %s [%s]: " % (nics, nics[1]) user_conf['tun_nic'] = utils.ask_user(txt, nics, nics[1]) txt = "Do you want this setup to configure the tunnel " \ "network? (Yes, No) [Yes]: " confirm = utils.ask_user(txt, ('yes, no'), 'yes') if confirm.lower() == 'yes': user_conf['cfg_tun'] = True utils.fmt_print("==== NETWORK CONFIGURATION FOR TUNNEL " "INTERFACE ====") user_conf['tun_nic_ip'] = utils.ask_user( 'ip address: ', check=utils.check_ip) user_conf['tun_nic_netmask'] = utils.ask_user( 'netmask [255.255.255.0]: ', default_val='255.255.255.0', check=_check_netmask(user_conf['tun_nic_ip']))
def main(): set_logger() cfgs = dict() entry_points = [ (e.name, e.load()) for e in pkg_resources.iter_entry_points( 'es_setup.cfg') ] for (_, fn) in entry_points: fn(cfgs) cfgs = dict(sorted(cfgs.iteritems(), key=lambda cfgs: cfgs[0])) # sort # OK, we enter our core logic LOG.info('Stage: Initializing\n') # first, ask user some question rebuild = 'no' if os.path.exists(user_conf_file): txt = "You have built eayunstack, do you want to reuse the same " \ "configuration (yes, no) [no]: " rebuild = utils.ask_user(txt, ('yes, no'), 'no') if rebuild.lower() == 'yes': with file(user_conf_file, 'r') as f: s = f.read().strip('\n') user_conf.update((eval(s))) if rebuild.lower() == 'no': for c in cfgs: cfgs[c].ask_user(user_conf) # save for next using with file(user_conf_file, 'w') as f: f.write(str(user_conf)) # then, we output the result user set just LOG.info('Stage: Setup validation\n') utils.fmt_print('--== CONFIGURATION PREVIEW ==--') for c in cfgs: cfgs[c].validation(user_conf) txt = 'Please confirm installation settings (OK, Cancel) [OK]: ' confirm = utils.ask_user(txt, ('ok, cancel'), 'ok') if confirm.lower() == 'cancel': sys.exit() # last, run every configuration module to setup LOG.info('Stage: Transaction setup') for c in cfgs: cfgs[c].run(user_conf)
def _ask_ext_nic(user_conf): # TODO: if there is only two nics in this host, the management # nic should be external nic, am i right? if len(nics) <= 2: dft_nic = nics[0] else: dft_nic = nics[2] txt = "which nic do you want to use as external " \ "interface: %s [%s]: " % (nics, dft_nic) user_conf['ext_nic'] = utils.ask_user(txt, nics, dft_nic)
def _create_user(self): user_id = ask_user('User identification:') try: self._server.register_user(user_id) except Fault as ex: print(get_message(ex)) return self._user_id = user_id
def _print_last_post(self): subject = ask_user('Subject:') post = self._server.get_last_post(subject) print() if post: print(format_posts([post])) else: print('No posts on this subject')
def _subscribe(self): subject = ask_user('Subject:') try: posts = self._server.subscribe(self._user_id, subject) except Fault as ex: print(get_message(ex)) return print()
def _menu_before_login(self): print('1 - Log in into existing account') print('2 - Create new account and log into it') option = ask_user('Choose an option:') if option == '1': self._perform_login() elif option == '2': self._create_user() else: print('Unkown option')
def _ask_mgt_nic(user_conf): txt = "which nic do you want to use as management " \ "interface: %s [%s]: " % (nics, nics[0]) user_conf['mgt_nic'] = utils.ask_user(txt, nics, nics[0]) txt = "Do you want this setup to configure the management " \ "network? (Yes, No) [Yes]: " confirm = utils.ask_user(txt, ('yes, no'), 'yes') if confirm.lower() == 'yes': user_conf['cfg_mgt'] = True utils.fmt_print("==== NETWORK CONFIGURATION FOR MANAGEMENT " "INTERFACE ====") user_conf['mgt_nic_ip'] = utils.ask_user( 'ip address: ', check=utils.check_ip) user_conf['mgt_nic_netmask'] = utils.ask_user( 'netmask [255.255.255.0]: ', default_val='255.255.255.0', check=_check_netmask(user_conf['mgt_nic_ip'])) default_gw = utils.first_host_in_subnet( user_conf['mgt_nic_ip'], user_conf['mgt_nic_netmask']) user_conf['mgt_nic_gw'] = utils.ask_user( 'gateway [%s]: ' % default_gw, default_val=default_gw, check=_check_gw(user_conf['mgt_nic_ip'], user_conf['mgt_nic_netmask']))
def _perform_login(self): user_id = ask_user('User identification:') try: is_registered = self._server.is_user_registered(user_id) except Fault as ex: print(get_message(ex)) return if is_registered: self._user_id = user_id else: print('Unknown user') self._user_id = None
def _menu_before_login(self): print('1 - Log in into existing account') print('2 - Create new account and log into it') print('3 - Show last post') print('4 - List posts') option = ask_user('Choose an option:') print() if option == '1': self._perform_login() if option == '2': self._create_user() elif option == '3': self._print_last_post() elif option == '4': self._print_filtered_posts()
def _menu_after_login(self): print('1 - Show last post') print('2 - List posts') print('3 - Subscribe to subject') print('4 - Watch new posts online') option = ask_user('Choose an option:') print() if option == '1': self._print_last_post() elif option == '2': self._print_filtered_posts() elif option == '3': self._subscribe() elif option == '4': self._watch_posts()
def _menu_after_login(self): print('1 - Add new subject') print('2 - List all subjects') print('3 - Add new post') print('4 - List all posts') option = ask_user('Choose an option:') print() if option == '1': self._add_subject() elif option == '2': self._print_subjects() elif option == '3': self._add_post() elif option == '4': self._print_posts() else: print('Unkown option')
def gmos_img_proc2(dbFile="./raw/obsLog.sqlite3", qd={'use_me': 1,'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30'}, bias_dateobs="2006-09-01:2006-10-30", biasFlags={'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'yes'}, flat_dateobs='2006-09-10:2006-10-10', flatFlags = {'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes','rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes'}, filters = ['Ha', 'HaC', 'SII', 'r', 'i'], sciFlags={'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias':'yes', 'fl_dark': 'no','fl_flat': 'yes', 'logfile':'gireduceLog.txt', 'rawpath': './raw/','fl_vardq': 'yes','bpm':bpm_gmos, 'verbose': 'yes'}, mosaicFlags = {'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'yes'}, coaddFlags = {'fwhm': 3, 'datamax': 6.e4, 'geointer': 'nearest', 'logfile': 'imcoaddLog.txt'}, targets = ['M8-1', 'M8-2', 'M8-3'], clean_files = False ): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` qd : dictionary Query Dictionary of essential parameter=value pairs. Select bias exposures within ~2 months of the target observations e.g. qd= {'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' } bias_dateobs : str String representing the bias search Obsdate e.g. bias_dateobs = `2006-09-01:2006-10-30` biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flat_dateobs : str String representing the flat search Obsdate e.g. flat_dateobs = `2006-09-10:2006-10-10` flatFlags : dict Dictionary for the keyword flags of gmos.giflat() function e.g. flatFlags = {'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes','rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes'} filters : list List of filter names to perform reduction e.g. filters=['Ha', 'HaC', 'SII', 'r', 'i'] sciFlags : dict Dictionary for the keyword flags of gmos.gireduce() function mosaicFlags : dict Dictionary for the keyword flags of gmos.gimosaic() function coaddFlags : dict Dictionary for the keyword flags of gemtools.imcoadd() function targets : list List of names of target observations for the co-addition e.g. targets = ['M8-1', 'M8-2', 'M8-3'] clean_files : bool Whether to clean intermediate files from reduction process Returns ------- Reduce GMOS imaging based on tutorial example. """ print ("### Begin Processing GMOS/MOS Images ###") print ("###") print ("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs. # Select bias exposures within ~2 months of the target observations: print (" --Creating Bias MasterCal--") qd.update({'DateObs': bias_dateobs}) # Set the task parameters. gmos.gbias.unlearn() # The following SQL generates the list of files to process. SQL = fs.createQuery('bias', qd) biasFiles = fs.fileListQuery(dbFile, SQL, qd) # The str.join() function is needed to transform a python list into a string # filelist that IRAF can understand. if len(biasFiles) > 1: files_all = ','.join(str(x) for x in biasFiles) # import pdb; pdb.set_trace() gmos.gbias(files_all, 'MCbias.fits', **biasFlags) # Clean up year_obs = qd['DateObs'].split('-')[0] if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user("MC Bias done. Would you like to continue to proceed with Master Flats? (y/n): ",['y','yes']) print (" --Creating Twilight Imaging Flat-Field MasterCal--") # Select flats obtained contemporaneously with the observations. qd.update({'DateObs': flat_dateobs}) # Set the task parameters. gmos.giflat.unlearn() #filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print " Building twilight flat MasterCal for filter: %s" % (f) # Select filter name using a substring of the official designation. qd['Filter2'] = f + '_G%' mcName = 'MCflat_%s.fits' % (f) flatFiles = fs.fileListQuery(dbFile, fs.createQuery('twiFlat', qd), qd) if len(flatFiles) > 0: files_all = ','.join(str(x) for x in flatFiles) # import pdb; pdb.set_trace() gmos.giflat(files_all, mcName, bias='MCbias', **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,rgS{}*.fits'.format(year_obs, year_obs)) ask_user("MC Flats done. Would you like to continue to proceed with processing Science Images? (y/n): ", ['yes','y']) print ("=== Processing Science Images ===") # Remove restriction on date range qd['DateObs'] = '*' prefix = 'rg' gmos.gireduce.unlearn() gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() # Reduce the science images, then mosaic the extensions in a loop for f in filters: print " Processing science images for filter: %s" % (f) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f + '.fits' SQL = fs.createQuery('sciImg', qd) sciFiles = fs.fileListQuery(dbFile, SQL, qd) if len(sciFiles) > 0: # Make sure BPM table is in sciFlags for employing the imaging Static BPM for this set of detectors. # import pdb; pdb.set_trace() all_files = ','.join(str(x) for x in sciFiles) gmos.gireduce(all_files, bias='MCbias', flat1=flatFile, **sciFlags) for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) else: print("No Science images found for filter {}. Check database.".format(f)) import pdb; pdb.set_trace() if clean_files: iraf.imdelete('gS{}*.fits,rgS{}*.fits'.format(year_obs,year_obs)) ask_user("Science Images done. Would you like to continue to proceed with image co-addition? (y/n): ", ['y','yes']) ## Co-add the images, per position and filter. print (" -- Begin image co-addition --") # Use primarily the default task parameters. gemtools.imcoadd.unlearn() prefix = 'mrg' for f in filters: print " - Co-addding science images in filter: %s" % (f) qd['Filter2'] = f + '_G%' for t in targets: qd['Object'] = t + '%' print " - Co-addding science images for position: %s" % (t) outImage = t + '_' + f + '.fits' coAddFiles = fs.fileListQuery(dbFile, fs.createQuery('sciImg', qd), qd) all_files = ','.join(prefix + str(x) for x in coAddFiles) if all_files == '': print('No files available for co-addition. Check that the target names are written correctly.') import pdb; pdb.set_trace() gemtools.imcoadd(all_files, outimage=outImage, **coaddFlags) ask_user("Co-addition done. Would you like to clean the latest intermediate reduction files? (y/n): ", ['y','yes']) if clean_files: iraf.delete("*_trn*,*_pos,*_cen") iraf.imdelete("*badpix.pl,*_med.fits,*_mag.fits") # iraf.imdelete ("mrgS*.fits") print ("=== Finished Calibration Processing ===")
def gmos_img_proc2( dbFile="./raw/obsLog.sqlite3", qd={ 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' }, bias_dateobs="2006-09-01:2006-10-30", biasFlags={ 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'yes' }, flat_dateobs='2006-09-10:2006-10-10', flatFlags={ 'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes', 'rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes' }, filters=['Ha', 'HaC', 'SII', 'r', 'i'], sciFlags={ 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_flat': 'yes', 'logfile': 'gireduceLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'bpm': bpm_gmos, 'verbose': 'yes' }, mosaicFlags={ 'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'yes' }, coaddFlags={ 'fwhm': 3, 'datamax': 6.e4, 'geointer': 'nearest', 'logfile': 'imcoaddLog.txt' }, targets=['M8-1', 'M8-2', 'M8-3'], clean_files=False): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` qd : dictionary Query Dictionary of essential parameter=value pairs. Select bias exposures within ~2 months of the target observations e.g. qd= {'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' } bias_dateobs : str String representing the bias search Obsdate e.g. bias_dateobs = `2006-09-01:2006-10-30` biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flat_dateobs : str String representing the flat search Obsdate e.g. flat_dateobs = `2006-09-10:2006-10-10` flatFlags : dict Dictionary for the keyword flags of gmos.giflat() function e.g. flatFlags = {'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes','rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes'} filters : list List of filter names to perform reduction e.g. filters=['Ha', 'HaC', 'SII', 'r', 'i'] sciFlags : dict Dictionary for the keyword flags of gmos.gireduce() function mosaicFlags : dict Dictionary for the keyword flags of gmos.gimosaic() function coaddFlags : dict Dictionary for the keyword flags of gemtools.imcoadd() function targets : list List of names of target observations for the co-addition e.g. targets = ['M8-1', 'M8-2', 'M8-3'] clean_files : bool Whether to clean intermediate files from reduction process Returns ------- Reduce GMOS imaging based on tutorial example. """ print("### Begin Processing GMOS/MOS Images ###") print("###") print("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs. # Select bias exposures within ~2 months of the target observations: print(" --Creating Bias MasterCal--") qd.update({'DateObs': bias_dateobs}) # Set the task parameters. gmos.gbias.unlearn() # The following SQL generates the list of files to process. SQL = fs.createQuery('bias', qd) biasFiles = fs.fileListQuery(dbFile, SQL, qd) # The str.join() function is needed to transform a python list into a string # filelist that IRAF can understand. if len(biasFiles) > 1: files_all = ','.join(str(x) for x in biasFiles) # import pdb; pdb.set_trace() gmos.gbias(files_all, 'MCbias.fits', **biasFlags) # Clean up year_obs = qd['DateObs'].split('-')[0] if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user( "MC Bias done. Would you like to continue to proceed with Master Flats? (y/n): ", ['y', 'yes']) print(" --Creating Twilight Imaging Flat-Field MasterCal--") # Select flats obtained contemporaneously with the observations. qd.update({'DateObs': flat_dateobs}) # Set the task parameters. gmos.giflat.unlearn() #filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print " Building twilight flat MasterCal for filter: %s" % (f) # Select filter name using a substring of the official designation. qd['Filter2'] = f + '_G%' mcName = 'MCflat_%s.fits' % (f) flatFiles = fs.fileListQuery(dbFile, fs.createQuery('twiFlat', qd), qd) if len(flatFiles) > 0: files_all = ','.join(str(x) for x in flatFiles) # import pdb; pdb.set_trace() gmos.giflat(files_all, mcName, bias='MCbias', **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,rgS{}*.fits'.format(year_obs, year_obs)) ask_user( "MC Flats done. Would you like to continue to proceed with processing Science Images? (y/n): ", ['yes', 'y']) print("=== Processing Science Images ===") # Remove restriction on date range qd['DateObs'] = '*' prefix = 'rg' gmos.gireduce.unlearn() gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() # Reduce the science images, then mosaic the extensions in a loop for f in filters: print " Processing science images for filter: %s" % (f) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f + '.fits' SQL = fs.createQuery('sciImg', qd) sciFiles = fs.fileListQuery(dbFile, SQL, qd) if len(sciFiles) > 0: # Make sure BPM table is in sciFlags for employing the imaging Static BPM for this set of detectors. # import pdb; pdb.set_trace() all_files = ','.join(str(x) for x in sciFiles) gmos.gireduce(all_files, bias='MCbias', flat1=flatFile, **sciFlags) for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) else: print("No Science images found for filter {}. Check database.". format(f)) import pdb pdb.set_trace() if clean_files: iraf.imdelete('gS{}*.fits,rgS{}*.fits'.format(year_obs, year_obs)) ask_user( "Science Images done. Would you like to continue to proceed with image co-addition? (y/n): ", ['y', 'yes']) ## Co-add the images, per position and filter. print(" -- Begin image co-addition --") # Use primarily the default task parameters. gemtools.imcoadd.unlearn() prefix = 'mrg' for f in filters: print " - Co-addding science images in filter: %s" % (f) qd['Filter2'] = f + '_G%' for t in targets: qd['Object'] = t + '%' print " - Co-addding science images for position: %s" % (t) outImage = t + '_' + f + '.fits' coAddFiles = fs.fileListQuery(dbFile, fs.createQuery('sciImg', qd), qd) all_files = ','.join(prefix + str(x) for x in coAddFiles) if all_files == '': print( 'No files available for co-addition. Check that the target names are written correctly.' ) import pdb pdb.set_trace() gemtools.imcoadd(all_files, outimage=outImage, **coaddFlags) ask_user( "Co-addition done. Would you like to clean the latest intermediate reduction files? (y/n): ", ['y', 'yes']) if clean_files: iraf.delete("*_trn*,*_pos,*_cen") iraf.imdelete("*badpix.pl,*_med.fits,*_mag.fits") # iraf.imdelete ("mrgS*.fits") print("=== Finished Calibration Processing ===")
def ask_user(user_conf): LOG.info('Stage: role configuration\n') utils.fmt_print('==== ROLE CONFIGURE ====') txt = 'Which role do you want to configure this host as? (controller, network, computer) [controller]: ' user_conf['role'] = utils.ask_user(txt, ('controller', 'network', 'computer'), 'controller')
validation_datagen = ImageDataGenerator(rescale=1. / 255) train_generator = train_datagen.flow_from_directory( train_dir, target_size=params['image_size'], batch_size=params['batch_size'], class_mode='categorical') validation_generator = validation_datagen.flow_from_directory( val_dir, target_size=params['image_size'], batch_size=params['batch_size'], class_mode='categorical') if ask_user('Data generators created. Continue to Network Initialisation?' ) == False: sys.exit(0) ######################################################################## # NETWORK INITIALISATION ######################################################################## net = InceptionV3(include_top=False, weights='imagenet', input_shape=params['receptive_field']) # Disable training in all but the last 4 layers for layer in net.layers: layer.trainable = False #for layer in net.layers[-17:]: # layer.trainable = True
def gmos_ls_proc2( sciTargets, stdTarget, dbFile='./raw/obsLog.sqlite3', qd_full={ 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'Full', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'AM2306-72%', 'DateObs': '2007-06-05:2007-07-07' }, qd_censp={ 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'CenSp', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'LTT9239', 'DateObs': '2007-06-05:2007-07-07' }, biasFlags={ 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'no' }, flatFlags={ 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_fixpix': 'no', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'fl_detec': 'yes', 'function': 'spline3', 'order': '13,11,28', 'logfile': 'gsflatLog.txt', 'verbose': 'no' }, sciFlags={ 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_gscrrej': 'no', 'fl_dark': 'no', 'fl_flat': 'yes', 'fl_gmosaic': 'yes', 'fl_fixpix': 'no', 'fl_gsappwave': 'yes', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'logfile': 'gsreduceLog.txt', 'verbose': 'no' }, waveFlags={ 'coordlist': 'gmos$data/CuAr_GMOS.dat', 'fwidth': 6, 'nsum': 50, 'function': 'chebyshev', 'order': 5, 'fl_inter': 'no', 'logfile': 'gswaveLog.txt', 'verbose': 'no' }, sciCombFlags={ 'combine': 'average', 'reject': 'ccdclip', 'fl_vardq': 'yes', 'fl_dqprop': 'yes', 'logfile': 'gemcombineLog.txt', 'verbose': 'no' }, transFlags={ 'fl_vardq': 'yes', 'interptype': 'linear', 'fl_flux': 'yes', 'logfile': 'gstransLog.txt' }, skyFlags={ 'fl_oversize': 'no', 'fl_vardq': 'yes', 'logfile': 'gsskysubLog.txt' }, extrFlags={ 'apwidth': 3., 'fl_inter': 'yes', 'find': 'yes', 'trace': 'yes', 'tfunction': 'chebyshev', 'torder': '6', 'tnsum': 20, 'background': 'fit', 'bfunction': 'chebyshev', 'border': 2, 'fl_vardq': 'no', 'logfile': 'gsextrLog.txt' }, calibFlags={ 'extinction': 'onedstds$ctioextinct.dat', 'fl_ext': 'yes', 'fl_scale': 'no', 'sfunction': 'sens', 'fl_vardq': 'yes', 'logfile': 'gscalibrateLog.txt' }, skip_wavecal=True, clean_files=False): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` sciTargets : dict Dictionary with the associations of science targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. sciTargetd = {'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}} Note that there could be more than one target defined this way. stdTarget : dict Dictionary with the associations of standard star targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. stdTarget = {'LTT1788': {'arc': 'S20180711S0281', 'sky': '170:380,920:1080'}} qd_full : dictionary Query Dictionary of essential parameter=value pairs for Full RoI. Meant for science object. qd_censp : dictionary Query Dictionary of essential parameter=value pairs for CenSp RoI. Meant for standard star. biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flatFlags : dict Dictionary for the keyword flags of gmos.gsflat() function sciFlags : dict Dictionary for the keyword flags of gmos.gsreduce() function Based on these flags a set of arcFlags and stdFlags dictionaries will be created for basic processing. waveFlags : dict Dictionary for the keyword flags of gmos.gswavelength() function sciCombFlags : dict Dictionary for the keyword flags of gemtools.gemcombine() function Based on these flags a set of stdCombFlags dictionary will be created for the standard advanced processing. transFlags : dict Dictionary for the keyword flags of gmos.gstransform() function. xxx skyFlags : dict Dictionary for the keyword flags of gmos.gsskysub() function extrFlags : dict Dictionary for the keywords flags of gmos.gsextract() function calibFlags : dict XXX skip_wavecal : bool Whether to skip interactive wavelength calibration. Useful when this is already done. Returns ------- """ print("### Begin Processing GMOS/Longslit Images ###") print("###") print("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs for Full and CenSp RoIs qd = {'Full': qd_full, 'CenSp': qd_censp} print(" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() regions = ['Full', 'CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: # NT comment: sometimes if there are too many files, gmos.gbias() raises an error. # import pdb; pdb.set_trace() gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias' + r, **biasFlags) # Clean up year_obs = qd_full['DateObs'].split('-')[0] if clean_files: iraf.imdel("gS{}*.fits".format(year_obs)) ask_user( "MC Bias done. Would you like to continue to proceed with GCAL Spectral Master Flats? (y/n): ", ['y', 'yes']) print(" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs': '*'}) qd['CenSp'].update({'DateObs': '*'}) gmos.gireduce.unlearn() gmos.gsflat.unlearn() # Normalize the spectral flats per CCD. # The response fitting should be done interactively. if flatFlags['fl_inter'] != 'yes': print( "The response fitting should be done interactively. Please set flatFlags['fl_inter'] = 'yes'." ) ask_user( "Do you still want to proceed despite this important warning? (y/n): ", ['yes', 'y']) for r in regions: qr = qd[r] flatFiles = fs.fileListQuery(dbFile, fs.createQuery('gcalFlat', qr), qr) if len(flatFiles) > 0: gmos.gsflat(','.join(str(x) for x in flatFiles), 'MCflat' + r, bias='MCbias' + r, **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,gsS{}*.fits'.format(year_obs, year_obs)) ask_user( "GCAL Spectral Flat-Field MasterCals done. Would you like to continue to proceed with Basic Processing? (y/n): ", ['y', 'yes']) print("=== Processing Science Files ===") print(" -- Performing Basic Processing --") # Set task parameters. gmos.gsreduce.unlearn() sciFlags = sciFlags # redundant but put here because NT likes it arcFlags = copy.deepcopy(sciFlags) arcFlags.update({'fl_flat': 'no', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) stdFlags = copy.deepcopy(sciFlags) stdFlags.update({'fl_fixpix': 'yes', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) # Perform basic reductions on all exposures for science targets. print(" - Arc exposures -") for r in regions: qr = qd[r] arcFiles = fs.fileListQuery(dbFile, fs.createQuery('arc', qr), qr) if len(arcFiles) > 0: gmos.gsreduce(','.join(str(x) for x in arcFiles), bias='MCbias' + r, **arcFlags) print(" - Std star exposures -") r = 'CenSp' stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qd[r]), qd[r]) if len(stdFiles) > 0: gmos.gsreduce(','.join(str(x) for x in stdFiles), bias='MCbias' + r, flatim='MCflat' + r, **stdFlags) print(" - Science exposures -") r = 'Full' sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qd[r]), qd[r]) if len(sciFiles) > 0: gmos.gsreduce(','.join(str(x) for x in sciFiles), bias='MCbias' + r, flatim='MCflat' + r, **sciFlags) # Clean up if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user( "Basic processing done. Would you like to continue to determine wavelength calibration? (y/n): ", ['y', 'yes']) print(" -- Determine wavelength calibration --") # Set task parameters gmos.gswavelength.unlearn() # The fit to the dispersion relation should be performed interactively. # Here we will use a previously determined result. if waveFlags['fl_inter'] != 'yes': print( "The fit to the dispersion relation should be performed interactively. Please set waveFlags['fl_inter'] = 'yes'." ) ask_user( "Do you still want to proceed despite this important warning? (y/n): ", ['yes', 'y']) # Need to select specific wavecals to match science exposures. # NT: we do this now from the sciTargets + stdTarget input dictionaries # e.g. ''' sciTargets = { 'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}, 'AM2306-721_c': {'arc': 'gsS20070623S0091', 'sky': '170:380,920:1080'} } ''' #prefix = 'gsS20070623S0' #for arc in ['071', '081', '091', '109']: # gmos.gswavelength(prefix + arc, **waveFlags) prefix = 'gs' arc_files = [] for key in sciTargets.keys(): arc_files += [sciTargets[key]['arc']] for key in stdTarget.keys(): arc_files += [stdTarget[key]['arc']] # import pdb; pdb.set_trace() if skip_wavecal is not True: for arc in arc_files: gmos.gswavelength(prefix + arc, **waveFlags) ### End of basic processing. Continue with advanced processing. ask_user( "Wavelength solution done. Would you like to continue with advanced processing? (y/n): ", ['y', 'yes']) print(" -- Performing Advanced Processing --") print(" -- Combine exposures, apply dispersion, subtract sky --") # Set task parameters. gemtools.gemcombine.unlearn() sciCombFlags = sciCombFlags stdCombFlags = copy.deepcopy(sciCombFlags) stdCombFlags.update({'fl_vardq': 'no', 'fl_dqprop': 'no'}) gmos.gstransform.unlearn() # apply gtransform to standard # Process the Standard Star prefix = "gs" qs = qd['CenSp'] stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qs), qs) std_name = stdTarget.keys()[0] if len(stdFiles) == 0: ValueError( "No standard star associated. Please check parameters of search (e.g. RoI=CentSp)" ) # import pdb; pdb.set_trace() if len(stdFiles) > 1: # import pdb; pdb.set_trace() gemtools.gemcombine(','.join(prefix + str(x) for x in stdFiles), std_name, **stdCombFlags) else: os.system("cp {}.fits {}.fits".format(prefix + stdFiles[0], std_name)) gmos.gstransform(std_name, wavtraname=prefix + stdTarget[std_name]['arc'], **transFlags) # The sky regions should be selected with care, using e.g. prows/pcols: # pcols ("tAM2306b.fits[SCI]", 1100, 2040, wy1=40, wy2=320) print( "The sky regions should be selected with care, using e.g. with prows/pcols (see tutorial)." ) ''' answer = raw_input("Please provide the long_sample string to apply to gmos.gsskysub() for the standard star." "e.g. '20:70,190:230' (say 'no' for using the example as the default values): ") if answer in ['n', 'no']: print("Using default long_sample set by stdTarget values {}.".format(stdTarget[std_name]['sky'])) long_sample_std = stdTarget[std_name]['sky'] else: long_sample_std = answer ''' long_sample_std = stdTarget[std_name]['sky'] ask_user( "Before proceeding it is important that you have set a good sky region for the standard.\n" "Thus far you have selected: {}\n Would you like to proceed with the current one? (y/n): " .format(long_sample_std), ['yes', 'y']) # apply sky substraction skyFlags = skyFlags gmos.gsskysub.unlearn() gmos.gsskysub('t{}'.format(std_name), long_sample=long_sample_std) # NT: make sure the process works ok until here before proceeding further. i.e. setting the sky region manually and correctly. # NT: seems to be working. print(" -- Extract Std spectrum --") # Extract the std spectrum using a large aperture. # It's important to trace the spectra interactively. gmos.gsextract.unlearn() gmos.gsextract("st" + std_name, **extrFlags) print(" -- Derive the Flux calibration --") gmos.gsstandard.unlearn() sensFlags = { 'fl_inter': 'no', 'starname': 'XXX', 'caldir': 'onedstds$ctionewcal/', 'observatory': 'Gemini-South', 'extinction': 'onedstds$ctioextinct.dat', 'function': 'chebyshev', 'order': 9, 'verbose': 'no', 'logfile': 'gsstdLog.txt' } sensFlags['starname'] = stdTarget[std_name][ 'iraf_name'] # replace corresponding starname gmos.gsstandard('est' + std_name, sfile='std.txt', sfunction='sens', **sensFlags) ask_user( "Sensitivity function from standard star done. Would you like to continue with reduction of science" " exposures? (y/n): ", ['yes', 'y']) # Process the science targets. # Use a dictionary to associate science targets with Arcs and sky regions. prefix = 'gs' extract_individuals = True for targ, p in sciTargets.iteritems(): qs = qd['Full'] qs['Object'] = p['name'] # Fix up the target name for the output file sciOut = p['name_out'] sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qs), qs) all_files = ','.join(prefix + str(x) for x in sciFiles) gemtools.gemcombine(all_files, sciOut, **sciCombFlags) gmos.gstransform(sciOut, wavtraname=prefix + p['arc'], **transFlags) ask_user( "It is important to select a good sky region for substraction. Thus far you have selected {}" " based on the sciTargets input dictionary. Would you like to continue? (y/n): " .format(p['sky']), ['y', 'yes']) gmos.gsskysub('t' + sciOut, long_sample=p['sky'], **skyFlags) if extract_individuals: import pdb pdb.set_trace() for fname in sciFiles: gmos.gstransform(prefix + fname, wavtraname=prefix + p['arc'], **transFlags) gmos.gsskysub('t' + prefix + fname, long_sample=p['sky'], **skyFlags) gmos.gscalibrate.unlearn() gmos.gscalibrate('st' + prefix + fname, **calibFlags) # Clean up if clean_files: iraf.imdel("gsS{}*.fits".format(year_obs)) ask_user( "Sky substraction done. Would you like to continue to apply sensitivity function? (y/n): ", ['y']) ## Apply the sensitivity function. gmos.gscalibrate.unlearn() gmos.gscalibrate('st' + sciOut + '*', **calibFlags) calibFlags.update({'fl_vardq': 'no'}) gmos.gscalibrate('est' + std_name, **calibFlags) print(" -- Extract Target Spectra --") method = 'gsextract' if method == 'gsextract': gmos.gsextract.unlearn() # import pdb;pdb.set_trace() gmos.gsextract("cst" + sciOut, **extrFlags) elif method == 'sarith': # not implemented yet onedspec.nsum = 4 onedspec.sarith('cst{}.fits[SCI]'.format(sciOut), 'copy', '', 'ecst{}.ms'.format(sciOut), apertures='222-346x4') print("=== Finished Calibration Processing ===")
def gmos_ls_proc2( sciTargets, stdTarget, dbFile='./raw/obsLog.sqlite3', qd_full={'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'Full', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'AM2306-72%','DateObs': '2007-06-05:2007-07-07'}, qd_censp={'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'CenSp', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'LTT9239','DateObs': '2007-06-05:2007-07-07'}, biasFlags={'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'no'}, flatFlags = {'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_fixpix': 'no', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes','rawpath': './raw', 'fl_inter': 'no', 'fl_detec': 'yes', 'function': 'spline3', 'order': '13,11,28', 'logfile': 'gsflatLog.txt', 'verbose': 'no'}, sciFlags = {'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_gscrrej': 'no','fl_dark': 'no', 'fl_flat': 'yes', 'fl_gmosaic': 'yes', 'fl_fixpix': 'no', 'fl_gsappwave': 'yes', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'logfile': 'gsreduceLog.txt', 'verbose': 'no'}, waveFlags = {'coordlist': 'gmos$data/CuAr_GMOS.dat', 'fwidth': 6, 'nsum': 50, 'function': 'chebyshev', 'order': 5, 'fl_inter': 'no', 'logfile': 'gswaveLog.txt', 'verbose': 'no'}, sciCombFlags = {'combine': 'average', 'reject': 'ccdclip', 'fl_vardq': 'yes', 'fl_dqprop': 'yes', 'logfile': 'gemcombineLog.txt', 'verbose': 'no'}, transFlags={'fl_vardq': 'yes', 'interptype': 'linear', 'fl_flux': 'yes', 'logfile': 'gstransLog.txt'}, skyFlags={'fl_oversize': 'no', 'fl_vardq': 'yes', 'logfile': 'gsskysubLog.txt'}, extrFlags = {'apwidth': 3., 'fl_inter': 'yes', 'find': 'yes','trace': 'yes', 'tfunction': 'chebyshev', 'torder': '6', 'tnsum': 20, 'background': 'fit', 'bfunction': 'chebyshev', 'border': 2, 'fl_vardq': 'no', 'logfile': 'gsextrLog.txt'}, calibFlags = {'extinction': 'onedstds$ctioextinct.dat', 'fl_ext': 'yes', 'fl_scale': 'no','sfunction': 'sens', 'fl_vardq': 'yes', 'logfile': 'gscalibrateLog.txt'}, skip_wavecal=True, clean_files=False): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` sciTargets : dict Dictionary with the associations of science targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. sciTargetd = {'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}} Note that there could be more than one target defined this way. stdTarget : dict Dictionary with the associations of standard star targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. stdTarget = {'LTT1788': {'arc': 'S20180711S0281', 'sky': '170:380,920:1080'}} qd_full : dictionary Query Dictionary of essential parameter=value pairs for Full RoI. Meant for science object. qd_censp : dictionary Query Dictionary of essential parameter=value pairs for CenSp RoI. Meant for standard star. biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flatFlags : dict Dictionary for the keyword flags of gmos.gsflat() function sciFlags : dict Dictionary for the keyword flags of gmos.gsreduce() function Based on these flags a set of arcFlags and stdFlags dictionaries will be created for basic processing. waveFlags : dict Dictionary for the keyword flags of gmos.gswavelength() function sciCombFlags : dict Dictionary for the keyword flags of gemtools.gemcombine() function Based on these flags a set of stdCombFlags dictionary will be created for the standard advanced processing. transFlags : dict Dictionary for the keyword flags of gmos.gstransform() function. xxx skyFlags : dict Dictionary for the keyword flags of gmos.gsskysub() function extrFlags : dict Dictionary for the keywords flags of gmos.gsextract() function calibFlags : dict XXX skip_wavecal : bool Whether to skip interactive wavelength calibration. Useful when this is already done. Returns ------- """ print ("### Begin Processing GMOS/Longslit Images ###") print ("###") print ("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs for Full and CenSp RoIs qd = {'Full': qd_full, 'CenSp': qd_censp} print (" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() regions = ['Full', 'CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: # NT comment: sometimes if there are too many files, gmos.gbias() raises an error. # import pdb; pdb.set_trace() gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias' + r, **biasFlags) # Clean up year_obs = qd_full['DateObs'].split('-')[0] if clean_files: iraf.imdel("gS{}*.fits".format(year_obs)) ask_user("MC Bias done. Would you like to continue to proceed with GCAL Spectral Master Flats? (y/n): ",['y','yes']) print (" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs': '*'}) qd['CenSp'].update({'DateObs': '*'}) gmos.gireduce.unlearn() gmos.gsflat.unlearn() # Normalize the spectral flats per CCD. # The response fitting should be done interactively. if flatFlags['fl_inter'] != 'yes': print("The response fitting should be done interactively. Please set flatFlags['fl_inter'] = 'yes'.") ask_user("Do you still want to proceed despite this important warning? (y/n): ", ['yes','y']) for r in regions: qr = qd[r] flatFiles = fs.fileListQuery(dbFile, fs.createQuery('gcalFlat', qr), qr) if len(flatFiles) > 0: gmos.gsflat(','.join(str(x) for x in flatFiles), 'MCflat' + r, bias='MCbias' + r, **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,gsS{}*.fits'.format(year_obs, year_obs)) ask_user("GCAL Spectral Flat-Field MasterCals done. Would you like to continue to proceed with Basic Processing? (y/n): ",['y','yes']) print ("=== Processing Science Files ===") print (" -- Performing Basic Processing --") # Set task parameters. gmos.gsreduce.unlearn() sciFlags = sciFlags # redundant but put here because NT likes it arcFlags = copy.deepcopy(sciFlags) arcFlags.update({'fl_flat': 'no', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) stdFlags = copy.deepcopy(sciFlags) stdFlags.update({'fl_fixpix': 'yes', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) # Perform basic reductions on all exposures for science targets. print (" - Arc exposures -") for r in regions: qr = qd[r] arcFiles = fs.fileListQuery(dbFile, fs.createQuery('arc', qr), qr) if len(arcFiles) > 0: gmos.gsreduce(','.join(str(x) for x in arcFiles), bias='MCbias' + r, **arcFlags) print (" - Std star exposures -") r = 'CenSp' stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qd[r]), qd[r]) if len(stdFiles) > 0: gmos.gsreduce(','.join(str(x) for x in stdFiles), bias='MCbias' + r, flatim='MCflat' + r, **stdFlags) print (" - Science exposures -") r = 'Full' sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qd[r]), qd[r]) if len(sciFiles) > 0: gmos.gsreduce(','.join(str(x) for x in sciFiles), bias='MCbias' + r, flatim='MCflat' + r, **sciFlags) # Clean up if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user("Basic processing done. Would you like to continue to determine wavelength calibration? (y/n): ",['y','yes']) print (" -- Determine wavelength calibration --") # Set task parameters gmos.gswavelength.unlearn() # The fit to the dispersion relation should be performed interactively. # Here we will use a previously determined result. if waveFlags['fl_inter'] != 'yes': print("The fit to the dispersion relation should be performed interactively. Please set waveFlags['fl_inter'] = 'yes'.") ask_user("Do you still want to proceed despite this important warning? (y/n): ", ['yes','y']) # Need to select specific wavecals to match science exposures. # NT: we do this now from the sciTargets + stdTarget input dictionaries # e.g. ''' sciTargets = { 'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}, 'AM2306-721_c': {'arc': 'gsS20070623S0091', 'sky': '170:380,920:1080'} } ''' #prefix = 'gsS20070623S0' #for arc in ['071', '081', '091', '109']: # gmos.gswavelength(prefix + arc, **waveFlags) prefix = 'gs' arc_files = [] for key in sciTargets.keys(): arc_files += [sciTargets[key]['arc']] for key in stdTarget.keys(): arc_files += [stdTarget[key]['arc']] # import pdb; pdb.set_trace() if skip_wavecal is not True: for arc in arc_files: gmos.gswavelength(prefix + arc, **waveFlags) ### End of basic processing. Continue with advanced processing. ask_user("Wavelength solution done. Would you like to continue with advanced processing? (y/n): ",['y','yes']) print (" -- Performing Advanced Processing --") print (" -- Combine exposures, apply dispersion, subtract sky --") # Set task parameters. gemtools.gemcombine.unlearn() sciCombFlags = sciCombFlags stdCombFlags = copy.deepcopy(sciCombFlags) stdCombFlags.update({'fl_vardq': 'no', 'fl_dqprop': 'no'}) gmos.gstransform.unlearn() # apply gtransform to standard # Process the Standard Star prefix = "gs" qs = qd['CenSp'] stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qs), qs) std_name = stdTarget.keys()[0] if len(stdFiles) == 0: ValueError("No standard star associated. Please check parameters of search (e.g. RoI=CentSp)") # import pdb; pdb.set_trace() if len(stdFiles) > 1: # import pdb; pdb.set_trace() gemtools.gemcombine(','.join(prefix + str(x) for x in stdFiles), std_name, **stdCombFlags) else: os.system("cp {}.fits {}.fits".format(prefix + stdFiles[0], std_name)) gmos.gstransform(std_name, wavtraname=prefix + stdTarget[std_name]['arc'], **transFlags) # The sky regions should be selected with care, using e.g. prows/pcols: # pcols ("tAM2306b.fits[SCI]", 1100, 2040, wy1=40, wy2=320) print("The sky regions should be selected with care, using e.g. with prows/pcols (see tutorial).") ''' answer = raw_input("Please provide the long_sample string to apply to gmos.gsskysub() for the standard star." "e.g. '20:70,190:230' (say 'no' for using the example as the default values): ") if answer in ['n', 'no']: print("Using default long_sample set by stdTarget values {}.".format(stdTarget[std_name]['sky'])) long_sample_std = stdTarget[std_name]['sky'] else: long_sample_std = answer ''' long_sample_std = stdTarget[std_name]['sky'] ask_user("Before proceeding it is important that you have set a good sky region for the standard.\n" "Thus far you have selected: {}\n Would you like to proceed with the current one? (y/n): ".format(long_sample_std), ['yes','y']) # apply sky substraction skyFlags = skyFlags gmos.gsskysub.unlearn() gmos.gsskysub('t{}'.format(std_name), long_sample=long_sample_std) # NT: make sure the process works ok until here before proceeding further. i.e. setting the sky region manually and correctly. # NT: seems to be working. print (" -- Extract Std spectrum --") # Extract the std spectrum using a large aperture. # It's important to trace the spectra interactively. gmos.gsextract.unlearn() gmos.gsextract("st" + std_name, **extrFlags) print (" -- Derive the Flux calibration --") gmos.gsstandard.unlearn() sensFlags = { 'fl_inter': 'no', 'starname': 'XXX', 'caldir': 'onedstds$ctionewcal/', 'observatory': 'Gemini-South', 'extinction': 'onedstds$ctioextinct.dat', 'function': 'chebyshev', 'order': 9, 'verbose': 'no', 'logfile': 'gsstdLog.txt' } sensFlags['starname'] = stdTarget[std_name]['iraf_name'] # replace corresponding starname gmos.gsstandard('est'+std_name, sfile='std.txt', sfunction='sens', **sensFlags) ask_user("Sensitivity function from standard star done. Would you like to continue with reduction of science" " exposures? (y/n): ",['yes','y']) # Process the science targets. # Use a dictionary to associate science targets with Arcs and sky regions. prefix = 'gs' extract_individuals = True for targ, p in sciTargets.iteritems(): qs = qd['Full'] qs['Object'] = p['name'] # Fix up the target name for the output file sciOut = p['name_out'] sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qs), qs) all_files = ','.join(prefix + str(x) for x in sciFiles) gemtools.gemcombine(all_files, sciOut, **sciCombFlags) gmos.gstransform(sciOut, wavtraname=prefix + p['arc'], **transFlags) ask_user("It is important to select a good sky region for substraction. Thus far you have selected {}" " based on the sciTargets input dictionary. Would you like to continue? (y/n): ".format(p['sky']),['y','yes']) gmos.gsskysub('t' + sciOut, long_sample=p['sky'], **skyFlags) if extract_individuals: import pdb; pdb.set_trace() for fname in sciFiles: gmos.gstransform(prefix + fname, wavtraname=prefix + p['arc'], **transFlags) gmos.gsskysub('t' + prefix + fname, long_sample=p['sky'], **skyFlags) gmos.gscalibrate.unlearn() gmos.gscalibrate('st'+prefix+fname, **calibFlags) # Clean up if clean_files: iraf.imdel("gsS{}*.fits".format(year_obs)) ask_user("Sky substraction done. Would you like to continue to apply sensitivity function? (y/n): ",['y']) ## Apply the sensitivity function. gmos.gscalibrate.unlearn() gmos.gscalibrate('st'+sciOut+'*', **calibFlags) calibFlags.update({'fl_vardq': 'no'}) gmos.gscalibrate('est'+std_name, **calibFlags) print (" -- Extract Target Spectra --") method = 'gsextract' if method == 'gsextract': gmos.gsextract.unlearn() # import pdb;pdb.set_trace() gmos.gsextract("cst" + sciOut, **extrFlags) elif method == 'sarith': # not implemented yet onedspec.nsum = 4 onedspec.sarith('cst{}.fits[SCI]'.format(sciOut), 'copy', '', 'ecst{}.ms'.format(sciOut), apertures='222-346x4') print ("=== Finished Calibration Processing ===")