Exemplo n.º 1
0
    def save(self):
        """Save the config to a file"""
        dbg('ConfigBase::save: saving config')
        parser = ConfigObj()
        parser.indent_type = '  '

        for section_name in ['global_config', 'keybindings']:
            dbg('ConfigBase::save: Processing section: %s' % section_name)
            section = getattr(self, section_name)
            parser[section_name] = dict_diff(DEFAULTS[section_name], section)

        parser['profiles'] = {}
        for profile in self.profiles:
            dbg('ConfigBase::save: Processing profile: %s' % profile)
            parser['profiles'][profile] = dict_diff(
                    DEFAULTS['profiles']['default'], self.profiles[profile])

        parser['layouts'] = {}
        for layout in self.layouts:
            dbg('ConfigBase::save: Processing layout: %s' % layout)
            parser['layouts'][layout] = self.layouts[layout]

        parser['plugins'] = {}
        for plugin in self.plugins:
            dbg('ConfigBase::save: Processing plugin: %s' % plugin)
            parser['plugins'][plugin] = self.plugins[plugin]

        config_dir = get_config_dir()
        if not os.path.isdir(config_dir):
            os.makedirs(config_dir)
        try:
            parser.write(open(self.command_line_options.config, 'w'))
        except Exception, ex:
            err('ConfigBase::save: Unable to save config: %s' % ex)
Exemplo n.º 2
0
    def save(self):
        """Save the config to a file"""
        dbg("ConfigBase::save: saving config")
        parser = ConfigObj()
        parser.indent_type = "  "

        for section_name in ["global_config", "keybindings"]:
            dbg("ConfigBase::save: Processing section: %s" % section_name)
            section = getattr(self, section_name)
            parser[section_name] = dict_diff(DEFAULTS[section_name], section)

        parser["profiles"] = {}
        for profile in self.profiles:
            dbg("ConfigBase::save: Processing profile: %s" % profile)
            parser["profiles"][profile] = dict_diff(DEFAULTS["profiles"]["default"], self.profiles[profile])

        parser["layouts"] = {}
        for layout in self.layouts:
            dbg("ConfigBase::save: Processing layout: %s" % layout)
            parser["layouts"][layout] = self.layouts[layout]

        parser["plugins"] = {}
        for plugin in self.plugins:
            dbg("ConfigBase::save: Processing plugin: %s" % plugin)
            parser["plugins"][plugin] = self.plugins[plugin]

        config_dir = get_config_dir()
        if not os.path.isdir(config_dir):
            os.makedirs(config_dir)
        try:
            parser.write(open(self.command_line_options.config, "w"))
        except Exception, ex:
            err("ConfigBase::save: Unable to save config: %s" % ex)
Exemplo n.º 3
0
    def __init__(self, topofile, testrig):
        ## Initializes the setting up of the test system topology (this includes setting up of all

        # get the basic info sorted
        self.filename = TOPO_DIR + topofile
        self.topo = self.gettopologysettings()
        self.testrig = testrig
        self.rfmatrix = None

        # add MRV object and set all ethernet paths
        if testrig.testrigfile == "testrig-d.ini":
            self.mrvports = ConfigObj(TOPO_DIR + "mrvconnections-d.ini")
        else:
            self.mrvports = ConfigObj(TOPO_DIR + "mrvconnections.ini")

        if 'MRV' in self.testrig.devices.keys():
            self.mrvs = self.testrig.devices['MRV']
            self.setethroutes()

        # add Matrix object
        if 'MATRIX' in self.testrig.devices.keys():
            self.rfmatrix = testrig.devices['MATRIX']

        # set RF paths
        self.setrfpaths()
Exemplo n.º 4
0
    def save(self):
        """Save the config to a file"""
        dbg('ConfigBase::save: saving config')
        parser = ConfigObj()
        parser.indent_type = '  '

        for section_name in ['global_config', 'keybindings']:
            dbg('ConfigBase::save: Processing section: %s' % section_name)
            section = getattr(self, section_name)
            parser[section_name] = dict_diff(DEFAULTS[section_name], section)

        parser['profiles'] = {}
        for profile in self.profiles:
            dbg('ConfigBase::save: Processing profile: %s' % profile)
            parser['profiles'][profile] = dict_diff(
                DEFAULTS['profiles']['default'], self.profiles[profile])

        parser['layouts'] = {}
        for layout in self.layouts:
            dbg('ConfigBase::save: Processing layout: %s' % layout)
            parser['layouts'][layout] = self.layouts[layout]

        parser['plugins'] = {}
        for plugin in self.plugins:
            dbg('ConfigBase::save: Processing plugin: %s' % plugin)
            parser['plugins'][plugin] = self.plugins[plugin]

        config_dir = get_config_dir()
        if not os.path.isdir(config_dir):
            os.makedirs(config_dir)
        try:
            parser.write(open(os.path.join(config_dir, 'config'), 'w'))
        except Exception, ex:
            err('ConfigBase::save: Unable to save config: %s' % ex)
Exemplo n.º 5
0
def printDirectoryLists():
    """Print paths to science, telluric and calibration observations.

    Useful for:
        - Making sure sorting worked properly.
        - Making sure pipeline is loading runtimeData/scienceDirectoryList.txt,
          runtimeData/telluricDirectoryList.txt and runtimeData/calibrationDirectoryList.txt
          correctly.
    """
    # Print the current directory of data being reduced.
    logging.info("\n#################################################################################")
    logging.info("                                   ")
    logging.info("  COMPLETE - sorting. I've updated scienceDirectoryList,")
    logging.info("             telluricDirectoryList and calibrationDirectoryList in")
    logging.info("             runtimeData/config.cfg with the following values:")
    logging.info("")
    logging.info("#################################################################################\n")

    with open('./config.cfg') as config_file:
        options = ConfigObj(config_file, unrepr=True)
    logging.info("\nScience Directory List: ")
    for i in range(len(options['scienceDirectoryList'])):
        logging.info(options['scienceDirectoryList'][i])
    logging.info("\nTelluric Directory List: ")
    for i in range(len(options['telluricDirectoryList'])):
        logging.info(options['telluricDirectoryList'][i])
    logging.info("\nCalibration Directory List: ")
    for i in range(len(options['calibrationDirectoryList'])):
        logging.info(options['calibrationDirectoryList'][i])
Exemplo n.º 6
0
    def save(self, force=False):
        """Save the config to a file"""
        if self._nosave:
            dbg('~ConfigBase::save: WRITE SUPRESSED')
            return (True)
        elif force:
            dbg('~ConfigBase::save: WRITE FORCED')
            pass
        elif not self._dirty:
            dbg('~ConfigBase::save: CONFIG CLEAN')
            return (True)

        dbg('~ConfigBase::save: WRITE CONFIG')
        self._dirty = False

        # FIXME this craziness must be purged asap.
        parser = ConfigObj()
        parser.indent_type = '  '

        for section_name in ['global_config', 'keybindings']:
            dbg('ConfigBase::save: Processing section: %s' % section_name)
            section = getattr(self, section_name)
            parser[section_name] = dict_diff(DEFAULTS[section_name], section)

        parser['profiles'] = {}
        for profile in self.profiles:
            dbg('ConfigBase::save: Processing profile: %s' % profile)
            parser['profiles'][profile] = dict_diff(
                DEFAULTS['profiles']['default'], self.profiles[profile])

        parser['layouts'] = {}
        for layout in self.layouts:
            dbg('ConfigBase::save: Processing layout: %s' % layout)
            parser['layouts'][layout] = self.cleancfg(self.layouts[layout])

        parser['plugins'] = {}
        for plugin in self.plugins:
            dbg('ConfigBase::save: Processing plugin: %s' % plugin)
            parser['plugins'][plugin] = self.plugins[plugin]

        config_dir = get_config_dir()
        if not os.path.isdir(config_dir):
            os.makedirs(config_dir)
        try:
            parser.write(open(self.command_line_options.config, 'w'))
        except Exception, ex:
            err('ConfigBase::save: Unable to save config: %s' % ex)
Exemplo n.º 7
0
    def gettopologysettings(self):
        ## returns all topology settings from given topology file as a dictionary
        # @returns: topo(dictionary) - all content in the topology.ini file as a dictionary

        logging.info("Gathering Topology information from %s" %
                     (self.filename))
        topo = ConfigObj(self.filename)
        return topo
Exemplo n.º 8
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-i',
                        dest='src',
                        default=None,
                        action='store',
                        type=str,
                        required=True,
                        help='input file')
    parser.add_argument('-o',
                        dest='dst',
                        default=None,
                        action='store',
                        type=str,
                        required=True,
                        help='output file')
    ns = parser.parse_args()

    if exists(ns.dst):
        return print('{} already exists, do nothing.'.format(ns.dst))

    with open(ns.src, mode='rt') as f:
        configdata = ConfigObj(f, configspec=defaults_to_configspec())

    validator = Validator()
    configdata.validate(validator, preserve_errors=True)
    for k, v in configdata['keybindings'].items():
        if v == 'None':
            configdata['keybindings'][k] = ''

    with open(ns.dst, mode='wt') as f:
        json.dump(configdata,
                  f,
                  indent=4,
                  separators=(',', ':'),
                  sort_keys=True)
    return print('convert {} to {}'.format(ns.src, ns.dst))
Exemplo n.º 9
0
class ConfigBase(Borg):
    """Class to provide access to our user configuration"""
    loaded = None
    whined = None
    sections = None
    global_config = None
    profiles = None
    keybindings = None
    plugins = None
    layouts = None
    command_line_options = None
    _curlayoutname = 'default'
    _dirty = None
    _nosave = None
    _building = None

    def __init__(self):
        """Class initialiser"""

        Borg.__init__(self, self.__class__.__name__)

        self.prepare_attributes()
        import optionparse
        self.command_line_options = optionparse.options
        self.load()
        self._dirty = False
        self._nosave = False

    def prepare_attributes(self):
        """Set up our borg environment"""
        if self.loaded is None:
            self.loaded = False
        if self.whined is None:
            self.whined = False
        if self.sections is None:
            self.sections = [
                'global_config', 'keybindings', 'profiles', 'layouts',
                'plugins'
            ]
        if self.global_config is None:
            self.global_config = copy(DEFAULTS['global_config'])
        if self.profiles is None:
            self.profiles = {}
            self.profiles['default'] = copy(DEFAULTS['profiles']['default'])
        if self.keybindings is None:
            self.keybindings = copy(DEFAULTS['keybindings'])
        if self.plugins is None:
            self.plugins = {}
        if self.layouts is None:
            self.layouts = {}
            for layout in DEFAULTS['layouts']:
                self.layouts[layout] = copy(DEFAULTS['layouts'][layout])

    # XXX prefseditor Cancel feature preparation
    def get_undo_tree(self):
        r = {}
        for k in self.sections:
            r[k] = getattr(self, k)
        return r

    # FIXME this all configspec thing needs to be purged off. No mere user
    # run terminal under terminal to get a chance to read what this 'validator'
    # whines about. Here already is DEFAULTS dict to get sane value from if the
    # conf file lacks a key. An user who can do vim ~/.config/terminator/config
    # will know what to do if she'd do a typo there. Mortals have prefseditor.
    # This mess will stay for a while due to plugins and time. (ohir)
    def defaults_to_configspec(self):
        """Convert our tree of default values into a ConfigObj validation
        specification"""
        configspecdata = {}

        keymap = {
            'int': 'integer',
            'str': 'string',
            'bool': 'boolean',
        }

        section = {}
        for key in DEFAULTS['global_config']:
            keytype = DEFAULTS['global_config'][key].__class__.__name__
            value = DEFAULTS['global_config'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)

            keytype = '%s(default=%s)' % (keytype, value)

            if key == 'custom_url_handler':
                keytype = 'string(default="")'

            section[key] = keytype
        configspecdata['global_config'] = section

        section = {}
        for key in DEFAULTS['keybindings']:
            value = DEFAULTS['keybindings'][key]
            if value is None or value == '':
                continue
            section[key] = 'string(default=%s)' % value
        configspecdata['keybindings'] = section

        section = {}
        for key in DEFAULTS['profiles']['default']:
            keytype = DEFAULTS['profiles']['default'][key].__class__.__name__
            value = DEFAULTS['profiles']['default'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)
            if keytype == 'string':
                value = '"%s"' % value

            keytype = '%s(default=%s)' % (keytype, value)

            section[key] = keytype
        configspecdata['profiles'] = {}
        configspecdata['profiles']['__many__'] = section

        section = {}
        section['type'] = 'string'
        section['parent'] = 'string'
        section['profile'] = 'string(default=default)'
        section['position'] = 'string(default="")'
        #section['size'] = 'list(default=list(-1,-1))'
        #section['size'] = 'list'
        configspecdata['layouts'] = {}
        configspecdata['layouts']['__many__'] = {}
        configspecdata['layouts']['__many__']['__many__'] = section

        configspecdata['plugins'] = {}

        configspec = ConfigObj(configspecdata)
        if DEBUG == True:
            configspec.write(open('/tmp/terminator_configspec_debug.txt', 'w'))
        return (configspec)

    def load(self):
        """Load configuration data from our various sources"""
        if self.loaded is True:
            dbg('ConfigBase::load: config already loaded')
            return

        if self.command_line_options:
            if not self.command_line_options.config:
                self.command_line_options.config = os.path.join(
                    get_config_dir(), 'config92')
            filename = self.command_line_options.config
        else:
            #filename = os.path.join(get_config_dir(), 'config')
            filename = os.path.join(get_config_dir(), 'config92')

        dbg('looking for config file: %s' % filename)
        try:
            configfile = open(filename, 'r')
        except Exception, ex:
            if not self.whined:
                err('ConfigBase::load: Unable to open %s (%s)' %
                    (filename, ex))
                self.whined = True
            return
        # If we have successfully loaded a config, allow future whining
        self.whined = False

        try:
            configspec = self.defaults_to_configspec()
            parser = ConfigObj(configfile, configspec=configspec)
            validator = Validator()
            result = parser.validate(validator, preserve_errors=True)
        except Exception, ex:
            err('Unable to load configuration: %s' % ex)
            return
Exemplo n.º 10
0
    def defaults_to_configspec(self):
        """Convert our tree of default values into a ConfigObj validation
        specification"""
        configspecdata = {}

        keymap = {
            'int': 'integer',
            'str': 'string',
            'bool': 'boolean',
        }

        section = {}
        for key in DEFAULTS['global_config']:
            keytype = DEFAULTS['global_config'][key].__class__.__name__
            value = DEFAULTS['global_config'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)

            keytype = '%s(default=%s)' % (keytype, value)

            section[key] = keytype
        configspecdata['global_config'] = section

        section = {}
        for key in DEFAULTS['keybindings']:
            value = DEFAULTS['keybindings'][key]
            if value is None or value == '':
                continue
            section[key] = 'string(default=%s)' % value
        configspecdata['keybindings'] = section

        section = {}
        for key in DEFAULTS['profiles']['default']:
            keytype = DEFAULTS['profiles']['default'][key].__class__.__name__
            value = DEFAULTS['profiles']['default'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)
            if keytype == 'string':
                value = '"%s"' % value

            keytype = '%s(default=%s)' % (keytype, value)

            section[key] = keytype
        configspecdata['profiles'] = {}
        configspecdata['profiles']['__many__'] = section

        section = {}
        section['type'] = 'string'
        section['parent'] = 'string'
        section['profile'] = 'string(default=default)'
        section['command'] = 'string(default="")'
        section['position'] = 'string(default="")'
        section['size'] = 'list(default=list(-1,-1))'
        configspecdata['layouts'] = {}
        configspecdata['layouts']['__many__'] = {}
        configspecdata['layouts']['__many__']['__many__'] = section

        configspecdata['plugins'] = {}

        configspec = ConfigObj(configspecdata)
        if DEBUG == True:
            configspec.write(open('/tmp/terminator_configspec_debug.txt', 'w'))
        return (configspec)
Exemplo n.º 11
0
class ConfigBase(Borg):
    """Class to provide access to our user configuration"""
    loaded = None
    whined = None
    sections = None
    global_config = None
    profiles = None
    keybindings = None
    plugins = None
    layouts = None
    command_line_options = None

    def __init__(self):
        """Class initialiser"""

        Borg.__init__(self, self.__class__.__name__)

        self.prepare_attributes()
        self.load()

    def prepare_attributes(self):
        """Set up our borg environment"""
        if self.loaded is None:
            self.loaded = False
        if self.whined is None:
            self.whined = False
        if self.sections is None:
            self.sections = [
                'global_config', 'keybindings', 'profiles', 'layouts',
                'plugins'
            ]
        if self.global_config is None:
            self.global_config = copy(DEFAULTS['global_config'])
        if self.profiles is None:
            self.profiles = {}
            self.profiles['default'] = copy(DEFAULTS['profiles']['default'])
        if self.keybindings is None:
            self.keybindings = copy(DEFAULTS['keybindings'])
        if self.plugins is None:
            self.plugins = {}
        if self.layouts is None:
            self.layouts = {}
            for layout in DEFAULTS['layouts']:
                self.layouts[layout] = copy(DEFAULTS['layouts'][layout])

    def defaults_to_configspec(self):
        """Convert our tree of default values into a ConfigObj validation
        specification"""
        configspecdata = {}

        keymap = {
            'int': 'integer',
            'str': 'string',
            'bool': 'boolean',
        }

        section = {}
        for key in DEFAULTS['global_config']:
            keytype = DEFAULTS['global_config'][key].__class__.__name__
            value = DEFAULTS['global_config'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)

            keytype = '%s(default=%s)' % (keytype, value)

            section[key] = keytype
        configspecdata['global_config'] = section

        section = {}
        for key in DEFAULTS['keybindings']:
            value = DEFAULTS['keybindings'][key]
            if value is None or value == '':
                continue
            section[key] = 'string(default=%s)' % value
        configspecdata['keybindings'] = section

        section = {}
        for key in DEFAULTS['profiles']['default']:
            keytype = DEFAULTS['profiles']['default'][key].__class__.__name__
            value = DEFAULTS['profiles']['default'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)
            if keytype == 'string':
                value = '"%s"' % value

            keytype = '%s(default=%s)' % (keytype, value)

            section[key] = keytype
        configspecdata['profiles'] = {}
        configspecdata['profiles']['__many__'] = section

        section = {}
        section['type'] = 'string'
        section['parent'] = 'string'
        section['profile'] = 'string(default=default)'
        section['command'] = 'string(default="")'
        section['position'] = 'string(default="")'
        section['size'] = 'list(default=list(-1,-1))'
        configspecdata['layouts'] = {}
        configspecdata['layouts']['__many__'] = {}
        configspecdata['layouts']['__many__']['__many__'] = section

        configspecdata['plugins'] = {}

        configspec = ConfigObj(configspecdata)
        if DEBUG == True:
            configspec.write(open('/tmp/terminator_configspec_debug.txt', 'w'))
        return (configspec)

    def load(self):
        """Load configuration data from our various sources"""
        if self.loaded is True:
            dbg('ConfigBase::load: config already loaded')
            return

        filename = os.path.join(get_config_dir(), 'config')
        dbg('looking for config file: %s' % filename)
        try:
            configfile = open(filename, 'r')
        except Exception, ex:
            if not self.whined:
                err('ConfigBase::load: Unable to open %s (%s)' %
                    (filename, ex))
                self.whined = True
            return
        # If we have successfully loaded a config, allow future whining
        self.whined = False

        try:
            configspec = self.defaults_to_configspec()
            parser = ConfigObj(configfile, configspec=configspec)
            validator = Validator()
            result = parser.validate(validator, preserve_errors=True)
        except Exception, ex:
            err('Unable to load configuration: %s' % ex)
            return
Exemplo n.º 12
0
def defaults_to_configspec():
    """Convert our tree of default values into a ConfigObj validation
        specification"""
    configspecdata = {}

    keymap = {
        'int': 'integer',
        'str': 'string',
        'bool': 'boolean',
    }

    section = {}
    for key in DEFAULTS['global_config']:
        keytype = DEFAULTS['global_config'][key].__class__.__name__
        value = DEFAULTS['global_config'][key]
        if keytype in keymap:
            keytype = keymap[keytype]
        elif keytype == 'list':
            value = f'list({",".join(value)})'

        keytype = f'{keytype}(default={value})'

        if key == 'custom_url_handler':
            keytype = 'string(default="")'

        section[key] = keytype
    configspecdata['global_config'] = section

    section = {}
    for key in DEFAULTS['keybindings']:
        value = DEFAULTS['keybindings'][key]
        if value is None or value == '':
            continue
        section[key] = f'string(default={value})'
    configspecdata['keybindings'] = section

    section = {}
    for key in DEFAULTS['profiles']['default']:
        keytype = DEFAULTS['profiles']['default'][key].__class__.__name__
        value = DEFAULTS['profiles']['default'][key]
        if keytype in keymap:
            keytype = keymap[keytype]
        elif keytype == 'list':
            value = f'list({",".join(value)})'
        if keytype == 'string':
            value = f'"{value}"'

        keytype = f'{keytype}(default={value})'

        section[key] = keytype
    configspecdata['profiles'] = {}
    configspecdata['profiles']['__many__'] = section

    section = {
        'type': 'string',
        'parent': 'string',
        'profile': 'string(default=default)',
        'command': 'string(default="")',
        'position': 'string(default="")',
        'size': 'list(default=list(-1,-1))'
    }
    configspecdata['layouts'] = {}
    configspecdata['layouts']['__many__'] = {}
    configspecdata['layouts']['__many__']['__many__'] = section

    configspecdata['plugins'] = {}

    configspec = ConfigObj(configspecdata)
    return configspec
Exemplo n.º 13
0
    def defaults_to_configspec(self):
        """Convert our tree of default values into a ConfigObj validation
        specification"""
        configspecdata = {}

        keymap = {
                'int': 'integer',
                'str': 'string',
                'bool': 'boolean',
                }

        section = {}
        for key in DEFAULTS['global_config']:
            keytype = DEFAULTS['global_config'][key].__class__.__name__
            value = DEFAULTS['global_config'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)

            keytype = '%s(default=%s)' % (keytype, value)

            if key == 'custom_url_handler':
                keytype = 'string(default="")'

            section[key] = keytype
        configspecdata['global_config'] = section

        section = {}
        for key in DEFAULTS['keybindings']:
            value = DEFAULTS['keybindings'][key]
            if value is None or value == '':
                continue
            section[key] = 'string(default=%s)' % value
        configspecdata['keybindings'] = section

        section = {}
        for key in DEFAULTS['profiles']['default']:
            keytype = DEFAULTS['profiles']['default'][key].__class__.__name__
            value = DEFAULTS['profiles']['default'][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == 'list':
                value = 'list(%s)' % ','.join(value)
            if key == 'background_image':
                keytype = 'string'
            if keytype == 'string':
                value = '"%s"' % value

            keytype = '%s(default=%s)' % (keytype, value)

            section[key] = keytype
        configspecdata['profiles'] = {}
        configspecdata['profiles']['__many__'] = section

        section = {}
        section['type'] = 'string'
        section['parent'] = 'string'
        section['profile'] = 'string(default=default)'
        section['command'] = 'string(default="")'
        section['position'] = 'string(default="")'
        section['size'] = 'list(default=list(-1,-1))'
        configspecdata['layouts'] = {}
        configspecdata['layouts']['__many__'] = {}
        configspecdata['layouts']['__many__']['__many__'] = section

        configspecdata['plugins'] = {}

        configspec = ConfigObj(configspecdata)
        if DEBUG == True:
            configspec.write(open('/tmp/terminator_configspec_debug.txt', 'w'))
        return(configspec)
Exemplo n.º 14
0
def start(args):
    """

    nifsPipeline

    This script is a full-featured NIFS data reduction pipeline. It can call up
    to three "Steps".

    This script does two things. It:
        - gets data reduction parameters; either from an interactive input session or
          an input file, and
        - launches appropriate scripts to do the work. It can call up to 3 scripts directly:
                1) nifsSort.py
                2) nifsBaselineCalibration.py
                3) nifsReduce.py

    """
    # Save path for later use and change one directory up.
    path = os.getcwd()

    # Get paths to Nifty data.
    RECIPES_PATH = pkg_resources.resource_filename('nifty', 'recipes/')
    RUNTIME_DATA_PATH = pkg_resources.resource_filename('nifty', 'runtimeData/')

    # Format logging options.
    FORMAT = '%(asctime)s %(message)s'
    DATEFMT = datefmt()

    # Set up the logging file.
    logging.basicConfig(filename='Nifty.log',format=FORMAT,datefmt=DATEFMT,level=logging.DEBUG)
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    # This lets us logging.info(to stdout AND a logfile. Cool, huh?
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(message)s')
    ch.setFormatter(formatter)
    logger.addHandler(ch)

    logging.info("\nWARNING: This pipeline is untested and needs revision.")
    # TODO(nat): revise this pipeline.

    logging.info("\n####################################")
    logging.info("#                                  #")
    logging.info("#             NIFTY                #")
    logging.info("#   NIFS Data Reduction Pipeline   #")
    logging.info("#         Version "+ __version__+ "            #")
    logging.info("#         July 25th, 2017          #")
    logging.info("#     Marie Lemoine-Busserolle     #")
    logging.info("# Gemini Observatory, Hilo, Hawaii #")
    logging.info("#                                  #")
    logging.info("####################################\n")

    # Make sure to change this if you change the default logfile.
    logging.info('The log file is Nifty.log.')

    # Read or write a configuration file, interactively or from some defaults.
    # Second argument is the name of the current script. Used to get script-dependent configuration.
    GetConfig(args, "nifsPipeline")

    # TODO(nat): fix this. It isn't recursively printing the dictionaries of values.
    logging.info("\nParameters for this data reduction as read from ./config.cfg:\n")
    with open('./config.cfg') as config_file:
        config = ConfigObj(config_file, unrepr=True)
        for i in config:
            logging.info(str(i) + " " + str(config[i]))
    logging.info("")

    # Load configuration from ./config.cfg that is used by this script.
    with open('./config.cfg') as config_file:
        # Load general config.
        config = ConfigObj(config_file, unrepr=True)
        manualMode = config['manualMode']

        # Load pipeline specific config.
        nifsPipelineConfig = config['nifsPipelineConfig']

        sort = nifsPipelineConfig['sort']
        calibrationReduction = nifsPipelineConfig['calibrationReduction']
        telluricReduction = nifsPipelineConfig['telluricReduction']
        scienceReduction = nifsPipelineConfig['scienceReduction']

    ###########################################################################
    ##                         SETUP COMPLETE                                ##
    ##                      BEGIN DATA REDUCTION                             ##
    ##                                                                       ##
    ##        Four Main Steps:                                               ##
    ##          1) Sort the Raw Data - nifsSort.py                           ##
    ##          2) Reduce baseline calibrations - nifsBaselineCalibration.py ##
    ##          3) Reduce telluric observations - nifsReduce.py              ##
    ##          4) Reduce science observations - nifsReduce.py               ##
    ##                                                                       ##
    ###########################################################################

    ###########################################################################
    ##                      STEP 1: Sort the raw data.                       ##
    ###########################################################################

    if sort:
        if manualMode:
            a = raw_input('About to enter nifsSort.')
        nifsSort.start()
    # By now, we should have paths to the three types of raw data. Print them out.
    printDirectoryLists()

    with open('./config.cfg') as config_file:
        # Load general config.
        config = ConfigObj(config_file, unrepr=True)
        scienceDirectoryList = config['scienceDirectoryList']

    # This mode does a full reduction, science observation by science observation.
    # For each science directory in science directory list:
    # Reduce the associated calibrations
    # Reduce the associated tellurics
    # Reduce the science observation
    for scienceObservation in scienceDirectoryList:
        # Find associated calibrations by looping through calibration directory list.
        # Split to this form: ('/Users/nat/tests/core/M85/20150508/J', 'obs36')
        temp = os.path.split(scienceObservation)
        # Split again to this form: ('/Users/nat/tests/core/M85/20150508', 'J')
        temp2 = os.path.split(temp[0])
        # Now temp[0] is calibation base path name, temp[1] is grating.
        calibrationDirectory = temp2[0]+"/Calibrations_"+temp2[1]
        # Have to convert it to a one-element list first.
        calibrationDirectory = [calibrationDirectory]
        # We now have our calibration directory for the given science!
        # Now find associated telluric observations.
        # temp[0] looks like: '/Users/nat/tests/core/M85/20150508/J'
        telluricBaseName = temp[0]+"/Tellurics/"
        os.chdir(telluricBaseName)
        telluricDirectoryList = glob.glob("obs*")
        # We must make these incomplete paths into full paths by adding the base path.
        telluricDirectoryList = [telluricBaseName+x for x in telluricDirectoryList]
        # Don't forget to change back to starting point.
        os.chdir(path)

        ###########################################################################
        ##                STEP 2: Reduce baseline calibrations.                  ##
        ###########################################################################

        if calibrationReduction:
            if manualMode:
                a = raw_input('About to enter nifsBaselineCalibration.')
            nifsBaselineCalibration.start(calibrationDirectoryList=calibrationDirectory)

        ###########################################################################
        ##                STEP 3: Reduce telluric observations.                  ##
        ###########################################################################

        if telluricReduction:
            if manualMode:
                a = raw_input('About to enter nifsReduce to reduce Tellurics.')
            nifsReduce.start('Telluric', telluricDirectoryList=telluricDirectoryList)

        ###########################################################################
        ##                 STEP 4: Reduce science observations.                  ##
        ###########################################################################

        if scienceReduction:
            if manualMode:
                a = raw_input('About to enter nifsReduce to reduce science.')
            nifsReduce.start('Science')

    ###########################################################################
    ##                    Data Reduction Complete!                           ##
    ##                  Good luck with your science!                         ##
    ###########################################################################

    logging.info('#########################################')
    logging.info('#                                       #')
    logging.info('#        DATA REDUCTION COMPLETE        #')
    logging.info('#     Good luck with your science!      #')
    logging.info('#        Check out ??                   #')
    logging.info('#   For docs, tutorials and examples.   #')
    logging.info('#                                       #')
    logging.info('#########################################')

    return
Exemplo n.º 15
0
def getUserInput():
    """
    Interactive input session based on Sphinx's interactive setup.

    """

    logging.info("\nGood day! Press enter to accept default reduction options.")

    fullReduction = getParam(
                "Do a full data reduction with default parameters loaded from recipes/default_input.cfg? [no]: ",
                False,
                "Type yes to start Nifty with data reduction input parameters loaded from recipes/default_input.cfg file."
    )
    if fullReduction == False:
        # "Select in". User has to turn individual steps on.
        # TODO(nat): Implement these steps.
        date = ""
        program = ""
        copy = ""

        sort = getParam(
        "Sort data? [no]: ",
        False
        )
        rawPath = getParam(
        "Path to raw files directory? [~/data]: ",
        "~/data"
        )
        tel = getParam(
        "Apply a telluric correction? [no]: ",
        False
        )
        # See if we want to reduce the baseline calibrations. And if so, which substeps
        # to perform.
        calibrationReduction = getParam(
        "Reduce baseline calibrations? [no]: ",
        False
        )
        # By default do all of them.
        rstart = getParam(
        "Starting point of baseline calibration reductions? [1]: ",
        1
        )
        rstop = getParam(
        "Stopping point of baseline calibration reductions? [4]: ",
        4
        )

        # Check for tellurics as well; by default do all reduction steps.
        telluricReduction = getParam(
        "Reduce telluric data? [no]: ",
        False
        )
        telStart = getParam(
        "Starting point of science and telluric reductions? [1]: ",
        1
        )
        telStop = getParam(
        "Stopping point of science and telluric reductions? [6]: ",
        6
        )
        # Set the telluric application correction method. Choices are iraf.telluric and a python variant.
        # Set the h-line removal method with the vega() function in nifsReduce as default.
        hline_method = getParam(
        "H-line removal method? [vega]: ",
        "vega"
        )
        # Set yes or no for interactive the h line removal, telluric correction, and continuum fitting
        hlineinter = getParam(
        "Interative H-line removal? [no]: ",
        False
        )
        continuuminter = getParam(
        "Interative telluric continuum fitting? [no]: ",
        False
        )
        telluric_correction_method = getParam(
        "Telluric correction method? [python]: ",
        "python"
        )
        telinter = getParam(
        "Interactive telluric correction? [no]: ",
        False
        )
        # Check for science as well.
        scienceReduction = getParam(
        "Reduce science data? [no]: ",
        False
        )
        sciStart = getParam(
        "Starting point of science and telluric reductions? [1]: ",
        1
        )
        sciStop = getParam(
        "Stopping point of science and telluric reductions? [6]: ",
        6
        )
        efficiencySpectrumCorrection = getParam(
        "Do a flux calibration? [no]: ",
        False
        )
        spectemp = getParam(
        "Effective temperature in kelvin of telluric standard star? [""]: ",
        ""
        )
        mag = getParam(
        "Magnitude of standard star? [""]: ",
        ""
        )
        merge = getParam(
        "Produce one final 3D cube? [no]: ",
        False
        )
        use_pq_offsets = getParam(
        "Use pq offsets to merge data cubes? [yes]: ",
        "yes"
        )
        im3dtran = getParam(
        "Transpose cubes for faster merging? [no]: ",
        False
        )
        over = getParam(
        "Overwrite old files? [no]: ",
        False
        )
        debug = getParam(
        "Pause after each data reduction step? [yes]: ",
        "yes"
        )

        # Serialize and save the options as a .cfg file.
        options = ConfigObj(unrepr=True)
        options['date'] = date
        options['program'] = program
        options['rawPath'] = rawPath
        options['over'] = over
        options['copy'] = copy
        options['sort'] = sort
        options['calibrationReduction'] = calibrationReduction
        options['scienceReduction'] = scienceReduction
        options['merge'] = merge
        options['tel'] = tel
        options['telluricReduction'] = telluricReduction
        options['spectemp'] = spectemp
        options['mag'] = mag
        options['efficiencySpectrumCorrection'] = efficiencySpectrumCorrection
        options['rstart']= rstart
        options['rstop'] = rstop
        options['telStart'] = telStart
        options['telStop'] = telStop
        options['sciStart'] = sciStart
        options['sciStop'] = sciStop
        options['hline_method'] = hline_method
        options['hlineinter'] = hlineinter
        options['continuuminter'] = continuuminter
        options['telluric_correction_method'] = telluric_correction_method
        options['telinter'] = telinter
        options['use_pq_offsets'] = use_pq_offsets
        options['im3dtran'] = im3dtran
        options['debug'] = debug
        with open(RUNTIME_DATA_PATH+'/config.cfg', 'w') as outfile:
            options.write(outfile)

    return fullReduction
Exemplo n.º 16
0
def start(args):
    """

    nifsPipeline

    This script is a full-featured NIFS data reduction pipeline. It can call up
    to three "Steps".

    This script does two things. It:
        - gets data reduction parameters; either from an interactive input session or
          an input file, and
        - launches appropriate scripts to do the work. It can call up to 3 scripts directly:
                1) nifsSort.py
                2) nifsBaselineCalibration.py
                3) nifsReduce.py

    """
    # Save starting path for later use and change one directory up.
    path = os.getcwd()
    print "IT WORKED!"
    # Get paths to built-in Nifty data. Special code in setup.py makes sure recipes/ and
    # runtimeData/ will be installed when someone installs Nifty, and accessible in this way.
    RECIPES_PATH = pkg_resources.resource_filename('nifty', 'recipes/')
    RUNTIME_DATA_PATH = pkg_resources.resource_filename(
        'nifty', 'runtimeData/')

    # Format logging options.
    FORMAT = '%(asctime)s %(message)s'
    DATEFMT = datefmt()

    # Set up the main logging file.
    logging.basicConfig(filename='Nifty.log',
                        format=FORMAT,
                        datefmt=DATEFMT,
                        level=logging.DEBUG)
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    # This lets us logging.info(to stdout AND a logfile. Cool, huh?
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(message)s')
    ch.setFormatter(formatter)
    logger.addHandler(ch)

    logging.info("\n####################################")
    logging.info("#                                  #")
    logging.info("#             NIFTY                #")
    logging.info("#   NIFS Data Reduction Pipeline   #")
    logging.info("#         Version " + __version__ + "            #")
    logging.info("#         July 25th, 2017          #")
    logging.info("#     Marie Lemoine-Busserolle     #")
    logging.info("# Gemini Observatory, Hilo, Hawaii #")
    logging.info("#                                  #")
    logging.info("####################################\n")

    # Make sure to change this if you change the default logfile.
    logging.info('The log file is Nifty.log.')

    # Read or write a configuration file, interactively, from defaults or from a provided file.
    # Second argument is the name of the current script. This could be used to get script-dependent configuration.
    GetConfig(args, "nifsPipeline")

    # TODO(nat): fix this. It isn't recursively printing the dictionaries of values.
    logging.info(
        "\nParameters for this data reduction as read from ./config.cfg:\n")
    with open('./config.cfg') as config_file:
        config = ConfigObj(config_file, unrepr=True)
        for i in config:
            if isinstance(config[i], dict):
                for k in config[i]:
                    logging.info(str(k) + " " + str(config[i][k]))
            else:
                logging.info(str(i) + " " + str(config[i]))
    logging.info("")

    # Load pipeline configuration from ./config.cfg that is used by this script.
    with open('./config.cfg') as config_file:
        # Load general config.
        config = ConfigObj(config_file, unrepr=True)
        manualMode = config['manualMode']

        # Load pipeline specific config.
        nifsPipelineConfig = config['nifsPipelineConfig']

        sort = nifsPipelineConfig['sort']
        calibrationReduction = nifsPipelineConfig['calibrationReduction']
        telluricReduction = nifsPipelineConfig['telluricReduction']
        scienceReduction = nifsPipelineConfig['scienceReduction']
        telluricCorrection = nifsPipelineConfig['telluricCorrection']
        fluxCalibration = nifsPipelineConfig['fluxCalibration']
        merge = nifsPipelineConfig['merge']

    ###########################################################################
    ##                         SETUP COMPLETE                                ##
    ##                      BEGIN DATA REDUCTION                             ##
    ##                                                                       ##
    ##        Four Main Steps:                                               ##
    ##          1) Sort the Raw Data - nifsSort.py                           ##
    ##          2) Reduce baseline calibrations - nifsBaselineCalibration.py ##
    ##          3) Reduce telluric observations - nifsReduce.py              ##
    ##          4) Reduce science observations - nifsReduce.py               ##
    ##                                                                       ##
    ###########################################################################

    ###########################################################################
    ##                      STEP 1: Sort the raw data.                       ##
    ###########################################################################

    if sort:
        if manualMode:
            a = raw_input('About to enter nifsSort.')
        nifsSort.start()
    # By now, we should have paths to the three types of raw data. Print them out.
    printDirectoryLists()

    ###########################################################################
    ##                STEP 2: Reduce baseline calibrations.                  ##
    ###########################################################################

    if calibrationReduction:
        if manualMode:
            a = raw_input('About to enter nifsBaselineCalibration.')
        nifsBaselineCalibration.start()

    ###########################################################################
    ##                STEP 3: Reduce telluric observations.                  ##
    ###########################################################################

    if telluricReduction:
        if manualMode:
            a = raw_input('About to enter nifsReduce to reduce Tellurics.')
        nifsReduce.start('Telluric')

    ###########################################################################
    ##                 STEP 4: Reduce science observations.                  ##
    ###########################################################################

    if scienceReduction:
        if manualMode:
            a = raw_input('About to enter nifsReduce to reduce science.')
        nifsReduce.start('Science')
    if telluricCorrection:
        if manualMode:
            a = raw_input(
                'About to enter nifsTelluric to make and create telluric corrected cubes.'
            )
        nifsTelluric.run()

    if fluxCalibration:
        if manualMode:
            a = raw_input(
                'About to enter nifsFluxCalibrate to make and create flux calibrated and telluric corrected cubes.'
            )
        nifsFluxCalibrate.run()

    if merge:
        if manualMode:
            a = raw_input(
                'About to enter nifsMerge to merge final 3D data cubes to single cubes.'
            )
        nifsMerge.run()

    ###########################################################################
    ##                    Data Reduction Complete!                           ##
    ##                  Good luck with your science!                         ##
    ###########################################################################

    logging.info(
        '\n###########################################################')
    logging.info('#                                                         #')
    logging.info('#               DATA REDUCTION COMPLETE                   #')
    logging.info('#             Good luck with your science!                #')
    logging.info('# Check out http://nifty4gemini.readthedocs.io/en/latest/ #')
    logging.info('#           For docs, tutorials and examples.             #')
    logging.info('#                                                         #')
    logging.info(
        '###########################################################\n')

    return
Exemplo n.º 17
0
def start():
    """
         nifsBaselineCalibration

         This module contains all the functions needed to reduce
         NIFS GENERAL BASELINE CALIBRATIONS

         INPUT FILES FOR EACH BASELINE CALIBRATION:

         Raw files:
           - Flat frames (lamps on)
           - Flat frames (lamps off)
           - Arc frames
           - Arc dark frames
           - Ronchi mask flat frames

         OUTPUT FILES:
         - Shift file. Eg: sCALFLAT.fits
         - Bad Pixel Mask. Eg: rgnCALFLAT_sflat_bmp.pl
         - Flat field. Eg: rgnCALFLAT_flat.fits
         - Reduced arc frame. Eg: wrgnARC.fits
         - Reduced ronchi mask. Eg: rgnRONCHI.fits
         - Reduced dark frame. Eg: rgnARCDARK.fits

    Args:
        # Loaded from runtimeData/config.cfg
        calDirList:      list of paths to calibrations. ['path/obj/date/Calibrations_grating']
        over (boolean):  overwrite old files. Default: False.
        start (int):     starting step of daycal reduction. Specified at command line with -a. Default: 1.
        stop (int):      stopping step of daycal reduction. Specified at command line with -z. Default: 6.
        debug (boolean): enable optional debugging pauses. Default: False.

    """

    # TODO(nat): stop using first frame from list as name for combined frames. Find better names and implement
    # them in pipeline and docs.
    # TODO(nat): Finish converting the print statements to logging.info() statements.

    # Store current working directory for later use.
    path = os.getcwd()

    # Set up the logging file.
    log = os.getcwd()+'/Nifty.log'

    logging.info('#################################################')
    logging.info('#                                               #')
    logging.info('# Start the NIFS Baseline Calibration Reduction #')
    logging.info('#                                               #')
    logging.info('#################################################')

    # Set up/prepare IRAF.
    iraf.gemini()
    iraf.nifs()
    iraf.gnirs()
    iraf.gemtools()

    # Reset to default parameters the used IRAF tasks.
    iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs)

    # From http://bishop.astro.pomona.edu/Penprase/webdocuments/iraf/beg/beg-image.html:
    # Before doing anything involving image display the environment variable
    # stdimage must be set to the correct frame buffer size for the display
    # servers (as described in the dev$graphcap file under the section "STDIMAGE
    # devices") or to the correct image display device. The task GDEVICES is
    # helpful for determining this information for the display servers.
    iraf.set(stdimage='imt2048')

    # Prepare the IRAF package for NIFS.
    # NSHEADERS lists the header parameters used by the various tasks in the
    # NIFS package (excluding headers values which have values fixed by IRAF or
    # FITS conventions).
    iraf.nsheaders("nifs",logfile=log)

    # Set clobber to 'yes' for the script. This still does not make the gemini
    # tasks overwrite files, so:
    # YOU WILL LIKELY HAVE TO REMOVE FILES IF YOU RE_RUN THE SCRIPT.
    user_clobber=iraf.envget("clobber")
    iraf.reset(clobber='yes')

    # Load reduction parameters from ./config.cfg.
    with open('./config.cfg') as config_file:
        options = ConfigObj(config_file, unrepr=True)
        calDirList = options['calibrationDirectoryList']
        over = options['over']
        start = options['rstart']
        stop = options['rstop']
        debug = options['debug']

    ################################################################################
    # Define Variables, Reduction Lists AND identify/run number of reduction steps #
    ################################################################################

    # Loop over the Calibrations directories and reduce the day calibrations in each one.
    for calpath in calDirList:
        os.chdir(calpath)
        pwdDir = os.getcwd()+"/"
        iraffunctions.chdir(pwdDir)

        # However, don't do the reduction for a Calibration_"grating" directory without associated telluric or science data.
        # Check that a "grating" directory exists at the same level as the Calibrations_"grating" directory.
        # If not, skip the reduction of calibrations in that Calibrations_grating directory.
        # "grating" should be the last letter of calpath.
        grating = calpath[-1]
        if not os.path.exists("../"+grating):

            print "\n##############################################################################"
            print ""
            print "  No grating directory (including science or telluric data) found for  "
            print "  ", calpath
            print "  Skipping reduction of calibrations in that directory."
            print ""
            print "##############################################################################\n"

            continue

        # Create lists of each type of calibration from textfiles in Calibrations directory.
        flatlist = open('flatlist', "r").readlines()
        flatdarklist = open("flatdarklist", "r").readlines()
        arcdarklist = open("arcdarklist", "r").readlines()
        arclist = open("arclist", "r").readlines()
        ronchilist = open("ronchilist", "r").readlines()

        # Store the name of the first image of each calibration-type-list in
        # a variable for later use (Eg: calflat). This is because gemcombine will
        # merge a list of files (Eg: "n"+flatlist) and the output file will have the same
        # name as the first file in the list (Eg: calflat). These first file names are used
        # later in the pipeline.
        calflat = (flatlist[0].strip()).rstrip('.fits')
        flatdark = (flatdarklist[0].strip()).rstrip('.fits')
        arcdark = (arcdarklist[0].strip()).rstrip('.fits')
        arc = (arclist[0].strip()).rstrip('.fits')
        ronchiflat = (ronchilist[0].strip()).rstrip('.fits')

        # Check start and stop values for reduction steps. Ask user for a correction if
        # input is not valid.
        valindex = start
        while valindex > stop  or valindex < 1 or stop > 4:
            print "\n#####################################################################"
            print "#####################################################################"
            print ""
            print "     WARNING in calibrate: invalid start/stop values of calibration "
            print "                           reduction steps."
            print ""
            print "#####################################################################"
            print "#####################################################################\n"

            valindex = int(raw_input("\nPlease enter a valid start value (1 to 4, default 1): "))
            stop = int(raw_input("\nPlease enter a valid stop value (1 to 4, default 4): "))

        # Print the current directory of calibrations being processed.
        print "\n#################################################################################"
        print "                                   "
        print "  Currently working on calibrations "
        print "  in ", calpath
        print "                                   "
        print "#################################################################################\n"


        while valindex <= stop:

            #############################################################################
            ##  STEP 1: Determine the shift to the MDF (mask definition file)          ##
            ##          using nfprepare (nsoffset). Ie: locate the spectra.            ##
            ##  Output: First image in flatlist with "s" prefix.                       ##
            #############################################################################

            if valindex == 1:
                if debug:
                    a = raw_input("About to enter step 1: locate the spectrum.")
                getShift(calflat, over, log)
                print "\n###################################################################"
                print ""
                print "    STEP 1: Locate the Spectrum (Determine the shift to the MDF) - COMPLETED"
                print ""
                print "###################################################################\n"

            #############################################################################
            ##  STEP 2: Create Flat Field frame and BPM (Bad Pixel Mask)               ##
            ##  Output: Flat Field image with spatial and spectral information.        ##
            ##          First image in flatlist with  "rgn" prefix and "_flat" suffix. ##
            #############################################################################

            elif valindex == 2:
                if debug:
                    a = raw_input("About to enter step 2: flat field.")
                makeFlat(flatlist, flatdarklist, calflat, flatdark, over, log)
                print "\n###################################################################"
                print ""
                print "    STEP 2: Flat Field (Create Flat Field image and BPM image) - COMPLETED       "
                print ""
                print "###################################################################\n"

            ############################################################################
            ##  STEP 3: NFPREPARE and Combine arc darks.                              ##
            ##          NFPREPARE, Combine and flat field arcs.                       ##
            ##          Determine the wavelength solution and create the wavelength   ##
            ##          referenced arc.                                               ##
            ############################################################################

            elif valindex == 3:
                if debug:
                    a = raw_input("About to enter step 3: wavelength solution.")
                reduceArc(arclist, arc, arcdarklist, arcdark, log, over)
                wavecal(arc, log, over, path)
                print "\n###################################################################"
                print ""
                print "         STEP 3: Wavelength Solution (NFPREPARE and Combine arc darks.  "
                print "                 NFPREPARE, Combine and flat field arcs."
                print "                 Determine the wavelength solution and create the"
                print "                 wavelength referenced arc) - COMPLETED"
                print ""
                print "###################################################################\n"

            ######################################################################################
            ##  Step 4: Trace the spatial curvature and spectral distortion in the Ronchi flat. ##
            ######################################################################################

            elif valindex == 4:
                if debug:
                    a = raw_input("About to enter step 4: spatial distortion.")
                ronchi(ronchilist, ronchiflat, calflat, over, flatdark, log)
                print "\n###################################################################"
                print ""
                print "     Step 4: Spatial Distortion (Trace the spatial curvature and spectral distortion "
                print "             in the Ronchi flat) - COMPLETED"
                print ""
                print "###################################################################\n"

            else:
                print "\nERROR in nifs_baseline_calibration: step ", valindex, " is not valid.\n"
                raise SystemExit

            valindex += 1

        print "\n##############################################################################"
        print ""
        print "  COMPLETE - Calibration reductions completed for "
        print "  ", calpath
        print ""
        print "##############################################################################\n"


    # Return to directory script was begun from.
    os.chdir(path)
    return
Exemplo n.º 18
0
    def defaults_to_configspec(self):
        """Convert our tree of default values into a ConfigObj validation
        specification"""
        configspecdata = {}

        keymap = {"int": "integer", "str": "string", "bool": "boolean"}

        section = {}
        for key in DEFAULTS["global_config"]:
            keytype = DEFAULTS["global_config"][key].__class__.__name__
            value = DEFAULTS["global_config"][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == "list":
                value = "list(%s)" % ",".join(value)

            keytype = "%s(default=%s)" % (keytype, value)

            if key == "custom_url_handler":
                keytype = 'string(default="")'

            section[key] = keytype
        configspecdata["global_config"] = section

        section = {}
        for key in DEFAULTS["keybindings"]:
            value = DEFAULTS["keybindings"][key]
            if value is None or value == "":
                continue
            section[key] = "string(default=%s)" % value
        configspecdata["keybindings"] = section

        section = {}
        for key in DEFAULTS["profiles"]["default"]:
            keytype = DEFAULTS["profiles"]["default"][key].__class__.__name__
            value = DEFAULTS["profiles"]["default"][key]
            if keytype in keymap:
                keytype = keymap[keytype]
            elif keytype == "list":
                value = "list(%s)" % ",".join(value)
            if key == "background_image":
                keytype = "string"
            if keytype == "string":
                value = '"%s"' % value

            keytype = "%s(default=%s)" % (keytype, value)

            section[key] = keytype
        configspecdata["profiles"] = {}
        configspecdata["profiles"]["__many__"] = section

        section = {}
        section["type"] = "string"
        section["parent"] = "string"
        section["profile"] = "string(default=default)"
        section["command"] = 'string(default="")'
        section["position"] = 'string(default="")'
        section["size"] = "list(default=list(-1,-1))"
        configspecdata["layouts"] = {}
        configspecdata["layouts"]["__many__"] = {}
        configspecdata["layouts"]["__many__"]["__many__"] = section

        configspecdata["plugins"] = {}

        configspec = ConfigObj(configspecdata)
        if DEBUG == True:
            configspec.write(open("/tmp/terminator_configspec_debug.txt", "w"))
        return configspec
Exemplo n.º 19
0
def start(args):
    """

    NIFTY

    This script launches a nifs data reduction.

    It does two things; it:
        - gets data reduction parameters; either from an interactive input session or
          an input file
        - launches appropriate scripts to do the work. It can call up to 3 scripts directly:
                1) nifsSort.py
                2) nifsBaselineCalibration.py
                3) nifsReduce.py

    """

    # Save path for later use and change one directory up.
    path = os.getcwd()

    # Get paths to Nifty data.
    RECIPES_PATH = pkg_resources.resource_filename('nifty', 'recipes/')
    RUNTIME_DATA_PATH = pkg_resources.resource_filename(
        'nifty', 'runtimeData/')

    # Format logging options.
    FORMAT = '%(asctime)s %(message)s'
    DATEFMT = datefmt()

    # Set up the logging file.
    logging.basicConfig(filename='Nifty.log',
                        format=FORMAT,
                        datefmt=DATEFMT,
                        level=logging.DEBUG)
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    # This lets us logging.info(to stdout AND a logfile. Cool, huh?
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(message)s')
    ch.setFormatter(formatter)
    logger.addHandler(ch)

    logging.info("\n####################################")
    logging.info("#                                  #")
    logging.info("#             NIFTY                #")
    logging.info("#   NIFS Data Reduction Pipeline   #")
    logging.info("#         Version " + __version__ + "            #")
    logging.info("#         July 25th, 2017          #")
    logging.info("#     Marie Lemoine-Busserolle     #")
    logging.info("# Gemini Observatory, Hilo, Hawaii #")
    logging.info("#                                  #")
    logging.info("####################################\n")

    # Make sure to change this if you change the default logfile.
    logging.info('The log file is Nifty.log.\n')

    # Parse command line options.
    parser = argparse.ArgumentParser(
        description='Do a Gemini NIFS data reduction.')
    # Ability to repeat the last data reduction
    parser.add_argument(
        '-r',
        '--repeat',
        dest='repeat',
        default=False,
        action='store_true',
        help=
        'Repeat the last data reduction, loading parameters from runtimeData/config.cfg.'
    )
    # Ability to load a built-in configuration file (recipe)
    parser.add_argument(
        '-l',
        '--recipe',
        dest='recipe',
        action='store',
        help=
        'Load data reduction parameters from the a provided recipe. Default is default_input.cfg.'
    )
    # Ability to load your own configuration file
    parser.add_argument(
        dest='inputfile',
        action='store',
        help='Load data reduction parameters from <inputfile>.cfg.')
    # Ability to do a quick and dirty fully automatic data reduction with no user input
    # TODO(nat): make it so Nifty does this when you type "niftyRun" with no options
    parser.add_argument(
        '-f',
        '--fullReduction',
        dest='fullReduction',
        default=False,
        action='store_true',
        help=
        'Do a full reduction with data reduction parameters loaded from runtimeData/default_input.cfg'
    )

    args = parser.parse_args(args)

    repeat = args.repeat
    fullReduction = args.fullReduction
    inputfile = args.inputfile

    # Check if the user specified at command line to repeat the last Reduction, do a full default data reduction from a
    # recipe file or do a full data reduction from a handmade file.
    if not repeat and not fullReduction and not inputfile:
        # If not get user input and check if user specified a full data reduction.
        fullReduction = getUserInput()

    # TODO(nat): Add proper documentation on supplying an input file name (the args option here).
    if fullReduction:
        # TODO(nat): move this code to a function.
        # Read and use parameters of the last Reduction from runtimeData/config.cfg.
        shutil.copy(RECIPES_PATH + 'default_input.cfg',
                    RUNTIME_DATA_PATH + 'config.cfg')
        logging.info(
            "\nData reduction parameters for this reduction were copied from recipes/default_input.cfg to runtimeData/config.cfg."
        )

    if inputfile:
        # Load input from a .cfg file user specified at command line.
        shutil.copy('./' + inputfile, RUNTIME_DATA_PATH + 'config.cfg')
        logging.info("\nPipeline configuration for this data reduction was read from " + str(inputfile) + \
        " and copied to ./config.cfg.")
    else:
        shutil.copy(RUNTIME_DATA_PATH + 'config.cfg', './config.cfg')
        logging.info(
            "\nPipeline configuration for this data reduction has been written to ./config.cfg"
        )

    # Print data reduction parameters for a user's peace-of-mind.
    logging.info(
        "\nParameters for this data reduction as read from that file:\n")
    with open('./config.cfg') as config_file:
        options = ConfigObj(config_file, unrepr=True)
        for i in options:
            logging.info(str(i) + " " + str(options[i]))
    logging.info("")

    # Define parameters used by this script:
    with open('./config.cfg') as config_file:
        options = ConfigObj(config_file, unrepr=True)
        sort = options['sort']
        calibrationReduction = options['calibrationReduction']
        telluricReduction = options['telluricReduction']
        scienceReduction = options['scienceReduction']
        debug = options['debug']

    ###########################################################################
    ##                         SETUP COMPLETE                                ##
    ##                      BEGIN DATA REDUCTION                             ##
    ##                                                                       ##
    ##        Four Main Steps:                                               ##
    ##          1) Sort the Raw Data - nifsSort.py                           ##
    ##          2) Reduce baseline calibrations - nifsBaselineCalibration.py ##
    ##          3) Reduce telluric observations - nifsReduce.py              ##
    ##          4) Reduce science observations - nifsReduce.py               ##
    ##                                                                       ##
    ###########################################################################

    ###########################################################################
    ##                      STEP 1: Sort the raw data.                       ##
    ###########################################################################

    if sort:
        if debug:
            a = raw_input('About to enter sort.')
        nifsSort.start()
    printDirectoryLists()

    ###########################################################################
    ##                STEP 2: Reduce baseline calibrations.                  ##
    ###########################################################################

    if calibrationReduction:
        if debug:
            a = raw_input('About to enter calibrate.')
        nifsBaselineCalibration.start()

    ###########################################################################
    ##                STEP 3: Reduce telluric observations.                  ##
    ###########################################################################

    if telluricReduction:
        if debug:
            a = raw_input(
                'About to enter reduce to reduce Telluric images, create telluric correction spectrum and blackbody spectrum.'
            )
        nifsReduce.start('Telluric')

    ###########################################################################
    ##                 STEP 4: Reduce science observations.                  ##
    ###########################################################################

    if scienceReduction:
        if debug:
            a = raw_input('About to enter reduce to reduce science images.')
        nifsReduce.start('Science')

    ###########################################################################
    ##                    Data Reduction Complete!                           ##
    ##                  Good luck with your science!                         ##
    ###########################################################################

    logging.info('#########################################')
    logging.info('#                                       #')
    logging.info('#        DATA REDUCTION COMPLETE        #')
    logging.info('#     Good luck with your science!      #')
    logging.info('#        Check out ??                   #')
    logging.info('#   For docs, recipes and examples.     #')
    logging.info('#                                       #')
    logging.info('#########################################')

    return