Esempio n. 1
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        # Prompt for a comment string if none is provided on the command line
        if self.comment is None:
            if sys.stdout is not None and sys.stdout.isatty():
                self.comment = input(
                    'Please enter a comment for this version.\n'
                    'comment: ')
            else:
                self.comment = ''

        # find any source.txt or moment.xml files
        momentfile = os.path.join(datadir, 'moment.xml')
        sourcefile = os.path.join(datadir, 'source.txt')
        if not os.path.isfile(sourcefile):
            sourcefile = None
        if not os.path.isfile(momentfile):
            momentfile = None

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)
        pdl_path = os.path.join(datadir, 'pdl')
        if os.path.isdir(pdl_path):
            shutil.rmtree(pdl_path, ignore_errors=True)

        # Look for any .transferred file and delete it
        save_file = os.path.join(datadir, SAVE_FILE)
        if os.path.isfile(save_file):
            os.remove(save_file)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.debug('Looking for configuration files...')
        modules = ConfigObj(
            os.path.join(install_path, 'config', 'modules.conf'),
            configspec=spec_file)
        gmpe_sets = ConfigObj(
            os.path.join(install_path, 'config', 'gmpe_sets.conf'),
            configspec=spec_file)
        global_config = ConfigObj(
            os.path.join(install_path, 'config', 'model.conf'),
            configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_select.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_select.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        global_data_path = os.path.join(os.path.expanduser('~'),
                                        'shakemap_data')
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros; this could have the event ID, so we
        # can't just use the file_type handler in the configspec
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = path_macro_sub(loc_file, ip=install_path,
                                          dp=data_path, gp=global_data_path,
                                          ei=self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location']['file'] = \
                    loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        datafiles += glob.glob(os.path.join(datadir, '*_dat.json'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.json')):
            datafiles.append(os.path.join(datadir, 'stationlist.json'))

        self.logger.debug('Looking for rupture files...')
        # look for geojson versions of rupture files
        rupturefile = os.path.join(datadir, 'rupture.json')
        if not os.path.isfile(rupturefile):
            # failing any of those, look for text file versions
            rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
            rupturefile = None
            if len(rupturefiles):
                rupturefile = rupturefiles[0]

        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(
            glob.glob(os.path.join(datadir, '..', 'backup*')),
            reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(
                backup_dirs[0].replace(
                    os.path.join(datadir, '..', 'backup'), ''))
            version += 1
            new_line = [timestamp, originator, version, self.comment]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            # If there is no history, just start a new one with version 1
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            if 'history' in history:
                new_line = [timestamp, originator, history['history'][-1][2],
                            self.comment]
                history['history'][-1] = new_line
            else:
                history = {'history': []}
                new_line = [timestamp, originator, 1, self.comment]
                history['history'].append(new_line)
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1, self.comment]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
            hdf_file,
            config,
            eventxml,
            history,
            rupturefile=rupturefile,
            sourcefile=sourcefile,
            momentfile=momentfile,
            datafiles=datafiles)
        self.logger.debug('Created HDF5 input container in %s' %
                          shake_data.getFileName())
        ah = AmplitudeHandler(install_path, data_path)
        event = ah.getEvent(self._eventid)
        if event is None:
            origin = shake_data.getRuptureObject().getOrigin()
            event = {'id': self._eventid,
                     'netid': origin.netid,
                     'network': origin.network,
                     'time': origin.time.strftime(constants.TIMEFMT),
                     'lat': origin.lat,
                     'lon': origin.lon,
                     'depth': origin.depth,
                     'mag': origin.mag,
                     'locstring': origin.locstring}
            ah.insertEvent(event)
        shake_data.close()
Esempio n. 2
0
def test_input_container():
    f, datafile = tempfile.mkstemp()
    os.close(f)
    event_text = """<?xml version="1.0" encoding="US-ASCII" standalone="yes"?>
<earthquake id="2008ryan" lat="30.9858" lon="103.3639" mag="7.9"
time="2008-05-12T06:28:01Z"
depth="19.0" locstring="EASTERN SICHUAN, CHINA" productcode="us2008ryan"
mech="" netid="us" network="" />"""
    try:
        config = {
            'alliance': 'chaotic neutral',
            'race': 'Elf',
            'armor': 5,
            'class': 'Warrior',
            'intelligence': 10
        }
        rupturefile = os.path.join(homedir, 'container_data',
                                   'Barkaetal02_fault.txt')
        eventfile = io.StringIO(event_text)
        datafiles = [
            os.path.join(homedir, 'container_data/northridge_stations_dat.xml')
        ]

        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = 'us'
        version = 1
        history = {'history': [[timestamp, originator, version]]}

        container = ShakeMapInputContainer.createFromInput(
            datafile,
            config,
            eventfile,
            history,
            datafiles=datafiles,
            rupturefile=rupturefile)
        cfile = container.getFileName()
        assert datafile == cfile
        config = container.getConfig()
        station = container.getStationList()
        rupture = container.getRuptureObject()
        history = container.getVersionHistory()
        container.close()

        container2 = ShakeMapInputContainer.load(datafile)
        config2 = container2.getConfig()
        station2 = container2.getStationList()  # noqa
        rupture2 = container2.getRuptureObject()  # noqa
        history2 = container2.getVersionHistory()  # noqa

        assert dict_equal(config, config2)
        df1, _ = station.getStationDictionary(instrumented=False)
        df2, _ = station2.getStationDictionary(instrumented=False)
        assert dict_equal(df1, df2)
        df1, _ = station.getStationDictionary(instrumented=True)
        df2, _ = station2.getStationDictionary(instrumented=True)
        assert dict_equal(df1, df2)
        assert history['history'][-1][0] == history['history'][-1][0]
        assert history['history'][-1][1] == history['history'][-1][1]
        assert history['history'][-1][2] == history['history'][-1][2]

        container2.close()

        eventfile.seek(0)
        container3 = ShakeMapInputContainer.createFromInput(
            datafile, config, eventfile, {})
        try:
            # this should fail, because we haven't set any station data yet
            station = container3.getStationList()
        except AttributeError:
            assert 1 == 1
        rupture = container3.getRuptureObject()
        history = container3.getVersionHistory()
        assert len(history) == 0
        assert isinstance(rupture, PointRupture)

        container3.setStationData(datafiles)

        #
        # Test the getStationDict() and setStationDict() functions with
        # some dummy data
        #
        config = {
            'alliance': 'chaotic neutral',
            'race': 'Elf',
            'armor': 5,
            'class': 'Warrior',
            'intelligence': 10
        }
        with pytest.raises(AttributeError):
            container3.getStationDict()
        with pytest.raises(TypeError):
            container3.setStationDict(None)
        container3.setStationDict(config)
        config2 = container3.getStationDict()
        assert dict_equal(config, config2)
        container3.close()

    except Exception:
        assert 1 == 2
    finally:
        os.remove(datafile)
Esempio n. 3
0
    def execute(self):
        """
        Augment a ShakeMap input data file with local configs, data, rupture,
        etc. The version history will only be incremented if the originator
        differs from the originator in the previous line of the history.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')
        if not os.path.isfile(hdf_file):
            raise FileNotFoundError('%s does not exist. Use assemble.' %
                                    hdf_file)
        shake_data = ShakeMapInputContainer.load(hdf_file)

        # Prompt for a comment string if none is provided on the command line
        if self.comment is None:
            if sys.stdout.isatty():
                self.comment = input(
                    'Please enter a comment for this version.\n'
                    '(Start with "+" if you wish to append to the\n'
                    'existing comment; "+" by itself will preserve\n'
                    'existing comments.)\n'
                    'comment: ')
            else:
                self.comment = ''

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Get the config from the HDF file and merge in the local configs
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        shake_config = shake_data.getConfig()
        shake_config = ConfigObj(shake_config, configspec=spec_file)
        #
        # This is a weird hack to get around a bug/feature of ConfigObj
        # that results in the validation failing if max_workers is already
        # an integer.
        #
        if 'max_workers' in shake_config['system']:
            shake_config['system']['max_workers'] = \
                    str(shake_config['system']['max_workers'])

        modules_file = os.path.join(install_path, 'config', 'modules.conf')
        if os.path.isfile(modules_file):
            self.logger.debug('Found a modules file.')
            modules = ConfigObj(modules_file, configspec=spec_file)
            shake_config.merge(modules)
        gmpe_file = os.path.join(install_path, 'config', 'gmpe_sets.conf')
        if os.path.isfile(gmpe_file):
            self.logger.debug('Found a gmpe file.')
            gmpe_sets = ConfigObj(gmpe_file, configspec=spec_file)
            shake_config.merge(gmpe_sets)
        config_file = os.path.join(install_path, 'config', 'model.conf')
        if os.path.isfile(config_file):
            self.logger.debug('Found a global config file.')
            global_config = ConfigObj(config_file, configspec=spec_file)
            shake_config.merge(global_config)

        # extent conf (may not be present)
        extent_config = os.path.join(install_path, 'config', 'extent.conf')
        if os.path.isfile(extent_config):
            extent_config = ConfigObj(extent_config, configspec=spec_file)
        else:
            extent_config = ConfigObj()
        shake_config.merge(extent_config)
        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            self.logger.debug('Found an event specific model.conf file.')
            event_config = ConfigObj(event_config_file, configspec=spec_file)
            shake_config.merge(event_config)
        elif os.path.isfile(event_config_zc_file):
            self.logger.debug('Found an event specific model_zc file.')
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        #
        # Validate the resulting config
        #
        results = shake_config.validate(validator)
        if not results or isinstance(results, dict):
            config_error(shake_config, results)
        check_config(shake_config, self.logger)
        #
        # The vs30 file may have macros in it
        #
        vs30file = shake_config['data']['vs30file']
        global_data_path = os.path.join(os.path.expanduser('~'),
                                        'shakemap_data')
        if vs30file:
            vs30file = path_macro_sub(vs30file,
                                      ip=install_path,
                                      dp=data_path,
                                      gp=global_data_path,
                                      ei=self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError('vs30 file "%s" is not a '
                                        'valid file' % vs30file)
            shake_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in shake_config['interp']['prediction_location']:
            loc_file = shake_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':  # 'None' is a string here
                loc_file = path_macro_sub(loc_file,
                                          ip=install_path,
                                          dp=data_path,
                                          gp=global_data_path,
                                          ei=self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError('prediction file "%s" is not a '
                                            'valid file' % loc_file)
                shake_config['interp']['prediction_location']['file'] = \
                    loc_file
        #
        # Put the updated config back into shake_data.hdf`
        #
        config = shake_config.dict()
        shake_data.setConfig(config)
        #
        # Look for additional data files and update the stationlist if found
        #
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        if datafiles:
            self.logger.debug('Found additional data files...')
            shake_data.addStationData(datafiles)
        #
        # Look for a rupture file and replace the existing one if found
        #
        rupturefile = os.path.join(datadir, 'rupture.json')
        eventxml = os.path.join(datadir, 'event.xml')
        if not os.path.isfile(eventxml):
            eventxml = None
        if not os.path.isfile(rupturefile):
            faultfiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
            if len(faultfiles):
                rupturefile = faultfiles[0]
            else:
                rupturefile = None
        if (rupturefile and os.path.isfile(rupturefile)) \
                or eventxml is not None:
            self.logger.debug('Updating rupture/origin information.')
            shake_data.updateRupture(eventxml=eventxml,
                                     rupturefile=rupturefile)

        #
        # Sort out the version history. We're working with an existing
        # HDF file, so: if we are the originator, just update the timestamp,
        # otherwise add a new line.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']

        history = shake_data.getVersionHistory()
        if history['history'][-1][1] == originator:
            history['history'][-1][0] = timestamp
            if self.comment.startswith('+'):
                if self.comment.replace('+', '') != '':
                    history['history'][-1][3] += self.comment.replace('+', ' ')
            else:
                history['history'][-1][3] = self.comment
        else:
            version = int(history['history'][-1][2]) + 1
            if self.comment.startswith('+'):
                new_line = [
                    timestamp, originator, version,
                    self.comment.replace('+', '')
                ]
            else:
                new_line = [timestamp, originator, version, self.comment]
            history['history'].append(new_line)
        shake_data.setVersionHistory(history)

        shake_data.close()
Esempio n. 4
0
    def execute(self):
        """
        Augment a ShakeMap input data file with local configs, data, rupture,
        etc. The version history will only be incremented if the originator
        differs from the originator in the previous line of the history.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')
        if not os.path.isfile(hdf_file):
            raise FileNotFoundError('%s does not exist. Use assemble.' %
                                    hdf_file)
        shake_data = ShakeMapInputContainer.load(hdf_file)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Get the config from the HDF file and merge in the local configs
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        shake_config = shake_data.getConfig()
        shake_config = ConfigObj(shake_config, configspec=spec_file)

        modules_file = os.path.join(install_path, 'config', 'modules.conf')
        if os.path.isfile(modules_file):
            self.logger.info('Found a modules file.')
            modules = ConfigObj(modules_file, configspec=spec_file)
            shake_config.merge(modules)
        gmpe_file = os.path.join(install_path, 'config', 'gmpe_sets.conf')
        if os.path.isfile(gmpe_file):
            self.logger.info('Found a gmpe file.')
            gmpe_sets = ConfigObj(gmpe_file, configspec=spec_file)
            shake_config.merge(gmpe_sets)
        config_file = os.path.join(install_path, 'config', 'model.conf')
        if os.path.isfile(config_file):
            self.logger.info('Found a global config file.')
            global_config = ConfigObj(config_file, configspec=spec_file)
            shake_config.merge(global_config)
        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            self.logger.info('Found an event specific model.conf file.')
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        elif os.path.isfile(event_config_zc_file):
            self.logger.info('Found an event specific model_zc file.')
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        #
        # Validate the resulting config
        #
        results = shake_config.validate(validator)
        if not results or isinstance(results, dict):
            config_error(shake_config, results)
        check_config(shake_config, self.logger)
        #
        # The vs30 file may have macros in it
        #
        vs30file = shake_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError('vs30 file "%s" is not a '
                                        'valid file' % vs30file)
            shake_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in shake_config['interp']['prediction_location']:
            loc_file = shake_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError('prediction file "%s" is not a '
                                            'valid file' % loc_file)
                shake_config['interp']['prediction_location']['file'] = loc_file
        #
        # Put the updated config back into shake_data.hdf`
        #
        config = shake_config.dict()
        shake_data.setConfig(config)
        #
        # Look for additional data files and update the stationlist if found
        #
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        if datafiles:
            self.logger.info('Found additional data files...')
            shake_data.addStationData(datafiles)
        #
        # Look for a rupture file and replace the existing one if found
        #
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        eventxml = os.path.join(datadir, 'event.xml')
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        if not os.path.isfile(eventxml):
            eventxml = None
        if rupturefile is not None or eventxml is not None:
            self.logger.info('Updating rupture/origin information.')
            shake_data.updateRupture(
                eventxml=eventxml, rupturefile=rupturefile)

        #
        # Sort out the version history. We're working with an existing
        # HDF file, so: if we are the originator, just update the timestamp,
        # otherwise add a new line.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']

        history = shake_data.getVersionHistory()
        if history['history'][-1][1] == originator:
            history['history'][-1][0] = timestamp
        else:
            version = int(history['history'][-1][2]) + 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        shake_data.setVersionHistory(history)

        shake_data.close()
Esempio n. 5
0
def test_input_container():
    f,datafile = tempfile.mkstemp()
    os.close(f)
    try:
        config = {'alliance': 'chaotic neutral',
                  'race': 'Elf',
                  'armor': 5,
                  'class': 'Warrior',
                  'intelligence': 10}
        rupturefile = os.path.join(homedir, 'container_data',
                                   'Barkaetal02_fault.txt')
        event_text = """<?xml version="1.0" encoding="US-ASCII" standalone="yes"?>
    <earthquake id="2008ryan" lat="30.9858" lon="103.3639" mag="7.9" year="2008"
    month="05" day="12" hour="06" minute="28" second="01" timezone="GMT"
    depth="19.0" locstring="EASTERN SICHUAN, CHINA" created="1211173621" productcode="us2008ryan"
    otime="1210573681" type="" />"""
        eventfile = io.StringIO(event_text)
        datafiles = [os.path.join(
            homedir, 'container_data/northridge_stations_dat.xml')]

        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = 'us'
        version = 1
        history = {'history': [[timestamp, originator, version]]}

        container = ShakeMapInputContainer.createFromInput(datafile,
                                                           config,
                                                           eventfile,
                                                           history,
                                                           datafiles=datafiles,
                                                           rupturefile=rupturefile)
        cfile = container.getFileName()
        assert datafile == cfile
        config = container.getConfig()
        station = container.getStationList()
        rupture = container.getRuptureObject()
        history = container.getVersionHistory()
        container.close()

        container2 = ShakeMapInputContainer.load(datafile)
        config2 = container2.getConfig()
        station2 = container2.getStationList()  # noqa
        rupture2 = container2.getRuptureObject()  # noqa
        history2 = container2.getVersionHistory()  # noqa

        assert dict_equal(config, config2)
        df1 = station.getStationDictionary(instrumented=False)
        df2 = station2.getStationDictionary(instrumented=False)
        assert dict_equal(df1, df2)
        df1 = station.getStationDictionary(instrumented=True)
        df2 = station2.getStationDictionary(instrumented=True)
        assert dict_equal(df1, df2)
        assert history['history'][-1][0] == history['history'][-1][0]
        assert history['history'][-1][1] == history['history'][-1][1]
        assert history['history'][-1][2] == history['history'][-1][2]

        container2.close()

        eventfile.seek(0)
        container3 = ShakeMapInputContainer.createFromInput(datafile,
                                                            config,
                                                            eventfile,
                                                            {})
        try:
            #this should fail, because we haven't set any station data yet
            station = container3.getStationList()
        except AttributeError:
            assert 1 == 1
        rupture = container3.getRuptureObject()
        history = container3.getVersionHistory()
        assert len(history) == 0
        assert isinstance(rupture, PointRupture)

        container3.setStationData(datafiles)

        #
        # Test the getStationDict() and setStationDict() functions with
        # some dummy data
        #
        config = {'alliance': 'chaotic neutral',
                  'race': 'Elf',
                  'armor': 5,
                  'class': 'Warrior',
                  'intelligence': 10}
        with pytest.raises(AttributeError):
            junk = container3.getStationDict()
        with pytest.raises(TypeError):
            container3.setStationDict(None)
        container3.setStationDict(config)
        config2 = container3.getStationDict()
        assert dict_equal(config, config2)

    except:
        assert 1==2
    finally:
        os.remove(datafile)
Esempio n. 6
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.debug('Looking for configuration files...')
        modules = ConfigObj(
            os.path.join(install_path, 'config', 'modules.conf'),
            configspec=spec_file)
        gmpe_sets = ConfigObj(
            os.path.join(install_path, 'config', 'gmpe_sets.conf'),
            configspec=spec_file)
        global_config = ConfigObj(
            os.path.join(install_path, 'config', 'model.conf'),
            configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        #
        # The vs30 file may have macros in it
        #
        vs30file = global_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError("vs30 file '%s' is not a valid file" %
                                        vs30file)
            global_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location']['file'] = loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))

        self.logger.debug('Looking for rupture files...')
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(
            glob.glob(os.path.join(datadir, '..', '.backup*')),
            reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(
                backup_dirs[0].replace(
                    os.path.join(datadir, '..', '.backup'), ''))
            version += 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            new_line = [timestamp, originator, history['history'][-1][2]]
            history['history'][-1] = new_line
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
                hdf_file,
                config,
                eventxml,
                history,
                rupturefile=rupturefile,
                datafiles=datafiles)
        self.logger.debug('Created HDF5 input container in %s' %
                         shake_data.getFileName())
        shake_data.close()
Esempio n. 7
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.info('Looking for configuration files...')
        modules = ConfigObj(os.path.join(install_path, 'config',
                                         'modules.conf'),
                            configspec=spec_file)
        gmpe_sets = ConfigObj(os.path.join(install_path, 'config',
                                           'gmpe_sets.conf'),
                              configspec=spec_file)
        global_config = ConfigObj(os.path.join(install_path, 'config',
                                               'model.conf'),
                                  configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file, configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        #
        # The vs30 file may have macros in it
        #
        vs30file = global_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError("vs30 file '%s' is not a valid file" %
                                        vs30file)
            global_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':  # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location'][
                    'file'] = loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))

        self.logger.debug('Looking for rupture files...')
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(glob.glob(os.path.join(datadir, '..',
                                                    '.backup*')),
                             reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(backup_dirs[0].replace(
                os.path.join(datadir, '..', '.backup'), ''))
            version += 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            new_line = [timestamp, originator, history['history'][-1][2]]
            history['history'][-1] = new_line
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
            hdf_file,
            config,
            eventxml,
            history,
            rupturefile=rupturefile,
            datafiles=datafiles)
        self.logger.info('Created HDF5 input container in %s' %
                         shake_data.getFileName())
        shake_data.close()