Ejemplo n.º 1
0
    def test_no_occurrence_bin_exists___oasis_exception_is_raised(self):
        with TemporaryDirectory() as d:
            self.make_fake_bins(d)
            os.remove(os.path.join(d, 'static', 'occurrence.bin'))

            with self.assertRaises(OasisException):
                prepare_model_run_inputs({}, d)
Ejemplo n.º 2
0
    def test_periods_bin_doesnt_not_exist_event_set_isnt_specified___bin_is_copied_from_static(self):
        with TemporaryDirectory() as d:
            self.make_fake_bins(d)

            with io.open(os.path.join(d, 'static', 'periods.bin'), 'w', encoding='utf-8') as periods_file:
                periods_file.write('periods bin')
                periods_file.flush()

                prepare_model_run_inputs({}, d)

            with io.open(os.path.join(d, 'input', 'periods.bin'), 'r', encoding='utf-8') as new_periods_file:
                self.assertEqual('periods bin', new_periods_file.read())
Ejemplo n.º 3
0
    def test_periods_bin_already_exists___existing_bin_is_uncahnged(self):
        with TemporaryDirectory() as d:
            self.make_fake_bins(d)

            with io.open(os.path.join(d, 'input', 'periods.bin'), 'w', encoding='utf-8') as periods_file:
                periods_file.write('periods bin')
                periods_file.flush()

                prepare_model_run_inputs({}, d)

            with io.open(os.path.join(d, 'input', 'periods.bin'), 'r', encoding='utf-8') as new_periods_file:
                self.assertEqual('periods bin', new_periods_file.read())
Ejemplo n.º 4
0
    def test_occurrence_bin_doesnt_not_exist_event_set_is_specified___event_occurrence_id_specific_bin_is_copied_from_static(self):
        with TemporaryDirectory() as d:
            self.make_fake_bins(d)

            with io.open(os.path.join(d, 'static', 'occurrence_occurrence_id.bin'), 'w', encoding='utf-8') as occurrence_file:
                occurrence_file.write('occurrence occurrence id bin')
                occurrence_file.flush()

                prepare_model_run_inputs({'model_settings': {'event_occurrence_id': 'occurrence id'}}, d)

            with io.open(os.path.join(d, 'input', 'occurrence.bin'), 'r', encoding='utf-8') as new_occurrence_file:
                self.assertEqual('occurrence occurrence id bin', new_occurrence_file.read())
Ejemplo n.º 5
0
 def test_prepare_input_bin_raises___oasis_exception_is_raised(self):
     with patch('oasislmf.model_execution.bin._prepare_input_bin', Mock(side_effect=OSError('os error'))):
         with self.assertRaises(OasisException):
             prepare_model_run_inputs({}, 'some_dir')
Ejemplo n.º 6
0
def start_analysis(analysis_settings, input_location):
    '''
    Run an analysis.
    Args:
        analysis_profile_json (string): The analysis settings.
    Returns:
        (string) The location of the outputs.
    '''
    # Check that the input archive exists and is valid
    input_archive = os.path.join(
        settings.get('worker', 'INPUTS_DATA_DIRECTORY'),
        input_location + ARCHIVE_FILE_SUFFIX)

    if not os.path.exists(input_archive):
        raise MissingInputsException(input_archive)
    if not tarfile.is_tarfile(input_archive):
        raise InvalidInputsException(input_archive)

    source_tag = analysis_settings['analysis_settings']['source_tag']
    analysis_tag = analysis_settings['analysis_settings']['analysis_tag']
    logging.info("Source tag = {}; Analysis tag: {}".format(
        analysis_tag, source_tag))

    module_supplier_id = analysis_settings['analysis_settings'][
        'module_supplier_id']
    model_version_id = analysis_settings['analysis_settings'][
        'model_version_id']
    logging.info("Model supplier - version = {} {}".format(
        module_supplier_id, model_version_id))

    # Get the supplier module and call it
    use_default_model_runner = not Path(
        settings.get('worker', 'SUPPLIER_MODULE_DIRECTORY'),
        module_supplier_id).exists()

    model_data_path = os.path.join(
        settings.get('worker', 'MODEL_DATA_DIRECTORY'), module_supplier_id,
        model_version_id)

    if not os.path.exists(model_data_path):
        raise MissingModelDataException(model_data_path)

    logging.info("Setting up analysis working directory")

    directory_name = "{}_{}_{}".format(source_tag, analysis_tag,
                                       uuid.uuid4().hex)
    working_directory = os.path.join(
        settings.get('worker', 'WORKING_DIRECTORY'), directory_name)

    if 'ri_output' in analysis_settings['analysis_settings'].keys():
        ri = analysis_settings['analysis_settings']['ri_output']
    else:
        ri = False

    prepare_model_run_directory(working_directory,
                                ri=ri,
                                model_data_src_path=model_data_path,
                                inputs_archive=input_archive)
    prepare_model_run_inputs(analysis_settings['analysis_settings'],
                             working_directory,
                             ri=ri)

    with setcwd(working_directory):
        logging.info("Working directory = {}".format(working_directory))

        # Persist the analysis_settings
        with open("analysis_settings.json", "w") as json_file:
            json.dump(analysis_settings, json_file)

        if use_default_model_runner:
            model_runner_module = runner
        else:
            sys.path.append(settings.get('worker',
                                         'SUPPLIER_MODULE_DIRECTORY'))
            model_runner_module = importlib.import_module(
                '{}.supplier_model_runner'.format(module_supplier_id))

        ##! to add check that RI directories take the form of RI_{ID} amd ID is a monotonic index

        num_reinsurance_iterations = len(glob.glob('RI_[0-9]'))

        model_runner_module.run(
            analysis_settings['analysis_settings'],
            settings.getint('worker', 'KTOOLS_BATCH_COUNT'),
            num_reinsurance_iterations=num_reinsurance_iterations,
            ktools_mem_limit=settings.getboolean('worker',
                                                 'KTOOLS_MEMORY_LIMIT'),
            set_alloc_rule=settings.getint('worker', 'KTOOLS_ALLOC_RULE'),
            fifo_tmp_dir=False)

        output_location = uuid.uuid4().hex
        output_filepath = os.path.join(
            settings.get('worker', 'OUTPUTS_DATA_DIRECTORY'),
            output_location + ARCHIVE_FILE_SUFFIX)

        output_directory = os.path.join(working_directory, "output")
        with tarfile.open(output_filepath, "w:gz") as tar:
            tar.add(output_directory, arcname="output")

    if settings.getboolean('worker', 'DO_CLEAR_WORKING'):
        shutil.rmtree(working_directory, ignore_errors=True)

    logging.info("Output location = {}".format(output_location))

    return output_location