Esempio n. 1
0
def start(config_list=None, config_file=None, cont_in=None):
    """
    Run the HazImp tool, based on the config info.

    :param config_list: The configuration info, as a list.
    :param config_file: The configuration info, as a file location.
    :param cont_in: Only used in testing. A context instance.
    :returns: The config dictionary.
    """
    if config_file:
        config_list = config.read_config_file(config_file)

    if isinstance(config_list, dict):
        msg = "Bad configuration file. \n"
        msg += "Add a dash ( - ) before each variable. e.g. - template: flood"
        raise RuntimeError(msg)

    if config_list is None:
        raise RuntimeError('No configuration information.')

    if cont_in is None:
        cont_in = context.Context()
    calc_jobs = config.instance_builder(config_list)
    the_pipeline = pipeline.PipeLine(calc_jobs)
    the_pipeline.run(cont_in)

    config_dict = {k: v for item in config_list for k, v in list(item.items())}
    agg = config_dict.get('aggregate')
    if agg:
        from . import aggregate
        aggregate.chloropleth(config_dict['save'], agg['boundaries'],
                              agg['save'])

    return cont_in
Esempio n. 2
0
def start(config_list=None, config_file=None, cont_in=None):
    """
    Run the HazImp tool, based on the config info.

    :param config_list: The configuration info, as a list.
    :param config_file: The configuration info, as a file location.
    :param cont_in: Only used in testing. A context instance.
    :returns: The config dictionary.
    """
    if config_file:
        config_file = misc.download_file_from_s3_if_needed(config_file)
        config_list = config.read_config_file(config_file)

    if isinstance(config_list, dict):
        msg = "Bad configuration file. \n"
        msg += "Add a dash ( - ) before each variable. e.g. - template: flood"
        raise RuntimeError(msg)

    if config_list is None:
        raise RuntimeError('No configuration information.')

    if cont_in is None:
        cont_in = context.Context()
    # TODO: Make the entity name a tailored variable
    cont_in.set_prov_label("HazImp_analysis")
    calc_jobs = config.instance_builder(config_list)
    the_pipeline = pipeline.PipeLine(calc_jobs)
    the_pipeline.run(cont_in)

    return cont_in
Esempio n. 3
0
 def test_Builder(self):
     a_test = 5
     b_test = 2
     calc_list = [CALCS['add_test'], CALCS['multiply_test']]
     cont_in = context.Context()
     cont_in.exposure_att = {'a_test': a_test, 'b_test': b_test}
     the_pipeline = pipeline.PipeLine(calc_list)
     the_pipeline.run(cont_in)
     self.assertEqual(cont_in.exposure_att['d_test'], 35)
Esempio n. 4
0
 def test_BuilderII(self):
     a_test = 5
     b_test = 2
     caj = workflow.ConfigAwareJob(CALCS['constant_test'],
                                   atts_to_add={'constant': 5})
     calc_list = [CALCS['add_test'], CALCS['multiply_test'], caj]
     cont_in = context.Context()
     cont_in.exposure_att = {'a_test': a_test, 'b_test': b_test}
     the_pipeline = pipeline.PipeLine(calc_list)
     the_pipeline.run(cont_in)
     self.assertEqual(cont_in.exposure_att['d_test'], 35)
     self.assertEqual(cont_in.exposure_att['g_test'], 10)
Esempio n. 5
0
    def test_PipeLine_actually(self):

        # Write a file to test
        f = tempfile.NamedTemporaryFile(mode='w+t',
                                        suffix='.csv',
                                        prefix='test_Job_title_fix_Co',
                                        delete=False)
        f.write('LAT, LONG, a_test, b_test,BUILDING\n')
        f.write('1., 2., 3., 30.,TAB\n')
        f.write('4., 5., 6., 60.,DSG\n')
        f.close()
        f2 = tempfile.NamedTemporaryFile(suffix='.csv',
                                         prefix='test_Job_title_fix_Co',
                                         delete=False)
        f2.close()
        atts = {
            'file_name': f.name,
            context.EX_LAT: 'LAT',
            context.EX_LONG: 'LONG'
        }
        caj1 = workflow.ConfigAwareJob(JOBS[LOADCSVEXPOSURE], atts_to_add=atts)

        atts = {'var': 'con_test', 'value': 'yeah'}
        caj2 = workflow.ConfigAwareJob(JOBS[CONSTANT], atts_to_add=atts)
        atts = {'var': 'con2_test', 'value': 30}
        caj3 = workflow.ConfigAwareJob(JOBS[CONSTANT], atts_to_add=atts)

        calc_list = [caj1, caj2, caj3, CALCS['add_test']]
        cont_in = context.Context()
        cont_in.set_prov_label('Test label')

        the_pipeline = pipeline.PipeLine(calc_list)
        the_pipeline.run(cont_in)
        cont_dict = cont_in.save_exposure_atts(f2.name)
        os.remove(f2.name)
        if parallel.STATE.rank == 0:
            self.assertTrue(allclose(cont_dict['c_test'], asarray([33., 66.])))
            self.assertEqual(cont_dict['BUILDING'].tolist(), ['TAB', 'DSG'])
            self.assertTrue(
                allclose(cont_dict['con2_test'], asarray([30., 30.])))
            self.assertEqual(cont_dict['con_test'].tolist(), ['yeah', 'yeah'])
        os.remove(f.name)