Example #1
0
    
    if refinements is None:
        refinements = dataset_pool.get_dataset('refinement')
        years = refinements.get_attribute('year')
        if start_year is None: start_year = years.min()
        if end_year is None: end_year = years.max()

    for year in range(start_year, end_year+1):
        logger.start_block("Doing refinement for %s" % year )
        simulation_state.set_current_time(year)
        
        ## reload refinements, from original refinement_directory or dataset_pool, in case it's been changed by refinement model
        if refinements_storage is not None:
            refinements = DatasetFactory().search_for_dataset('refinement', package_order, arguments={'in_storage':refinements_storage})
        else:
            refinements = dataset_pool.get_dataset('refinement')
            
        if options.backup:
            src_dir = os.path.join(options.cache_directory, str(year))
            dst_dir = os.path.join(options.cache_directory, 'backup', str(year))
            if os.path.exists(src_dir):
                logger.log_status("Backing up %s to %s" % (src_dir, dst_dir))
                copytree(src_dir, dst_dir)
        RefinementModel().run(refinements, current_year=year, dataset_pool=dataset_pool)
        if dataset_pool.has_dataset('refinement'):
            #avoid caching refinements
            dataset_pool._remove_dataset('refinement')  
        dataset_pool.flush_loaded_datasets()
        dataset_pool.remove_all_datasets()
        logger.end_block()
Example #2
0
    for year in range(start_year, end_year + 1):
        logger.start_block("Doing refinement for %s" % year)
        simulation_state.set_current_time(year)

        ## reload refinements, from original refinement_directory or dataset_pool, in case it's been changed by refinement model
        if refinements_storage is not None:
            refinements = DatasetFactory().search_for_dataset(
                'refinement',
                package_order,
                arguments={'in_storage': refinements_storage})
        else:
            refinements = dataset_pool.get_dataset('refinement')

        if options.backup:
            src_dir = os.path.join(options.cache_directory, str(year))
            dst_dir = os.path.join(options.cache_directory, 'backup',
                                   str(year))
            if os.path.exists(src_dir):
                logger.log_status("Backing up %s to %s" % (src_dir, dst_dir))
                copytree(src_dir, dst_dir)
        RefinementModel().run(refinements,
                              current_year=year,
                              dataset_pool=dataset_pool)
        if dataset_pool.has_dataset('refinement'):
            #avoid caching refinements
            dataset_pool._remove_dataset('refinement')
        dataset_pool.flush_loaded_datasets()
        dataset_pool.remove_all_datasets()
        logger.end_block()