def setUp(self): self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_test_do_convert_numarray_cache_to_numpy_cache') package = OpusPackage() opus_package_path = package.get_path_for_package('opus_upgrade') self.root_path = os.path.join(opus_package_path, 'changes_2007_04_11') self.test_data_path = os.path.join(self.root_path, 'test_data')
def setUp(self): self.temp_dir = tempfile.mkdtemp( prefix='opus_tmp_test_do_convert_numarray_cache_to_numpy_cache') package = OpusPackage() opus_package_path = package.get_path_for_package('opus_upgrade') self.root_path = os.path.join(opus_package_path, 'changes_2007_04_11') self.test_data_path = os.path.join(self.root_path, 'test_data')
def test_does_not_find_files_without_test_cases(self): path = OpusPackage().get_path_for_package('opus_core') path = os.path.join(path, 'tests', 'utils') module_name = 'test_scanner_test_file' file_name = '%s.py' % module_name file_name = os.path.join(path, file_name) f = open(file_name, 'w') f.write("""class TestClass(object): def test_method(self): print 'Delete me if you wish, for I am but a unit-test test file!'""") f.close() self.assert_(os.path.exists(file_name)) package = 'opus_core' test_modules = TestScanner().find_opus_test_cases_for_package(package) self.assert_( 'opus_core.tests.utils.%s' % module_name not in test_modules, "TestScanner found a test file created without unit tests " "(opus_core.tests.utils.%s)!" % module_name) os.remove(file_name)
def _find_opus_test_cases_for_package(self, package, test_case_class): root = OpusPackage().get_path_for_package(package) modules_with_test_cases = [] for path, dirs, files in os.walk(root, topdown=True): for file in files: if not file.endswith('.py'): continue f = open(os.path.join(path, file), 'r') import_pattern = re.compile('^\s*(import|from).*unittest') skip_pattern = re.compile('^.*#.*IGNORE_THIS_FILE') found_import = False for line in f: if skip_pattern.match(line): break if import_pattern.match(line): found_import = True break if not found_import: # No unittest import found in file. continue module_name = self._get_module_name(package, root, path, file) try: exec('import %s' % module_name) except Exception, val: logger.log_error("Could not import %s!" % module_name) traceback.print_exc() continue module = eval(module_name) if inspect.ismodule(module): members = inspect.getmembers(module) member_dict = {} for key, value in members: member_dict[key] = value for key in member_dict.keys(): try: is_subclass = issubclass(member_dict[key], test_case_class) except: pass else: if is_subclass: class_name = member_dict[key].__name__ modules_with_test_cases.append( (module_name, class_name)) else: logger.log_warning('WARNING: %s is not a module!' % module)
def test(self): opus_core_path = OpusPackage().get_opus_core_path() dbf_directory = os.path.join(opus_core_path, 'tests', 'data', 'dbf') table_name = 'test_logical' cache_directory = self._temp_dir year = 1000 exporter = ExportDbfTableToCacheCommand( dbf_directory=dbf_directory, table_name=table_name, cache_directory=cache_directory, year=year, ) exporter.execute() attribute_cache = AttributeCache(cache_directory=cache_directory) old_time = SimulationState().get_current_time() SimulationState().set_current_time(year) values = attribute_cache.load_table(table_name) self.assertEqual(set(['keyid', 'works']), set(values.keys())) self.assert_(ma.allequal(array([1, 2, 3, 4, 5]), values['keyid'])) self.assert_(ma.allequal(array([1, 1, -1, 0, 0]), values['works'])) SimulationState().set_current_time(old_time)
def test_tutorial_code(self): opus_docs_path = OpusPackage().get_path_for_package('opus_docs') error_code = os.system( '%s "%s"' % (sys.executable, os.path.join(opus_docs_path, 'manual', 'part-command-line', 'tutorial_code.py'))) self.assert_(not error_code)
def create_dataset_from_dbf_storage(): storage = StorageFactory().get_storage('dbf_storage', # type of storage storage_location = os.path.join(OpusPackage().get_opus_core_path(), "data", "dbf") # directory ) test_dataset = Dataset(in_storage = storage, in_table_name='test_medium', # file name without its ending id_name='keyid' # which attribute is the unique identifier ) return test_dataset
def test_get_table_names_1981(self): opus_core_path = OpusPackage().get_opus_core_path() local_test_data_path = os.path.join(opus_core_path, 'data', 'test_cache', '1981') storage = file_flt_storage(local_test_data_path) expected = ['base_year', 'cities', 'dumb_datasets'] actual = storage.get_table_names() expected.sort() actual.sort() self.assertEquals(expected, actual)
def create_dataset_from_flt_storage(): storage = StorageFactory().get_storage('flt_storage', # type of storage storage_location = os.path.join(OpusPackage().get_opus_core_path(), "data", "test_cache", "1980") # directory ) # in case of flt storage, each dataset is a directory with one file per attribute zone_dataset = Dataset(in_storage = storage, in_table_name='zones', # directory of the dataset id_name='zone_id' # which attribute is the unique identifier ) return zone_dataset
def setUp(self): if skip_test(): return opus_core_path = OpusPackage().get_opus_core_path() local_test_data_path = os.path.join(opus_core_path, 'data', 'test_cache', '1980') sftp_location = 'sftp://%s@%s' % (TESTUSERNAME, TESTHOSTNAME) self.storage = sftp_flt_storage(sftp_location) self.remote_temp_dir = self.storage.ssh_client.get_remote_temp_dir() self.storage._base_directory_remote = self.remote_temp_dir self.sftp_location = sftp_location + self.remote_temp_dir self.table_name = 'testtable'
def __init__(self, **kargs): # Estimator.__init__(self, settings=None, run_land_price_model_before_estimation=False, **kargs) # <-- old __init__ # Estimator.__init__(self, config=None, save_estimation_results=True) # <-- new __init__ doesn't work, but not needed parent_dir_path = package().get_package_parent_path() package_path = OpusPackage().get_path_for_package("biocomplexity") self.storage = StorageFactory().get_storage( 'tab_storage', storage_location=os.path.join(package_path, 'data')) ## 1. directory path of full (4 county spatial extent) dataset flt_directory = os.path.join(parent_dir_path, "biocomplexity", "data", "LCCM_4County") ## 2. select (uncomment) from one the following choices of directory pathes of subsetted sample input data/variables # flt_directory_est = os.path.join(parent_dir_path, "biocomplexity", "data", "LCCM_small_test_set_opus") flt_directory_est = os.path.join(parent_dir_path, "biocomplexity", "data", "data_for_estimation_all") # flt_directory_est = os.path.join(parent_dir_path, "biocomplexity", "data", "data_for_estimation_all_orig") # flt_directory_est = os.path.join(parent_dir_path, "biocomplexity", "data", "data_for_suburban_orig") # flt_directory_est = os.path.join(parent_dir_path, "biocomplexity", "data", "data_for_urban") # flt_directory_est = os.path.join(parent_dir_path, "biocomplexity", "data", "data_for_urban_orig") ## note - must rename lct-forusewith91sample.Float32 to lct.lf4 if doing 1991-1995 ## note - must rename lct-forusewith95sample.Float32 to lct.lf4 if doing 1995-1999 ## 3. select (uncomment) from one the following choices of land cover data (input data) date pairs (years) # years = [1991, 1995] years = [1995, 1999] # years = [1999, 2002] self.lc1 = LandCoverDataset(in_storage=StorageFactory().get_storage( "flt_storage", storage_location=os.path.join(flt_directory_est, str(years[0]))), resources=Resources({"lowercase": 1})) self.lc2 = LandCoverDataset(in_storage=StorageFactory().get_storage( "flt_storage", storage_location=os.path.join(flt_directory_est, str(years[1]))), resources=Resources({"lowercase": 1})) self.lc1_all = LandCoverDataset( in_storage=StorageFactory().get_storage( "flt_storage", storage_location=os.path.join(flt_directory, str(years[0]))), resources=Resources({"lowercase": 1})) self.lc1_all.flush_dataset() self.lc2_all = LandCoverDataset( in_storage=StorageFactory().get_storage( "flt_storage", storage_location=os.path.join(flt_directory, str(years[1]))), resources=Resources({"lowercase": 1})) self.lc2_all.flush_dataset()
def run_all_tests_for_package(self, package): # Common tests to be run on each package suite = [] test_loader = opus_unittest.TestLoader() TestForSQLPassword.modul = package TestPackageSyntax.modul = package suite += test_loader.loadTestsFromTestCase(TestForSQLPassword) suite += test_loader.loadTestsFromTestCase(TestPackageSyntax) test_suite = opus_unittest.TestSuite(suite) opus_unittest.TextTestRunner(verbosity=2).run(test_suite) #TestForSQLPassword(package_name = package).test_no_sql_password_in_files() #TestPackageSyntax(package_name = package).test_no_opus_syntax_violations() # Default loader = PackageTestLoader().load_tests_from_package xml_file_name = 'TEST_all_tests.xml' # Unlike 2.3, Python 2.4 complains about MySQL warnings. # We don't want to hear it. filterwarnings('ignore', "Can't (create|drop) database '[^']*'; ") filterwarnings('ignore', "Unknown table '[^']*'") filterwarnings('ignore', "Table '[^']*' already exists") # Will we do integration tests? for opt in sys.argv: if opt in ('-i', '-I', '--integration'): sys.argv.remove(opt) loader = PackageTestLoader( ).load_integration_tests_from_package xml_file_name = 'TEST_all_integration_tests.xml' loader(package) output_as_xml = False for opt in sys.argv: if opt in ('-x', '-X', '--xml'): sys.argv.remove(opt) output_as_xml = True if output_as_xml: file_path = os.path.join( OpusPackage().get_path_for_package(package), 'tests', xml_file_name) opus_unittest.main(testRunner=OpusXMLTestRunner( package, stream=open(file_path, 'w'))) else: opus_unittest.main(testRunner=OpusTestRunner(package))
def setUp(self): if skip_test(): return years = [1980, 1981] datasets = ['base_year', 'cities'] opus_core_path = OpusPackage().get_opus_core_path() sftp_location = 'sftp://%s@%s' % (TESTUSERNAME, TESTHOSTNAME) self.storage = sftp_flt_storage(sftp_location) self.remote_temp_dir = self.storage.ssh_client.get_remote_temp_dir() self.storage._base_directory_remote = os.path.join( self.remote_temp_dir, 'data', 'test_cache', '1980') for year in years: local_test_data_path = os.path.join(opus_core_path, 'data', 'test_cache', str(year)) base_directory_remote = os.path.join(self.remote_temp_dir, 'data', 'test_cache', str(year)) for dataset in datasets: local_dir_name = os.path.join(local_test_data_path, dataset) remote_dir_name = base_directory_remote self.storage.ssh_client.mput(local_dir_name, remote_dir_name)
class Simulation(object): """ Import data from urbansim cache directory, and compute the following computed variables: devt, de, commden, house_den, comm_add and house_add. """ package_path = OpusPackage().get_path_for_package("biocomplexity") lct_attribute = "biocomplexity.land_cover.lct_recoded" #lct_attribute = "lct" possible_lcts = range(1, 15) def run(self, base_directory, urbansim_cache_directory, years): """ run the simulation base_directory: directory contains all years folder of lccm. urbansim_cache_directory: directory contains all years folder of urbansim cache. years: lists of year to run.""" model = LandCoverChangeModel(self.possible_lcts, submodel_string=self.lct_attribute, choice_attribute_name=self.lct_attribute, debuglevel=4) coefficients = Coefficients() storage = StorageFactory().get_storage('tab_storage', storage_location=os.path.join( self.package_path, 'data')) coefficients.load(in_storage=storage, in_table_name="land_cover_change_model_coefficients") specification = EquationSpecification(in_storage=storage) specification.load( in_table_name="land_cover_change_model_specification") specification.set_variable_prefix("biocomplexity.land_cover.") constants = Constants() simulation_state = SimulationState() simulation_state.set_cache_directory(urbansim_cache_directory) attribute_cache = AttributeCache() index = arange(100000) for year in years: simulation_state.set_current_time(year) #land_cover_path = os.path.join(base_directory, str(year)) land_cover_path = base_directory land_covers = LandCoverDataset( in_storage=StorageFactory().get_storage( 'flt_storage', storage_location=land_cover_path), out_storage=StorageFactory().get_storage( 'flt_storage', storage_location=land_cover_path), debuglevel=4) land_covers.subset_by_index(index) #land_covers.load_dataset() gridcells = GridcellDataset(in_storage=attribute_cache, debuglevel=4) agents_index = None model.run(specification, coefficients, land_covers, data_objects={ "gridcell": gridcells, "constants": constants, "flush_variables": True }, chunk_specification={'nchunks': 1}) land_covers.flush_dataset() del gridcells del land_covers
def setUp(self): opus_core_path = OpusPackage().get_opus_core_path() local_test_data_path = os.path.join(opus_core_path, 'data', 'test_cache', '1980') self.storage = file_flt_storage(local_test_data_path)
def setUp(self): opus_core_path = OpusPackage().get_opus_core_path() self.local_test_data_path = os.path.join(opus_core_path, 'tests', 'data', 'dbf') self.storage = dbf_storage(self.local_test_data_path)
class Simulation(object): """ Import data from urbansim cache directory, and compute the following computed variables: devt, de, commden, house_den, comm_add and house_add. """ package_path = OpusPackage().get_path_for_package("biocomplexity") #lct_attribute = "biocomplexity.land_cover.lct_recoded" lct_attribute = "lct" possible_lcts = range(1,15) def _clean_up_land_cover_cache(self, path): if os.path.exists(path): shutil.rmtree(path) def _get_previous_year(self, current_year, years): current_year_index = -1 for i in range(len(years)): if current_year == years[i]: current_year_index = i if i <= 0 or i >= len(years): logger.log_error("invalid year " + str(current_year)) return years[current_year_index-1] def _generate_input_land_cover(self, current_year, base_directory, urbansim_cache_directory, years, output_directory, convert_flt, convert_input): if current_year == years[0]: if not convert_input: return base_directory else: package_dir_path = package().get_package_path() command = os.path.join(package_dir_path, "tools", "lc_convert.py") status = os.system(command + ' %s -i "%s" -o "%s"' % ('input data', base_directory, self.temp_land_cover_dir)) assert(status == 0, "generate input failed") return self.temp_land_cover_dir previous_year = self._get_previous_year(current_year, years) if not convert_flt: logger.start_block("Copy data from %s to temp land cover folder" % urbansim_cache_directory) try: self._copy_invariants_to_temp_land_cover_dir(os.path.join(urbansim_cache_directory, str(previous_year))) finally: logger.end_block() return self.temp_land_cover_dir # package_dir_path = package().get_package_path() # command = os.path.join(package_dir_path, "tools", "lc_convert.py") flt_directory_in = os.path.join(output_directory, str(previous_year)) flt_directory_out = self.temp_land_cover_dir LCCMInputConvert()._convert_lccm_input(flt_directory_in, flt_directory_out) # status = os.system(command + ' %d -i "%s" -o "%s"' % (previous_year, flt_directory_in, flt_directory_out)) # assert(status == 0, "generate input failed") return self.temp_land_cover_dir def _get_max_index(self, land_cover_path): land_covers = LandCoverDataset(in_storage=StorageFactory().get_storage("flt_storage", storage_location=land_cover_path)) return land_covers.size() def _copy_invariants_to_temp_land_cover_dir(self, land_cover_path): logger.log_status("temp input land cover data in " + self.temp_land_cover_dir) land_covers = LandCoverDataset(in_storage=StorageFactory().get_storage("flt_storage", storage_location=land_cover_path), out_storage=StorageFactory().get_storage("flt_storage", storage_location=self.temp_land_cover_dir), out_table_name='land_covers', debuglevel=4) logger.log_status("Land cover dataset created.... ") # added dec 4, 2009 land_covers.flush_dataset() # added dec 4, 2009 land_covers.write_dataset(attributes=AttributeType.PRIMARY) def _generate_output_flt(self, current_year, urbansim_cache_directory, output_directory, convert_flt): if not convert_flt: return package_dir_path = package().get_package_path() command = os.path.join(package_dir_path, "tools", "lc_convert_to_flt.py") flt_directory_in = os.path.join(urbansim_cache_directory, str(current_year)) flt_directory_out = os.path.join(output_directory, str(current_year)) status = os.system(sys.executable + ' ' + command + ' %d -i "%s" -o "%s"' % (current_year, flt_directory_in, flt_directory_out)) assert(status == 0, "generate output failed") def _generate_output_flt2(self, current_year, urbansim_cache_directory, output_directory, convert_flt): if not convert_flt: return flt_directory_in = os.path.join(urbansim_cache_directory, str(current_year)) flt_directory_out = os.path.join(output_directory, str(current_year)) ConvertToFloat()._create_flt_file(current_year, flt_directory_in, flt_directory_out) def run(self, base_directory, urbansim_cache_directory, years, output_directory, temp_folder, coefficients_name, specification_name, convert_flt=True, convert_input=False): """ run the simulation base_directory: directory contains all years folder of lccm. urbansim_cache_directory: directory contains all years folder of urbansim cache. years: lists of year to run.""" model = LandCoverChangeModel(self.possible_lcts, submodel_string=self.lct_attribute, choice_attribute_name=self.lct_attribute, debuglevel=4) coefficients = Coefficients() storage = StorageFactory().get_storage('tab_storage', storage_location=os.path.join(self.package_path, 'data')) coefficients.load(in_storage=storage, in_table_name=coefficients_name) specification = EquationSpecification(in_storage=storage) specification.load(in_table_name=specification_name) specification.set_variable_prefix("biocomplexity.land_cover.") constants = Constants() simulation_state = SimulationState() simulation_state.set_cache_directory(urbansim_cache_directory) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['biocomplexity', 'urbansim', 'opus_core'], in_storage=AttributeCache()) ncols = LccmConfiguration.ncols if temp_folder is None: self.temp_land_cover_dir = tempfile.mkdtemp() else: self.temp_land_cover_dir = temp_folder for year in years: land_cover_path = self._generate_input_land_cover(year, base_directory, urbansim_cache_directory, years, output_directory, convert_flt, convert_input) #max_size = 174338406 (orig) - act. int: 19019944 (37632028 incl NoData) max_size = self._get_max_index(land_cover_path) # 1st instance of lc_dataset - but looks like a 'lite' version offset = min(LccmConfiguration.offset, max_size) s = 0 t = offset while (s < t and t <= max_size): logger.log_status("Offset: ", s, t) index = arange(s,t) land_cover_cache_path=os.path.join(urbansim_cache_directory,str(year),'land_covers') self._clean_up_land_cover_cache(land_cover_cache_path) simulation_state.set_current_time(year) # 2nd instance of lc_dataset land_covers = LandCoverDataset(in_storage=StorageFactory().get_storage('flt_storage', storage_location=land_cover_path), out_storage=StorageFactory().get_storage('flt_storage', storage_location=land_cover_path), debuglevel=4) land_covers.subset_by_index(index) # land_covers.load_dataset() gridcells = GridcellDataset(in_storage=attribute_cache, debuglevel=4) agents_index = None model.run(specification, coefficients, land_covers, data_objects={"gridcell":gridcells, "constants":constants, "flush_variables":True}, chunk_specification = {'nchunks':5}) ## chunk size set here land_covers.flush_dataset() del gridcells del land_covers # self._generate_output_flt(year, urbansim_cache_directory, output_directory, convert_flt) self._generate_output_flt2(year, urbansim_cache_directory, output_directory, convert_flt) if t >= max_size: break s = max(t-10*ncols,s) t = min(t+offset-10*ncols,max_size) # clean up temp storage after done simulation shutil.rmtree(self.temp_land_cover_dir)
if __name__ == "__main__": estimator = LCCMEstimatorMultiRun(save_estimation_results=True, debuglevel=4) # ## 6. select (uncomment) from one the following choices of model specifications ## used for single run of lccm_estimator ## import estimation_lccm_specification_all91to95 as spec_py ## import estimation_lccm_specification_all95to99 as spec_py # import estimation_lccm_specification_all99to02v2 as spec_py ## import estimation_lccm_specification_sub91to95 as spec_py ## import estimation_lccm_specification_sub95to99 as spec_py ## import estimation_lccm_specification_ub91to95 as spec_py ## import estimation_lccm_specification_ub95to99 as spec_py # estimator.estimate(spec_py) ## 2 to iterate over spec_py file permutations (i.e. systematic) # ::IMPT:: need to read in each spec_py (output from _lccm_multirun_specpy_gen.py), # import them as spec_py, and iterate through them using write_dict_to_file # this is recursive until until all spec_py files are processed rootdir = os.path.join(OpusPackage().get_path_for_package("biocomplexity"), "data", "uncertainty", "model_specs0") # rootdir = os.path.join(OpusPackage().get_path_for_package("biocomplexity"), "data", "uncertainty") for subdir, dirs, files in os.walk(rootdir): for file in files: file_short = file[:-3] if file_short != "_lccm_multirun_estimator" and file_short != "lccm_estimator_local_multirun": module = [file_short] exec "import %s as spec_py" % module[0] estimator.estimate(spec_py)
def setUp(self): opus_core_path = OpusPackage().get_opus_core_path() self.local_test_data_path = os.path.join(opus_core_path, 'data', 'test_cache') self.storage = AttributeCache(self.local_test_data_path) self._SimulationState_time = SimulationState().get_current_time()
def create_dataset_from_tab_storage_shortcut(): from opus_core.misc import get_dataset_from_tab_storage return get_dataset_from_tab_storage('tests', directory=os.path.join(OpusPackage().get_opus_core_path(), "data", "tab"), dataset_args={'in_table_name':'tests', 'id_name':'id'})