def test_samefile(self, tmpdir): f1 = (tempdir() / '1.txt').touch() f1.write_text('foo') f2 = (tempdir() / '2.txt').touch() f1.write_text('foo') f3 = (tempdir() / '3.txt').touch() f1.write_text('bar') f4 = (tempdir() / '4.txt') f1.copyfile(f4) assert os.path.samefile(f1, f2) == f1.samefile(f2) assert os.path.samefile(f1, f3) == f1.samefile(f3) assert os.path.samefile(f1, f4) == f1.samefile(f4) assert os.path.samefile(f1, f1) == f1.samefile(f1)
def test_xlsx_sheet_name_limit(self): ''' PyExcelerate will raise the following if any sheet name exceeds 31 characters: Exception: Excel does not permit worksheet names longer than 31 characters. Set force_name=True to disable this restriction. ''' title, schemas, submissions = build_fixture('long_names') fp = FormPack(schemas, title) options = { 'versions': 'long_survey_name__the_quick__brown_fox_jumps' '_over_the_lazy_dog_v1' } with tempdir() as d: xls = d / 'foo.xlsx' fp.export(**options).to_xlsx(xls, submissions) assert xls.isfile() book = xlrd.open_workbook(xls) assert book.sheet_names() == [ u'long survey name: the quick,...', u'long_group_name__Victor_jagt...', u'long_group_name__Victor_... (1)' ]
def test_copy_fields_and_force_index_and_unicode(self): title, schemas, submissions = customer_satisfaction fp = FormPack(schemas, 'رضا العملاء') export = fp.export(copy_fields=('_uuid', '_submission_time'), force_index=True) exported = export.to_dict(submissions) expected = OrderedDict({ "رضا العملاء": { 'fields': [ "restaurant_name", "customer_enjoyment", "_uuid", "_submission_time", "_index" ], 'data': [[ "Felipes", "yes", "90dd7750f83011e590707c7a9125d07d", "2016-04-01 19:57:45.306805", 1 ], [ "Dunkin Donuts", "no", "90dd7750f83011e590707c7a9125d08d", "2016-04-02 19:57:45.306805", 2 ], [ "McDonalds", "no", "90dd7750f83011e590707c7a9125d09d", "2016-04-03 19:57:45.306805", 3 ]] } }) self.assertEqual(exported, expected) with tempdir() as d: xls = d / 'test.xlsx' fp.export().to_xlsx(xls, submissions) assert xls.isfile()
def test_on_missing_config_file(self): with tempdir() as tmp: config_path = tmp / 'config.yaml' config = ConfigParser(config_path).load() self.assertEqual({}, config.contexts) self.assertEqual('', config.current_context) self.assertTrue(not config_path.exists())
def setup(self, timeout=600, cleanup=True): """Deploy the workload. :param timeout: Amount of time to wait for deployment to complete. :param cleanup: Set to False to leave the generated deployer file on disk. Useful for debugging. """ if not self.deployer: raise NameError("Path to juju-deployer is not defined.") with tempdir(prefix="amulet-juju-deployer-") as tmpdir: schema_json = json.dumps(self.schema(), indent=2) self.log.debug("Deployer schema\n%s", schema_json) schema_file = tmpdir / "deployer-schema.json" schema_file.write_text(schema_json) cmd = "{deployer} -W -L -c {schema} -e {env} -t {timeout} {env}" cmd_args = dict( deployer=self.deployer.expanduser(), schema=schema_file, env=self.juju_env, timeout=str(timeout + 100) ) cmd = cmd.format(**cmd_args) self.log.debug(cmd) with self.deploy_w_timeout_and_dir(timeout, self.deployer_dir): subprocess.check_call(shlex.split(cmd)) self.sentry = Talisman(self.services) if cleanup is False: tmpdir.makedirs() (tmpdir / "deployer-schema.json").write_text(schema_json)
def setup(self, timeout=600, cleanup=True): """Deploy the workload. If timeout expires before the deployment completes, raises :class:`amulet.helpers.TimeoutError`. :param timeout: Amount of time to wait for deployment to complete. If environment variable AMULET_SETUP_TIMEOUT is set, it overrides this value. :param cleanup: Set to False to leave the generated deployer file on disk. Useful for debugging. Example:: import amulet d = amulet.Deployment() d.add('wordpress') d.add('mysql') d.configure('wordpress', debug=True) d.relate('wordpress:db', 'mysql:db') try: d.setup(timeout=900) except amulet.helpers.TimeoutError: # Setup didn't complete before timeout pass """ timeout = int(os.environ.get('AMULET_SETUP_TIMEOUT') or timeout) if not self.deployer: raise NameError('Path to juju-deployer is not defined.') with tempdir(prefix='amulet-juju-deployer-') as tmpdir: schema_json = json.dumps(self.schema(), indent=2) self.log.debug("Deployer schema\n%s", schema_json) schema_file = tmpdir / 'deployer-schema.json' schema_file.write_text(schema_json) cmd = "{deployer} -W {debug} -c {schema} -e {env} -t {timeout} {env}" cmd_args = dict( deployer=self.deployer.expanduser(), debug=( '-d' if self.log.getEffectiveLevel() == logging.DEBUG else ''), schema=schema_file, env=self.juju_env, timeout=str(timeout + 100), ) cmd = cmd.format(**cmd_args) self.log.debug(cmd) with self._deploy_w_timeout(timeout): subprocess.check_call(shlex.split(cmd)) self.sentry = Talisman(self.services, timeout=timeout) if cleanup is False: tmpdir.makedirs() (tmpdir / 'deployer-schema.json').write_text(schema_json)
def test_save_load(heat_model): with tempdir() as d: heat_model.save(d / "heat_model") loaded_heat_model = Model.load(d / "heat_model") x, dx = np.linspace(0, 10, 50, retstep=True, endpoint=False) T = np.cos(x * 2 * np.pi / 10) initial_fields = heat_model.fields_template(x=x, T=T) parameters = dict(periodic=True, k=1) assert loaded_heat_model._symb_diff_eqs == heat_model._symb_diff_eqs assert loaded_heat_model._symb_dep_vars == heat_model._symb_dep_vars assert loaded_heat_model._symb_pars == heat_model._symb_pars assert loaded_heat_model._symb_help_funcs == heat_model._symb_help_funcs assert loaded_heat_model.F_array == heat_model.F_array assert (loaded_heat_model.J_array == heat_model.J_array).all() assert (loaded_heat_model._J_sparse_array == heat_model._J_sparse_array ).all() assert list(map(str, loaded_heat_model._args)) == list( map(str, heat_model._args)) assert (loaded_heat_model.F(initial_fields, parameters) == heat_model.F( initial_fields, parameters)).all() assert (loaded_heat_model.J(initial_fields, parameters).todense() == heat_model.J( initial_fields, parameters).todense()).all()
def test_save_load(heat_model): with tempdir() as d: heat_model.save(d / "heat_model") loaded_heat_model = Model.load(d / "heat_model") x, dx = np.linspace(0, 10, 50, retstep=True, endpoint=False) T = np.cos(x * 2 * np.pi / 10) initial_fields = heat_model.fields_template(x=x, T=T) parameters = dict(periodic=True, k=1) assert loaded_heat_model._symb_diff_eqs == heat_model._symb_diff_eqs assert loaded_heat_model._symb_dep_vars == heat_model._symb_dep_vars assert loaded_heat_model._symb_pars == heat_model._symb_pars assert loaded_heat_model._symb_help_funcs == heat_model._symb_help_funcs assert loaded_heat_model.F_array == heat_model.F_array assert (loaded_heat_model.J_array == heat_model.J_array).all() assert (loaded_heat_model._J_sparse_array == heat_model._J_sparse_array).all() assert list(map(str, loaded_heat_model._args)) == list( map(str, heat_model._args) ) assert ( loaded_heat_model.F(initial_fields, parameters) == heat_model.F(initial_fields, parameters) ).all() assert ( loaded_heat_model.J(initial_fields, parameters).todense() == heat_model.J(initial_fields, parameters).todense() ).all()
def test_containers_coerce(simul, fields): with tempdir() as container_path: simul.parameters["test_bool"] = True simul.parameters["test_list"] = [] simul.parameters["test_object"] = type("TestObject", (object, ), dict(a=[], b={})) simul.attach_container(container_path) simul.run()
def test_containers_last(simul, fields): with pytest.raises(ValueError): simul.attach_container(None, save="") with tempdir() as container_path: simul.attach_container(container_path, save="last") simul.run() assert simul.container.data.t.size == 1 assert simul.container.data == simul.fields
def testSameFile(self): f1 = (tempdir() / '1.txt').touch() f1.write_text('foo') f2 = (tempdir() / '2.txt').touch() f1.write_text('foo') f3 = (tempdir() / '3.txt').touch() f1.write_text('bar') f4 = (tempdir() / '4.txt') f1.copyfile(f4) self.assertEqual(os.path.samefile(f1, f2), f1.samefile(f2)) self.assertEqual(os.path.samefile(f1, f3), f1.samefile(f3)) self.assertEqual(os.path.samefile(f1, f4), f1.samefile(f4)) self.assertEqual(os.path.samefile(f1, f1), f1.samefile(f1))
def test_containers_retrieve_list(simul, lazy): with tempdir() as container_path: simul.attach_container(container_path) simul.run() container = retrieve_container(container_path / simul.id, lazy=lazy, isel=[0, 1, 2]) assert container.data == simul.container.data.isel(t=[0, 1, 2]) assert container.metadata == simul.container.metadata
def test_display_probles_on_disk(simul, fmt): with path.tempdir() as d: display = display_probe(simul, function=lambda simul: simul.timer.total, on_disk=d, fmt=fmt) simul.run() [process.join() for process in display._writers] assert len(d.glob("*.%s" % fmt)) == 5
def test_containers_retrieve_dict(simul, lazy): with tempdir() as container_path: simul.attach_container(container_path) simul.run() container = retrieve_container(container_path / simul.id, lazy=lazy, isel=dict(x=0, t=-1)) assert container.data == simul.container.data.isel(x=0, t=-1) assert container.metadata == simul.container.metadata
def testSameFile(self): f1 = (tempdir() / "1.txt").touch() f1.write_text("foo") f2 = (tempdir() / "2.txt").touch() f1.write_text("foo") f3 = (tempdir() / "3.txt").touch() f1.write_text("bar") f4 = tempdir() / "4.txt" f1.copyfile(f4) self.assertEqual(os.path.samefile(f1, f2), f1.samefile(f2)) self.assertEqual(os.path.samefile(f1, f3), f1.samefile(f3)) self.assertEqual(os.path.samefile(f1, f4), f1.samefile(f4)) self.assertEqual(os.path.samefile(f1, f1), f1.samefile(f1))
def test_chunks(self, tmpdir): p = (tempdir() / 'test.txt').touch() txt = "0123456789" size = 5 p.write_text(txt) for i, chunk in enumerate(p.chunks(size)): assert chunk == txt[i * size:i * size + size] assert i == len(txt) / size - 1
def test_xlsx(self): title, schemas, submissions = build_fixture('grouped_repeatable') fp = FormPack(schemas, title) options = {'versions': 'rgv1'} with tempdir() as d: xls = d / 'foo.xlsx' fp.export(**options).to_xlsx(xls, submissions) assert xls.isfile()
def test_context_manager_using_with(self): """ The context manager will allow using the with keyword and provide a temporry directory that will be deleted after that. """ with tempdir() as d: self.assertTrue(d.isdir()) self.assertFalse(d.isdir())
def test_next_class(self): """ It should be possible to invoke operations on a tempdir and get path classes. """ d = tempdir() sub = d / 'subdir' assert isinstance(sub, path) d.rmdir()
def test_next_class(self): """ It should be possible to invoke operations on a tempdir and get Path classes. """ d = tempdir() sub = d / 'subdir' assert isinstance(sub, path.Path) d.rmdir()
def test_context_manager_using_with(self): """ The context manager will allow using the with keyword and provide a temporry directory that will be deleted after that. """ with tempdir() as d: assert d.isdir() assert not d.isdir()
def testChunks(self): p = (tempdir() / 'test.txt').touch() txt = "0123456789" size = 5 p.write_text(txt) for i, chunk in enumerate(p.chunks(size)): self.assertEqual(chunk, txt[i * size:i * size + size]) self.assertEqual(i, len(txt) / size - 1)
def test_context_manager_exception(self): """ The context manager will not clean up if an exception occurs. """ d = tempdir() d.__enter__() (d / 'somefile.txt').touch() assert not isinstance(d / 'somefile.txt', tempdir) d.__exit__(TypeError, TypeError('foo'), None) assert d.exists()
def test_containers_retrieve_incomplete(simul, lazy): with tempdir() as container_path: simul.attach_container(container_path) next(simul) simul.container.flush() next(simul) simul.container.flush() container = retrieve_container(container_path / simul.id) assert container.data == simul.container.data assert container.metadata == simul.container.metadata
def test_constructor(self): """ One should be able to readily construct a temporary directory """ d = tempdir() assert isinstance(d, path) assert d.exists() assert d.isdir() d.rmdir() assert not d.exists()
def test_constructor(self): """ One should be able to readily construct a temporary directory """ d = tempdir() assert isinstance(d, path.Path) assert d.exists() assert d.isdir() d.rmdir() assert not d.exists()
def test_containers_retrieve_all(simul, lazy): with tempdir() as container_path: simul.attach_container(container_path) simul.run() container = retrieve_container(container_path / simul.id) assert container.data == simul.container.data assert container.metadata == simul.container.metadata container = retrieve_container(container_path / simul.id, lazy=lazy, isel='all') assert container.data == simul.container.data assert container.metadata == simul.container.metadata
def test_context_manager(self): """ One should be able to use a tempdir object as a context, which will clean up the contents after. """ d = tempdir() res = d.__enter__() assert res is d (d / 'somefile.txt').touch() assert not isinstance(d / 'somefile.txt', tempdir) d.__exit__(None, None, None) assert not d.exists()
def test_containers_retrieve_backcompat(simul): with tempdir() as container_path: simul.attach_container(container_path) simul.run() Container.get_all(container_path / simul.id) Container.get_last(container_path / simul.id) with open(container_path / simul.id / "metadata.yml", "r") as f: pars = yaml.load(f) with open(container_path / simul.id / "Treant.16486.json", "w") as f: json.dump(dict(categories=pars), f) (container_path / simul.id / "metadata.yml").remove() Container.get_all(container_path / simul.id)
def _deploy_w_timeout(self, timeout): """Sets timeout and tmp working directory for wrapped block. If successful, sets instance.deployed. :param timeout: Amount of time to wait for deployment to complete. """ deploy_dir = tempdir(prefix='amulet_deployment_') with deploy_dir, unit_timesout(timeout): yield self.deployed = True
def test_containers_merge(simul, lazy): with tempdir() as container_path: simul.attach_container(container_path) next(simul) simul.container.flush() next(simul) simul.container.flush() sliced_data = simul.container.data.load().copy() Container.merge_datafiles(container_path / simul.id) with pytest.raises(FileExistsError): Container.merge_datafiles(container_path / simul.id) fields = xr.open_dataset(container_path / simul.id / "data.nc") assert fields == sliced_data
def test_generate_v2(self): from deepsense.version import __version__ version = int(__version__.split('.')[0]) if version == 2: # test only if neptune-cli==2 is installed with tempdir() as d: temp_path = d / 'neptune.yaml' with temp_path.open('w') as fh: NeptuneConfigFileV2(**self.CONFIG_ORIG).dump(fh) config_read = load_neptune_config(temp_path) # trim to keys from config_orig config_trimmed = {k: v for k, v in config_read.items() if k in self.CONFIG_ORIG} self.assertEqual(self.CONFIG_ORIG, config_trimmed)
def test_populations_during_working_hours(): with path.tempdir() as log_parent_folder: log_folder = os.path.join(log_parent_folder, "logs") circus = Circus(name="tested_circus", master_seed=1, start=pd.Timestamp("8 June 2016"), step_duration=pd.Timedelta("1h")) field_agents = circus.create_population( name="fa", size=100, ids_gen=SequencialGenerator(max_length=3, prefix="id_")) mobility_time_gen = WorkHoursTimerGenerator(clock=circus.clock, seed=next(circus.seeder)) five_per_day = mobility_time_gen.activity(n=5, per=pd.Timedelta("1day")) std_per_day = mobility_time_gen.activity(n=.5, per=pd.Timedelta("1day")) gaussian_activity = NumpyRandomGenerator(method="normal", loc=five_per_day, scale=std_per_day, seed=1) mobility_activity_gen = gaussian_activity.map(bound_value(lb=1)) # just a dummy operation to produce some logs story = circus.create_story(name="test_story", initiating_population=field_agents, member_id_field="some_id", timer_gen=mobility_time_gen, activity_gen=mobility_activity_gen) story.set_operations(circus.clock.ops.timestamp(named_as="TIME"), FieldLogger(log_id="the_logs")) circus.run(duration=pd.Timedelta("30 days"), log_output_folder=log_folder) logging.info("loading produced logs") logs = load_all_logs(log_folder)["the_logs"] logging.info("number of produced logs: {} logs".format(logs.shape[0])) # 30 days of simulation should produce 100 * 5 * 30 == 15k logs assert 14e3 <= logs.shape[0] <= 16e3
def test_io_round_trip(): with path.tempdir() as root_dir: population = Population(circus=tc, size=5, ids_gen=SequencialGenerator(prefix="abc", max_length=1)) orig = Attribute(population, init_values=[10, 20, 30, 40, 50]) full_path = os.path.join(root_dir, "attribute.csv") orig.save_to(full_path) retrieved = Attribute.load_from(full_path) assert orig._table.equals(retrieved._table)
def _raw(self): with tempdir() as td: cmd = "git clone -n --depth=1 {} {}"\ .format(self.fork, self.name) with path(td): self.call(shlex.split(cmd)) cmd = "git checkout HEAD metadata.yaml" with td / self.name: self.call(shlex.split(cmd)) md = td / self.name / 'metadata.yaml' txt = md.text() return yaml.safe_load(txt)
def xlsx_export(request, username, id_string): export = build_export(request, username, id_string) data = [("v1", get_instances_for_user_and_form(username, id_string))] with tempdir() as d: tempfile = d / str(uuid.uuid4()) export.to_xlsx(tempfile, data) xlsx = tempfile.bytes() name = build_export_filename(export, 'xlsx') ct = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' response = HttpResponse(xlsx, content_type=ct) response['Content-Disposition'] = 'attachment; filename="%s"' % name return response
def xlsx_export(request, username, id_string): export = build_export_context(request, username, id_string)['export'] data = [("v1", get_instances_for_user_and_form(username, id_string))] with tempdir() as d: tempfile = d / str(uuid.uuid4()) export.to_xlsx(tempfile, data) xlsx = tempfile.bytes() name = build_export_filename(export, 'xlsx') ct = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' response = HttpResponse(xlsx, content_type=ct) response['Content-Disposition'] = 'attachment; filename="%s"' % name return response
def test_paths_to_copy(self): with tempdir() as tmp: tmp.chdir() (tmp / 'a/1').makedirs() (tmp / 'a/2').makedirs() (tmp / 'b/1').makedirs() (tmp / 'b/2').makedirs() (tmp / 'c/1/a').makedirs() (tmp / 'file1').write_text('file1') (tmp / 'file2').write_text('file2') (tmp / 'file3').write_text('file3') (tmp / 'a/file_a1').write_text('file_a1') (tmp / 'a/1/file_a1_1').write_text('file_a1_1') # list all dirs and files self.assertEqual( { PathToDump(Path(p), Path(p)) for p in {'a', 'b', 'c', 'file1', 'file2', 'file3'} }, set(get_paths_to_copy())) # exclude 'a' and 'file1' self.assertEqual( { PathToDump(Path(p), Path(p)) for p in {'b', 'c', 'file2', 'file3'} }, set(get_paths_to_copy(exclude=['a', 'file1']))) # exclude 'a/1 and 'file1' self.assertEqual( { PathToDump(Path(p), Path(p)) for p in {'a/2', 'a/file_a1', 'b', 'c', 'file2', 'file3'} }, set( get_paths_to_copy( exclude=['a/1', 'file1', 'a/10/file_a10_1']))) # add external resource self.assertEqual( { PathToDump(Path(s), Path(d)) for s, d in {('../external1', 'external1'), ( 'a', 'a'), ('b', 'b'), ('c', 'c'), ( 'file1', 'file1'), ('file2', 'file2'), ('file3', 'file3')} }, set(get_paths_to_copy(paths_to_copy=[tmp / '../external1'])))
def test_copy_fields_and_force_index_and_unicode(self): title, schemas, submissions = customer_satisfaction fp = FormPack(schemas, 'رضا العملاء') export = fp.export(copy_fields=('_uuid', '_submission_time'), force_index=True) exported = export.to_dict(submissions) expected = OrderedDict({ "رضا العملاء": { 'fields': ["restaurant_name", "customer_enjoyment", "_uuid", "_submission_time", "_index"], 'data': [ [ "Felipes", "yes", "90dd7750f83011e590707c7a9125d07d", "2016-04-01 19:57:45.306805", 1 ], [ "Dunkin Donuts", "no", "90dd7750f83011e590707c7a9125d08d", "2016-04-02 19:57:45.306805", 2 ], [ "McDonalds", "no", "90dd7750f83011e590707c7a9125d09d", "2016-04-03 19:57:45.306805", 3 ] ] } }) self.assertEqual(exported, expected) with tempdir() as d: xls = d / 'test.xlsx' fp.export().to_xlsx(xls, submissions) assert xls.isfile()
def __init__(self, juju_env=None, series="precise", juju_deployer="juju-deployer", **kw): self.services = {} self.relations = [] self.interfaces = [] self.subordinates = [] self.series = series self.deployed = False self.juju_env = juju_env or default_environment() self.charm_name = get_charm_name(os.getcwd()) self.sentry = None self.deployer = path(juju_deployer) self.deployer_dir = tempdir(prefix="amulet_deployment_") if "JUJU_TEST_CHARM" in os.environ: self.charm_name = os.environ["JUJU_TEST_CHARM"] self.charm_cache = CharmCache(self.charm_name)
def test_containers_attached_ondisk(simul, fields): with tempdir() as container_path: simul.attach_container(container_path) simul.run() assert simul.container.data.isel(t=0) == fields assert simul.container.data.isel(t=-1) == simul.fields assert simul.container.metadata == simul.parameters with open(container_path / simul.id / "metadata.yml") as metafile: assert yaml.load(metafile) == simul.parameters assert (xr.open_dataset(container_path / simul.id / "data.nc") == simul.container.data) with pytest.raises(FileExistsError): Container(path=container_path / simul.id, force=False, mode="w") Container(path=container_path / simul.id, force=True, mode="w") (container_path / simul.id).rmtree() with pytest.raises(FileNotFoundError): Container(path=container_path / simul.id, mode="r")
def test_containers_meta_set(mode): cont = Container(None, mode) cont.metadata = dict(test="foo") assert cont.metadata["test"] == "foo" with tempdir() as container_path: cont = Container(container_path / "test_meta", mode) cont.metadata = dict(test="foo") with open(container_path / "test_meta" / "metadata.yml", "r") as f: pars = yaml.load(f) assert cont.metadata["test"] == "foo" assert cont.metadata["test"] == pars["test"] cont = Container(container_path / "test_meta", "r") cont.metadata = dict(test="foo") (container_path / "test_meta" / "metadata.yml").remove() assert cont.metadata is None assert cont.data is None cont = Container(None, "r") cont.metadata["test"] = "foo"