def test_retrieval_single_item(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items = IndexedItemsArchive(temp_file_name) items.append_and_save(["item0", "item1"]) self.assertEqual(items.fetch([0])[0], "item0") self.assertEqual(items.fetch([1])[0], "item1")
def test_store(): with TempDir() as tmp: store = FileStore(dir=tmp) item = Item() empty_hash = item.hash store.put(item) item = Item(text="Foo Value") foo_hash = item.hash store.put(item) item = Item(text="Bar Value") bar_hash = item.hash store.put(item) item = store.item(empty_hash) assert item.hash == empty_hash item = store.item(foo_hash) assert item.hash == foo_hash assert item.text == "Foo Value" item = store.item(bar_hash) assert item.hash == bar_hash assert item.text == "Bar Value"
def test_sample_recent_memories_after_appending_to_loaded_memories(self): with TempDir() as temp_directory: replay_memories_file_name = os.path.join(temp_directory, 'replay_memories.dat') replay_memories1 = ReplayMemories(replay_memories_file_name, max_current_memories_in_ram=100) replay_memory1 = self._create_replay_memory(action_index=11) replay_memory2 = self._create_replay_memory(action_index=22) replay_memory3 = self._create_replay_memory(action_index=32) replay_memory4 = self._create_replay_memory(action_index=42) replay_memory5 = self._create_replay_memory(action_index=52) replay_memory6 = self._create_replay_memory(action_index=62) replay_memory7 = self._create_replay_memory(action_index=72) replay_memories1.append(replay_memory1) replay_memories1.append(replay_memory2) replay_memories1.append(replay_memory3) replay_memories1.save() replay_memories2 = ReplayMemories(replay_memories_file_name, max_current_memories_in_ram=100) replay_memories2.append(replay_memory4) replay_memories2.append(replay_memory5) replay_memories2.append(replay_memory6) replay_memories2.append(replay_memory7) sampled_replay_memories = replay_memories2.sample(5, recent_memories_span=5) self.assertTrue(replay_memory3 in sampled_replay_memories) self.assertTrue(replay_memory4 in sampled_replay_memories) self.assertTrue(replay_memory5 in sampled_replay_memories) self.assertTrue(replay_memory6 in sampled_replay_memories) self.assertTrue(replay_memory7 in sampled_replay_memories)
def test_sample_recent_memories(self): with TempDir() as temp_directory: replay_memories_file_name = os.path.join(temp_directory, 'replay_memories.dat') replay_memories = ReplayMemories(replay_memories_file_name, max_current_memories_in_ram=100) replay_memory1 = self._create_replay_memory(action_index=11) replay_memory2 = self._create_replay_memory(action_index=22) replay_memory3 = self._create_replay_memory(action_index=32) replay_memory4 = self._create_replay_memory(action_index=42) replay_memory5 = self._create_replay_memory(action_index=52) replay_memory6 = self._create_replay_memory(action_index=62) replay_memory7 = self._create_replay_memory(action_index=72) replay_memories.append(replay_memory1) replay_memories.append(replay_memory2) replay_memories.append(replay_memory3) replay_memories.append(replay_memory4) replay_memories.append(replay_memory5) replay_memories.append(replay_memory6) replay_memories.append(replay_memory7) sampled_replay_memories = replay_memories.sample(2, recent_memories_span=2) print(sampled_replay_memories) self.assertTrue(replay_memory6 in sampled_replay_memories) self.assertTrue(replay_memory7 in sampled_replay_memories)
def test_idempotent_put(): with TempDir() as tmp: store = FileStore(dir=tmp) item = Item(text="Idempotent?") store.put(item) store.put(item) store.put(item)
def testStdRun(self): with TempDir() as d: in_raster = os.path.join(d, 'test_bpi.tif') std_raster = os.path.join(d, 'test_std_bpi.tif') # was encountering this: ERROR 000875: Output raster: # c:\Users\shau7031\AppData\Local\Temp\tmp8co8nk\FocalSt_bath1's # workspace is an invalid output workspace. Force the workspace to temp: arcpy.env.scratchWorkspace = d bpi.main(bathy=config.bathy_raster, inner_radius=10, outer_radius=30, out_raster=in_raster, bpi_type='broad') self.assertTrue(os.path.exists(in_raster)) standardize_bpi_grids.main(bpi_raster=in_raster, out_raster=std_raster) self.assertTrue(os.path.exists(std_raster)) self.assertAlmostEqual(su.raster_properties(std_raster, "MEAN"), 0.671608391608) self.assertAlmostEqual(su.raster_properties(std_raster, "STD"), 99.655593923183)
def test_sample_loaded_memories(self): with TempDir() as temp_directory: replay_memories_file_name = os.path.join(temp_directory, 'replay_memories.dat') replay_memories1 = ReplayMemories(replay_memories_file_name, max_current_memories_in_ram=100) replay_memory1 = self._create_replay_memory(action_index=11) replay_memory2 = self._create_replay_memory(action_index=22) replay_memory3 = self._create_replay_memory(action_index=32) replay_memory4 = self._create_replay_memory(action_index=42) replay_memory5 = self._create_replay_memory(action_index=52) replay_memory6 = self._create_replay_memory(action_index=62) replay_memory7 = self._create_replay_memory(action_index=72) replay_memories1.append(replay_memory1) replay_memories1.append(replay_memory2) replay_memories1.append(replay_memory3) replay_memories1.append(replay_memory4) replay_memories1.append(replay_memory5) replay_memories1.append(replay_memory6) replay_memories1.append(replay_memory7) replay_memories1.save() replay_memories2 = ReplayMemories(replay_memories_file_name, max_current_memories_in_ram=100) sampled_replay_memories = replay_memories2.sample(5, seed=3) expected_replay_memories = [replay_memory2, replay_memory4, replay_memory7, replay_memory3, replay_memory5] self.assertItemsEqual(sampled_replay_memories, expected_replay_memories)
def test_get_filename_nonexistant(self, store, key): # NOTE: boto misbehaves here and tries to erase the target file # the parent tests use /dev/null, which you really should not try # to os.remove! with TempDir() as tmpdir: with pytest.raises(KeyError): store.get_file(key, os.path.join(tmpdir, 'a'))
def build_pdf(self, source, texinputs=[]): texinputs.append( bytes.decode(subprocess.check_output(["which", "xelatex"])).strip()) with TempDir() as tmpdir, source.temp_saved(suffix=".latex", dir=tmpdir) as tmp: # close temp file, so other processes can access it also on Windows tmp.close() base_fn = os.path.splitext(tmp.name)[0] output_fn = base_fn + ".pdf" args = [self.xelatex, tmp.name] # create environment newenv = os.environ.copy() newenv["TEXINPUTS"] = os.pathsep.join(texinputs) + os.pathsep try: subprocess.check_call( args, cwd=tmpdir, env=newenv, stdin=open(os.devnull, "r"), stdout=open(os.devnull, "w"), stderr=open(os.devnull, "w"), ) except CalledProcessError as e: raise_from(LatexBuildError(base_fn + ".log"), e) return I(open(output_fn, "rb").read(), encoding=None)
def test_persist_and_retrieve(self): bundle1 = self._create_metrics_bundle(episode_number=31, average_delta_score=33.0, average_speed=20.0, average_action_value=49.0, average_loss=63.0, final_score=888.0, execution_time=114.0) bundle2 = self._create_metrics_bundle(episode_number=32, average_delta_score=123.0, average_speed=3.55, average_action_value=312.1, average_loss=11.0, final_score=1002.0, execution_time=114.0) with TempDir() as temp_directory: metrics1_file_name = os.path.join(temp_directory, 'metrics.dat') metrics1 = Metrics(metrics_path=metrics1_file_name, bundler=MetricsInTrainBundle) metrics1.append(bundle1) metrics1.append(bundle2) metrics1.persist_and_flush_memory() metrics2 = Metrics(metrics_path=metrics1_file_name, bundler=MetricsInTrainBundle) all_episode_metrics = metrics2.all_metric_bundles() self.assertSequenceEqual(all_episode_metrics, [bundle1, bundle2])
def test_use_build_script_instead_of_docker_file_if_available( self, docker_mock, res_mock, tempDir_mock): # given provider = DockerProvider(self._adapter, 'leap_provider', self._leap_provider_x509) tempBuildDir = TempDir() try: tempDir_mock.return_value = tempBuildDir tempBuildDir_name = tempBuildDir.name with NamedTemporaryFile() as file: res_mock.resource_exists.return_value = True res_mock.resource_string.return_value = '#!/bin/bash\necho %s $PWD > %s' % ( file.name, file.name) # when provider.initialize() # then res_mock.resource_exists.assert_called_with( 'pixelated.resources', 'init-pixelated-docker-context.sh') res_mock.resource_string.assert_called_with( 'pixelated.resources', 'init-pixelated-docker-context.sh') with open(file.name, "r") as input: data = input.read().replace('\n', '') self.assertEqual( '%s %s' % (file.name, os.path.realpath(tempBuildDir_name)), data) docker_mock.return_value.build.assert_called_once_with( path=tempBuildDir_name, tag='pixelated:latest', fileobj=None) finally: tempBuildDir.dissolve()
def unpack_msg(input_file, output_dir, cfg): target_exts = cfg.targets with open(input_file, "rb") as fp: msg = email.message_from_file(fp) only_input_filename = os.path.split(input_file)[1] counter = 0 for part in msg.walk(): # multipart/* are just containers try: if part.get_content_maintype() == 'multipart': continue # Applications should really sanitize the given filename so that an # email message can't be used to overwrite important files m_filename = part.get_filename() filename = m_filename if filename: if filename.startswith("=?"): decoded = decode_header(filename) filename = decoded[0][0].decode(decoded[0][1].upper()) else: filename = escape_chars(filename) filename = only_input_filename.decode("utf-8") + "_" + filename ext = os.path.splitext(filename)[1].lower() else: ext = mimetypes.guess_extension(part.get_content_type()) if not ext: # Use a generic bag-of-bits extension ext = '.bin' filename = u'%s_part-%03d%s' % (only_input_filename, counter, ext) filename = filename.encode(fs_enc) if ext in cfg.targets: with open(os.path.join(output_dir, filename), 'wb') as of: of.write(part.get_payload(decode=True)) elif ext in arch_exts and not fnmatch(filename, cfg.exclude): with TempDir(dir=cfg.tempdir) as temp: archpath = os.path.join(temp, filename) with open(archpath, 'wb') as of: of.write(part.get_payload(decode=True)) for f in unpack_arch(archpath, temp, cfg): ext = os.path.splitext(f)[1].lower() if ext in cfg.targets and not fnmatch(f, cfg.exclude): path_from = os.path.join(temp, f) path_to = os.path.join(output_dir, filename + '_' + f) shutil.copy(path_from, path_to) counter += 1 except UnicodeDecodeError as e: print "oops:" print input_file raise print "encoded: ", type(m_filename), m_filename.encode( "string_escape") if m_filename and m_filename.startswith("=?"): decoded = decode_header(m_filename) print "tuple: ", type(decoded), decoded if cfg.remove == True: os.remove(input_file)
def test_retrieval_multiple_items_on_new_session_with_non_sequential_order(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name) items1.append_and_save(["item0", "item1", "item2", "item3"]) items2 = IndexedItemsArchive(temp_file_name) self.assertEqual(items2.fetch([2,0,1,3]), ["item0", "item1", "item2", "item3"])
def test_purge_older_items_min_recent_items_to_keep_greater_then_to_len(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items = IndexedItemsArchive(temp_file_name, max_items_per_file=2) items.append_and_save(["item0", "item1"], purge_min_recent_items_to_keep=3) self.assertEqual(list(items), ["item0", "item1"])
def test_len_complex_items_on_new_session(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name) items1.append_and_save([np.array([0, 1, 2]), np.array([3, 4, 5]), np.array([6, 7, 8])]) items2 = IndexedItemsArchive(temp_file_name) self.assertEqual(len(items2), 3)
def test_retrieval_complex_items_on_new_session(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name) items1.append_and_save([np.array([0, 1, 2]), np.array([3, 4, 5]), np.array([6, 7, 8])]) items2 = IndexedItemsArchive(temp_file_name) np.testing.assert_array_equal(items2.fetch([0, 2]), [np.array([0, 1, 2]), np.array([6, 7, 8])])
def test_len_on_new_session(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name) items1.append_and_save(["item0", "item1", "item2", "item3"]) items2 = IndexedItemsArchive(temp_file_name) self.assertEqual(len(items2), 4)
def test_get_into_file(self, store, key, value): with TempDir() as tmpdir: store.put(key, value) out_filename = os.path.join(tmpdir, 'output') store.get_file(key, out_filename) assert open(out_filename, 'rb').read() == value
def test_purge_older_items_does_not_purge_when_items_stored_single_file(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items = IndexedItemsArchive(temp_file_name) items.append_and_save(["item0", "item1", "item2", "item3", "item4", "item5", "item6", "item7"], purge_min_recent_items_to_keep=2) self.assertEqual(list(items), ["item0", "item1", "item2", "item3", "item4", "item5", "item6", "item7"])
def setUp(self): self._provider_hostname = 'example.org' self.users = MagicMock(spec=Users) self._tmpdir = TempDir() self.root_path = self._tmpdir.name self._adapter = MagicMock( wraps=PixelatedDockerAdapter(self._provider_hostname)) self._adapter.docker_image_name.return_value = 'pixelated' self._leap_provider_x509 = LeapProviderX509Info()
def testSlopeRun(self): with TempDir() as d: slope_raster = os.path.join(d, 'test_slope.tif') arcpy.env.scratchWorkspace = d slope.main(bathy=config.bathy_raster, out_raster=slope_raster) self.assertTrue(os.path.exists(slope_raster)) self.assertAlmostEqual(su.raster_properties(slope_raster, "MEAN"), 3.802105241105673)
def create_recording(qemu_path, qcow, snapshot, command, copy_directory, recording_path, expect_prompt, cdrom, isoname=None, rr=False, savevm=False, perf=False, env={}, extra_args=None, stdin=False): assert not (rr and perf) recording_path = realpath(recording_path) if not isoname: isoname = copy_directory + '.iso' with TempDir() as tempdir, \ Qemu(qemu_path, qcow, snapshot, tempdir, rr=rr, perf=perf, expect_prompt=expect_prompt, extra_args=extra_args) as qemu: if os.listdir(copy_directory): progress("Creating ISO {}...".format(isoname)) make_iso(copy_directory, isoname) progress("Inserting CD...") qemu.run_monitor("change {} \"{}\"".format(cdrom, isoname)) qemu.run_console("mkdir -p {}".format(pipes.quote(copy_directory))) # Make sure cdrom didn't automount # Make sure guest path mirrors host path qemu.run_console("while ! mount /dev/cdrom {}; ".format( pipes.quote(copy_directory)) + "do sleep 0.3; umount /dev/cdrom; done") # if there is a setup.sh script in the replay/proc_name/cdrom/ folder # then run that setup.sh script first (good for scriptst that need to # prep guest environment before script runs qemu.run_console("{}/setup.sh &> /dev/null || true".format( pipes.quote(copy_directory))) # Important that we type command into console before recording starts and only # hit enter once we've started the recording. progress("Running command inside guest.") if stdin: # only support for "[binary] [STDIN_file]" assert (len(command) == 2) command.insert(1, "<") qemu.type_console(sp.list2cmdline(env_to_list(env) + command)) # start PANDA recording qemu.run_monitor("begin_record \"{}\"".format(recording_path)) qemu.run_console(timeout=1200) # end PANDA recording progress("Ending recording...") qemu.run_monitor("end_record")
def test_len_loaded_memories(self): with TempDir() as temp_directory: replay_memories1_file_name = os.path.join(temp_directory, 'replay_memories.dat') replay_memories1 = ReplayMemories(replay_memories1_file_name, max_current_memories_in_ram=100) replay_memories1.append(self._create_replay_memory(consequent_reward=34.0)) replay_memories1.append(self._create_replay_memory(consequent_reward=35.0)) replay_memories1.save() replay_memories2 = ReplayMemories(replay_memories1_file_name, max_current_memories_in_ram=100) self.assertEquals(len(replay_memories2), 2)
def test_simple_store(): with TempDir() as tmp: store = FileStore(dir=tmp) text = "This is a test" item = Item(text=text) store.put(item) got = store.item(item.hash) assert item.hash == got.hash assert item.text == got.text
def test_items_in_multiple_files_on_new_session_with_filled_positions_on_first_file(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name, max_items_per_file=2) items1.append_and_save(["item0", "item1", "item2", "item3"]) items2 = IndexedItemsArchive(temp_file_name, max_items_per_file=2) items2.append_and_save(["item4", "item5"]) self.assertEqual(list(items2), ["item0", "item1", "item2", "item3", "item4", "item5"])
def test_items_in_multiple_files_tolerate_different_max_items_in_between_sessions(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name, max_items_per_file=2) items1.append_and_save(["item0", "item1", "item2", "item3"]) items2 = IndexedItemsArchive(temp_file_name, max_items_per_file=3) items2.append_and_save(["item4", "item5"]) self.assertEqual(list(items2), ["item0", "item1", "item2", "item3", "item4", "item5"])
def test_purge_older_items_and_reload(self): with TempDir() as temp_directory: temp_file_name = os.path.join(temp_directory, 'temp_file.name') items1 = IndexedItemsArchive(temp_file_name, max_items_per_file=3) items1.append_and_save(["item0", "item1", "item2", "item3", "item4", "item5", "item6", "item7"], purge_min_recent_items_to_keep=4) items2 = IndexedItemsArchive(temp_file_name) self.assertEqual(list(items2), ["item3", "item4", "item5", "item6", "item7"])
def setUp(self): self._tmpdir = TempDir() self._tmpbin = NamedTemporaryFile() self.mailpile_bin = self._tmpbin.name self.root_path = self._tmpdir.name self.gpg_initializer = MagicMock() self._adapter = MailpileAdapter(self.mailpile_bin, None, gpg_initializer=self.gpg_initializer) self.runner = ForkRunner(self.root_path, self._adapter)
def __init__(self): self.d = TempDir() self.dir_path = self.d.name self.name = 'test_genegis' self.path = os.path.join(self.dir_path, "%s.gdb" % self.name) self.input_fc = os.path.join(self.path, "test_spatial") self.input_fc_mem = 'in_memory/test_spatial' # populate the feature with valid data self.create_feature() self.feature_to_mem()
def build_pdf(self, source, texinputs=[]): with TempDir() as tmpdir,\ source.temp_saved(suffix='.latex', dir=tmpdir) as tmp: # close temp file, so other processes can access it also on Windows tmp.close() base_fn = os.path.splitext(tmp.name)[0] output_fn = base_fn + '.pdf' latex_cmd = [ shlex_quote(self.pdflatex), '-interaction=batchmode', '-halt-on-error', '-no-shell-escape', '-file-line-error', '%O', '%S', ] if self.variant == 'pdflatex': args = [ self.latexmk, '-pdf', '-pdflatex={}'.format(' '.join(latex_cmd)), tmp.name, ] elif self.variant == 'xelatex': args = [ self.latexmk, '-xelatex', tmp.name, ] else: raise ValueError('Invalid LaTeX variant: {}'.format( self.variant)) # create environment newenv = os.environ.copy() newenv['TEXINPUTS'] = os.pathsep.join(texinputs) + os.pathsep try: subprocess.check_call( args, cwd=tmpdir, env=newenv, stdin=open(os.devnull, 'r'), stdout=open(os.devnull, 'w'), stderr=open(os.devnull, 'w'), ) except CalledProcessError as e: raise_from(LatexBuildError(base_fn + '.log'), e) return I(open(output_fn, 'rb').read(), encoding=None)