def generate_file(tmp_dir, start, stop): tmp = Tempfile(folder=tmp_dir) tmp.write(bytearray(map(lambda v: v % 256, range(start, stop)))) tmp.close() return tmp
def _generate_files(tmp_files, number, size): content = b'1' * size files = [] for i in range(0, number): t = Tempfile() tmp_files.append(t) t.write(content) files.append(t) return files
def test_get_env(self): if os.name == 'nt': suffix = ".bat" else: suffix = ".sh" def set_env_var_str(name, value): if os.name == 'nt': return "set %s=%s\n" % (name, value) else: return "{name}={value};export {name}\n".format(name=name, value=value) with Tempfile(suffix=suffix, mode="w+") as script: if os.name != 'nt': script.write("#!/bin/sh\n") script.write(set_env_var_str('TEST_ENV_A', 1)) script.write(set_env_var_str('TEST_ENV_B', 2)) script.close() os.chmod( script, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) env = get_shell_script_env(script, "x86") self.assertGreaterEqual(len(env), 2) self.assertEqual(env['TEST_ENV_A'], '1') self.assertEqual(env['TEST_ENV_B'], '2')
def _test_values_file_speed(self, use_sqlite): values = [] for i in range(20000): value = SimpleEntity("http://aql.org/download", name="target_url%s" % i) values.append(value) with Tempfile() as tmp: print("Opening a database '%s' ..." % tmp) timer = Chrono() with EntitiesFile(tmp, use_sqlite=use_sqlite) as vf: with timer: keys = vf.add_entities(values) print("add values time: %s" % (timer, )) with EntitiesFile(tmp, use_sqlite=use_sqlite) as vf: with timer: keys = vf.add_entities(values) print("re-add values time: %s" % (timer, )) with EntitiesFile(tmp, use_sqlite=use_sqlite) as vf: with timer: vf.find_entities_by_key(keys) print("get values time: %s" % timer) with timer: with EntitiesFile(tmp, use_sqlite=use_sqlite) as vf: pass print("reopen values file time: %s" % timer)
def test_values_file(self): with Tempfile() as tmp: vfile = EntitiesFile(tmp) try: vfile.self_test() value1 = SimpleEntity("http://aql.org/download") value2 = SimpleEntity("http://aql.org/download2") value3 = SimpleEntity("http://aql.org/download3") values = [value1, value2, value3] value_keys = vfile.add_entities(values) vfile.self_test() other_value_keys = vfile.add_entities(values) vfile.self_test() self.assertItemsEqual(value_keys, other_value_keys) values = sorted(values, key=operator.attrgetter('id')) s_values = vfile.find_entities(values) s_values = sorted(s_values, key=operator.attrgetter('id')) self.assertItemsEqual(values, s_values) vfile.clear() vfile.self_test() # ----------------------------------------------------------- value_keys = vfile.add_entities(values) vfile.self_test() s_values = vfile.find_entities(values) vfile.self_test() dep_values = vfile.find_entities_by_key(value_keys) vfile.self_test() self.assertItemsEqual(s_values, dep_values) # ----------------------------------------------------------- value1_key = vfile.add_entities([value1])[0] vfile.self_test() s_dep_value = vfile.find_entities_by_key([value1_key])[0] self.assertEqual(value1, s_dep_value) value1 = SimpleEntity("abc", name=value1.name) vfile.add_entities([value1]) vfile.self_test() s_dep_value = vfile.find_entities_by_key(value_keys) vfile.self_test() self.assertIsNone(s_dep_value) finally: vfile.close()
def test_value_pickler_speed(self): with Tempfile() as tmp: vpick = EntityPickler() value = FileChecksumEntity(tmp) t = lambda vpick = vpick, value = value: \ vpick.loads(vpick.dumps(value)) t = timeit.timeit(t, number=10000) print("value picker: %s" % t) t = lambda vpick = vpick, value = value:\ vpick.loads(vpick.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)) t = timeit.timeit(t, number=10000) print("pickle: %s" % t) vl = vpick.dumps(value) print("vl: %s" % len(vl)) pl = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL) print("pl: %s" % len(pl))
def test_values_file_same_name(self): with Tempfile() as tmp: vfile = EntitiesFile(tmp) try: vfile.self_test() value1 = SimpleEntity("test", name="test1") value2 = SignatureEntity(b"1234354545", name=value1.name) vfile.add_entities([value1, value2]) vfile.self_test() values = [ SimpleEntity(name=value1.name), SignatureEntity(name=value2.name) ] values = vfile.find_entities(values) self.assertItemsEqual(values, [value1, value2]) vfile.close() vfile.open(tmp) vfile.self_test() finally: vfile.close()
def test_file_rwlock(self): if FileLock is GeneralFileLock: skipped("System specific FileLock is not available") with Tempfile() as temp_file: flock = FileLock(temp_file) event = mp.Event() p = mp.Process(target=read_process, args=(str(temp_file), event, FileLock)) p.start() event.wait() start_time = time.time() with flock.write_lock(): self.assertGreaterEqual(time.time() - start_time, 1) with open(temp_file, 'w+b') as file: file.write(b'345') file.flush() p.join()
def test_file_lock_force(self): if FileLock is GeneralFileLock: skipped("System specific FileLock is not available") with Tempfile() as temp_file: flock = FileLock(temp_file) event = mp.Event() p = mp.Process(target=write_process, args=(str(temp_file), event, FileLock)) p.start() event.wait() self.assertRaises(ErrorFileLocked, flock.read_lock, wait=False, force=True) self.assertRaises(ErrorFileLocked, flock.write_lock, wait=False, force=True) p.join()
def test_temp_file(self): with Tempfile() as temp_file: temp_file.write('1234567890\n1234567890'.encode()) temp_file.flush() self.assertFalse(os.path.isfile(temp_file))
def test_file_value(self): with Tempfile() as temp_file: test_string = '1234567890' temp_file.write(test_string.encode()) temp_file.flush() temp_file_value1 = FileChecksumEntity(temp_file) temp_file_value2 = FileChecksumEntity(temp_file) self.assertEqual(temp_file_value1, temp_file_value2) self.assertTrue(temp_file_value1.is_actual()) reversed_test_string = str(reversed(test_string)) temp_file.seek(0) temp_file.write(reversed_test_string.encode()) temp_file.flush() self.assertFalse(temp_file_value1.is_actual()) temp_file_value2 = FileChecksumEntity(temp_file_value1) self.assertEqual(temp_file_value1.name, temp_file_value2.name) self.assertNotEqual(temp_file_value1, temp_file_value2) actual_value = temp_file_value1.get_actual() self.assertIsNot(actual_value, temp_file_value1) self.assertEqual(actual_value.name, temp_file_value1.name) self.assertEqual(actual_value.id, temp_file_value1.id) self.assertNotEqual(actual_value.signature, temp_file_value1.signature)
def test_value_pickler(self): with Tempfile() as tmp: tmp_name = str(tmp) vpick = EntityPickler() value = FileChecksumEntity(tmp) vl = vpick.dumps(value) vl = vpick.dumps(value) vl = vpick.dumps(value) v = vpick.loads(vl) v = vpick.loads(vl) v = vpick.loads(vl) self.assertEqual(value, v) value = FileTimestampEntity(tmp) v = vpick.loads(vpick.dumps(value)) self.assertEqual(value, v) value = SimpleEntity('123-345', name=tmp_name) v = vpick.loads(vpick.dumps(value)) self.assertEqual(value, v) value = SimpleEntity('123-345', name=tmp_name) v = vpick.loads(vpick.dumps(value)) self.assertEqual(value, v) value = SimpleEntity(name=tmp_name) v = vpick.loads(vpick.dumps(value)) self.assertEqual(value.name, v.name) self.assertIsNone(v.signature)
def _test_data_file_update(self, data_file_type): with Tempfile() as tmp: tmp.remove() data_map = generate_data_map(100, 16, 128) data_keys = {} df = data_file_type(tmp) try: df.self_test() df.clear() df.self_test() for data_id, data in data_map.items(): df.write(data_id, data) df.self_test() stored_data = df.read(data_id) self.assertEqual(stored_data, data) extend_data_map(data_map) for data_id, data in data_map.items(): df.write(data_id, data) df.self_test() stored_data = df.read(data_id) self.assertEqual(stored_data, data) df.close() df.self_test() df.open(tmp) df.self_test() for data_id, data in data_map.items(): stored_data = df.read(data_id) self.assertEqual(stored_data, data) for data_id, data in data_map.items(): key = df.write_with_key(data_id, data) df.self_test() data_keys[data_id] = key tmp_data_id = df.get_ids([key])[0] self.assertEqual(tmp_data_id, data_id) new_key = df.write_with_key(data_id, data) df.self_test() self.assertGreater(new_key, key) self.assertIsNone(df.get_ids([key])) self.assertSequenceEqual(df.get_ids([new_key]), [data_id]) stored_data = df.read(data_id) self.assertEqual(stored_data, data) for data_id in data_map: df.remove((data_id, )) finally: df.close()
def test_temp_dir(self): with Tempdir() as tmp_dir: tmp_dir = Tempdir(root_dir=tmp_dir) for i in range(10): Tempfile(root_dir=tmp_dir, suffix='.tmp').close() self.assertFalse(os.path.exists(tmp_dir))
def test_temp_file_in_use(self): with Tempfile() as temp_file: temp_file.remove() with open_file(temp_file, write=True, binary=True) as f: f.write(b'1234567890') self.assertFalse(os.path.isfile(temp_file))
def test_node_batch(self): with Tempdir() as tmp_dir: vfile_name = Tempfile(root_dir=tmp_dir) vfile_name.close() with EntitiesFile(vfile_name) as vfile: src_files = self.generate_source_files(tmp_dir, 5, 100) self._rebuild_batch_node(vfile, src_files, len(src_files)) self._rebuild_batch_node(vfile, src_files, 0) self._rebuild_batch_node(vfile, src_files[:-2], 0) self._rebuild_batch_node(vfile, src_files[0:1], 0) # ----------------------------------------------------------- write_bin_file(src_files[1], b"src_file1") write_bin_file(src_files[2], b"src_file1") self._rebuild_batch_node(vfile, src_files, 2)
def test_node_batch(self): with Tempdir() as tmp_dir: vfile_name = Tempfile(folder=tmp_dir) vfile_name.close() with EntitiesFile(vfile_name) as vfile: src_files = self.generate_source_files(tmp_dir, 5, 100) self._rebuild_batch_node(vfile, src_files, len(src_files)) self._rebuild_batch_node(vfile, src_files, 0) self._rebuild_batch_node(vfile, src_files[:-2], 0) self._rebuild_batch_node(vfile, src_files[0:1], 0) # ----------------------------------------------------------- write_bin_file(src_files[1], b"src_file1") write_bin_file(src_files[2], b"src_file1") self._rebuild_batch_node(vfile, src_files, 2)
def test_temp_file_rw(self): with Tempfile() as temp_file: test_string = '1234567890' temp_file.write(test_string.encode()) temp_file.flush() with open(temp_file, "r") as temp_file_rh: test_string_read = temp_file_rh.read() self.assertEqual(test_string, test_string_read)
def test_general_file_lock_timeout(self): with Tempfile() as temp_file: flock1 = GeneralFileLock(temp_file) flock2 = GeneralFileLock(temp_file, interval=1, timeout=3) with flock1.write_lock(): start_time = time.time() self.assertRaises(ErrorFileLocked, flock2.write_lock) self.assertGreater(time.time() - start_time, 2)
def test_node_batch(self): with Tempdir() as tmp_dir: vfile_name = Tempfile( dir = tmp_dir ) vfile_name.close() with ValuesFile( vfile_name ) as vfile: src_files = self.generateSourceFiles( tmp_dir, 5, 100 ) self._rebuildBatchNode( vfile, src_files, len(src_files) ) self._rebuildBatchNode( vfile, src_files, 0 ) self._rebuildBatchNode( vfile, src_files[:-2], 0 ) self._rebuildBatchNode( vfile, src_files[0:1], 0 ) #//-------------------------------------------------------// writeBinFile( src_files[1], b"src_file1" ) writeBinFile( src_files[2], b"src_file1" ) FileChecksumValue( src_files[1] ) # clear cached value FileChecksumValue( src_files[2] ) # clear cached value self._rebuildBatchNode( vfile, src_files, 2 )
def test_gcc_compiler_batch_error(self): with Tempdir() as tmp_dir: build_dir = os.path.join(tmp_dir, 'output') src_dir = os.path.join(tmp_dir, 'src') os.makedirs(src_dir) num_src_files = 5 src_files, hdr_files = self.generate_cpp_files( src_dir, 'foo', num_src_files) src_file_orig = Tempfile(folder=tmp_dir) src_file_orig.close() self.copy_file(src_files[0], src_file_orig) self.add_error_to_cpp_file(src_files[0]) cfg = ProjectConfig(args=["build_dir=%s" % build_dir]) prj = Project(cfg) tools_path = os.path.join(os.path.dirname(__file__), '../../tools') cpp = prj.tools.try_tool('g++', tools_path=tools_path) if cpp is None: print("WARNING: g++ tool has not been found. Skip the test.") return cpp.Compile(src_files, batch_build=True, batch_groups=1) self.build_prj(prj, 0, num_failed_nodes=1) self.copy_file(src_file_orig, src_files[0]) cpp.Compile(src_files) self.build_prj(prj, 1)
def generate_file(tmp_dir, size, suffix='.tmp'): tmp = Tempfile(root_dir=tmp_dir, suffix=suffix) tmp.write(bytearray(random.randint(0, 255) for i in range(size))) tmp.close() return tmp
def _test_data_file_speed(self, data_file_type): with Tempfile() as tmp: timer = Chrono() with timer: data_map = generate_data_map(20000, 123, 123) print("generate data time: %s" % timer) df = data_file_type(tmp) try: with timer: for data_id, data in data_map.items(): df.write_with_key(data_id, data) print("add time: %s" % timer) df.close() with timer: df = data_file_type(tmp) print("load time: %s" % timer) with timer: for data_id, data in data_map.items(): df.write_with_key(data_id, data) print("update time: %s" % timer) with timer: for data_id in data_map: df.read(data_id) print("read time: %s" % timer) data_ids = list(data_map) remove_data_ids1 = [ data_ids[i * 2 + 0] for i in range(len(data_ids) // 2) ] remove_data_ids2 = [ data_ids[i * 2 + 1] for i in range(len(data_ids) // 2) ] with timer: df.remove(remove_data_ids1) df.remove(remove_data_ids2) print("remove time: %s" % timer) finally: df.close()
def test_msvc_compiler_batch_error(self): with Tempdir() as tmp_dir: build_dir = os.path.join( tmp_dir, 'output') src_dir = os.path.join( tmp_dir, 'src') os.makedirs( src_dir ) num_src_files = 5 src_files, hdr_files = self.generateCppFiles( src_dir, 'foo', num_src_files ) src_file_orig = Tempfile( dir = tmp_dir ) src_file_orig.close() self.copyFile( src_files[0], src_file_orig ) self.addErrorToCppFile( src_files[0] ) cfg = ProjectConfig( args = [ "build_dir=%s" % build_dir] ) prj = Project( cfg.options, cfg.targets ) try: cpp = prj.tools['msvc++'] except ErrorToolNotFound: print("WARNING: MSVC tool has not been found. Skip the test.") return cpp.Compile( src_files, batch_build = True, batch_groups = 1 ) self.buildPrj( prj, 0, num_failed_nodes = 1 ) self.copyFile( src_file_orig, src_files[0] ) cpp.Compile( src_files ) self.buildPrj( prj, 1 )
def test_file_value_save_load(self): with Tempfile() as temp_file: test_string = '1234567890' temp_file.write(test_string.encode()) temp_file.flush() temp_file_value = FileChecksumEntity(temp_file) self._test_save_load(temp_file_value) file_value = FileChecksumEntity(temp_file) self.assertEqual(temp_file_value.name, file_value.name) self.assertNotEqual(temp_file_value, file_value) self.assertFalse(file_value.is_actual())
def test_read_file_lock(self): if FileLock is GeneralFileLock: skipped("System specific FileLock is not available") with Tempfile() as temp_file: flock1 = FileLock(temp_file) flock2 = FileLock(temp_file) with flock1.read_lock(): start_time = time.time() with flock2.read_lock(): self.assertLess(time.time() - start_time, 1) with open(temp_file, 'r+b') as file: file.read()
def test_prj_config(self): with Tempfile() as f: cfg = b""" abc = 123 size = 100 options.build_variant = "final" """ f.write(cfg) f.flush() args = ["-v", "-j", "5", "-c", f] cfg = ProjectConfig(args) self.assertEqual(cfg.options.bv, 'final') self.assertEqual(cfg.jobs, 5) self.assertTrue(cfg.verbose)
def test_cli_config_file(self): cli_options = ( CLIOption("-j", "--jobs", "jobs", int, 1, "", 'NUMBER'), CLIOption("-s", "--size", "size", int, 256, "", 'NUMBER'), CLIOption("-q", "--quite", "quite", bool, False, ""), CLIOption("-v", "--verbose", "verbose", bool, False, ""), ) with Tempfile() as f: cfg = b""" abc = 123 size = 100 jobs = 4 options['BUILD'] = "DEBUG" targets="test1 test2 test3" """ f.write(cfg) f.flush() config = CLIConfig(cli_options, ["-j", "0", "-v", "foo", "bar", "bv=release", "jobs=10"]) options = {} config.read_file(f, {'options': options}) self.assertRaises(AttributeError, getattr, config, 'options') self.assertEqual(config.abc, 123) self.assertEqual(options['BUILD'], 'DEBUG') self.assertEqual(config.jobs, 10) self.assertEqual(config.size, 100) self.assertEqual(config.targets, "foo, bar") config = CLIConfig(cli_options, ["-j", "0", "-v", "bv=release", "jobs=10"]) options = {} config.read_file(f, {'options': options}) self.assertEqual( config.targets, ["test1", "test2", "test3", "test3"]) cli_values = {'abc': 123, 'jobs': 10, 'verbose': True, 'quite': False, 'bv': 'release', 'size': 100} self.assertEqual(dict(config.items()), cli_values)
def _test_file_lock_no_wait(self, lock_type): with Tempfile() as temp_file: flock = lock_type(temp_file) event = mp.Event() p = mp.Process(target=write_process, args=(str(temp_file), event, lock_type)) p.start() event.wait() self.assertRaises(ErrorFileLocked, flock.read_lock, wait=False) self.assertRaises(ErrorFileLocked, flock.write_lock, wait=False) p.join()
def test_file_part_value(self): with Tempfile() as temp_file: test_string = '1234567890' temp_file.write(test_string.encode()) temp_file.flush() temp_file_value1 = FilePartChecksumEntity(temp_file, offset=4) temp_file_value2 = FilePartChecksumEntity(temp_file, offset=4) self.assertEqual(temp_file_value1, temp_file_value2) self.assertTrue(temp_file_value1.is_actual()) self.assertIs(temp_file_value1.get_actual(), temp_file_value1) temp_file.seek(0) temp_file.write("4321".encode()) temp_file.flush() temp_file_value2 = FilePartChecksumEntity(temp_file, offset=4) self.assertEqual(temp_file_value1, temp_file_value2) self.assertTrue(temp_file_value1.is_actual()) self.assertIs(temp_file_value1.get_actual(), temp_file_value1) temp_file.seek(4) temp_file.write("098765".encode()) temp_file.flush() temp_file_value2 = FilePartChecksumEntity(temp_file_value1, offset=4) self.assertEqual(temp_file_value1.name, temp_file_value2.name) self.assertNotEqual(temp_file_value1, temp_file_value2) self.assertFalse(temp_file_value1.is_actual()) actual_value = temp_file_value1.get_actual() self.assertIsNot(actual_value, temp_file_value1) self.assertEqual(actual_value.name, temp_file_value1.name) self.assertNotEqual(actual_value.signature, temp_file_value1.signature)
def test_general_file_lock_force(self): with Tempfile() as temp_file: flock = GeneralFileLock(temp_file) event = mp.Event() p = mp.Process(target=write_process, args=(str(temp_file), event, GeneralFileLock)) p.start() event.wait() with flock.read_lock(wait=False, force=True): with flock.write_lock(wait=False, force=True): pass p.join()
def test_temp_mmap(self): import mmap with Tempfile() as temp_file: temp_file.remove() with open_file(temp_file, write=True, binary=True) as f: f.seek(0) f.write(b'\0') f.flush() mm = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_WRITE) mm.close() f.seek(0) f.write(b"header") f.flush() with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_WRITE) as mem: d = range(ord('0'), ord('9')) data = bytearray(d) end_offset = len(data) if end_offset > mem.size(): page_size = mmap.ALLOCATIONGRANULARITY new_size = ((end_offset + (page_size - 1)) // page_size) * page_size mem.resize(new_size) mem[0:end_offset] = data buf = mem[0:end_offset] print("buf: %s" % (buf, )) buf = mem[0:10] print("buf: %s" % (buf, )) mem.move(3, 1, 5) buf = mem[0:10] print("buf: %s" % (buf, ))
def _test_data_file_add(self, data_file_type): with Tempfile() as tmp: tmp.remove() data_map = generate_data_map(2100, 16, 128) df = data_file_type(tmp) try: df.self_test() df.clear() df.self_test() for data_id, data in data_map.items(): df.write_with_key(data_id, data) df.self_test() stored_data = df.read(data_id) self.assertEqual(stored_data, data) finally: df.close()
def __test_file_lock_type(self, lock_type): with Tempfile() as temp_file: flock = lock_type(temp_file) event = mp.Event() p = mp.Process(target=write_process, args=(str(temp_file), event, lock_type)) p.start() event.wait() start_time = time.time() with flock.write_lock(): self.assertGreaterEqual(time.time() - start_time, 1) with open(temp_file, 'w+b') as file: file.write(b'345') file.flush() p.join()
def test_file_value_time(self): with Tempfile() as temp_file: test_string = '1234567890' temp_file.write(test_string.encode()) temp_file.flush() temp_file_value1 = FileTimestampEntity(temp_file) temp_file_value2 = FileTimestampEntity(temp_file) self.assertEqual(temp_file_value1, temp_file_value2) time.sleep(2) temp_file.seek(0) temp_file.write(b"0987654321") temp_file.close() FileTimestampEntity(temp_file_value1.name) self.assertFalse(temp_file_value1.is_actual()) temp_file_value2 = FileTimestampEntity(temp_file_value1) self.assertEqual(temp_file_value1.name, temp_file_value2.name) self.assertNotEqual(temp_file_value1, temp_file_value2)