def test_main_dir(files): compress.run("dir2", "zipped") extract._main("zipped", "dirnew") files["f_root"].append("zipped") files["f_root"].append("dirnew") assert os.listdir("dirnew") == os.listdir("dir2") assert_files_changes(files)
def test_main_dir_different_path(files): compress.run("dir2") extract._main("dir2.tgz", "dir1/dir2") files["f_root"].append("dir2.tgz") files["f_dir1"].append("dir2") assert os.listdir("dir1/dir2") == os.listdir("dir2") assert_files_changes(files)
def test_parse_output_not_specified(files): compress.run("file1", "new.zip") input_file, output_file = extract._parse_arguments("new.zip") assert output_file == "new" compress.run("file1", "new.tar.gz") input_file, output_file = extract._parse_arguments("new.tar.gz") assert output_file == "new"
def test_main(files): compress.run("file1") extract._main("file1.tgz", "file1extracted") with open("file1", "r") as f1, open("file1extracted", "r") as f2: assert f1.read() == f2.read() files["f_root"].append("file1.tgz") files["f_root"].append("file1extracted") assert_files_changes(files)
def test_run(files): file = "file1" mock_parent = Mock() with patch( "compress._parse_arguments", return_value=(file, file + ".tgz") ) as mock_parse: with patch("compress._main") as mock_main: mock_parent.attach_mock(mock_parse, "mock_parse") mock_parent.attach_mock(mock_main, "mock_main") compress.run(file) """Check if method 1 is called before method 2""" mock_parent.assert_has_calls( [call.mock_parse(file, ""), call.mock_main(file, file + ".tgz")] )
def run(data_dir, scratch_dir, results_dir): tick = time.time() job = os.path.basename(scratch_dir) print('Copying to scratch') db_dir = copy_results(results_dir, scratch_dir) warcs_dir = copy_data(data_dir, scratch_dir) index_path = os.path.join(db_dir, 'index.db') print('Indexing') indexer.index(warcs_dir, index_path) delta_dir = os.path.join(scratch_dir, 'warcs') print('Creating delta versions') deltas = delta.run(warcs_dir, delta_dir, db_dir, job) deltas.append('no_delta') print('Compressing') compress.run(deltas, delta_dir, db_dir) print('Copying results back to home') save_results(db_dir, results_dir) shutil.rmtree(scratch_dir) with open(os.path.join(results_dir, 'time_taken.txt'), 'w') as fd: fd.write('%d' % int(time.time() - tick)) with open(os.path.join(results_dir, 'DONE'), 'w') as fd: fd.write('done\n')
def test_main_different_path(files): compress.run("file1") extract._main("file1.tgz", "dir2/file1") files["f_root"].append("file1.tgz") files["f_dir2"].append("file1") assert_files_changes(files)
del datasets attrs = {'INSTRUMENT': instrument, 'TELESCOPE': telescope} print('combined shape:', newdata.shape) outfile = '%s.hdf5' % telescope with h5py.File(outfile, 'w') as f: d = f.create_dataset("spectra", data=newdata, compression="gzip", compression_opts=9, shuffle=True) for k, v in list(attrs.items()): print((' storing attribute %s = %s' % (k, v))) d.attrs[k] = v newdata, repackedfile = repack(outfile) print('applying PCA ...') del newdata componentfile = compress.run(cmd='create', filename=repackedfile) try: print('plotting PCA ...') compress.run(cmd='components', filename=repackedfile) except Exception: pass print('exporting to "%s.json" ...' % telescope.lower()) export(telescope.lower() + '_%d.json' % nbins, componentfile) print('exporting to "%s.json" ... done' % telescope.lower())