def test_remove_trash_with_trash(): with tempdir(): wrapper = Wrapper() os.mkdir(".trash") assert os.path.exists(".trash") wrapper.empty_trash() assert not os.path.exists(".trash")
def __enter__(self): self.tempdir = tempfile.mkdtemp() self.location = os.path.abspath(os.curdir) os.chdir(self.tempdir) wrapper = Wrapper() wrapper.setup_run(setup_docs=False) return wrapper
def test_archive_filter_with_short_names(): with wrap() as wrapper: with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|archive", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ", archive={'use-short-names': True}) wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py.html" in names assert "archive/hello.rb.html" in names tar.close()
def test_pattern_node(): with wrap() as wrapper: with open("foo.txt", "w") as f: f.write("foo!") with open("bar.txt", "w") as f: f.write("bar!") wrapper = Wrapper(log_level='DEBUG') wrapper.to_valid() wrapper.nodes = {} wrapper.roots = [] wrapper.batch = dexy.batch.Batch(wrapper) wrapper.filemap = wrapper.map_files() node = PatternNode("*.txt", wrapper, [], foo="bar") assert node.args['foo'] == 'bar' wrapper.run_docs(node) assert len(node.children) == 2 for child in node.children: assert child.__class__.__name__ == "Doc" assert child.args['foo'] == 'bar' assert child.key_with_class() in ["doc:foo.txt", "doc:bar.txt"] assert child.filters == []
def test_zip_archive_filter(): with tempdir(): with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.zip|zip", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ") wrapper.run_docs(doc) wrapper.report() path_exists = os.path.exists("output/archive.zip") assert path_exists z = zipfile.ZipFile("output/archive.zip", "r") names = z.namelist() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py-pyg.html" in names assert "archive/hello.rb-pyg.html" in names z.close()
def test_unprocessed_directory_archive_filter(): with wrap() as wrapper: with open("abc.txt", "w") as f: f.write('this is abc') with open("def.txt", "w") as f: f.write('this is def') wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|tgzdir", wrapper, [], contents="ignore", tgzdir={'dir': '.'}) wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert ("./abc.txt" in names) or ("abc.txt" in names) assert ("./def.txt" in names) or ("def.txt" in names) tar.close()
def test_yamlargs_filterargs(): with wrap() as wrapper: doc = Doc("example.txt|yamlargs|filterargs", wrapper, [], contents = "%s\n---\r\nThis is the content." % YAML, ) wrapper.run_docs(doc) output = doc.output_data().as_text() assert "abc: xyz" in output assert "foo: 5" in output wrapper = Wrapper() doc = Doc("example.txt|yamlargs|filterargs", wrapper, [], contents = "%s\n---\r\nThis is the content." % YAML, ) wrapper.run_docs(doc) output = doc.output_data().as_text() assert "abc: xyz" in output assert "foo: 5" in output
def test_wrapper_register(): with tempdir(): doc = Doc("abc.txt") wrapper = Wrapper() wrapper.setup_run() wrapper.register(doc) assert doc in wrapper.registered
def test_error_if_to_valid_called_without_dirs_setup(): with tempdir(): wrapper = Wrapper() try: wrapper.to_valid() assert False, "should not get here" except InternalDexyProblem: assert True
def test_assert_dexy_dirs(): with tempdir(): wrapper = Wrapper() try: wrapper.assert_dexy_dirs_exist() assert False except UserFeedback: assert True
def test_create_remove_dexy_dirs(): with tempdir(): wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper.to_valid() assert wrapper.dexy_dirs_exist() wrapper.remove_dexy_dirs() assert not wrapper.dexy_dirs_exist()
def test_pdfcrop_filter(): with wrap() as wrapper: orig = os.path.join(TEST_DATA_DIR, 'color-graph.pdf') shutil.copyfile(orig, 'example.pdf') wrapper=Wrapper() node = Doc("example.pdf|pdfcrop|pdfinfo", wrapper) wrapper.run_docs(node) assert node.output_data().is_cached()
def test_pdfcrop_filter(): with wrap() as wrapper: orig = os.path.join(TEST_DATA_DIR, 'color-graph.pdf') shutil.copyfile(orig, 'example.pdf') wrapper = Wrapper() node = Doc("example.pdf|pdfcrop|pdfinfo", wrapper) wrapper.run_docs(node) assert node.output_data().is_cached()
def test_deprecated_dot_dexy_file(): with tempdir(): with open(".dexy", 'w') as f: f.write("{}") wrapper = Wrapper() try: wrapper.assert_dexy_dirs_exist() except UserFeedback as e: assert "this format is no longer supported" in str(e)
def test_text_parser(): with wrap() as wrapper: with open("f1.py", "w") as f: f.write("print 'hello'") with open("f2.py", "w") as f: f.write("print 'hello'") with open("index.md", "w") as f: f.write("") wrapper = Wrapper() wrapper.to_valid() wrapper.nodes = {} wrapper.roots = [] wrapper.batch = dexy.batch.Batch(wrapper) wrapper.filemap = wrapper.map_files() ast = AbstractSyntaxTree(wrapper) parser = TextFile(wrapper, ast) parser.parse( ".", """ *.py *.py|pyg *.md|jinja """) ast.walk() assert len(wrapper.nodes) == 8
def test_subdir_config_with_bundle(): with wrap(): with open("dexy.yaml", "w") as f: f.write(""" foo: - .txt """) os.makedirs("abc/def") with open("abc/def/dexy.yaml", "w") as f: f.write(""" bar: - .py """) with open("abc/def/hello.py", "w") as f: f.write("print 'hello'") wrapper = Wrapper() wrapper.run_from_new() assert "doc:abc/def/hello.py" in wrapper.nodes wrapper = Wrapper(recurse=False) wrapper.run_from_new() assert not "doc:abc/def/hello.py" in wrapper.nodes wrapper = Wrapper(recurse=False, configs="abc/def/dexy.yaml") wrapper.run_from_new() assert "doc:abc/def/hello.py" in wrapper.nodes
def test_script_node_caching__slow(): with wrap(): with open("start.sh", "w") as f: f.write("pwd") with open("middle.sh", "w") as f: f.write("echo `time`") with open("end.sh", "w") as f: f.write("echo 'done'") with open("dexy.yaml", "w") as f: f.write(SCRIPT_YAML) wrapper1 = Wrapper() wrapper1.run_from_new() for node in wrapper1.nodes.values(): assert node.state == 'ran' wrapper2 = Wrapper() wrapper2.run_from_new() for node in wrapper2.nodes.values(): assert node.state == 'consolidated' time.sleep(1.1) with open("middle.sh", "w") as f: f.write("echo 'new'") wrapper3 = Wrapper() wrapper3.run_from_new() for node in wrapper1.nodes.values(): assert node.state == 'ran'
def test_parse_doc_configs_no_configs(): with tempdir(): with capture_stdout() as stdout: wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() wrapper.to_valid() wrapper.to_walked() value = stdout.getvalue() assert "didn't find any document config files" in value
def run_from_cache_a_bunch_of_times(): n = random.randint(2, 10) print "running %s times:" % n for i in range(n): print '', i+1 wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() for node in wrapper.nodes.values(): assert_node_state(node, 'consolidated', "In iter %s" % i) wrapper.report()
def test_walked(): with tempdir(): with open("dexy.yaml", "w") as f: f.write("foo.txt") with open("foo.txt", "w") as f: f.write("foo") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper.to_valid() wrapper.to_walked() wrapper.validate_state('walked')
def test_move_cache_dir(): with capture_stdout() as stdout: with tempdir(): os.mkdir(".cache") with open(".cache/.dexy-generated", 'w') as f: f.write("") wrapper = Wrapper() wrapper.assert_dexy_dirs_exist() assert "Moving directory '.cache'" in stdout.getvalue() assert not os.path.exists(".cache") assert os.path.exists(".dexy")
def test_unprocessed_directory_archive_filter(): with wrap() as wrapper: with open("abc.txt", "w") as f: f.write('this is abc') with open("def.txt", "w") as f: f.write('this is def') wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|tgzdir", wrapper, [], contents="ignore", tgzdir={'dir' : '.'} ) wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert ("./abc.txt" in names) or ("abc.txt" in names) assert ("./def.txt" in names) or ("def.txt" in names) tar.close()
def test_parse_doc_configs_single_empty_config(): with tempdir(): wrapper = Wrapper() wrapper.create_dexy_dirs() with open("dexy.yaml", "w") as f: f.write("foo.txt") with open("foo.txt", "w") as f: f.write("foo") wrapper = Wrapper() wrapper.to_valid() wrapper.to_walked()
def test_caching_virtual_file(): with tempdir(): wrapper1 = Wrapper() doc1 = Doc("abc.txt|dexy", contents = "these are the contents", wrapper=wrapper1) wrapper1.docs = [doc1] wrapper1.run() assert isinstance(doc1.artifacts[0], InitialVirtualArtifact) hashstring_0_1 = doc1.artifacts[0].hashstring assert isinstance(doc1.artifacts[1], FilterArtifact) hashstring_1_1 = doc1.artifacts[1].hashstring wrapper2 = Wrapper() doc2 = Doc( "abc.txt|dexy", contents = "these are the contents", wrapper=wrapper2) wrapper2.docs = [doc2] wrapper2.run() assert isinstance(doc2.artifacts[0], InitialVirtualArtifact) hashstring_0_2 = doc2.artifacts[0].hashstring assert isinstance(doc2.artifacts[1], FilterArtifact) hashstring_1_2 = doc2.artifacts[1].hashstring assert hashstring_0_1 == hashstring_0_2 assert hashstring_1_1 == hashstring_1_2
def test_caching(): with tempdir(): wrapper1 = Wrapper() with open("abc.txt", "w") as f: f.write("these are the contents") doc1 = Doc("abc.txt|dexy", wrapper=wrapper1) wrapper1.docs = [doc1] wrapper1.run() assert isinstance(doc1.artifacts[0], InitialArtifact) hashstring_0_1 = doc1.artifacts[0].hashstring assert isinstance(doc1.artifacts[1], FilterArtifact) hashstring_1_1 = doc1.artifacts[1].hashstring wrapper2 = Wrapper() doc2 = Doc("abc.txt|dexy", wrapper=wrapper2) wrapper2.docs = [doc2] wrapper2.run() assert isinstance(doc2.artifacts[0], InitialArtifact) hashstring_0_2 = doc2.artifacts[0].hashstring assert isinstance(doc2.artifacts[1], FilterArtifact) hashstring_1_2 = doc2.artifacts[1].hashstring assert hashstring_0_1 == hashstring_0_2 assert hashstring_1_1 == hashstring_1_2
def test_batch(): with tempdir(): wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() batch = dexy.batch.Batch(wrapper) os.makedirs(batch.batch_dir()) batch.save_to_file() assert batch.filename() in os.listdir(".dexy/batches") wrapper = Wrapper() batch = dexy.batch.Batch.load_most_recent(wrapper)
def test_yamlargs_with_caching(): with wrap() as wrapper: doc = Doc("example.txt|yamlargs", wrapper, [], contents = "title: My Title\n---\r\nThis is the content." ) wrapper.run_docs(doc) task = wrapper.nodes["doc:example.txt|yamlargs"] assert task.output_data().title() == "My Title" assert task.state == 'ran' wrapper = Wrapper() doc = Doc("example.txt|yamlargs", wrapper, [], contents = "title: My Title\n---\r\nThis is the content." ) wrapper.run_docs(doc) task = wrapper.nodes["doc:example.txt|yamlargs"] assert task.output_data().title() == "My Title" assert task.state == 'consolidated' wrapper = Wrapper() doc = Doc("example.txt|yamlargs", wrapper, [], contents = "title: My Title\n---\r\nThis is the content." ) wrapper.run_docs(doc) task = wrapper.nodes["doc:example.txt|yamlargs"] assert task.output_data().title() == "My Title" assert task.state == 'consolidated'
def test_archive_filter(): with wrap() as wrapper: with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|archive", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ") wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py-pyg.html" in names assert "archive/hello.rb-pyg.html" in names tar.close()
def test_text_parser(): with wrap() as wrapper: with open("f1.py", "w") as f: f.write("print 'hello'") with open("f2.py", "w") as f: f.write("print 'hello'") with open("index.md", "w") as f: f.write("") wrapper = Wrapper() wrapper.to_valid() wrapper.nodes = {} wrapper.roots = [] wrapper.batch = dexy.batch.Batch(wrapper) wrapper.filemap = wrapper.map_files() ast = AbstractSyntaxTree(wrapper) parser = TextFile(wrapper, ast) parser.parse(".", """ *.py *.py|pyg *.md|jinja """) ast.walk() assert len(wrapper.nodes) == 8
def test_init_wrapper_if_dexy_dirs_exist(): with tempdir(): wrapper = Wrapper() wrapper.create_dexy_dirs() with open("hello.txt", "w") as f: f.write("hello") wrapper = Wrapper() wrapper.to_valid() assert wrapper.project_root wrapper.to_walked() assert 'hello.txt' in wrapper.filemap assert 'dexy.log' in os.listdir('.dexy') assert not '.dexy/dexy.log' in wrapper.filemap
def test_old_cache_dir_with_settings(): with capture_stdout() as stdout: with tempdir(): os.mkdir(".cache") with open(".cache/.dexy-generated", 'w') as f: f.write("") wrapper = Wrapper(artifacts_dir=".cache") wrapper.assert_dexy_dirs_exist() assert os.path.exists(".cache") assert not os.path.exists(".dexy") assert "You may have a dexy.conf file" in stdout.getvalue()
def test_casperjs_svg2pdf_filter(): # TODO find smaller file - make test go faster? with wrap() as wrapper: orig = os.path.join(TEST_DATA_DIR, 'butterfly.svg') shutil.copyfile(orig, 'butterfly.svg') from dexy.wrapper import Wrapper wrapper = Wrapper() node = Doc("butterfly.svg|svg2pdf", wrapper) wrapper.run_docs(node) assert node.output_data().is_cached() assert node.output_data().filesize() > 1000
def test_hard_tabs_in_config(): with wrap(): with capture_stderr() as stderr: os.makedirs("abc/def") with open("abc/def/dexy.yaml", "w") as f: f.write("""foo:\t- .txt""") wrapper = Wrapper() try: wrapper.run_from_new() except UserFeedback as e: assert "hard tabs" in str(e) assert "abc/def/dexy.yaml" in stderr.getvalue()
def test_old_cache_dir_with_settings(): with capture_stdout() as stdout: with tempdir(): os.mkdir(".cache") with open(".cache/.dexy-generated", 'w') as f: f.write("") wrapper = Wrapper(artifacts_dir = ".cache") wrapper.assert_dexy_dirs_exist() assert os.path.exists(".cache") assert not os.path.exists(".dexy") assert "You may have a dexy.conf file" in stdout.getvalue()
def test_casperjs_svg2pdf_filter(): raise SkipTest() # TODO fix this - if casper is missing should raise error before reach assertions # TODO find smaller file - make test go faster? with wrap() as wrapper: orig = os.path.join(TEST_DATA_DIR, 'butterfly.svg') shutil.copyfile(orig, 'butterfly.svg') from dexy.wrapper import Wrapper wrapper = Wrapper() node = Doc("butterfly.svg|svg2pdf", wrapper) wrapper.run_docs(node) assert node.output_data().is_cached() assert node.output_data().filesize() > 1000
def __enter__(self): # Create a temporary working dir and move to it self.tempdir = tempfile.mkdtemp() self.location = os.path.abspath(os.curdir) os.chdir(self.tempdir) # Create a document. Skip testing documents with inactive filters. try: doc_key = "subdir/example%s|%s" % (self.ext, self.filter_alias) doc_spec = [doc_key, {"contents": self.doc_contents}] wrapper = Wrapper(doc_spec) wrapper.run() except InactiveFilter: print "Skipping tests for inactive filter", self.filter_alias raise SkipTest return wrapper.docs[0]
def grep_command( expr=None, # The expression to search for keyexpr="", # Only search for keys matching this expression, implies keys=True keys=False, # if True, try to list the keys in any found files recurse=False, # if True, recurse into keys to look for sub keys (implies keys=True) artifactsdir=Wrapper.DEFAULT_ARTIFACTS_DIR, # location of directory in which to store artifacts logsdir=Wrapper.DEFAULT_LOG_DIR # location of directory in which to store logs ): """ Search for a Dexy document in the database matching the expression. For sqlite the expression will be wrapped in % for you. """ wrapper = Wrapper(artifactsdir=artifactsdir, logsdir=logsdir) wrapper.setup_read() for row in wrapper.db.query_like("%%%s%%" % expr): print row['key']
def test_config_for_directory(): with wrap() as wrapper: with open("dexy.yaml", "w") as f: f.write(""".abc""") with open("root.abc", "w") as f: f.write("hello") with open("root.def", "w") as f: f.write("hello") os.makedirs("s1") os.makedirs("s2") with open("s1/s1.abc", "w") as f: f.write("hello") with open("s1/s1.def", "w") as f: f.write("hello") with open("s2/s2.abc", "w") as f: f.write("hello") with open("s2/s2.def", "w") as f: f.write("hello") with open(os.path.join('s1', 'dexy.yaml'), 'w') as f: f.write(""".def|dexy""") wrapper = Wrapper() wrapper.to_valid() wrapper.to_walked() wrapper.to_checked() wrapper.run() assert len(wrapper.nodes) == 6 p = wrapper.nodes["pattern:*.abc"] c = wrapper.nodes["doc:s2/s2.abc"] assert c in p.children
def test_pattern_node_one_filter(): with wrap() as wrapper: with open("foo.txt", "w") as f: f.write("foo!") wrapper = Wrapper(log_level='DEBUG') wrapper.to_valid() wrapper.nodes = {} wrapper.roots = [] wrapper.batch = dexy.batch.Batch(wrapper) wrapper.filemap = wrapper.map_files() node = PatternNode("*.txt|dexy", wrapper=wrapper) doc = node.children[0] assert doc.key == "foo.txt|dexy" assert doc.filter_aliases == ['dexy'] assert doc.parent == node
def run_from_cache_a_bunch_of_times(): n = random.randint(2, 10) print "running %s times:" % n for i in range(n): print '', i + 1 wrapper = Wrapper(log_level=LOGLEVEL, debug=True) wrapper.run_from_new() for node in wrapper.nodes.values(): assert_node_state(node, 'consolidated', "In iter %s" % i) wrapper.report()
def test_batch_with_docs(): with tempdir(): wrapper = Wrapper(log_level='DEBUG', debug=True) wrapper.create_dexy_dirs() with open("hello.txt", "w") as f: f.write("hello") with open("dexy.yaml", "w") as f: f.write("hello.txt") wrapper = Wrapper() wrapper.run_from_new() batch = dexy.batch.Batch.load_most_recent(wrapper) assert batch for doc_key in batch.docs: assert batch.input_data(doc_key) assert batch.output_data(doc_key)
def test_cache_and_dexy_dirs_present(): with tempdir(): os.mkdir(".dexy") os.mkdir(".cache") with open(".dexy/.dexy-generated", 'w') as f: f.write("") with open(".cache/.dexy-generated", 'w') as f: f.write("") wrapper = Wrapper() try: wrapper.assert_dexy_dirs_exist() except UserFeedback as e: assert "Please remove '.cache'" in str(e) os.remove(".cache/.dexy-generated") wrapper.assert_dexy_dirs_exist() # Cache still exists but dexy just ignores it. assert os.path.exists(".cache") # Dexy uses .dexy dir assert os.path.exists(".dexy")
def test_run_dexy(stdout): with tempdir(): wrapper = Wrapper() wrapper.create_dexy_dirs() dexy.commands.run()
def make_wrapper(): from dexy.wrapper import Wrapper return Wrapper(log_level='DEBUG', debug=True)