def test_zip_archive_filter(): with tempdir(): with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.zip|zip", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ") wrapper.run_docs(doc) wrapper.report() path_exists = os.path.exists("output/archive.zip") assert path_exists z = zipfile.ZipFile("output/archive.zip", "r") names = z.namelist() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py-pyg.html" in names assert "archive/hello.rb-pyg.html" in names z.close()
def test_unprocessed_directory_archive_filter(): with wrap() as wrapper: with open("abc.txt", "w") as f: f.write('this is abc') with open("def.txt", "w") as f: f.write('this is def') wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|tgzdir", wrapper, [], contents="ignore", tgzdir={'dir': '.'}) wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert ("./abc.txt" in names) or ("abc.txt" in names) assert ("./def.txt" in names) or ("def.txt" in names) tar.close()
def test_archive_filter_with_short_names(): with wrap() as wrapper: with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|archive", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ", archive={'use-short-names': True}) wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py.html" in names assert "archive/hello.rb.html" in names tar.close()
def test_zip_archive_filter(): with tempdir(): with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.zip|zip", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ") wrapper.run_docs(doc) wrapper.report() path_exists = os.path.exists("output/archive.zip") assert path_exists z = zipfile.ZipFile("output/archive.zip", "r") names = z.namelist() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py-pyg.html" in names assert "archive/hello.rb-pyg.html" in names z.close()
def test_archive_filter(): with wrap() as wrapper: with open("hello.py", "w") as f: f.write("print 'hello'") with open("hello.rb", "w") as f: f.write("puts 'hello'") wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|archive", wrapper, [ Doc("hello.py", wrapper), Doc("hello.rb", wrapper), Doc("hello.py|pyg", wrapper), Doc("hello.rb|pyg", wrapper) ], contents=" ") wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert "archive/hello.py" in names assert "archive/hello.rb" in names assert "archive/hello.py-pyg.html" in names assert "archive/hello.rb-pyg.html" in names tar.close()
def test_unprocessed_directory_archive_filter(): with wrap() as wrapper: with open("abc.txt", "w") as f: f.write('this is abc') with open("def.txt", "w") as f: f.write('this is def') wrapper = Wrapper() wrapper.create_dexy_dirs() wrapper = Wrapper() doc = Doc("archive.tgz|tgzdir", wrapper, [], contents="ignore", tgzdir={'dir' : '.'} ) wrapper.run_docs(doc) wrapper.report() assert os.path.exists("output/archive.tgz") tar = tarfile.open("output/archive.tgz", mode="r:gz") names = tar.getnames() assert ("./abc.txt" in names) or ("abc.txt" in names) assert ("./def.txt" in names) or ("def.txt" in names) tar.close()
def run_from_cache_a_bunch_of_times(): n = random.randint(2, 10) print "running %s times:" % n for i in range(n): print '', i + 1 wrapper = Wrapper(log_level=LOGLEVEL, debug=True) wrapper.run_from_new() for node in wrapper.nodes.values(): assert_node_state(node, 'consolidated', "In iter %s" % i) wrapper.report()
def run_from_cache_a_bunch_of_times(): n = random.randint(2, 10) print "running %s times:" % n for i in range(n): print '', i+1 wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() for node in wrapper.nodes.values(): assert_node_state(node, 'consolidated', "In iter %s" % i) wrapper.report()
def test_example_project(): with tempdir(): def run_from_cache_a_bunch_of_times(): n = random.randint(2, 10) print "running %s times:" % n for i in range(n): print '', i + 1 wrapper = Wrapper(log_level=LOGLEVEL, debug=True) wrapper.run_from_new() for node in wrapper.nodes.values(): assert_node_state(node, 'consolidated', "In iter %s" % i) wrapper.report() example_src = os.path.join(TEST_DATA_DIR, 'example') shutil.copytree(example_src, "example") os.chdir("example") wrapper = Wrapper(log_level=LOGLEVEL) wrapper.create_dexy_dirs() wrapper.run_from_new() wrapper.report() for node in wrapper.nodes.values(): assert_node_state(node, 'ran', "in first run") run_from_cache_a_bunch_of_times() # touch this file so it triggers cache updating os.utime("multiply.py", None) unaffected_keys = ( 'latex', 'pygments.sty|pyg', 's1/loop.py|pycon', 's1/loop.py|py', 'main.rst|idio|h', 'main.rst|idio|l', 'main.rst|pyg|l', 'main.rst|pyg|h', 's1/loop.py|idio|h', 's1/loop.py|idio|l', 's1/loop.py|pyg|l', 's1/loop.py|pyg|h', 'dexy.yaml|idio|h', 'dexy.yaml|idio|l', 'dexy.yaml|pyg|l', 'dexy.yaml|pyg|h', ) affected_keys = ( 'code', 'docs', "*|pyg|l", "*|pyg|h", "*|idio|l", "*|idio|h", "main.rst|jinja|rst|latex", "*.rst|jinja|rst|latex", "*.py|pycon", "*.py|py", "main.rst|jinja|rstbody|easyhtml", "*.rst|jinja|rstbody|easyhtml", "foo.txt", "multiply.py|idio|h", "multiply.py|idio|l", "multiply.py|pycon", "multiply.py|py", "multiply.py|pyg|h", "multiply.py|pyg|l", ) wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() wrapper.report() for node in wrapper.nodes.values(): if node.key in unaffected_keys: assert_node_state(node, 'consolidated', "after touching multiply.py") else: assert node.key in affected_keys, node.key assert_node_state(node, 'ran', "after touchimg multiply.py") run_from_cache_a_bunch_of_times() import time time.sleep(0.5) with open("multiply.py", "r") as f: old_content = f.read() with open("multiply.py", "w") as f: f.write("raise") wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() assert wrapper.state == 'error' import time time.sleep(0.9) with open("multiply.py", "w") as f: f.write(old_content) wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() for node in wrapper.nodes.values(): if node.key in unaffected_keys: assert_node_state(node, 'consolidated', "after restoring old multiply.py content") else: assert node.key in affected_keys, node.key assert_node_state(node, 'ran', "after restoring old multiply.py contnet") wrapper.remove_dexy_dirs() wrapper.remove_reports_dirs(keep_empty_dir=True) wrapper.create_dexy_dirs() assert len(os.listdir(".dexy")) == 1 wrapper = Wrapper(log_level=LOGLEVEL, dry_run=True) wrapper.run_from_new() wrapper.report() assert len(os.listdir(".dexy")) == 6 with open(".dexy/reports/graph.txt", "r") as f: graph_text = f.read() assert "BundleNode(docs) (uncached)" in graph_text os.chdir("..")
def test_example_project(): with tempdir(): def run_from_cache_a_bunch_of_times(): n = random.randint(2, 10) print "running %s times:" % n for i in range(n): print '', i+1 wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() for node in wrapper.nodes.values(): assert_node_state(node, 'consolidated', "In iter %s" % i) wrapper.report() example_src = os.path.join(TEST_DATA_DIR, 'example') shutil.copytree(example_src, "example") os.chdir("example") wrapper = Wrapper(log_level=LOGLEVEL) wrapper.create_dexy_dirs() wrapper.run_from_new() wrapper.report() for node in wrapper.nodes.values(): assert_node_state(node, 'ran') run_from_cache_a_bunch_of_times() # touch this file so it triggers cache updating os.utime("multiply.py", None) unaffected_keys = ('latex', 'pygments.sty|pyg', 's1/loop.py|pycon', 's1/loop.py|py', 'main.rst|idio|h', 'main.rst|idio|l', 'main.rst|pyg|l', 'main.rst|pyg|h', 's1/loop.py|idio|h', 's1/loop.py|idio|l', 's1/loop.py|pyg|l', 's1/loop.py|pyg|h', 'dexy.yaml|idio|h', 'dexy.yaml|idio|l', 'dexy.yaml|pyg|l', 'dexy.yaml|pyg|h', ) affected_keys = ('code', 'docs', "*|pyg|l", "*|pyg|h", "*|idio|l", "*|idio|h", "main.rst|jinja|rst|latex", "*.rst|jinja|rst|latex", "*.py|pycon", "*.py|py", "main.rst|jinja|rstbody|easyhtml", "*.rst|jinja|rstbody|easyhtml", "foo.txt", "multiply.py|idio|h", "multiply.py|idio|l", "multiply.py|pycon", "multiply.py|py", "multiply.py|pyg|h", "multiply.py|pyg|l", ) wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() wrapper.report() for node in wrapper.nodes.values(): if node.key in unaffected_keys: assert_node_state(node, 'consolidated') else: assert node.key in affected_keys, node.key assert_node_state(node, 'ran') run_from_cache_a_bunch_of_times() import time time.sleep(0.5) with open("multiply.py", "r") as f: old_content = f.read() with open("multiply.py", "w") as f: f.write("raise") wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() assert wrapper.state == 'error' import time time.sleep(0.9) with open("multiply.py", "w") as f: f.write(old_content) wrapper = Wrapper(log_level=LOGLEVEL) wrapper.run_from_new() for node in wrapper.nodes.values(): if node.key in unaffected_keys: assert_node_state(node, 'consolidated') else: assert node.key in affected_keys, node.key assert_node_state(node, 'ran') wrapper.remove_dexy_dirs() wrapper.remove_reports_dirs(keep_empty_dir=True) wrapper.create_dexy_dirs() assert len(os.listdir(".dexy")) == 1 wrapper = Wrapper(log_level=LOGLEVEL, dry_run=True) wrapper.run_from_new() wrapper.report() assert len(os.listdir(".dexy")) == 6 with open(".dexy/reports/graph.txt", "r") as f: graph_text = f.read() assert "BundleNode(docs) (uncached)" in graph_text os.chdir("..")
def run(*args, **kwargs): from dexy.wrapper import Wrapper wrapper = Wrapper(*args, **kwargs) wrapper.run() wrapper.report()
def dexy_command( artifactsdir=Wrapper.DEFAULT_ARTIFACTS_DIR, # location of directory in which to store artifacts conf=Wrapper.DEFAULT_CONFIG_FILE, # name to use for configuration file danger=False, # whether to allow running remote files dbalias=Wrapper.DEFAULT_DB_ALIAS, # type of database to use dbfile=Wrapper.DEFAULT_DB_FILE, # name of the database file (it lives in the logs dir) # directory=".", # the directory to process, you can just process a subdirectory of your project disabletests=False, # Whether to disable the dexy 'test' filter dryrun=Wrapper.DEFAULT_DRYRUN, # if True, just parse config and print batch info, don't run dexy exclude=Wrapper.DEFAULT_EXCLUDE, # directories to exclude from dexy processing globals=Wrapper.DEFAULT_GLOBALS, # global values to make available within dexy documents, should be KEY=VALUE pairs separated by spaces help=False, # for people who type -help out of habit h=False, # for people who type -h out of habit hashfunction=Wrapper.DEFAULT_HASHFUNCTION, # What hash function to use, set to crc32 or adler32 for more speed but less reliability ignore=Wrapper.DEFAULT_IGNORE_NONZERO_EXIT, # whether to ignore nonzero exit status or raise an error - may not be supported by all filters logfile=Wrapper.DEFAULT_LOG_FILE, # name of log file logformat=Wrapper.DEFAULT_LOG_FORMAT, # format of log entries loglevel=Wrapper.DEFAULT_LOG_LEVEL, # log level logsdir=Wrapper.DEFAULT_LOG_DIR, # location of directory in which to store logs nocache=Wrapper.DEFAULT_DONT_USE_CACHE, # whether to force artifacts to run even if there is a matching file in the cache # output=False, # Shortcut to mean "I just want the OutputReporter, nothing else" recurse=Wrapper.DEFAULT_RECURSE, # whether to recurse into subdirectories when running Dexy reports=Wrapper.DEFAULT_REPORTS, # reports to be run after dexy runs, enclose in quotes and separate with spaces # reset=False, # whether to purge existing artifacts and logs before running Dexy # run="", # specific document to run. if specified, this document + its dependencies will be all that is run silent=False, # Whether to not print any output when running dexy # uselocals=True, # use cached local copies of remote URLs, faster but might not be up to date, 304 from server will override this setting version=False # For people who type -version out of habit ): """ Runs Dexy, by processing your .dexy configuration file and running content through the filters you have specified. Results are cached in the artifacts/ directory but are presented in a more usable format by reporters. Basic reports are run automatically but you can specify additional reports. Type 'dexy reporters' for a list of available reporters. If your project is large, then running reports will start to take up a lot of time, so you should specify only the reports you really need. You can always run more reports after a batch has finished running (you can run historical reports as far back as the last time you cleared out your artifacts cache with a 'dexy reset' or similar). After running Dexy, the output/ directory will hold what dexy thinks are the most important generated files (with pretty filenames), the output-long directory will hold all of your generated files (with ugly filenames), and the logs/ directory will hold the basic dexy.log logfile and also a more colorful and descriptive HTML log file in logs/run-latest/. Please look at these logfiles to learn more about how dexy works, and if you run into problems the dexy.log file might provide clues as to what has gone wrong. Your original files will be copied to logs/source-batch-00001/ by the SourceReporter (enabled by default). Each time you run dexy, your source code files will be copied so you have a mini-version history. (You can also use the 'dexy history' command to get a history for a given file, and you can run the SourceReporter again at any time to restore a given batch's source files.) If you run into trouble, visit http://dexy.it/help """ if h or help: help_command() elif version: version_command() else: wrapper = Wrapper(**locals()) import time start_time = time.time() try: wrapper.setup_config() wrapper.run() wrapper.report() print "finished in %0.4f" % (time.time() - start_time) except dexy.exceptions.UserFeedback as e: wrapper.cleanup_partial_run() sys.stderr.write(e.message) if not e.message.endswith("\n"): sys.stderr.write("\n") sys.stderr.write("Dexy is stopping.\n") sys.exit(1)