def get_ternjs_files(project, config=None): """ Returns list of absolute paths of .js files that matches given TernJS config. This method locates all .js files in project dir and applies "include" and "exclude" patterns from TernJS config """ project_path = None if isinstance(project, dict): project_path = project['id'] config = project['config'] else: project_path = project if config is None: config = get_ternjs_config(project_path) proj_dir = os.path.dirname(project_path) fileset = FileSet(directory=proj_dir, include=config.get('include', ['**/*.js']), exclude=config.get('exclude', None)) return [ resolve_project_file_path(f, proj_dir) for f in fileset.qualified_files() ]
def test_filename_case(self, tmpdir): test_dir = tmpdir.mkdir('test') lower_dir = test_dir.mkdir('lower') lower_dir.join('lower.txt').write('') lower_dir.join('UPPER.txt').write('') upper_dir = test_dir.mkdir('UPPER') upper_dir.join('lower.txt').write('') upper_dir.join('UPPER.txt').write('') test_dir.join('formic.py').write('') test_dir.join('VERSION.txt').write('') for test in [ "/lower/lower.txt", "lower/UPPER.txt", "UPPER/lower.txt", "UPPER/UPPER.txt" ]: print("Testing", test) found = [f for f in FileSet(include=test, directory=str(test_dir))] assert len(found) == 1 print(" ... found", test) if os.name == "posix": for test in ["Formic.py", "VERSION.Txt"]: print("Testing for non-match of", test) found = [ f for f in FileSet(include=test, directory=str(test_dir)) ] assert len(found) == 0
def formic_count(directory, pattern): if pattern is None: pattern = "*" fs = FileSet(directory=directory, include="/**/" + pattern, default_excludes=False, symlinks=False) lines = sum(1 for file in fs.files()) print("FileSet found", lines, "files") return lines
def test_alternate_walk(self): files = [ "CVS/error.py", "silly/silly1.txt", "1/2/3.py", "silly/silly3.txt", "1/2/4.py", "silly/silly3.txt" ] fileset = FileSet(include="*.py", walk=walk_from_list(files)) found = [(dir, file) for dir, file in fileset.files()] assert len(found) == 2 assert ("CVS", "error.py") not in found assert (os.path.join("1", "2"), "3.py") in found assert (os.path.join("1", "2"), "4.py") in found
def test_rooted(self): curdir = os.getcwd() full = os.path.dirname(os.path.dirname(__file__)) drive, dir = os.path.splitdrive(full) wild = "**" + os.path.sep + "*.rst" os.chdir(full) try: fileset = FileSet(include=wild, directory=full) for filename in fileset.qualified_files(): print(filename) absolute = [ filename for filename in FileSet(include=wild, directory=full) ] relative = [filename for filename in FileSet(include=wild)] rooted = [ filename for filename in FileSet(include=os.path.join(dir, wild), directory=drive + os.path.sep) ] assert len(relative) == len(absolute) == len(rooted) combined = zip(rooted, relative, absolute) for root, rel, abso in combined: print(root, "<->", rel, "<->", abso) assert root.endswith(rel) assert abso.endswith(rel) finally: os.chdir(curdir)
def test_search_prune_efficiency(self, tmpdir): test_dir = tmpdir.mkdir('test') lower_dir = test_dir.mkdir('lower') lower_dir.join('lower.txt').write('') another_dir = tmpdir.mkdir('another') yet_another_dir = another_dir.mkdir('yet_another') yet_another_dir.join('yet_another.txt').write('') print("Absolute, starting at ", tmpdir) rooted = FileSet(include="/test/lower/lower.txt", directory=str(tmpdir), default_excludes=False) files = [f for f in rooted] assert len(files) == 1 floating = FileSet(include="/*/lower/lower.txt", directory=str(tmpdir), default_excludes=False) files = [f for f in floating] assert len(files) == 1 assert rooted._received < floating._received
def get_ternjs_files(project, config=None): """ Returns list of absolute paths of .js files that matches given TernJS config. This method locates all .js files in project dir and applies "include" and "exclude" patterns from TernJS config """ project_path = None if isinstance(project, dict): project_path = project['id'] config = project['config'] else: project_path = project if config is None: config = get_ternjs_config(project_path) proj_dir = os.path.dirname(project_path) fileset = FileSet(directory=proj_dir, include=config.get('include', ['**/*.js']), exclude=config.get('exclude', None)) return [f for f in fileset.qualified_files()]
def test_cwd(self): fs = FileSet(include="*") assert fs.directory is None assert os.getcwd() == fs.get_directory() directory = os.path.dirname( __file__) + os.path.sep + os.path.sep + os.path.sep fs = FileSet(directory=directory, include="*") assert fs.directory == os.path.dirname(__file__) assert fs.get_directory() == os.path.dirname(__file__)
def make_livereload_server(wsgi_app): server = Server(wsgi_app) watch_patterns = ("index.rst", "/_static/**") build_cmd = "make slides" print "Files being monitored:" cwd = getcwd() for pattern in watch_patterns: print "Pattern: ", pattern for filepath in FileSet(include=pattern): print "=>", path.relpath(filepath, cwd) server.watch(filepath, build_cmd) print return server
def make_livereload_server(wsgi_app): server = Server(wsgi_app) # XXX: build step could be useful, e.g. # making it `python app.py build`, but # in this use case not really necessary build_cmd = "true" print "Files being monitored:" cwd = getcwd() for pattern in WATCH_PATTERNS: print "Pattern: ", pattern for filepath in FileSet(include=pattern): print "=>", path.relpath(filepath, cwd) server.watch(filepath, build_cmd) print return server
def test_bound_root(self): """Unit test to pick up Issue #1""" original_dir = os.getcwd() curdir = os.path.dirname(os.path.dirname(__file__)) os.chdir(curdir) try: import glob actual = glob.glob("*.py") fs = FileSet(include="/*.py", default_excludes=False) count = 0 for file in fs: count += 1 print("File:", file) head, tail = os.path.split(file) assert curdir == head assert tail in actual assert tail.endswith(".py") assert len(actual) == count finally: os.chdir(original_dir)
def test_glob_starstar(self): files = [ "in/test/1.py", "in/a/b/test/2.py", "in/a/b/test", "out/a/3.py", "out/a/test.py" ] fileset = FileSet(include="in/**/test/", walk=walk_from_list(files)) found = [(dir, file) for dir, file in fileset.files()] assert len(found) == 3 assert (os.path.join("in", "a", "b"), "test") in found assert (os.path.join("out", "a"), "test.py") not in found files = [ "in/test/1test1.py", "in/a/b/test/2test2.py", "in/a/b/4test4", "out/a/3.py", "out/a/test.py" ] fileset = FileSet(include="in/**/*test*/", walk=walk_from_list(files)) found = [(dir, file) for dir, file in fileset.files()] assert len(found) == 3 assert (os.path.join("in", "a", "b"), "4test4") in found assert (os.path.join("out", "a"), "test.py") not in found
def test_basic(self): root = os.path.dirname(os.path.dirname(__file__)) pattern_all = os.path.sep + os.path.join("**", "*") pattern_py = os.path.sep + os.path.join("**", "*.py") pattern_pyc = os.path.sep + os.path.join("**", "*.pyc") pattern_txt = os.path.sep + os.path.join("**", "*.txt") print("Formic directory=", root, "include=", pattern_all) definitive_count = find_count(root, "*.py") fs = FileSet(directory=root, include=pattern_py, symlinks=False) files = [os.path.join(root, dir, file) for dir, file in fs.files()] assert definitive_count == len(files) assert [] == [file for file in files if not os.path.isfile(file)] assert files == [file for file in files if file.endswith(".py")] fs = FileSet(directory=root, include=pattern_all, exclude=[pattern_pyc, pattern_txt]) files = [os.path.join(root, dir, file) for dir, file in fs.files()] assert definitive_count <= len(files) assert [] == [file for file in files if not os.path.isfile(file)] assert [] == [file for file in files if file.endswith(".pyc")] assert [] == [file for file in files if file.endswith(".txt")]
def main(*kw): """Command line entry point; arguments must match those defined in in :meth:`create_parser()`; returns 0 for success, else 1. Example:: command.main("-i", "**/*.py", "--no-default-excludes") Runs formic printing out all .py files in the current working directory and its children to ``sys.stdout``. If *kw* is None, :func:`main()` will use ``sys.argv``.""" parser = create_parser() args = parser.parse_args(kw if kw else None) if args.help: parser.print_help() elif args.usage: print("""Ant Globs ========= Apache Ant fileset is documented at the Apache Ant project: * http://ant.apache.org/manual/dirtasks.html#patterns Examples -------- Ant Globs are like simple file globs (they use ? and * in the same way), but include powerful ways for selecting directories. The examples below use the Ant glob naming, so a leading slash represents the top of the search, *not* the root of the file system. *.py Selects every matching file anywhere in the whole tree Matches /foo.py and /bar/foo.py but not /foo.pyc or /bar/foo.pyc/ /*.py Selects every matching file in the root of the directory (but no deeper). Matches /foo.py but not /bar/foo.py /myapp/** Matches all files under /myapp and below. /myapp/**/__init__.py Matches all __init__.py files /myapp and below. dir1/__init__.py Selects every __init__.py in directory dir1. dir1 directory can be anywhere in the directory tree Matches /dir1/file.py, /dir3/dir1/file.py and /dir3/dir2/dir1/file.py but not /dir1/another/__init__.py. **/dir1/__init__.py Same as above. /**/dir1/__init__.py Same as above. /myapp/**/dir1/__init__.py Selects every __init__.py in dir1 in the directory tree /myapp under the root. Matches /myapp/dir1/__init__.py and /myapp/dir2/dir1/__init__.py but not /myapp/file.txt and /dir1/file.txt Default excludes ---------------- Ant FileSet (and Formic) has built-in patterns to screen out a lot of development 'noise', such as hidden VCS files and directories. The full list is at: * https://formic.readthedocs.io/en/latest/api.html#formic.formic.get_initial_default_excludes Default excludes can be simply switched off on both the command line and the API, for example:: $ formic -i "*.py" -e "__init__.py" "**/*test*/" "test_*" --no-default-excludes """) elif args.version: print("formic", get_version()) elif args.license: print(resource_string(__name__, "LICENSE.txt")) else: try: fileset = FileSet(directory=args.directory, include=args.include if args.include else ["*"], exclude=args.exclude, default_excludes=args.default_excludes, symlinks=not args.no_symlinks, casesensitive=not args.insensitive) except FormicError as exception: parser.print_usage() print(exception.message) return 1 prefix = fileset.get_directory() for directory, file_name in fileset.files(): if args.relative: sys.stdout.write(".") else: sys.stdout.write(prefix) if directory: sys.stdout.write(os.path.sep) sys.stdout.write(directory) sys.stdout.write(os.path.sep) sys.stdout.write(file_name) sys.stdout.write("\n") return 0
def process_rules(config, basedir, files, **kwargs): rules = config.get('mappings', []) verbose = config.get('verbose', False) dry_run = config.get('dry_run', False) # reset state for each file and rule as false/not used file_used_state = {} for filename in files: file_used_state[os.path.join(basedir, filename)] = False rule_used_state = [] # reset all counters files_added = 0 files_skipped = 0 files_updated = 0 files_ignored = 0 # setup jinja2 environment loader = jinja2.loaders.FileSystemLoader(basedir) env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined) # wall through all rules for idx, rule in enumerate(rules): logger.info('Processing rule {}/{}'.format(idx + 1, len(rules))) logger.debug(rule) # the base directory we are going to search as specified in the rule reference_dir = os.path.abspath( os.path.join(basedir, rule['directory'])) logger.debug('# reference dir: {}'.format(reference_dir)) # search files matching rule pattern rule_used_state.append(False) fileset = FileSet(directory=reference_dir, include=rule.get('include', '*'), exclude=rule.get('exclude', None)) # walk through all matches for filename in fileset: logger.debug('# source: {}'.format(filename)) target = rule.get('target', None) # as we found a match, mark file and rule as used file_used_state[filename] = True rule_used_state[idx] = True # filename is absolute, sub_path contains only the part relative to basedir sub_path = filename[len(reference_dir) + 1:] logger.debug('subpath: {}'.format(sub_path)) # skip further processing if rule sets ignore=True ignore = rule.get('ignore', False) if ignore: files_ignored += 1 if verbose: logger.info(' I {}'.format(sub_path)) continue # render source file if template is_template = rule.get('template', False) if is_template: tpl = env.get_template(filename[len(basedir) + 1:]) rendered = tpl.render(config) filename_rendered = filename + '.rendered' with open(filename_rendered, 'w') as f: f.write(rendered.encode('utf-8')) filename = filename_rendered # identify target path including filename if target is None: target = os.path.join(os.getcwd(), sub_path) if isinstance(target, str) or isinstance(target, unicode): target = [target] logger.debug(target) # walk through all targets (more than one destination is allowed) for t in target: # make sure target is a file if t.endswith('/') or os.path.isdir(t): if rule.get('flatten', False): t = os.path.join(t, os.path.basename(sub_path)) else: t = os.path.join(t, sub_path) logger.debug('# target: {}'.format(t)) # case 1: target already exists if os.path.exists(t): if filecmp.cmp(filename, t): if verbose: logger.info(' S {}'.format(t)) files_skipped += 1 else: logger.info(' U {}'.format(t)) files_updated += 1 if not dry_run: copyfile(filename, t) # case 2: target is missing and will be added else: logger.info(' A {}'.format(t)) files_added += 1 target_path = os.path.dirname(t) if not os.path.isdir(target_path): os.makedirs(target_path) if not dry_run: copyfile(filename, t) # print warning if rule was not used if rule_used_state[idx] == 0: logger.warn('No files found matching rule') logger.warn(' + {}'.format(rule)) # end of loop over rules # print files summary extra_text = 'WOULD BE ' if dry_run else '' logger.info( '{} file(s) {}added, {} updated, {} skipped and {} ignored'.format( files_added, extra_text, files_updated, files_skipped, files_ignored)) # print rules summary rules_not_used = len(rules) - sum(rule_used_state) files_untouched = len(file_used_state) - sum(file_used_state.values()) level = logging.INFO if rules_not_used == 0 and files_untouched == 0 else logging.WARN logger.log( level, '{} rule(s) found, {} not used, {} file(s) did not match any rule'. format(len(rules), rules_not_used, files_untouched)) # show details if we are in verbose mode if verbose: for filename in sorted(file_used_state.keys()): if not file_used_state[filename]: logger.warn(' + {}'.format(filename)) return
def test_iterator(self): fs = FileSet(include="*.py") i = fs.__iter__() assert {f for f in fs.qualified_files()} == {f for f in i}