Esempio n. 1
0
def uncrustify_sources(svn_path, svn_co_root, uncrustify_config, uncrustify_path_accept, uncrustify_path_reject):
    ## @brief Uncrustify code before import
    #  @param svn_path Filesystem path to parsed up SVN checkout
    #  @param svn_co_root Base directory of SVN checkout
    #  @param uncrustify_config Config file for uncrustify pass
    #  @param uncrustify_path_accept Paths to force
    #  @param uncrustify_path_reject Paths to exclude

    for root, dirs, files in os.walk(svn_path):
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_veto = False
            for filter in uncrustify_path_reject:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} will not go through uncrustify".format(svn_filename, filter.pattern))
                    path_veto = True
                    break
            for filter in uncrustify_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} will be passed to uncrustify".format(svn_filename, filter.pattern))
                    path_veto = False
                    break
            if path_veto:
                continue
            extension = filename.rsplit(".", 1)[1] if "." in filename else ""
            if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h", "hpp", "hh"):
                logger.debug("Uncrustifying {0}".format(filename))
                cmd = ("uncrustify", "-c", uncrustify_config, "--no-backup", "-l", "CPP", filename)
                # We do not consider uncrustify errors as fatal for the import... this can
                # happen because of a source file issue or picking the wrong language
                try:
                    check_output_with_retry(cmd, retries=0)
                except RuntimeError:
                    logger.warning("Uncrustify failed on {0}".format(filename))
Esempio n. 2
0
def remove_gpl_block(source_lines, keep_lines, depth):
    '''Get rid of GPL license block'''
    gpl_idx = search_from(source_lines, "GNU General Public License", 0, depth)
    if gpl_idx == -1:
        return

    # OK, found a matching line, now search for start and end of block
    start_idx = search_from(source_lines,
                            r"\*\*\*\*\*\*\*\*",
                            gpl_idx,
                            max=10,
                            backwards=True)
    stop_idx = search_from(source_lines,
                           r"\*\*\*\*\*\*\*\*",
                           gpl_idx,
                           max=10,
                           backwards=False)

    if start_idx == -1 or stop_idx == -1:
        logger.warning(
            "Found GPL trigger line, but failed to find start/end of license block"
        )
        return

    logger.info(
        "Found GPL license block to suppress from lines {0} to {1}".format(
            start_idx + 1, stop_idx + 1))
    for idx in range(start_idx, stop_idx + 1):
        keep_lines[idx] = False
Esempio n. 3
0
def scan_svn_tags_and_get_metadata(svnroot, svn_packages, svn_metadata_cache, author_metadata_cache,
                                   all_package_tags=False, package_veto=[]):
    # # @brief Get SVN metadata for each of the package tags we're interested in
    #  @param svnroot URL of SVN repository
    #  @param svn_packages Dictionary of packages and tags to process
    #  @param svn_metadata_cache SVN metadata cache
    #  @param author_metadata_cache author metadata cache with name and email for commits
    #  @param all_package_tags Boolean flag triggering import of all package tags in SVN
    #  @param package_veto List of packages to just plain refuse to handle

    # First we establish the list of tags which we need to deal with.
    for package, package_tags in svn_packages.iteritems():
        logger.info("Preparing package {0} (base tags: {1})".format(package, package_tags))
        if all_package_tags:
            oldest_tag = svn_packages[package][0]
            tags = get_all_package_tags(svnroot, package)
            try:
                package_tags.extend(tags[tags.index(oldest_tag) + 1:])
            except ValueError:
                logger.error("Oldest release tag ({0}) for package {1} not found in SVN!".format(oldest_tag, package))
                sys.exit(1)
        # We need to now sort the package tags and remove any duplicates
        ordered_tags = list(set(package_tags))
        ordered_tags.sort(cmp=svn_tag_cmp)
        svn_packages[package] = ordered_tags

    # Now iterate over the required tags and ensure we have the necessary metadata
    for package, package_tags in svn_packages.iteritems():
        if package in package_veto:
            logger.info("Package {0} is vetoed - skipping SVN metadata import".format(package))
            continue
        package_name = os.path.basename(package)
        package_path = os.path.dirname(package)
        for tag in package_tags:
            # Do we have metadata?
            if package_name not in svn_metadata_cache:
                svn_metadata_cache[package_name] = {"path": package_path, "svn": {}}
            try:
                if tag == "trunk":
                    # We always need to get the metadata for trunk tags as we need to
                    # know the current revision
                    svn_metadata = svn_get_path_metadata(svnroot, package, tag)
                    if tag not in svn_metadata_cache[package_name]["svn"]:
                        svn_metadata_cache[package_name]["svn"][tag] = {svn_metadata["revision"]: svn_metadata}
                    elif svn_metadata["revision"] not in svn_metadata_cache[package_name]["svn"][tag]:
                        svn_metadata_cache[package_name]["svn"][tag][svn_metadata["revision"]] = svn_metadata
                elif tag not in svn_metadata_cache[package_name]["svn"]:
                    svn_metadata = svn_get_path_metadata(svnroot, package, tag)
                    svn_metadata_cache[package_name]["svn"][tag] = {svn_metadata["revision"]: svn_metadata}
                else:
                    svn_metadata = svn_metadata_cache[package_name]["svn"][tag].values()[0]
                if svn_metadata["author"] not in author_metadata_cache:
                    try:
                        author_metadata_cache[svn_metadata["author"]] = author_info_lookup(svn_metadata["author"])
                    except RuntimeError, e:
                        logger.info("Failed to get author information for {0}: {1}".format(package, e))
                        author_metadata_cache[svn_metadata["author"]] = {"name": svn_metadata["author"],
                                                                         "email": "{0}@cern.ch".format(svn_metadata["author"])}
            except RuntimeError:
                logger.warning("Failed to get SVN metadata for {0}".format(os.path.join(package, tag)))
Esempio n. 4
0
def read_project_packages(packages_file, project=None):
    ## @brief Read CMake made packages file
    #  @param packages_file File containing project package tags
    #  @param project Project name
    packages_dict = {}
    package_name_dict = {}
    with open(packages_file) as pfile:
        for line in pfile:
            line = line.strip()
            if line.startswith("#") or line == "":
                continue
            try:
                package, package_tag = line.split(" ")
                # These are odd fish, but keep them if they exist...
                if (package == project + "RunTime" or package
                        == project + "Release") and "/" not in package:
                    package = os.path.join("Projects", package)
                packages_dict[package] = {
                    "svn_tag": package_tag,
                    "project": project,
                    "package_name": os.path.basename(package)
                }
            except ValueError, e:
                logger.warning(
                    "Problem splitting line '{0}' into package and package tag"
                    .format(line))
Esempio n. 5
0
def find_cmake_tags(base_path, release, project_path):
    ## @brief Find the tags that went into a CMake release found
    #  at the path specified
    #  @param base_path Starting base path for the release number and flavour
    #  @param release The Athena release number
    #  @param project_path The path element inside each project where the
    #  project is installed
    release_packages = {}
    project_directories = [
        dir for dir in os.listdir(base_path)
        if dir.startswith("Atlas") or dir == "DetCommon"
    ]
    for project in project_directories:
        packages_file = os.path.join(base_path, project, release, project_path,
                                     "packages.txt")
        if not os.path.exists(packages_file):
            logger.warning(
                "Project packages file {0} doesn't exist - skipping this project"
                .format(packages_file))
            continue
        project_packages = read_project_packages(packages_file, project)
        logger.debug("Found {0} packages for project {1}".format(
            len(project_packages), project))
        release_packages.update(project_packages)
    logger.info("Found {0} packages in release {1}".format(
        len(release_packages), release))
    return release_packages
Esempio n. 6
0
def check_output_with_retry(cmd,
                            retries=2,
                            wait=10,
                            ignore_fail=False,
                            dryrun=False):
    ## @brief Multiple attempt wrapper for subprocess.check_call (especially remote SVN commands can bork)
    #  @param cmd list or tuple of command line parameters
    #  @param retries Number of attempts to execute successfully
    #  @param wait Sleep time after an unsuccessful execution attempt
    #  @param ignore_fail Do not raise an exception if the command fails
    #  @param dryrun If @c True do not actually execute the command, only print it and return an empty string
    #  @return String containing command output
    if dryrun:
        logger.info("Dryrun mode: {0}".format(cmd))
        return ""
    success = failure = False
    tries = 0
    start = time.time()
    while not success and not failure:
        tries += 1
        try:
            logger.debug("Calling {0}".format(cmd))
            output = subprocess.check_output(cmd)
            success = True
        except subprocess.CalledProcessError:
            if ignore_fail:
                success = True
                output = ""
                continue
            logger.warning("Attempt {0} to execute {1} failed".format(
                tries, cmd))
            if tries > retries:
                failure = True
            else:
                time.sleep(wait)
    if failure:
        raise RuntimeError("Repeated failures to execute {0}".format(cmd))
    logger.debug("Executed in {0}s".format(time.time() - start))
    return output
Esempio n. 7
0
def svn_license_injector(svn_path, svn_co_root, license_text, license_path_accept=[], license_path_reject=[]):
    ## @brief Add license statements to code before import
    #  @param svn_path Filesystem path to cleaned up SVN checkout
    #  @param svn_co_root Base directory of SVN checkout
    #  @param license_text List of strings that comprise the license to apply
    #  @param license_path_accept Paths to force include in license file addition (NOT IMPLEMENTED YET)
    #  @param license_path_reject Paths to exclude from license file addition
    #   license file addition
    for root, dirs, files in os.walk(svn_path):
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, svn_filename):
                    logger.info("File {0} will not have a license file applied".format(svn_filename, filter.pattern))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.info("File {0} will have a license file applied".format(svn_filename, filter.pattern))
                    path_veto = False
                    break
            if path_veto:
                continue
            # Get the file's mode here to then restore it
            try:
                fmode = os.stat(filename).st_mode
                extension = svn_filename.rsplit(".", 1)[1] if "." in svn_filename else ""
                if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h", "hpp", "hh"):
                    inject_c_license(filename, license_text)
                    os.chmod(filename, fmode)
                elif extension in ("py", "cmake"):
                    inject_py_license(filename, license_text)
                    os.chmod(filename, fmode)
            except OSError, e:
                # Can happen if a file is a softlink to nowhere
                logger.warning("Got an exception on stating {0}: {1}".format(filename, e))
Esempio n. 8
0
def license_check_file(filename, git_filename, quiet=False):
    with open(filename) as fh:
        counter = 0
        license_concern = False
        for line in fh:
            counter += 1
            if re.search(r"[Cc]opyright", line):
                # Filter out Apache license statements or ATLAS direct copyright
                # (even though the later is invalid)
                if not ("CERN for the benefit of the ATLAS collaboration"
                        in line or "Atlas Collaboration" in line
                        or "for more information"):
                    if quiet:
                        print "- {0}".format(git_filename)
                    else:
                        logger.warning(
                            "Found copyright line in {0} at line {1}: {2}".
                            format(git_filename, counter, line.strip()))
                    license_concern = True
            if re.search(r"[Ll]icense", line):
                # Filter on Apache license statements and misc log messages that
                # contain license
                if not ("Licensed under the Apache License" in line
                        or "You may obtain a copy of the License" in line
                        or "you may not use this file except in compliance"
                        in line or "http://www.apache.org/licenses" in line
                        or "under the License is distributed" in line
                        or "See the License for the specific" in line
                        or "limitations under the License" in line
                        or 'for more information.' in line):
                    if quiet:
                        print "- {0}".format(git_filename)
                    else:
                        logger.warning(
                            "Found license line in {0} at line {1}: {2}".
                            format(git_filename, counter, line.strip()))
                    license_concern = True
    return 1 if license_concern else 0
Esempio n. 9
0
def main():
    parser = argparse.ArgumentParser(description='git branch constructor')
    parser.add_argument('gitrepo', metavar='GITDIR',
                        help="Location of git repository")
    parser.add_argument('branchname',
                        help="Git branch name to build")
    parser.add_argument('tagfiles', metavar="TAGFILE", nargs="+", 
                        help="Tag files to use to build git branch from")
    parser.add_argument('--parentbranch', metavar="BRANCH:COMMIT or BRANCH:@TIMESTAMP",
                        help="If branch does not yet exist, use this BRANCH to make it from at COMMIT "
                        "(otherwise an orphaned branch is created). The syntax BRANCH:@TIMESTAMP will "
                        "find the commit closest to the given TIMESTAMP.")
    parser.add_argument('--baserelease', metavar="FILE",
                        help="For cache releases, use this tag file as the content of the base release on which "
                        "the release was a cache")
    parser.add_argument('--svnmetadata', metavar="FILE",
                        help="File with SVN metadata per SVN tag in the git repository. "
                        "By default GITREPO.svn.metadata will be used, if it exists.")
    parser.add_argument('--authorcachefile', metavar='FILE',
                        help="File containing cache of author name and email information - default '[gitrepo].author.metadata'")
    parser.add_argument('--skipreleasetag', action="store_true",
                        help="Do not create a git tag for this release, nor skip processing if a release tag "
                        "exists - use this option to add packages to a branch encapsulating an entire "
                        "release series, like 'master'.")
    parser.add_argument('--onlyforward', action="store_true",
                        help="Process tag files as usual, but never "
                        "downgrade a tag to a previous version. This can be used to reconstruct a master branch "
                        "that only goes forward in revision history (it is very useful for the initial master "
                        "branch constuction). In addition branch series releases that overlap with later releases "
                        "will not be imported so that (again) the master branch does not go backwards in time.")
    parser.add_argument("--commitdate", choices=["now", "release", "author"],
                        help="Strategy for setting git committer date: now - leave as current time; "
                        "release - set to time of the current release being processed; author - "
                        "set to author date, as found from SVN (default %(default)s)", default = "release")
    parser.add_argument('--debug', '--verbose', "-v", action="store_true",
                        help="Switch logging into DEBUG mode")
    parser.add_argument('--dryrun', action="store_true",
                        help="Perform no actions, but print what would happen")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)
        
    gitrepo = os.path.abspath(args.gitrepo)
    branch = args.branchname
    
    # If the onlyforward option is set, then we need to preprocess the list of tag content
    # files in order to ensure that we never jump across time to a previous release series 
    # when making the master branch. This is because the earlier release series will be 
    # branched off from and the later releases on that series really only make sense 
    # for the branch
    if args.onlyforward:
        print args.tagfiles
        args.tagfiles = backskip_filter(args.tagfiles)
        print args.tagfiles    
    tag_files = [ os.path.abspath(fname) for fname in args.tagfiles ]
    
    # If we have a baserelease tag content, then load that here
    if args.baserelease:
        with open(args.baserelease) as br_tags_fh:
            base_tags = json.load(br_tags_fh)
    else:
        base_tags = None
        
    # Load SVN metadata cache - this is the fastest way to query the SVN ordering in which tags
    # were made
    if not args.svnmetadata and os.access(args.gitrepo + ".svn.metadata", os.R_OK):
        args.svnmetadata = args.gitrepo + ".svn.metadata"
    else:
        logger.error("No SVN metadata cache found - cannot proceed")
        sys.exit(1)
    with open(args.svnmetadata) as cache_fh:
        svn_metadata_cache = json.load(cache_fh)
    logger.info("Loaded SVN metadata from {0}".format(args.svnmetadata))
    
    # Load author cache info
    if not args.authorcachefile:
        args.authorcachefile = args.gitrepo + ".author.metadata"
    try:
        with open(args.authorcachefile) as cache_fh:
            author_metadata_cache = json.load(cache_fh)
    except OSError:
        logger.warning("No author metadata found - will proceed without")
        author_metadata_cache = {}
    
    # Main branch reconstruction function
    branch_builder(gitrepo, args.branchname, tag_files, svn_metadata_cache, author_metadata_cache, parentbranch=args.parentbranch, 
                   baserelease=base_tags, skipreleasetag=args.skipreleasetag, dryrun=args.dryrun, only_forward=args.onlyforward,
                   commit_date=args.commitdate)
Esempio n. 10
0
def do_package_import(pkg_import, svn_metadata_cache, author_metadata_cache, release_name="unknown", branch="unknown", 
                      dryrun=False, commit_date="now"):
    ## @brief Import a package's SVN tag onto the current git branch
    #  updating the corresponding git tags
    #  @param pkg_import package import dictionary (see find_packages_for_update for the
    #  structure)
    #  @param svn_metadata_cache The standard metadata cache from SVN
    #  @param author_metadata_cache Cached author data
    #  @param release_name Name of current release being built (used only for generating log messages)
    #  @param branch Current branch name (used only for generating log messages)
    #  @param dryrun Boolean, if @c true then don't actually act
    #  @param commit_date Choices for setting committer date 
    logger.info("Migrating {0} from {1} to {2} for {3}...".format(pkg_import["package"], 
                                                          pkg_import["current_branch_import_tag"], 
                                                          pkg_import["svn_tag"], release_name))
    # Need to wipe out all contents in case files were removed from package
    if not dryrun:
        recursive_delete(pkg_import["package"])
    check_output_with_retry(("git", "checkout", pkg_import["git_import_tag"], pkg_import["package"]), dryrun=dryrun)
    # Splat Changelog file - we do not want these on the production branches
    try:
        os.remove(os.path.join(pkg_import["package"], "ChangeLog"))
    except OSError:
        pass
    # Done - now commit and tag
    if logger.level <= logging.DEBUG:
        cmd = ["git", "status"]
        logger.debug(check_output_with_retry(cmd))
    check_output_with_retry(("git", "add", "-A", pkg_import["package"]), dryrun=dryrun)
    staged = check_output_with_retry(("git", "diff", "--name-only", "--staged"), dryrun=dryrun)
    if len(staged) == 0 and (not dryrun): 
        # Nothing staged, so skip doing any commit, but do make the import tag for this branch
        # so that we don't repeat this step again
        logger.warning("Package {0} - no changes staged for {1}, " 
                       "git tagging and skipping commit".format(pkg_import["package"], release_name))
        check_output_with_retry(("git", "tag", pkg_import["branch_import_tag"]), retries=1, dryrun=dryrun)
        return

    rev_meta = svn_metadata_cache[pkg_import["package_name"]]["svn"][pkg_import["svn_meta_tag_key"]][pkg_import["svn_revision"]]
    msg = rev_meta["msg"]
    if pkg_import["svn_tag"] == "trunk":
        msg += " (trunk r{0})".format(rev_meta["revision"])
    else:
        msg += " ({0})".format(pkg_import["svn_tag"])
    cl_diff = changelog_diff(pkg_import["package"],
                             from_tag="/".join(pkg_import["current_branch_import_tag"].split("/")[1:]) if pkg_import["current_branch_import_tag"] else None,
                             to_tag=pkg_import["git_import_tag"])
    if cl_diff:
        msg += "\n\n" + "\n".join(cl_diff)
    cmd = ["git", "commit", "-m", msg]
    author = author_string(rev_meta["author"], author_metadata_cache)
    cmd.append("--author='{0}'".format(author))
    cmd.append("--date={0}".format(rev_meta["date"]))
    
    if commit_date == "author":
        os.environ["GIT_COMMITTER_DATE"] = rev_meta["date"]
    check_output_with_retry(cmd, retries=1, dryrun=dryrun)
    if commit_date == "author":
        del os.environ["GIT_COMMITTER_DATE"]
    
    check_output_with_retry(("git", "tag", pkg_import["branch_import_tag"]), retries=1, dryrun=dryrun)
    if pkg_import["current_branch_import_tag"]:
        check_output_with_retry(("git", "tag", "-d", pkg_import["current_branch_import_tag"]), retries=1, dryrun=dryrun)
    logger.info("Committed {0} ({1}) onto {2} for {3}".format(pkg_import["package"], 
                                                              pkg_import["svn_tag"], branch, release_name))
Esempio n. 11
0
def svn_license_injector(svn_path,
                         svn_co_root,
                         license_text,
                         license_path_accept=[],
                         license_path_reject=[]):
    ## @brief Add license statements to code before import
    #  @param svn_path Filesystem path to cleaned up SVN checkout
    #  @param svn_co_root Base directory of SVN checkout
    #  @param license_text List of strings that comprise the license to apply
    #  @param license_path_accept Paths to force include in license file addition (NOT IMPLEMENTED YET)
    #  @param license_path_reject Paths to exclude from license file addition
    #   license file addition
    for root, dirs, files in os.walk(svn_path):
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, svn_filename):
                    logger.debug(
                        "File {0} will not have a license file applied".format(
                            svn_filename, filter.pattern))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug(
                        "File {0} will have a license file applied".format(
                            svn_filename, filter.pattern))
                    path_veto = False
                    break
            if path_veto:
                continue
            # Now see if the license file is already in SVN, as this is happening sometimes
            with open(filename) as fh:
                lines = 0
                licensed = False
                while lines < 10 and not licensed:
                    fline = fh.readline()
                    # Checking the first line of the file should be enough
                    if license_text[0] in fline:
                        licensed = True
                        break
                    lines += 1
            if licensed:
                logger.debug(
                    "File {0} appears to already have a copyright/license statement in it"
                )
                continue
            # Get the file's mode here to then restore it
            try:
                fmode = os.stat(filename).st_mode
                extension = svn_filename.rsplit(
                    ".", 1)[1] if "." in svn_filename else ""
                if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h",
                                 "hpp", "hh"):
                    inject_c_license(filename, license_text)
                    os.chmod(filename, fmode)
                elif extension in ("py", "cmake"):
                    inject_py_license(filename, license_text)
                    os.chmod(filename, fmode)
            except OSError, e:
                # Can happen if a file is a softlink to nowhere
                logger.warning("Got an exception on stating {0}: {1}".format(
                    filename, e))
Esempio n. 12
0
def main():
    parser = argparse.ArgumentParser(
        description="License file checker, parsing a git import and "
        "checking for any files that may have had the new ATLAS copyright "
        "and license applied in error. All files are listed, filtered by the current "
        "exceptions and then checked for statements of license or copyright that "
        "indicate a problem.")
    parser.add_argument("--path", help="Path to check (by default check cwd)")
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")
    parser.add_argument(
        '--quiet',
        action="store_true",
        default=False,
        help=
        "Only print filenames that have issues for adding to the filter file")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Where to check
    if args.path:
        check_path = args.path
    else:
        check_path = os.getcwd()
    license_path_accept, license_path_reject = load_exceptions_file(
        args.licenseexceptions)

    worry_files = 0
    for root, dirs, files in os.walk(check_path):
        if os.path.basename(root) == ".git":
            continue
        for name in files:
            extension = name.rsplit(".", 1)[1] if "." in name else ""
            if extension not in ("cxx", "cpp", "icc", "cc", "c", "C", "h",
                                 "hpp", "hh", "py", "cmake"):
                continue
            if name == "AtlasInternals.cmake":  # Many false matches, so skip...
                continue
            filename = os.path.join(root, name)
            git_filename = filename[len(check_path) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, git_filename):
                    logger.debug("File {0} was license file vetoed".format(
                        git_filename))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} was license file forced".format(
                        git_filename))
                    path_veto = False
                    break
            if path_veto:
                continue
            worry_files += license_check_file(filename, git_filename,
                                              args.quiet)

    if worry_files:
        logger.warning("Found {0} concerning files".format(worry_files))
        sys.exit(1)

    return 0