Exemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser(description='Diff tag content of tagdiff files')
    parser.add_argument('action', choices=["missing", "versions"],
                        help="missing: show package paths in release 1, but not in release 2; "
                        "versions: show versions that are different between 1 and 2 (for packages in both)")
    parser.add_argument('tagfile1', metavar='RELEASE',
                        help="Tagfile of first release")
    parser.add_argument('tagfile2', metavar='RELEASE',
                        help="Tagfile of second release")
    parser.add_argument('--debug', '--verbose', "-v", action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    with open(args.tagfile1) as tf:
        rel_content1 = json.load(tf)
    with open(args.tagfile2) as tf:
        rel_content2 = json.load(tf)

    package_paths1 = set([ tag_info for tag_info in rel_content1["tags"] ])
    package_paths2 = set([ tag_info for tag_info in rel_content2["tags"] ])

    if args.action == "missing":
        missing_packages = package_paths1 - package_paths2
        for pkg in missing_packages:
            print pkg
    elif args.action == "versions":
        common_packages = package_paths1 & package_paths2
        for pkg in common_packages:
            if rel_content1["tags"][pkg]["svn_tag"] != rel_content2["tags"][pkg]["svn_tag"]:
                print rel_content1["tags"][pkg]["svn_tag"], rel_content2["tags"][pkg]["svn_tag"]
Exemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser(
        description='Relicense source file to CERN')
    parser.add_argument('files', nargs="+", help="Files to relicense")
    parser.add_argument(
        '--depth',
        type=int,
        default=20,
        help=
        "Number of lines from start of the file which can be processed (default %(default)s)"
    )
    parser.add_argument(
        '--nolicense',
        action="store_true",
        help=
        "If the standard CERN (C) should not be added to the file. This is only "
        "applied to C, C++ and python files (default apply license)")
    parser.add_argument(
        '--rename',
        action="store_true",
        help=
        "If the new file should overwrite the old one (original file renamed .bak) or "
        "be left as .relicense (default %(default)s)")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    for fname in args.files:
        relicense(fname, not args.nolicense, args.rename, depth=args.depth)
Exemplo n.º 3
0
def main():
    parser = argparse.ArgumentParser(
        description='ATLAS tag munger, calculating tag evolution across '
        'a releases series for CMake releases')
    parser.add_argument(
        'release',
        metavar='RELEASE',
        help=
        "Release to build tagdiff files from, e.g., 21.0 or 21.0.6 or 21.0.X")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here"
    )
    parser.add_argument(
        '--installpath',
        help=
        "path to CMake release installation location (defaults to cvfms path "
        "/cvmfs/atlas.cern.ch/repo/sw/software for releases, "
        "/afs/cern.ch/atlas/software/builds/nightlies for nightlies)")
    parser.add_argument(
        '--nightly',
        help="Generate tag file for the named nightly build in the nightly "
        "release series")
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--arch', help='Force architecture to this value')
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    if not args.installpath:
        if args.nightly:
            args.installpath = "/afs/cern.ch/atlas/software/builds/nightlies"
        else:
            args.installpath = "/cvmfs/atlas.cern.ch/repo/sw/software"

    if not os.path.exists(args.tagdir):
        try:
            os.makedirs(args.tagdir)
        except OSError, e:
            logger.error("Failed to make directory {0}: {1}".format(
                args.tagdir, e))
            sys.exit(1)
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'ATLAS CMT tag parser, grabbing tag content for a CMT cache release. '
        'This is quite a hacky script, only filling in the gaps in NICOS knowledge for '
        'ATLAS P1HLT caches.')
    parser.add_argument('release',
                        metavar='RELEASE',
                        help="CMT requirements file to parse")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here (default \"tagdir\")"
    )
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    release_description = parse_release_data(args.release)
    release_tags = parse_tag_file(args.release)
    logger.info("Processing tags for release {0}".format(
        release_description["name"]))
    output_file = os.path.join(args.tagdir, release_description["name"])
    if args.overwrite or not os.path.exists(output_file):
        with open(os.path.join(args.tagdir, release_description["name"]),
                  "w") as tag_output:
            json.dump({
                "release": release_description,
                "tags": release_tags
            },
                      tag_output,
                      indent=2)
    else:
        logger.debug(
            "Skipped writing to {0} - overwrite is false".format(output_file))
Exemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser(
        description='Merge releases to create a super-release')
    parser.add_argument('targetrelease',
                        metavar='RELEASE',
                        help="Target release")
    parser.add_argument('mergerelease',
                        metavar='RELEASE',
                        nargs="+",
                        help="Releases to merge into target")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    with open(args.targetrelease) as target:
        target_release_data = json.load(target)

    for release in args.mergerelease:
        with open(release) as merge:
            merge_release_data = json.load(merge)
        for package_path, package_data in merge_release_data["tags"].iteritems(
        ):
            if package_path not in target_release_data["tags"]:
                target_release_data["tags"][package_path] = package_data
                logger.info("Merged {0} at tag {1} from {2}".format(
                    package_path, package_data["svn_tag"], release))
            else:
                logger.debug("Package {0} already exists in target".format(
                    package_path))

    try:
        os.rename(args.targetrelease, args.targetrelease + ".bak")
        with open(args.targetrelease, "w") as output_fh:
            json.dump(target_release_data, output_fh, indent=2)
    except OSError, e:
        logger.error("Error while rewriting target file {0}: {1}".format(
            args.targetrelease, e))
Exemplo n.º 6
0
def main():
    parser = argparse.ArgumentParser(
        description='Return release list chronologically ordered')
    parser.add_argument('release',
                        metavar='RELEASE',
                        nargs="+",
                        help="Release tag files")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    release_list = []
    for release in args.release:
        with open(release) as rel_fh:
            release_data = json.load(rel_fh)
        release_list.append((release, release_data))

    release_list.sort(cmp=lambda x, y: cmp(x[1]["release"]["timestamp"], y[1][
        "release"]["timestamp"]))

    if logger.isEnabledFor(logging.DEBUG):
        for release_tuple in release_list:
            logger.debug("Release {0} built {1}".format(
                release_tuple[1]["release"]["name"],
                time.asctime(
                    time.localtime(release_tuple[1]["release"]["timestamp"]))))

    for release_tuple in release_list:
        print release_tuple[0],
    print
Exemplo n.º 7
0
def main():
    parser = argparse.ArgumentParser(description='git branch constructor')
    parser.add_argument('gitrepo', metavar='GITDIR',
                        help="Location of git repository")
    parser.add_argument('branchname',
                        help="Git branch name to build")
    parser.add_argument('tagfiles', metavar="TAGFILE", nargs="+", 
                        help="Tag files to use to build git branch from")
    parser.add_argument('--parentbranch', metavar="BRANCH:COMMIT or BRANCH:@TIMESTAMP",
                        help="If branch does not yet exist, use this BRANCH to make it from at COMMIT "
                        "(otherwise an orphaned branch is created). The syntax BRANCH:@TIMESTAMP will "
                        "find the commit closest to the given TIMESTAMP.")
    parser.add_argument('--baserelease', metavar="FILE",
                        help="For cache releases, use this tag file as the content of the base release on which "
                        "the release was a cache")
    parser.add_argument('--svnmetadata', metavar="FILE",
                        help="File with SVN metadata per SVN tag in the git repository. "
                        "By default GITREPO.svn.metadata will be used, if it exists.")
    parser.add_argument('--authorcachefile', metavar='FILE',
                        help="File containing cache of author name and email information - default '[gitrepo].author.metadata'")
    parser.add_argument('--skipreleasetag', action="store_true",
                        help="Do not create a git tag for this release, nor skip processing if a release tag "
                        "exists - use this option to add packages to a branch encapsulating an entire "
                        "release series, like 'master'.")
    parser.add_argument('--onlyforward', action="store_true",
                        help="Process tag files as usual, but never "
                        "downgrade a tag to a previous version. This can be used to reconstruct a master branch "
                        "that only goes forward in revision history (it is very useful for the initial master "
                        "branch constuction). In addition branch series releases that overlap with later releases "
                        "will not be imported so that (again) the master branch does not go backwards in time.")
    parser.add_argument("--commitdate", choices=["now", "release", "author"],
                        help="Strategy for setting git committer date: now - leave as current time; "
                        "release - set to time of the current release being processed; author - "
                        "set to author date, as found from SVN (default %(default)s)", default = "release")
    parser.add_argument('--debug', '--verbose', "-v", action="store_true",
                        help="Switch logging into DEBUG mode")
    parser.add_argument('--dryrun', action="store_true",
                        help="Perform no actions, but print what would happen")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)
        
    gitrepo = os.path.abspath(args.gitrepo)
    branch = args.branchname
    
    # If the onlyforward option is set, then we need to preprocess the list of tag content
    # files in order to ensure that we never jump across time to a previous release series 
    # when making the master branch. This is because the earlier release series will be 
    # branched off from and the later releases on that series really only make sense 
    # for the branch
    if args.onlyforward:
        print args.tagfiles
        args.tagfiles = backskip_filter(args.tagfiles)
        print args.tagfiles    
    tag_files = [ os.path.abspath(fname) for fname in args.tagfiles ]
    
    # If we have a baserelease tag content, then load that here
    if args.baserelease:
        with open(args.baserelease) as br_tags_fh:
            base_tags = json.load(br_tags_fh)
    else:
        base_tags = None
        
    # Load SVN metadata cache - this is the fastest way to query the SVN ordering in which tags
    # were made
    if not args.svnmetadata and os.access(args.gitrepo + ".svn.metadata", os.R_OK):
        args.svnmetadata = args.gitrepo + ".svn.metadata"
    else:
        logger.error("No SVN metadata cache found - cannot proceed")
        sys.exit(1)
    with open(args.svnmetadata) as cache_fh:
        svn_metadata_cache = json.load(cache_fh)
    logger.info("Loaded SVN metadata from {0}".format(args.svnmetadata))
    
    # Load author cache info
    if not args.authorcachefile:
        args.authorcachefile = args.gitrepo + ".author.metadata"
    try:
        with open(args.authorcachefile) as cache_fh:
            author_metadata_cache = json.load(cache_fh)
    except OSError:
        logger.warning("No author metadata found - will proceed without")
        author_metadata_cache = {}
    
    # Main branch reconstruction function
    branch_builder(gitrepo, args.branchname, tag_files, svn_metadata_cache, author_metadata_cache, parentbranch=args.parentbranch, 
                   baserelease=base_tags, skipreleasetag=args.skipreleasetag, dryrun=args.dryrun, only_forward=args.onlyforward,
                   commit_date=args.commitdate)
Exemplo n.º 8
0
def main():
    parser = argparse.ArgumentParser(
        description=textwrap.dedent('''\
                                    Pull a package revision from SVN and apply to the current athena
                                    git repository.

                                    Run this script from inside the athena git repository clone to
                                    be updated.

                                    SVN package revisions are usually specified as
                                    - A simple package name, which means import the package trunk,
                                      e.g., xAODMuon imports Event/xAOD/xAODMuon/trunk

                                    - A package tag, which imports that SVN tag, e.g., xAODMuon-00-18-01
                                      imports Event/xAOD/xAODMuon/tags/xAODMuon-00-18-01

                                    Some more advanced specifiers can be used for special cases:
                                    - A tag name + "-branch" will import the corresponding development
                                      branch, e.g., xAODMuon-00-11-04-branch will import
                                      Event/xAOD/xAODMuon/branches/xAODMuon-00-11-04-branch

                                    - A package path + SVN sub path, PACKAGEPATH+SVNSUBPATH, where
                                      PACKAGEPATH is the path to the package root in SVN and git and
                                      SVNSUBPATH is the path to the SVN version to import; e.g.,
                                      Reconstruction/RecJobTransforms+devbranches/RecJobTransforms_RAWtoALL
                                      (note the plus sign!) will import the SVN path
                                      Reconstruction/RecJobTransforms/devbranches/RecJobTransforms_RAWtoALL
                                      to Reconstruction/RecJobTransforms

                                    The final specifier is only needed if the package to be imported is
                                    not in your current git checkout or if you want to import an unusual
                                    SVN revision, such as a development branch.

                                    The --files specifier can be used to import only some files or paths
                                    to git, with globs supported, e.g.,

                                      --files src/* MyPackage/*.h share/important_file.py

                                    For consistency all options applied during the primary ATLAS SVN to
                                    git migration are re-applied by default.
                                    '''),
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        'svnpackage',
        nargs="+",
        help=
        "SVN package to import, usually a plain package name or tag (see above)"
    )
    parser.add_argument(
        '--files',
        nargs="+",
        help=
        "Only package files matching the values specified here are imported (globs allowed). "
        "This can be used to import only some files from the SVN package and will "
        "disable the normal --svnfilterexceptions matching.",
        default=[])
    parser.add_argument(
        '--revision',
        type=int,
        default=0,
        help="Work at specific SVN revision number instead of HEAD")
    parser.add_argument(
        '--svnroot',
        metavar='SVNDIR',
        help="Location of the SVN repository (defaults to %(default)s)",
        default="svn+ssh://svn.cern.ch/reps/atlasoff")
    parser.add_argument(
        '--svnfilterexceptions',
        '--sfe',
        metavar="FILE",
        help=
        "File listing path globs to exempt from SVN import filter (lines with '+PATH') or "
        "to always reject (lines with '-PATH'); default %(default)s. "
        "It is strongly recommended to keep the default value to ensure consistency "
        "with the official ATLAS migration.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasoffline-exceptions.txt"))
    parser.add_argument(
        '--licensefile',
        metavar="FILE",
        help="License file to add to C++ and python source code "
        "files (default %(default)s). "
        "It is strongly recommended to keep the default value to ensure consistency "
        "with the official ATLAS migration. Use NONE to disable if it is really necessary.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "cerncopy.txt"))
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions). "
        "It is strongly recommended to keep the default value to ensure consistency "
        "with the official ATLAS migration.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument(
        '--debug',
        '--verbose',
        "-v",
        action="store_true",
        help="Switch logging into DEBUG mode (default is WARNING)")
    parser.add_argument(
        '--info',
        action="store_true",
        help="Switch logging into INFO mode (default is WARNING)")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.info:
        logger.setLevel(logging.INFO)
    if args.debug:
        logger.setLevel(logging.DEBUG)
    svn_path_accept, svn_path_reject = load_exceptions_file(
        args.svnfilterexceptions, reject_changelog=True)

    if len(args.svnpackage) > 1 and args.files:
        logger.error(
            "You have specified multiple SVN packages and to filter on package files "
            "to import, which almost certainly will not work - aborting")
        sys.exit(1)

    # Check that we do seem to be in a git repository
    gitrepo = find_git_root()
    if not gitrepo:
        logger.fatal(
            "Not a git repository (or any of the parent directories), run from inside a clone of the athena repository."
        )
        sys.exit(1)
    os.chdir(gitrepo)

    # License file loading
    if args.licensefile and args.licensefile != "NONE":
        with open(args.licensefile) as lfh:
            license_text = [line.rstrip() for line in lfh.readlines()]
    else:
        license_text = None
    if args.licenseexceptions:
        license_path_accept, license_path_reject = load_exceptions_file(
            args.licenseexceptions)
    else:
        license_path_accept = license_path_reject = []

    # Map package names to paths
    package_path_dict = map_package_names_to_paths()

    # Now loop over each package we were given
    try:
        for svn_package in args.svnpackage:
            full_clobber = True
            package_name, package, svn_package_path = get_svn_path_from_tag_name(
                svn_package, package_path_dict)
            # If we have a --files option then redo the accept/reject paths here
            # (as the package path needs to be prepended it needs to happen in this loop)
            if args.files:
                full_clobber = False
                svn_path_reject = [re.compile(fnmatch.translate("*"))]
                svn_path_accept = []
                for glob in args.files:
                    package_glob = os.path.join(
                        package_path_dict[package_name], glob)
                    logger.debug(
                        "Will accept files matching {0}".format(package_glob))
                    svn_path_accept.append(
                        re.compile(fnmatch.translate(package_glob)))
                logger.debug("{0}".format([m.pattern
                                           for m in svn_path_accept]))
            logger.debug("Will import {0} to {1}, SVN revision {2}".format(
                os.path.join(package, svn_package_path),
                package_path_dict[package_name],
                "HEAD" if args.revision == 0 else args.revision))
            svn_co_tag_and_commit(
                args.svnroot,
                gitrepo,
                package,
                svn_package_path,
                full_clobber,
                svn_path_accept=svn_path_accept,
                svn_path_reject=svn_path_reject,
                revision=args.revision,
                license_text=license_text,
                license_path_accept=license_path_accept,
                license_path_reject=license_path_reject,
            )
    except RuntimeError as e:
        logger.error(
            "Got a RuntimeError raised when processing package {0} ({1}). "
            "Usually this is caused by a failure to checkout from SVN, meaning you "
            "specified a package tag that does not exist, or even a package that "
            "does not exist. See --help for how to specify what to import.".
            format(svn_package, e))
        sys.exit(1)

    print textwrap.fill(
        "Pull from SVN succeeded. Use 'git status' to check which files "
        "have been changed and 'git diff' to review the changes in detail. "
        "When you are happy with your changes commit with a good commit message - "
        "as an update has been done from SVN it is recommended to give the "
        "SVN tag in the one line commit summary.")
Exemplo n.º 9
0
import argparse
import fnmatch
import logging
import os
import os.path
import shutil
import stat
import subprocess
import sys
import re
import textwrap
import tempfile
import time

from glogger import logger
logger.setLevel(logging.WARNING)


def check_output_with_retry(cmd,
                            retries=2,
                            wait=10,
                            ignore_fail=False,
                            dryrun=False):
    ## @brief Multiple attempt wrapper for subprocess.check_call (especially remote SVN commands can bork)
    #  @param cmd list or tuple of command line parameters
    #  @param retries Number of attempts to execute successfully
    #  @param wait Sleep time after an unsuccessful execution attempt
    #  @param ignore_fail Do not raise an exception if the command fails
    #  @param dryrun If @c True do not actually execute the command, only print it and return an empty string
    #  @return String containing command output
    if dryrun:
Exemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser(description='Plotter for release dates')
    parser.add_argument(
        'tagfiles',
        nargs="+",
        metavar='TAGFILE',
        help="List of release tag content files to add to the plot")
    parser.add_argument('--text',
                        action='store_true',
                        help="Output text summary of release dates")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")

    # Parse and handle arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    summary = []
    by_series = {}
    for release in args.tagfiles:
        with open(release) as release_fh:
            release_data = json.load(release_fh)
        mini_dict = {
            "series":
            "{0}.{1}".format(release_data["release"]["series"],
                             release_data["release"]["flavour"]),
            "name":
            release_data["release"]["name"],
            "timestamp":
            release_data["release"]["timestamp"],
            "date":
            datetime.date.fromtimestamp(release_data["release"]["timestamp"])
        }
        summary.append(mini_dict)
        s = mini_dict["series"]
        if s not in by_series:
            by_series[s] = {"x": [], "y": [], "name": []}
        by_series[s]["x"].append(
            datetime.date.fromtimestamp(mini_dict["timestamp"]))
        by_series[s]["y"].append(float(mini_dict["series"]))
        by_series[s]["name"].append(".".join(mini_dict["name"].split(".")[2:]))

    if args.text:
        for r in summary:
            print r

    # Now arrange by release...
    for series, data in by_series.iteritems():
        print data
        plt.plot(data["x"], data["y"], "ro")
        plt.text(data["x"][0] - datetime.timedelta(21), data["y"][0] + 0.1,
                 series)
        for x, y, n in zip(data["x"], data["y"], data["name"]):
            plt.text(x, y + 0.1, "." + n)
    plt.xlabel("Date")
    plt.ylabel("Release Series")
    plt.title("Base Release Build Dates")
    plt.show()
Exemplo n.º 11
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'ATLAS tag munger, calculating tag evolution across a releases series')
    parser.add_argument(
        'release',
        metavar='RELEASE',
        nargs="+",
        help=
        "Files containing tag lists (NICOS format). If a release series/major is given (e.g., 20.1 or 20.1.5) "
        "the script will search for the base release and all caches to build the tag files in "
        "a simple way, without worrying about the details of the NICOS tag files and paths (N.B. "
        "in the rare cases when there is more than one tag file for a release, the last one will "
        "be used).")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here (default \"tagdir\")"
    )
    parser.add_argument(
        '--prefix',
        help=
        "Prefix for the name of the release, when the NICOS information is insufficient"
    )
    parser.add_argument(
        '--nicospath',
        default="/afs/cern.ch/atlas/software/dist/nightlies/nicos_work/tags/",
        help="path to NICOS tag files (defaults to usual CERN AFS location)")
    parser.add_argument(
        '--analysispkgfilter',
        action="store_true",
        help=
        "Special post processing for the (Ath)AnalysisBase-2.6.X release series, which "
        "filters tags to be only those which are missing from standard Athena releases"
    )
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Case when a single bese release is given - we have to expand this
    if len(args.release) == 1 and re.match(r"(\d+)\.(\d+)(\.(\d+))?$",
                                           args.release[0]):
        nicos_paths = find_nicos_from_base(args.nicospath, args.release[0])
    else:
        nicos_paths = []
        for path in args.release:
            if os.path.exists(path):
                nicos_paths.append(path)
            elif os.path.exists(os.path.join(args.nicospath, path)):
                nicos_paths.append(os.path.join(args.nicospath, path))
            else:
                logger.error(
                    "Path {0} doesn't exist (even after prepending NICOS path)"
                    .format(path))
                sys.exit(1)

    for release in nicos_paths:
        release_description = parse_release_data(release, args.prefix)
        release_tags = parse_tag_file(release, args.analysispkgfilter)
        logger.info("Processing tags for release {0}".format(
            release_description["name"]))
        output_file = os.path.join(args.tagdir, release_description["name"])
        if args.overwrite or not os.path.exists(output_file):
            with open(os.path.join(args.tagdir, release_description["name"]),
                      "w") as tag_output:
                json.dump(
                    {
                        "release": release_description,
                        "tags": release_tags
                    },
                    tag_output,
                    indent=2)
        else:
            logger.debug("Skipped writing to {0} - overwrite is false".format(
                output_file))
Exemplo n.º 12
0
def main():
    parser = argparse.ArgumentParser(
        description='SVN to git migrator, ATLAS style')
    parser.add_argument('svnroot',
                        metavar='SVNDIR',
                        help="Location of svn repository root")
    parser.add_argument('gitrepo',
                        metavar='GITDIR',
                        help="Location of git repository")
    parser.add_argument(
        'tagfiles',
        nargs="+",
        metavar='TAGFILE',
        help=
        "List of release tag content files to process - all tags found in these files will "
        "be imported (any already imported tags will be skipped)")
    parser.add_argument(
        '--targetbranch',
        default="package",
        help=
        "Target git branch for import. Default is the special value 'package' in which "
        "each package is imported onto its own branch")
    parser.add_argument(
        '--svnpath',
        metavar='PATH',
        nargs='+',
        default=[],
        help="Restrict actions to this list of paths in the SVN tree (use to "
        "make small scale tests of the import workflow).")
    parser.add_argument(
        '--intermediatetags',
        action="store_true",
        help=
        "Import all tags from oldest release tag found, instead of just release tags"
    )
    parser.add_argument(
        '--processtrunk',
        action="store_true",
        help=
        "Update trunk versions during the import (False by default, the trunk will be skipped)."
    )
    parser.add_argument(
        '--svncachefile',
        metavar='FILE',
        help=
        "File containing cache of SVN information - default '[gitrepo].svn.metadata'"
    )
    parser.add_argument(
        '--authorcachefile',
        metavar='FILE',
        help=
        "File containing cache of author name and email information - default '[gitrepo].author.metadata'"
    )
    parser.add_argument(
        '--importtimingfile',
        metavar="FILE",
        help=
        "File to dump SVN->git import timing information - default '[gitrepo]-timing.json'"
    )
    parser.add_argument(
        '--svnfilterexceptions',
        '--sfe',
        metavar="FILE",
        help=
        "File listing path globs to exempt from SVN import filter (lines with '+PATH') or "
        "to always reject (lines with '-PATH'); default %(default)s. Use NONE to have no exceptions.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasoffline-exceptions.txt"))
    parser.add_argument(
        '--packageveto',
        metavar="FILE",
        help="File listing packages that will be skipped completely on import.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaspackage-exceptions.txt"))
    parser.add_argument(
        '--licensefile',
        metavar="FILE",
        help="License file to add to source code files (default "
        "is to add %(default)s license file)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "cerncopy.txt"))
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument(
        '--uncrustify',
        metavar="FILE",
        help="Uncrustify configuration file to use to process C++ "
        "sources through before git import (by default uncrustify will not be used)"
    )
    parser.add_argument(
        '--uncrustifyexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply uncrustify to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasuncrustify-exceptions.txt"))
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Massage default values
    if not args.svncachefile:
        args.svncachefile = os.path.basename(args.gitrepo) + ".svn.metadata"
    if not args.authorcachefile:
        args.authorcachefile = os.path.basename(
            args.gitrepo) + ".author.metadata"
    if not args.importtimingfile:
        args.importtimingfile = os.path.basename(args.gitrepo) + "-timing.json"

    # Set svnroot and git repo, get some starting values
    svnroot = args.svnroot
    gitrepo = os.path.abspath(args.gitrepo)
    start_cwd = os.getcwd()
    start_timestamp_string = time.strftime("%Y%m%dT%H%M.%S")
    logger.debug("Set SVN root to {0} and git repo to {1}".format(
        svnroot, gitrepo))

    # Load exception globs
    svn_path_accept, svn_path_reject = load_exceptions_file(
        args.svnfilterexceptions)

    # Load package vetos
    if args.packageveto:
        package_veto = load_package_veto(args.packageveto)
    else:
        package_veto = []

    # License file loading
    if args.licensefile:
        with open(args.licensefile) as lfh:
            license_text = [line.rstrip() for line in lfh.readlines()]
    else:
        license_text = None
    if args.licenseexceptions:
        license_path_accept, license_path_reject = load_exceptions_file(
            args.licenseexceptions)
    else:
        license_path_accept = license_path_reject = []

    # Uncrustify exceptions file
    if args.uncrustify:
        uncrustify_path_accept, uncrustify_path_reject = load_exceptions_file(
            args.uncrustifyexceptions)
    else:
        uncrustify_path_accept = uncrustify_path_reject = []

    ### Main actions start here
    # Setup the git repository
    init_git(gitrepo)
    # Pull current list of tags here, to fast skip any work already done
    if args.targetbranch != "package":
        switch_to_branch(args.targetbranch, orphan=True)
    current_git_tags = get_current_git_tags(gitrepo)
    os.chdir(start_cwd)

    ## SVN interactions and reloading state
    # Decide which svn packages we will import
    # Note that if we're pulling the packages from a tag diff file, we also get tags
    # at this point, otherwise the tag list is empty.
    svn_packages = get_tags(args.tagfiles, args.svnpath)
    # Add "trunk" packages, if required
    if args.processtrunk:
        for package, tags in svn_packages.iteritems():
            if "trunk" not in tags:
                tags.append("trunk")

    # Initialise SVN and author metadata cache with any stored values
    svn_metadata_cache = initialise_metadata(args.svncachefile)
    author_metadata_cache = initialise_metadata(args.authorcachefile)

    # Prepare package import
    scan_svn_tags_and_get_metadata(svnroot,
                                   svn_packages,
                                   svn_metadata_cache,
                                   author_metadata_cache,
                                   args.intermediatetags,
                                   package_veto=package_veto)

    # Now presistify metadata cache
    backup_metadata(svn_metadata_cache, start_cwd, args.svncachefile,
                    start_timestamp_string)
    backup_metadata(author_metadata_cache, start_cwd, args.authorcachefile,
                    start_timestamp_string)

    # Setup dictionary for keying by SVN revision number
    svn_cache_revision_dict = svn_cache_revision_dict_init(svn_metadata_cache)

    ## git processing actions
    # Process each SVN tag in order
    ordered_revisions = svn_cache_revision_dict.keys()
    ordered_revisions.sort(cmp=lambda x, y: cmp(int(x), int(y)))
    logger.info("Will process {0} SVN revisions in total".format(
        len(ordered_revisions)))
    counter = 0
    processed_tags = 0
    timing = []
    os.chdir(gitrepo)

    for rev in ordered_revisions:
        counter += 1
        start = time.time()
        logger.info("SVN Revsion {0} ({1} of {2})".format(
            rev, counter, len(ordered_revisions)))
        for pkg_tag in svn_cache_revision_dict[rev]:
            if get_flattened_git_tag(pkg_tag["package"], pkg_tag["tag"],
                                     rev) in current_git_tags:
                logger.info("Tag {0} exists already - skipping".format(
                    os.path.join(pkg_tag["package"], pkg_tag["tag"])))
                continue
            if args.targetbranch == "package":
                switch_to_branch(os.path.basename(pkg_tag["package"]),
                                 orphan=True)
            svn_co_tag_and_commit(
                svnroot,
                gitrepo,
                pkg_tag["package"],
                pkg_tag["tag"],
                svn_metadata_cache[os.path.basename(
                    pkg_tag["package"])]["svn"][pkg_tag["tag"]][rev],
                author_metadata_cache,
                svn_path_accept=svn_path_accept,
                svn_path_reject=svn_path_reject,
                package_veto=package_veto,
                license_text=license_text,
                license_path_accept=license_path_accept,
                license_path_reject=license_path_reject,
                uncrustify_config=args.uncrustify,
                uncrustify_path_accept=uncrustify_path_accept,
                uncrustify_path_reject=uncrustify_path_reject)
            processed_tags += 1
        elapsed = time.time() - start
        logger.info(
            "{0} processed in {1}s ({2} packages really processed)".format(
                counter, elapsed, processed_tags))
        timing.append(elapsed)

    # Last task, clean all empty directories (git does not track these, but they are clutter)
    check_output_with_retry(("git", "clean", "-f", "-d"))

    if args.importtimingfile:
        os.chdir(start_cwd)
        with open(args.importtimingfile, "w") as time_file:
            json.dump(timing, time_file)
Exemplo n.º 13
0
def main():
    parser = argparse.ArgumentParser(
        description="License file checker, parsing a git import and "
        "checking for any files that may have had the new ATLAS copyright "
        "and license applied in error. All files are listed, filtered by the current "
        "exceptions and then checked for statements of license or copyright that "
        "indicate a problem.")
    parser.add_argument("--path", help="Path to check (by default check cwd)")
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")
    parser.add_argument(
        '--quiet',
        action="store_true",
        default=False,
        help=
        "Only print filenames that have issues for adding to the filter file")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Where to check
    if args.path:
        check_path = args.path
    else:
        check_path = os.getcwd()
    license_path_accept, license_path_reject = load_exceptions_file(
        args.licenseexceptions)

    worry_files = 0
    for root, dirs, files in os.walk(check_path):
        if os.path.basename(root) == ".git":
            continue
        for name in files:
            extension = name.rsplit(".", 1)[1] if "." in name else ""
            if extension not in ("cxx", "cpp", "icc", "cc", "c", "C", "h",
                                 "hpp", "hh", "py", "cmake"):
                continue
            if name == "AtlasInternals.cmake":  # Many false matches, so skip...
                continue
            filename = os.path.join(root, name)
            git_filename = filename[len(check_path) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, git_filename):
                    logger.debug("File {0} was license file vetoed".format(
                        git_filename))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} was license file forced".format(
                        git_filename))
                    path_veto = False
                    break
            if path_veto:
                continue
            worry_files += license_check_file(filename, git_filename,
                                              args.quiet)

    if worry_files:
        logger.warning("Found {0} concerning files".format(worry_files))
        sys.exit(1)

    return 0