Пример #1
0
def find_cmake_tags(base_path, release, project_path):
    ## @brief Find the tags that went into a CMake release found
    #  at the path specified
    #  @param base_path Starting base path for the release number and flavour
    #  @param release The Athena release number
    #  @param project_path The path element inside each project where the
    #  project is installed
    release_packages = {}
    project_directories = [
        dir for dir in os.listdir(base_path)
        if dir.startswith("Atlas") or dir == "DetCommon"
    ]
    for project in project_directories:
        packages_file = os.path.join(base_path, project, release, project_path,
                                     "packages.txt")
        if not os.path.exists(packages_file):
            logger.warning(
                "Project packages file {0} doesn't exist - skipping this project"
                .format(packages_file))
            continue
        project_packages = read_project_packages(packages_file, project)
        logger.debug("Found {0} packages for project {1}".format(
            len(project_packages), project))
        release_packages.update(project_packages)
    logger.info("Found {0} packages in release {1}".format(
        len(release_packages), release))
    return release_packages
Пример #2
0
def get_tags(tag_files, svn_path_accept):
    ## @brief Parse packages and package tags from release diff files
    #  @param tag_files List of release tag files to query
    #  @param svn_path_accept List of paths to filter on
    #  @return dictionary keyed by package (including path) and value as sorted list of tags
    svn_package_tags = {}
    for tag_file in tag_files:
        with open(tag_file) as tag_fh:
            tag_dict = json.load(tag_fh)
            logger.info("Getting tag lists from {0}".format(
                tag_dict["release"]["name"]))
            for package, package_info in tag_dict["tags"].iteritems():
                if len(svn_path_accept) > 0:
                    accept = False
                    for path in svn_path_accept:
                        if package.startswith(path):
                            accept = True
                            break
                    if not accept:
                        continue
                svn_tag = package_info["svn_tag"]
                if svn_tag != "trunk":
                    svn_tag = os.path.join("tags", svn_tag)
                if package in svn_package_tags:
                    svn_package_tags[package].add(svn_tag)
                else:
                    svn_package_tags[package] = set((svn_tag, ))
    # Now convert back to list and sort tags...
    for package in svn_package_tags:
        svn_package_tags[package] = list(svn_package_tags[package])
        svn_package_tags[package].sort()
    return svn_package_tags
Пример #3
0
def find_nicos_from_base(nicos_path, base_release):
    ## @brief Find base release and cache release tag files when only a base release number
    #  is given
    #  @param nicos_path Base path to NICOS tag file area
    #  @param base_release Base release number A.B or A.B.X (e.g., 21.0[.1])
    #  @return list of matching tag files, in release numbered order
    release_list = []
    dir_list = os.listdir(nicos_path)
    release_match = "{0}(\.(\d+))*$".format(
        os.path.basename(base_release).replace(".", r"\."))
    logger.debug(
        "Matching releases against pattern '{0}'".format(release_match))
    for entry in dir_list:
        if re.match(release_match, entry):
            release_list.append(entry)
    logger.debug("Matching releases: {0}".format(release_list))
    # It's not actually necessary to sort the releases, but it does no harm
    release_list.sort(cmp=release_compare)
    logger.info(
        "Found ordered list of production caches: {0}".format(release_list))

    tag_files = []
    for release in release_list:
        tag_files.append(get_tag_file(os.path.join(nicos_path, release)))

    return tag_files
Пример #4
0
def remove_gpl_block(source_lines, keep_lines, depth):
    '''Get rid of GPL license block'''
    gpl_idx = search_from(source_lines, "GNU General Public License", 0, depth)
    if gpl_idx == -1:
        return

    # OK, found a matching line, now search for start and end of block
    start_idx = search_from(source_lines,
                            r"\*\*\*\*\*\*\*\*",
                            gpl_idx,
                            max=10,
                            backwards=True)
    stop_idx = search_from(source_lines,
                           r"\*\*\*\*\*\*\*\*",
                           gpl_idx,
                           max=10,
                           backwards=False)

    if start_idx == -1 or stop_idx == -1:
        logger.warning(
            "Found GPL trigger line, but failed to find start/end of license block"
        )
        return

    logger.info(
        "Found GPL license block to suppress from lines {0} to {1}".format(
            start_idx + 1, stop_idx + 1))
    for idx in range(start_idx, stop_idx + 1):
        keep_lines[idx] = False
Пример #5
0
def load_exceptions_file(filename, reject_changelog=False):
    ## @brief Parse and return path globbing exceptions file
    #  @param filename File containing exceptions
    #  @param reject_changelog Special flag used by svnpull to ensure that
    #  ChangeLog files are rejected (in a normal svn2git they are accepted,
    #  onto the import branches, but then excluded specially from the
    #  release branches)
    #  @return Tuple of path globs to accept and globs to reject, converted to regexps
    path_accept = []
    path_reject = []
    if filename != "NONE":
        with open(filename) as filter_file:
            logger.info("Loaded import exceptions from {0}".format(filename))
            for line in filter_file:
                line = line.strip()
                if reject_changelog and ("ChangeLog" in line):
                    logger.debug(
                        "Found ChangeLog line, which will be forced to reject: {0}"
                        .format(line))
                    line = "- */ChangeLog"
                if line.startswith("#") or line == "":
                    continue
                if line.startswith("-"):
                    path_reject.append(
                        re.compile(fnmatch.translate(line.lstrip("- "))))
                else:
                    path_accept.append(
                        re.compile(fnmatch.translate(line.lstrip("+ "))))
    logger.debug("Glob accept: {0}".format([m.pattern for m in path_accept]))
    logger.debug("Glob reject: {0}".format([m.pattern for m in path_reject]))
    return path_accept, path_reject
Пример #6
0
def initialise_metadata(cachefile):
    ## @brief Load existing cache file, if it exists, or return empty cache
    #  @param cachefile Name of  cache file (serialised in JSON)
    if os.path.exists(cachefile):
        logger.info("Reloading cache from {0}".format(cachefile))
        with file(cachefile) as md_load:
            svn_metadata_cache = json.load(md_load)
    else:
        svn_metadata_cache = {}
    return svn_metadata_cache
Пример #7
0
def init_git(gitrepo):
    ## @brief Initialise git repo, if needed
    #  @param gitrepo Git repository path
    if not os.path.exists(gitrepo):
        os.makedirs(gitrepo)
    os.chdir(gitrepo)
    if os.path.exists(os.path.join(gitrepo, ".git")):
        logger.info("Found existing git repo, {0}".format(gitrepo))
        check_output_with_retry(("git", "reset", "--hard"))
    else:
        logger.info("Initialising git repo: {0}".format(gitrepo))
        check_output_with_retry(("git", "init"))
Пример #8
0
def find_youngest_tag(tag_diff, svn_metadata_cache):
    '''Use the svn metadata cache to find the youngest tag in the release''' 
    yougest_tag = None
    youngest_svn_revision = -1
    if svn_metadata_cache:
        for package, tag in tag_diff[0]["diff"]["add"].iteritems():
            if (package in svn_metadata_cache and 
                svn_metadata_cache[package][os.path.join("tags", tag)]["revision"] > youngest_svn_revision):
                youngest_svn_revision = svn_metadata_cache[package][os.path.join("tags", tag)]["revision"]
                yougest_tag = os.path.join("import", "tag", tag)
    logger.info("Tag to branch from master at is {0} (SVN revision {1})".format(yougest_tag, youngest_svn_revision))
    return yougest_tag
Пример #9
0
def remove_plain_copyright(source_lines, keep_lines, depth):
    '''Get rid of plain vanilla copyright lines'''
    idx = 0
    copyre = re.compile(r"//.*copyright.*\([Cc]\)")
    for line in source_lines:
        if copyre.search(line):
            logger.info(
                "Found copyright line to suppress at index {0}: {1}".format(
                    idx + 1, line))
            keep_lines[idx] = False
        if idx > depth:
            break
        idx += 1
Пример #10
0
def prepare_branch_point(branch, parentbranch=None):
    ## @brief Using information about the target branch and any parent
    #  switch to the correct point in history to start/continue
    #  @param branch Target branch name
    #  @param parentbranch If creating a new branch, this is the @c BRANCH:COMMIT_ID of 
    #  where to make the new branch from; syntax @c BRANCH:@FILE and @c BRANCH:@TIMESTAMP
    #  is also supported, where the timestamp will be used to find the branch directly
    #  (and can be taken from @c JSON release data in @c FILE) 
    if not parentbranch or branch_exists(branch):
        logger.info("Switching to branch {0}".format(branch))
        switch_to_branch(branch, orphan=True)
    else:
        parent, commit = parentbranch.split(":")
        check_output_with_retry(("git", "checkout", parent), retries=1) # needed?
        if commit.startswith("@"):
            timestamp = commit[1:]
            # If this maps to a file, try to open it as a release JSON, otherwise treat it as
            # a plain timestamp
            if os.access(timestamp, os.F_OK):
                logger.info("Taking branching timestamp from file {0}".format(timestamp))
                with open(timestamp) as fh:
                    branch_point_release = json.load(fh)
                timestamp = branch_point_release["release"]["timestamp"]
            logger.info("Using timestamp {0} for branch point".format(timestamp))
            commit = check_output_with_retry(["git", "log", "--until", str(timestamp), "-n1", "--pretty=format:%H"],
                                             retries=1).strip()
            logger.info("Mapped timestamp {0} to commit {1}".format(timestamp, commit))
        check_output_with_retry(("git", "checkout", commit), retries=1)
        check_output_with_retry(("git", "checkout", "-b", branch), retries=1)
Пример #11
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'ATLAS CMT tag parser, grabbing tag content for a CMT cache release. '
        'This is quite a hacky script, only filling in the gaps in NICOS knowledge for '
        'ATLAS P1HLT caches.')
    parser.add_argument('release',
                        metavar='RELEASE',
                        help="CMT requirements file to parse")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here (default \"tagdir\")"
    )
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    release_description = parse_release_data(args.release)
    release_tags = parse_tag_file(args.release)
    logger.info("Processing tags for release {0}".format(
        release_description["name"]))
    output_file = os.path.join(args.tagdir, release_description["name"])
    if args.overwrite or not os.path.exists(output_file):
        with open(os.path.join(args.tagdir, release_description["name"]),
                  "w") as tag_output:
            json.dump({
                "release": release_description,
                "tags": release_tags
            },
                      tag_output,
                      indent=2)
    else:
        logger.debug(
            "Skipped writing to {0} - overwrite is false".format(output_file))
Пример #12
0
def svn_get_path_metadata(svnroot, package, package_path, revision=None):
    # # @brief Get SVN metadata and return as a simple dictionary keyed on date, author and commit revision
    logger.info("Querying SVN metadata for {0}".format(os.path.join(package, package_path)))
    cmd = ["svn", "info", os.path.join(svnroot, package, package_path), "--xml"]
    svn_info = check_output_with_retry(cmd)
    tree = eltree.fromstring(svn_info)
    info = {"date": tree.find(".//date").text.rsplit(".",1)[0], # Strip off sub-second part
            "author": tree.find(".//author").text,
            "revision": tree.find(".//commit").attrib['revision']}

    cmd = ["svn", "log", os.path.join(svnroot, package, package_path), "-r", info["revision"], "--xml"]
    svn_log = check_output_with_retry(cmd)
    tree = eltree.fromstring(svn_log)
    info["msg"] = tree.find(".//msg").text.strip().encode('ascii', 'ignore')
    return info
Пример #13
0
def svn_cleanup(svn_path, svn_co_root, svn_path_accept=[], svn_path_reject=[]):
    # # @brief Cleanout files we do not want to import into git
    #  @param svn_path Full path to checkout of SVN package
    #  @param svn_co_root Base directory of SVN checkout
    #  @param svn_path_accept List of file path globs to always import to git
    #  @param svn_path_reject List of file path globs to never import to git

    # File size veto
    for root, dirs, files in os.walk(svn_path):
        if ".svn" in dirs:
            shutil.rmtree(os.path.join(root, ".svn"))
            dirs.remove(".svn")
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_accept_match = False
            for filter in svn_path_accept:
                if re.match(filter, svn_filename):
                    logger.info("{0} imported from globbed exception {1}".format(svn_filename, filter.pattern))
                    path_accept_match = True
                    break
            if path_accept_match:
                continue
            try:
                # Rejection always takes precedence
                for filter in svn_path_reject:
                    if re.match(filter, svn_filename):
                        logger.info("{0} not imported due to {1} filter".format(svn_filename, filter.pattern))
                        os.remove(filename)
                        continue

                if os.lstat(filename).st_size > 100 * 1024:
                    if "." in name and name.rsplit(".", 1)[1] in ("cxx", "py", "h", "java", "cc", "c", "icc", "cpp",
                                                                  "hpp", "hh", "f", "F"):
                        logger.info("Source file {0} is too large, but importing anyway (source files always imported)".format(filename))
                    else:
                        logger.info("File {0} is too large - not importing".format(filename))
                        os.remove(filename)
                        continue
                if name.startswith("."):
                    logger.info("File {0} starts with a '.' - not importing".format(filename))
                    os.remove(filename)
                    continue

            except OSError, e:
                logger.debug("Got OSError (usually harmless) treating {0}: {1}".format(filename, e))
Пример #14
0
def main():
    parser = argparse.ArgumentParser(
        description='Merge releases to create a super-release')
    parser.add_argument('targetrelease',
                        metavar='RELEASE',
                        help="Target release")
    parser.add_argument('mergerelease',
                        metavar='RELEASE',
                        nargs="+",
                        help="Releases to merge into target")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    with open(args.targetrelease) as target:
        target_release_data = json.load(target)

    for release in args.mergerelease:
        with open(release) as merge:
            merge_release_data = json.load(merge)
        for package_path, package_data in merge_release_data["tags"].iteritems(
        ):
            if package_path not in target_release_data["tags"]:
                target_release_data["tags"][package_path] = package_data
                logger.info("Merged {0} at tag {1} from {2}".format(
                    package_path, package_data["svn_tag"], release))
            else:
                logger.debug("Package {0} already exists in target".format(
                    package_path))

    try:
        os.rename(args.targetrelease, args.targetrelease + ".bak")
        with open(args.targetrelease, "w") as output_fh:
            json.dump(target_release_data, output_fh, indent=2)
    except OSError, e:
        logger.error("Error while rewriting target file {0}: {1}".format(
            args.targetrelease, e))
Пример #15
0
def relicense(fname, license=True, rename=False, depth=20):
    logger.info("Processing file {0}".format(fname))
    tmpname = fname + ".relicense"
    fmode = os.stat(fname).st_mode
    with open(fname) as infile, open(tmpname, "w") as outfile:
        source_lines = infile.readlines()

        keep_lines = [True for l in range(len(source_lines))]

        remove_plain_copyright(source_lines, keep_lines, depth)
        remove_gpl_block(source_lines, keep_lines, depth)

        if license:
            add_standard_license(fname, source_lines, keep_lines)

        write_relicensed_file(source_lines, keep_lines, outfile)
        os.chmod(tmpname, fmode)

        if rename:
            os.rename(fname, fname + ".bak")
            os.rename(tmpname, fname)
Пример #16
0
def backskip_filter(tagfiles):
    ## @brief Reverse order parse a set of tagfiles and reject those
    #  where procesing would require a backskip in time
    #  @param tagfiles List of tag files
    #  #return Filtered list of tagfiles with backskips removed
    last_timestamp = 0
    last_release = ""
    tagfiles_copy = tagfiles[:]
    tagfiles_copy.reverse()
    for tagfile in tagfiles_copy:
        with open(tagfile) as tagfile_fh:
            release_data = json.load(tagfile_fh)
        if last_timestamp and release_data["release"]["timestamp"] > last_timestamp:
            logger.info("Vetoing release {0} (from {1}) because of "
                        "backskip for {2}".format(release_data["release"]["name"], tagfile, last_release))
            tagfiles.remove(tagfile)
            continue
        last_timestamp = release_data["release"]["timestamp"]
        last_release = release_data["release"]["name"]
        logger.info("Accepted release {0} at {1}".format(last_release, last_timestamp))
    return tagfiles
Пример #17
0
def scan_svn_tags_and_get_metadata(svnroot, svn_packages, svn_metadata_cache, author_metadata_cache,
                                   all_package_tags=False, package_veto=[]):
    # # @brief Get SVN metadata for each of the package tags we're interested in
    #  @param svnroot URL of SVN repository
    #  @param svn_packages Dictionary of packages and tags to process
    #  @param svn_metadata_cache SVN metadata cache
    #  @param author_metadata_cache author metadata cache with name and email for commits
    #  @param all_package_tags Boolean flag triggering import of all package tags in SVN
    #  @param package_veto List of packages to just plain refuse to handle

    # First we establish the list of tags which we need to deal with.
    for package, package_tags in svn_packages.iteritems():
        logger.info("Preparing package {0} (base tags: {1})".format(package, package_tags))
        if all_package_tags:
            oldest_tag = svn_packages[package][0]
            tags = get_all_package_tags(svnroot, package)
            try:
                package_tags.extend(tags[tags.index(oldest_tag) + 1:])
            except ValueError:
                logger.error("Oldest release tag ({0}) for package {1} not found in SVN!".format(oldest_tag, package))
                sys.exit(1)
        # We need to now sort the package tags and remove any duplicates
        ordered_tags = list(set(package_tags))
        ordered_tags.sort(cmp=svn_tag_cmp)
        svn_packages[package] = ordered_tags

    # Now iterate over the required tags and ensure we have the necessary metadata
    for package, package_tags in svn_packages.iteritems():
        if package in package_veto:
            logger.info("Package {0} is vetoed - skipping SVN metadata import".format(package))
            continue
        package_name = os.path.basename(package)
        package_path = os.path.dirname(package)
        for tag in package_tags:
            # Do we have metadata?
            if package_name not in svn_metadata_cache:
                svn_metadata_cache[package_name] = {"path": package_path, "svn": {}}
            try:
                if tag == "trunk":
                    # We always need to get the metadata for trunk tags as we need to
                    # know the current revision
                    svn_metadata = svn_get_path_metadata(svnroot, package, tag)
                    if tag not in svn_metadata_cache[package_name]["svn"]:
                        svn_metadata_cache[package_name]["svn"][tag] = {svn_metadata["revision"]: svn_metadata}
                    elif svn_metadata["revision"] not in svn_metadata_cache[package_name]["svn"][tag]:
                        svn_metadata_cache[package_name]["svn"][tag][svn_metadata["revision"]] = svn_metadata
                elif tag not in svn_metadata_cache[package_name]["svn"]:
                    svn_metadata = svn_get_path_metadata(svnroot, package, tag)
                    svn_metadata_cache[package_name]["svn"][tag] = {svn_metadata["revision"]: svn_metadata}
                else:
                    svn_metadata = svn_metadata_cache[package_name]["svn"][tag].values()[0]
                if svn_metadata["author"] not in author_metadata_cache:
                    try:
                        author_metadata_cache[svn_metadata["author"]] = author_info_lookup(svn_metadata["author"])
                    except RuntimeError, e:
                        logger.info("Failed to get author information for {0}: {1}".format(package, e))
                        author_metadata_cache[svn_metadata["author"]] = {"name": svn_metadata["author"],
                                                                         "email": "{0}@cern.ch".format(svn_metadata["author"])}
            except RuntimeError:
                logger.warning("Failed to get SVN metadata for {0}".format(os.path.join(package, tag)))
Пример #18
0
def check_output_with_retry(cmd,
                            retries=2,
                            wait=10,
                            ignore_fail=False,
                            dryrun=False):
    ## @brief Multiple attempt wrapper for subprocess.check_call (especially remote SVN commands can bork)
    #  @param cmd list or tuple of command line parameters
    #  @param retries Number of attempts to execute successfully
    #  @param wait Sleep time after an unsuccessful execution attempt
    #  @param ignore_fail Do not raise an exception if the command fails
    #  @param dryrun If @c True do not actually execute the command, only print it and return an empty string
    #  @return String containing command output
    if dryrun:
        logger.info("Dryrun mode: {0}".format(cmd))
        return ""
    success = failure = False
    tries = 0
    start = time.time()
    while not success and not failure:
        tries += 1
        try:
            logger.debug("Calling {0}".format(cmd))
            output = subprocess.check_output(cmd)
            success = True
        except subprocess.CalledProcessError:
            if ignore_fail:
                success = True
                output = ""
                continue
            logger.warning("Attempt {0} to execute {1} failed".format(
                tries, cmd))
            if tries > retries:
                failure = True
            else:
                time.sleep(wait)
    if failure:
        raise RuntimeError("Repeated failures to execute {0}".format(cmd))
    logger.debug("Executed in {0}s".format(time.time() - start))
    return output
Пример #19
0
def svn_license_injector(svn_path, svn_co_root, license_text, license_path_accept=[], license_path_reject=[]):
    ## @brief Add license statements to code before import
    #  @param svn_path Filesystem path to cleaned up SVN checkout
    #  @param svn_co_root Base directory of SVN checkout
    #  @param license_text List of strings that comprise the license to apply
    #  @param license_path_accept Paths to force include in license file addition (NOT IMPLEMENTED YET)
    #  @param license_path_reject Paths to exclude from license file addition
    #   license file addition
    for root, dirs, files in os.walk(svn_path):
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, svn_filename):
                    logger.info("File {0} will not have a license file applied".format(svn_filename, filter.pattern))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.info("File {0} will have a license file applied".format(svn_filename, filter.pattern))
                    path_veto = False
                    break
            if path_veto:
                continue
            # Get the file's mode here to then restore it
            try:
                fmode = os.stat(filename).st_mode
                extension = svn_filename.rsplit(".", 1)[1] if "." in svn_filename else ""
                if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h", "hpp", "hh"):
                    inject_c_license(filename, license_text)
                    os.chmod(filename, fmode)
                elif extension in ("py", "cmake"):
                    inject_py_license(filename, license_text)
                    os.chmod(filename, fmode)
            except OSError, e:
                # Can happen if a file is a softlink to nowhere
                logger.warning("Got an exception on stating {0}: {1}".format(filename, e))
Пример #20
0
def add_standard_license(fname, source_lines, keep_lines):
    '''Add the standard CERN license if it is missing'''
    if search_from(
            source_lines,
            r"Copyright \(C\) 2002-2017 CERN for the benefit of the ATLAS collaboration"
    ) != -1:
        logger.info("Standard license already present")
        return
    extension = fname.rsplit(".", 1)[1] if "." in fname else ""
    if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h", "hpp", "hh"):
        logger.info("Adding C style license")
        add_c_license(source_lines, keep_lines)
    elif extension in ("py", "cmake"):
        logger.info("Adding py style license")
        add_py_license(source_lines, keep_lines)
Пример #21
0
def find_packages_for_update(release_data, tag_list, branch, svn_metadata_cache, release_tag_unprocessed, only_forward=False):
    ## @brief Find packages that need updates, comparing release tag content with
    #  git import tags already made
    #  @param release_data Release tag content dictionary
    #  @param tag_list List of current git tags
    #  @param branch Git branch being constructed
    #  @param svn_metadata_cache Usual cache data for SVN stuff
    #  @param release_tag_unprocessed Dictionary with current "tag" metadata, useful to mark up import
    #  @param only_forward If @c True then never revert a package to a previous version or import a branch tag
    #  @return Tuple of tag import dictionary, and a list of all "processed" packages
    
    ## Loop over all packages in a release and see if the package
    # - is missing from the svn metadata cache (so skip)
    # - is already imported at current HEAD (so skip)
    # - is new or updated (so mark for import)
    # Final construct is the import_list dictionary
    import_list = {}
    packages_considered = []
    for package, package_data in release_data["tags"].iteritems():
        package_name = os.path.basename(package)
        packages_considered.append(package_name)
        package_tag = package_data["svn_tag"]
        if package_name not in svn_metadata_cache:
            logger.debug("Package {0} not found - assuming restricted import".format(package_name))
            continue
        if package_tag == "trunk":
            svn_meta_tag_key = "trunk"
        else:
            svn_meta_tag_key = os.path.join("tags", package_tag)

        for revision in svn_metadata_cache[package_name]["svn"][svn_meta_tag_key]:
            git_import_tag = get_flattened_git_tag(package, package_tag, revision)
            if git_import_tag not in tag_list:
                logger.debug("Import tag {0} not found - assuming restricted import".format(git_import_tag))
                continue
            branch_import_tag = get_flattened_git_tag(package, package_tag, revision, branch)
            logger.debug("Considering import of {0} ({1}@r{2}) to {3} "
                         "for release {4}".format(branch_import_tag, package_tag, revision, branch, release_data["release"]["name"]))
            if branch_import_tag in tag_list:
                logger.info("Import of {0} ({1} r{2}) onto {3} done - skipping".format(package, package_tag, revision, branch))
                continue
            if only_forward:
                if package_name in release_tag_unprocessed and package_compare(release_tag_unprocessed[package_name]["svn_tag"], package_tag) != -1:
                    logger.info("Import of {0} onto {1} is blocked - onlyforward option will not downgrade tags".format(package_tag, branch))
                    continue
            ## @note The structure of the dictionary used to direct the import of a package is:
            #  "package": full package path
            #  "package_name": package basename (for convenience)
            #  "git_import_tag": the git import tag that identifies the import of the version of this package
            #  "svn_tag": the SVN tag corresponding to this package version
            #  "svn_revision": the SVN revision for this package version
            #  "branch_import_tag": the git import tag that will be created to stamp this import as done
            #  "svn_meta_tag_key": the key used in the SVN metadata dictionary to get SVN metadata
            #  "current_branch_import_tag": the git tag indicating the current version of this package 
            import_element = {"package": package, 
                              "package_name": os.path.basename(package),
                              "git_import_tag": git_import_tag, 
                              "svn_tag": package_tag, 
                              "svn_revision": revision,
                              "branch_import_tag": branch_import_tag, "svn_meta_tag_key": svn_meta_tag_key, 
                              "current_branch_import_tag": release_tag_unprocessed[package_name]["git_tag"] if package_name in release_tag_unprocessed else None}
            logger.debug("Will import {0} to {1}".format(import_element, branch))
            if revision in import_list:
                import_list[revision].append(import_element)
            else:
                import_list[revision] = [import_element]

    return import_list, packages_considered
Пример #22
0
def find_cmake_releases(install_path, release, nightly=None, arch=None):
    ## @brief Find the base path and project sub-path for a CMake release
    #  @param install_path Base release area for CMake installed releases
    #  @param release Athena release series + release flavour number (e.g., 21.0)
    #  @param nightly Nightly series to search (otherwise look for installed release)
    #  @param arch Manually specify architecture
    #  @return Tuple with full base release path, all matching releases and project sub-path
    base_path = os.path.join(install_path, release)
    if not os.path.isdir(base_path):
        logger.error(
            "Directory {0} is missing - cannot find CMake package data".format(
                base_path))
        sys.exit(1)
    logger.info("Using base path for release {0} of {1}".format(
        release, base_path))

    sample_project = find_cmake_sample_project(base_path)
    if not sample_project:
        logger.error(
            "Could not find any sample project from {0} - cannot find CMake package data"
            .format(base_path))
        sys.exit(1)
    logger.debug("Found build project {0} to build architecture with".format(
        sample_project))

    if nightly:
        releases = [nightly]
        if not os.path.isdir(
                os.path.join(os.path.join(base_path, sample_project,
                                          nightly))):
            logger.error("Could not find release {0} - "
                         "cannot find CMake package data".format(
                             os.path.join(base_path, sample_project, nightly)))
            sys.exit(1)
    else:
        releases = [
            d for d in os.listdir(os.path.join(base_path, sample_project))
            if os.path.isdir(os.path.join(base_path, sample_project, d))
            and d.startswith(release)
        ]
        if len(releases) == 0:
            logger.error("Could not find any releases in {0} - "
                         "cannot find CMake package data".format(
                             os.path.join(base_path, sample_project)))
            sys.exit(1)
    logger.debug("Found releases: {0}".format(releases))

    if arch:
        project_path = os.path.join("InstallArea", arch)
    else:
        if os.path.isdir(
                os.path.join(base_path, sample_project, releases[0],
                             "InstallArea")):
            arch = find_best_arch(
                os.path.join(base_path, sample_project, releases[0],
                             "InstallArea"))
            project_path = os.path.join("InstallArea", arch)
        else:
            arch = find_best_arch(
                os.path.join(base_path, sample_project, releases[0]))
            project_path = arch
    logger.debug("Using build architecture {0}".format(arch))

    return base_path, releases, project_path
Пример #23
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'ATLAS tag munger, calculating tag evolution across a releases series')
    parser.add_argument(
        'release',
        metavar='RELEASE',
        nargs="+",
        help=
        "Files containing tag lists (NICOS format). If a release series/major is given (e.g., 20.1 or 20.1.5) "
        "the script will search for the base release and all caches to build the tag files in "
        "a simple way, without worrying about the details of the NICOS tag files and paths (N.B. "
        "in the rare cases when there is more than one tag file for a release, the last one will "
        "be used).")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here (default \"tagdir\")"
    )
    parser.add_argument(
        '--prefix',
        help=
        "Prefix for the name of the release, when the NICOS information is insufficient"
    )
    parser.add_argument(
        '--nicospath',
        default="/afs/cern.ch/atlas/software/dist/nightlies/nicos_work/tags/",
        help="path to NICOS tag files (defaults to usual CERN AFS location)")
    parser.add_argument(
        '--analysispkgfilter',
        action="store_true",
        help=
        "Special post processing for the (Ath)AnalysisBase-2.6.X release series, which "
        "filters tags to be only those which are missing from standard Athena releases"
    )
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Case when a single bese release is given - we have to expand this
    if len(args.release) == 1 and re.match(r"(\d+)\.(\d+)(\.(\d+))?$",
                                           args.release[0]):
        nicos_paths = find_nicos_from_base(args.nicospath, args.release[0])
    else:
        nicos_paths = []
        for path in args.release:
            if os.path.exists(path):
                nicos_paths.append(path)
            elif os.path.exists(os.path.join(args.nicospath, path)):
                nicos_paths.append(os.path.join(args.nicospath, path))
            else:
                logger.error(
                    "Path {0} doesn't exist (even after prepending NICOS path)"
                    .format(path))
                sys.exit(1)

    for release in nicos_paths:
        release_description = parse_release_data(release, args.prefix)
        release_tags = parse_tag_file(release, args.analysispkgfilter)
        logger.info("Processing tags for release {0}".format(
            release_description["name"]))
        output_file = os.path.join(args.tagdir, release_description["name"])
        if args.overwrite or not os.path.exists(output_file):
            with open(os.path.join(args.tagdir, release_description["name"]),
                      "w") as tag_output:
                json.dump(
                    {
                        "release": release_description,
                        "tags": release_tags
                    },
                    tag_output,
                    indent=2)
        else:
            logger.debug("Skipped writing to {0} - overwrite is false".format(
                output_file))
Пример #24
0
def main():
    parser = argparse.ArgumentParser(
        description='SVN to git migrator, ATLAS style')
    parser.add_argument('svnroot',
                        metavar='SVNDIR',
                        help="Location of svn repository root")
    parser.add_argument('gitrepo',
                        metavar='GITDIR',
                        help="Location of git repository")
    parser.add_argument(
        'tagfiles',
        nargs="+",
        metavar='TAGFILE',
        help=
        "List of release tag content files to process - all tags found in these files will "
        "be imported (any already imported tags will be skipped)")
    parser.add_argument(
        '--targetbranch',
        default="package",
        help=
        "Target git branch for import. Default is the special value 'package' in which "
        "each package is imported onto its own branch")
    parser.add_argument(
        '--svnpath',
        metavar='PATH',
        nargs='+',
        default=[],
        help="Restrict actions to this list of paths in the SVN tree (use to "
        "make small scale tests of the import workflow).")
    parser.add_argument(
        '--intermediatetags',
        action="store_true",
        help=
        "Import all tags from oldest release tag found, instead of just release tags"
    )
    parser.add_argument(
        '--processtrunk',
        action="store_true",
        help=
        "Update trunk versions during the import (False by default, the trunk will be skipped)."
    )
    parser.add_argument(
        '--svncachefile',
        metavar='FILE',
        help=
        "File containing cache of SVN information - default '[gitrepo].svn.metadata'"
    )
    parser.add_argument(
        '--authorcachefile',
        metavar='FILE',
        help=
        "File containing cache of author name and email information - default '[gitrepo].author.metadata'"
    )
    parser.add_argument(
        '--importtimingfile',
        metavar="FILE",
        help=
        "File to dump SVN->git import timing information - default '[gitrepo]-timing.json'"
    )
    parser.add_argument(
        '--svnfilterexceptions',
        '--sfe',
        metavar="FILE",
        help=
        "File listing path globs to exempt from SVN import filter (lines with '+PATH') or "
        "to always reject (lines with '-PATH'); default %(default)s. Use NONE to have no exceptions.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasoffline-exceptions.txt"))
    parser.add_argument(
        '--packageveto',
        metavar="FILE",
        help="File listing packages that will be skipped completely on import.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaspackage-exceptions.txt"))
    parser.add_argument(
        '--licensefile',
        metavar="FILE",
        help="License file to add to source code files (default "
        "is to add %(default)s license file)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "cerncopy.txt"))
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument(
        '--uncrustify',
        metavar="FILE",
        help="Uncrustify configuration file to use to process C++ "
        "sources through before git import (by default uncrustify will not be used)"
    )
    parser.add_argument(
        '--uncrustifyexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply uncrustify to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasuncrustify-exceptions.txt"))
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Massage default values
    if not args.svncachefile:
        args.svncachefile = os.path.basename(args.gitrepo) + ".svn.metadata"
    if not args.authorcachefile:
        args.authorcachefile = os.path.basename(
            args.gitrepo) + ".author.metadata"
    if not args.importtimingfile:
        args.importtimingfile = os.path.basename(args.gitrepo) + "-timing.json"

    # Set svnroot and git repo, get some starting values
    svnroot = args.svnroot
    gitrepo = os.path.abspath(args.gitrepo)
    start_cwd = os.getcwd()
    start_timestamp_string = time.strftime("%Y%m%dT%H%M.%S")
    logger.debug("Set SVN root to {0} and git repo to {1}".format(
        svnroot, gitrepo))

    # Load exception globs
    svn_path_accept, svn_path_reject = load_exceptions_file(
        args.svnfilterexceptions)

    # Load package vetos
    if args.packageveto:
        package_veto = load_package_veto(args.packageveto)
    else:
        package_veto = []

    # License file loading
    if args.licensefile:
        with open(args.licensefile) as lfh:
            license_text = [line.rstrip() for line in lfh.readlines()]
    else:
        license_text = None
    if args.licenseexceptions:
        license_path_accept, license_path_reject = load_exceptions_file(
            args.licenseexceptions)
    else:
        license_path_accept = license_path_reject = []

    # Uncrustify exceptions file
    if args.uncrustify:
        uncrustify_path_accept, uncrustify_path_reject = load_exceptions_file(
            args.uncrustifyexceptions)
    else:
        uncrustify_path_accept = uncrustify_path_reject = []

    ### Main actions start here
    # Setup the git repository
    init_git(gitrepo)
    # Pull current list of tags here, to fast skip any work already done
    if args.targetbranch != "package":
        switch_to_branch(args.targetbranch, orphan=True)
    current_git_tags = get_current_git_tags(gitrepo)
    os.chdir(start_cwd)

    ## SVN interactions and reloading state
    # Decide which svn packages we will import
    # Note that if we're pulling the packages from a tag diff file, we also get tags
    # at this point, otherwise the tag list is empty.
    svn_packages = get_tags(args.tagfiles, args.svnpath)
    # Add "trunk" packages, if required
    if args.processtrunk:
        for package, tags in svn_packages.iteritems():
            if "trunk" not in tags:
                tags.append("trunk")

    # Initialise SVN and author metadata cache with any stored values
    svn_metadata_cache = initialise_metadata(args.svncachefile)
    author_metadata_cache = initialise_metadata(args.authorcachefile)

    # Prepare package import
    scan_svn_tags_and_get_metadata(svnroot,
                                   svn_packages,
                                   svn_metadata_cache,
                                   author_metadata_cache,
                                   args.intermediatetags,
                                   package_veto=package_veto)

    # Now presistify metadata cache
    backup_metadata(svn_metadata_cache, start_cwd, args.svncachefile,
                    start_timestamp_string)
    backup_metadata(author_metadata_cache, start_cwd, args.authorcachefile,
                    start_timestamp_string)

    # Setup dictionary for keying by SVN revision number
    svn_cache_revision_dict = svn_cache_revision_dict_init(svn_metadata_cache)

    ## git processing actions
    # Process each SVN tag in order
    ordered_revisions = svn_cache_revision_dict.keys()
    ordered_revisions.sort(cmp=lambda x, y: cmp(int(x), int(y)))
    logger.info("Will process {0} SVN revisions in total".format(
        len(ordered_revisions)))
    counter = 0
    processed_tags = 0
    timing = []
    os.chdir(gitrepo)

    for rev in ordered_revisions:
        counter += 1
        start = time.time()
        logger.info("SVN Revsion {0} ({1} of {2})".format(
            rev, counter, len(ordered_revisions)))
        for pkg_tag in svn_cache_revision_dict[rev]:
            if get_flattened_git_tag(pkg_tag["package"], pkg_tag["tag"],
                                     rev) in current_git_tags:
                logger.info("Tag {0} exists already - skipping".format(
                    os.path.join(pkg_tag["package"], pkg_tag["tag"])))
                continue
            if args.targetbranch == "package":
                switch_to_branch(os.path.basename(pkg_tag["package"]),
                                 orphan=True)
            svn_co_tag_and_commit(
                svnroot,
                gitrepo,
                pkg_tag["package"],
                pkg_tag["tag"],
                svn_metadata_cache[os.path.basename(
                    pkg_tag["package"])]["svn"][pkg_tag["tag"]][rev],
                author_metadata_cache,
                svn_path_accept=svn_path_accept,
                svn_path_reject=svn_path_reject,
                package_veto=package_veto,
                license_text=license_text,
                license_path_accept=license_path_accept,
                license_path_reject=license_path_reject,
                uncrustify_config=args.uncrustify,
                uncrustify_path_accept=uncrustify_path_accept,
                uncrustify_path_reject=uncrustify_path_reject)
            processed_tags += 1
        elapsed = time.time() - start
        logger.info(
            "{0} processed in {1}s ({2} packages really processed)".format(
                counter, elapsed, processed_tags))
        timing.append(elapsed)

    # Last task, clean all empty directories (git does not track these, but they are clutter)
    check_output_with_retry(("git", "clean", "-f", "-d"))

    if args.importtimingfile:
        os.chdir(start_cwd)
        with open(args.importtimingfile, "w") as time_file:
            json.dump(timing, time_file)
Пример #25
0
def main():
    parser = argparse.ArgumentParser(description='git branch constructor')
    parser.add_argument('gitrepo', metavar='GITDIR',
                        help="Location of git repository")
    parser.add_argument('branchname',
                        help="Git branch name to build")
    parser.add_argument('tagfiles', metavar="TAGFILE", nargs="+", 
                        help="Tag files to use to build git branch from")
    parser.add_argument('--parentbranch', metavar="BRANCH:COMMIT or BRANCH:@TIMESTAMP",
                        help="If branch does not yet exist, use this BRANCH to make it from at COMMIT "
                        "(otherwise an orphaned branch is created). The syntax BRANCH:@TIMESTAMP will "
                        "find the commit closest to the given TIMESTAMP.")
    parser.add_argument('--baserelease', metavar="FILE",
                        help="For cache releases, use this tag file as the content of the base release on which "
                        "the release was a cache")
    parser.add_argument('--svnmetadata', metavar="FILE",
                        help="File with SVN metadata per SVN tag in the git repository. "
                        "By default GITREPO.svn.metadata will be used, if it exists.")
    parser.add_argument('--authorcachefile', metavar='FILE',
                        help="File containing cache of author name and email information - default '[gitrepo].author.metadata'")
    parser.add_argument('--skipreleasetag', action="store_true",
                        help="Do not create a git tag for this release, nor skip processing if a release tag "
                        "exists - use this option to add packages to a branch encapsulating an entire "
                        "release series, like 'master'.")
    parser.add_argument('--onlyforward', action="store_true",
                        help="Process tag files as usual, but never "
                        "downgrade a tag to a previous version. This can be used to reconstruct a master branch "
                        "that only goes forward in revision history (it is very useful for the initial master "
                        "branch constuction). In addition branch series releases that overlap with later releases "
                        "will not be imported so that (again) the master branch does not go backwards in time.")
    parser.add_argument("--commitdate", choices=["now", "release", "author"],
                        help="Strategy for setting git committer date: now - leave as current time; "
                        "release - set to time of the current release being processed; author - "
                        "set to author date, as found from SVN (default %(default)s)", default = "release")
    parser.add_argument('--debug', '--verbose', "-v", action="store_true",
                        help="Switch logging into DEBUG mode")
    parser.add_argument('--dryrun', action="store_true",
                        help="Perform no actions, but print what would happen")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)
        
    gitrepo = os.path.abspath(args.gitrepo)
    branch = args.branchname
    
    # If the onlyforward option is set, then we need to preprocess the list of tag content
    # files in order to ensure that we never jump across time to a previous release series 
    # when making the master branch. This is because the earlier release series will be 
    # branched off from and the later releases on that series really only make sense 
    # for the branch
    if args.onlyforward:
        print args.tagfiles
        args.tagfiles = backskip_filter(args.tagfiles)
        print args.tagfiles    
    tag_files = [ os.path.abspath(fname) for fname in args.tagfiles ]
    
    # If we have a baserelease tag content, then load that here
    if args.baserelease:
        with open(args.baserelease) as br_tags_fh:
            base_tags = json.load(br_tags_fh)
    else:
        base_tags = None
        
    # Load SVN metadata cache - this is the fastest way to query the SVN ordering in which tags
    # were made
    if not args.svnmetadata and os.access(args.gitrepo + ".svn.metadata", os.R_OK):
        args.svnmetadata = args.gitrepo + ".svn.metadata"
    else:
        logger.error("No SVN metadata cache found - cannot proceed")
        sys.exit(1)
    with open(args.svnmetadata) as cache_fh:
        svn_metadata_cache = json.load(cache_fh)
    logger.info("Loaded SVN metadata from {0}".format(args.svnmetadata))
    
    # Load author cache info
    if not args.authorcachefile:
        args.authorcachefile = args.gitrepo + ".author.metadata"
    try:
        with open(args.authorcachefile) as cache_fh:
            author_metadata_cache = json.load(cache_fh)
    except OSError:
        logger.warning("No author metadata found - will proceed without")
        author_metadata_cache = {}
    
    # Main branch reconstruction function
    branch_builder(gitrepo, args.branchname, tag_files, svn_metadata_cache, author_metadata_cache, parentbranch=args.parentbranch, 
                   baserelease=base_tags, skipreleasetag=args.skipreleasetag, dryrun=args.dryrun, only_forward=args.onlyforward,
                   commit_date=args.commitdate)
Пример #26
0
def branch_builder(gitrepo, branch, tag_files, svn_metadata_cache, author_metadata_cache,
                   parentbranch=None, baserelease=None,
                   skipreleasetag=False, dryrun=False, only_forward=False, commit_date="now"):
    ## @brief Main branch builder function
    #  @param gitrepo The git repository location
    #  @param branch The git branch to work on
    #  @param tag_files The plain tag content files to process
    #  @param svn_metadata_cache The standard metadata cache from SVN
    #  @param author_metadata_cache Cached author data
    #  @param parentbranch If creating a new branch, this is the BRANCH:COMMIT_ID of where to make the new branch from
    #  @param skipreleasetag If @c True then skip creating git tags for each processed release
    #  @param dryrun If @c True, do nothing except print commands that would have been executed
    #  @param only_forward If @c True then never revert a package to a previous version or import a branch tag 
    #  @param commit_date Choice for commit date when building branches
    
    # Prepare - chdir and then make sure we are on the correct branch
    os.chdir(gitrepo)
    prepare_branch_point(branch, parentbranch)            

    # Main loop starts here, with one pass for each tag file we are processing
    for tag_file in tag_files:
        with open(tag_file) as tag_file_fh:
            release_data = json.load(tag_file_fh)

        tag_list = get_current_git_tags(gitrepo)
        current_release_tags = get_current_release_tag_dict(tag_list, branch) # Markers for which packages have been processed
        logger.info("Processing release {0} ({1} current tags)".format(release_data["release"]["name"], len(current_release_tags)))
        release_tag = git_release_tag(release_data["release"], branch)
        if release_tag in tag_list and not skipreleasetag:
            logger.info("Release tag {0} already made - skipping".format(release_tag))
            continue

        if commit_date == "release":
            logger.info("Setting committer date to {0:.0f}".format(release_data["release"]["timestamp"]))
            os.environ["GIT_COMMITTER_DATE"] = "{0:.0f}".format(release_data["release"]["timestamp"])
        
        # Find which packages need updated for this new tag content file
        import_list, packages_considered = find_packages_for_update(release_data, tag_list, branch, 
                                                                    svn_metadata_cache, current_release_tags, only_forward)

        ## Sort the list of tags to be imported by SVN revision number for a
        #  more or less sensible package by package commit history
        sorted_import_revisions = import_list.keys()
        sorted_import_revisions.sort(cmp=lambda x,y: cmp(int(x), int(y)))

        ## Now loop over all the packages we have to import and update them
        pkg_processed = 0
        for revision in sorted_import_revisions:
            for pkg_import in import_list[revision]:
                pkg_processed += 1
                do_package_import(pkg_import, svn_metadata_cache, author_metadata_cache, release_name=release_data["release"]["name"], 
                                  branch=branch, dryrun=dryrun, commit_date=commit_date)
                logger.info("Processed {0}/{1} revisions".format(pkg_processed, len(import_list)))


        ## After all packages are updated, look for packages which present in the last
        #  release, but not this one, so they need to be removed
        new_current_release_tags = get_current_release_tag_dict(tag_list, branch) # Updated package list after upgrade
        packages_to_remove = []
        packages_to_revert = {}
        for package_name, old_package_state in current_release_tags.iteritems():
            if package_name in packages_considered:
                logger.debug("Package {0} was processed for {1}".format(package_name, release_data["release"]["name"]))
                continue
            ## @note We have a package that was not "considered" in the current release.
            #  If we don't have a baserelease then this has been removed, so we zap it.
            #  If there is a baserelease... 
            #   ...and this package is not in it, it was in the cache, then was removed, so zap it.
            #   ...and this package is in it, then compare the versions and "revert" to the base 
            #      release version if they are different.
            if baserelease:
                base_package_version = None
                for package, base_package_data in baserelease["tags"].iteritems():
                    if base_package_data["package_name"] == package_name:
                        base_package_version = base_package_data
                        break
                if base_package_version:
                    if base_package_version["svn_tag"] == old_package_state["svn_tag"]:
                        logger.debug("Package {0} remains at base release version {1}".format(base_package_data["package_name"],
                                                                                              base_package_version["svn_tag"]))
                        packages_considered.append(package_name) # Flag we dealt this package
                    else:
                        logger.info("Package {0} was removed from cache - reverting to base "
                                    "release version {1}".format(base_package_data["package_name"],
                                                     base_package_version["svn_tag"]))
                        package_name = base_package_data["package_name"]
                        svn_meta_tag_key = os.path.join("tags", base_package_version["svn_tag"])
                        svn_revision = svn_metadata_cache[package_name]["svn"][svn_meta_tag_key].keys()[0]
                        git_import_tag = get_flattened_git_tag(package, base_package_version["svn_tag"], svn_revision)
                        packages_to_revert[package_name] = {"package": package,
                                                            "package_name": os.path.basename(package),
                                                            "git_import_tag": get_flattened_git_tag(package, base_package_version["svn_tag"], svn_revision),
                                                            "svn_tag": base_package_version["svn_tag"],
                                                            "svn_revision": svn_revision,
                                                            "branch_import_tag": get_flattened_git_tag(package, base_package_version["svn_tag"], svn_revision, branch),
                                                            "svn_meta_tag_key": svn_meta_tag_key,
                                                            "current_branch_import_tag": current_release_tags[package_name]["git_tag"]}
                else:
                    logger.info("Package {0} was removed from the cache and is not in the base release".format(package_name))
                    packages_to_remove.append(package_name)
            else:
                logger.info("Package {0} has been removed from the release".format(package_name))
                packages_to_remove.append(package_name)

        if baserelease:
            logger.info("{0} packages have been reverted to their base SVN state".format(len(packages_to_revert)))
            for package_name, revert_data in packages_to_revert.iteritems():
                do_package_import(revert_data, svn_metadata_cache, author_metadata_cache, release_name=release_data["release"]["name"], 
                                  branch=branch, dryrun=dryrun, commit_date=commit_date)

        logger.info("{0} packages have been removed from the release".format(len(packages_to_remove)))
        for package in packages_to_remove:
            logger.info("Removing {0} from {1}".format(package, branch))
            if not dryrun:
                package_path = os.path.join(svn_metadata_cache[package]["path"], package)
                recursive_delete(package_path)
            check_output_with_retry(("git", "add", "-A"), dryrun=dryrun)
            cmd = ["git", "commit", "--allow-empty", "-m", "{0} deleted from {1}".format(package_path, branch)]
            check_output_with_retry(cmd, dryrun=dryrun)
            check_output_with_retry(("git", "tag", "-d", current_release_tags[package]["git_tag"]), retries=1, dryrun=dryrun)
            pkg_processed += 1

        ## Now, finally, tag the release as done
        if not skipreleasetag:
            if release_data["release"]["nightly"]:
                check_output_with_retry(("git", "tag", release_tag), retries=1, dryrun=dryrun)
            else:
                check_output_with_retry(("git", "tag", release_tag, "-a",
                                         "-m", "Tagging release {0}".format(release_data["release"]["name"])), 
                                        retries=1, dryrun=dryrun)
            logger.info("Tagged release {0} as {1} "
                        "({2} packages processed)".format(release_data["release"]["name"],
                                                          release_tag, pkg_processed))
        else:
            logger.info("Processed release {0} (no tag; {1} packages processed)".format(release_data["release"]["name"], pkg_processed))
Пример #27
0
def do_package_import(pkg_import, svn_metadata_cache, author_metadata_cache, release_name="unknown", branch="unknown", 
                      dryrun=False, commit_date="now"):
    ## @brief Import a package's SVN tag onto the current git branch
    #  updating the corresponding git tags
    #  @param pkg_import package import dictionary (see find_packages_for_update for the
    #  structure)
    #  @param svn_metadata_cache The standard metadata cache from SVN
    #  @param author_metadata_cache Cached author data
    #  @param release_name Name of current release being built (used only for generating log messages)
    #  @param branch Current branch name (used only for generating log messages)
    #  @param dryrun Boolean, if @c true then don't actually act
    #  @param commit_date Choices for setting committer date 
    logger.info("Migrating {0} from {1} to {2} for {3}...".format(pkg_import["package"], 
                                                          pkg_import["current_branch_import_tag"], 
                                                          pkg_import["svn_tag"], release_name))
    # Need to wipe out all contents in case files were removed from package
    if not dryrun:
        recursive_delete(pkg_import["package"])
    check_output_with_retry(("git", "checkout", pkg_import["git_import_tag"], pkg_import["package"]), dryrun=dryrun)
    # Splat Changelog file - we do not want these on the production branches
    try:
        os.remove(os.path.join(pkg_import["package"], "ChangeLog"))
    except OSError:
        pass
    # Done - now commit and tag
    if logger.level <= logging.DEBUG:
        cmd = ["git", "status"]
        logger.debug(check_output_with_retry(cmd))
    check_output_with_retry(("git", "add", "-A", pkg_import["package"]), dryrun=dryrun)
    staged = check_output_with_retry(("git", "diff", "--name-only", "--staged"), dryrun=dryrun)
    if len(staged) == 0 and (not dryrun): 
        # Nothing staged, so skip doing any commit, but do make the import tag for this branch
        # so that we don't repeat this step again
        logger.warning("Package {0} - no changes staged for {1}, " 
                       "git tagging and skipping commit".format(pkg_import["package"], release_name))
        check_output_with_retry(("git", "tag", pkg_import["branch_import_tag"]), retries=1, dryrun=dryrun)
        return

    rev_meta = svn_metadata_cache[pkg_import["package_name"]]["svn"][pkg_import["svn_meta_tag_key"]][pkg_import["svn_revision"]]
    msg = rev_meta["msg"]
    if pkg_import["svn_tag"] == "trunk":
        msg += " (trunk r{0})".format(rev_meta["revision"])
    else:
        msg += " ({0})".format(pkg_import["svn_tag"])
    cl_diff = changelog_diff(pkg_import["package"],
                             from_tag="/".join(pkg_import["current_branch_import_tag"].split("/")[1:]) if pkg_import["current_branch_import_tag"] else None,
                             to_tag=pkg_import["git_import_tag"])
    if cl_diff:
        msg += "\n\n" + "\n".join(cl_diff)
    cmd = ["git", "commit", "-m", msg]
    author = author_string(rev_meta["author"], author_metadata_cache)
    cmd.append("--author='{0}'".format(author))
    cmd.append("--date={0}".format(rev_meta["date"]))
    
    if commit_date == "author":
        os.environ["GIT_COMMITTER_DATE"] = rev_meta["date"]
    check_output_with_retry(cmd, retries=1, dryrun=dryrun)
    if commit_date == "author":
        del os.environ["GIT_COMMITTER_DATE"]
    
    check_output_with_retry(("git", "tag", pkg_import["branch_import_tag"]), retries=1, dryrun=dryrun)
    if pkg_import["current_branch_import_tag"]:
        check_output_with_retry(("git", "tag", "-d", pkg_import["current_branch_import_tag"]), retries=1, dryrun=dryrun)
    logger.info("Committed {0} ({1}) onto {2} for {3}".format(pkg_import["package"], 
                                                              pkg_import["svn_tag"], branch, release_name))
Пример #28
0
def svn_co_tag_and_commit(svnroot, gitrepo, package, tag, svn_metadata=None, author_metadata_cache=None, branch=None,
                          svn_path_accept=[], svn_path_reject=[], package_veto=[], commit=True, revision=None,
                          license_text=None, license_path_accept=[], license_path_reject=[],
                          uncrustify_config=None, uncrustify_path_accept=[], uncrustify_path_reject=[]):
    ## @brief Make a temporary space, check out from svn, clean-up, copy and then git commit and tag
    #  @param svnroot Base path to SVN repository
    #  @param gitrepo Path to git repository to import to
    #  @param package Path to package root (in git and svn)
    #  @param tag Package tag to import (i.e., path after base package path)
    #  @param svn_metadata SVN metadata cache
    #  @param author_metadata_cache Author name/email cache
    #  @param branch Git branch to switch to before import
    #  @param svn_path_accept Paths to force import to git
    #  @param svn_path_reject Paths to force reject from the import
    #  @param package_veto List of packages to just plain refuse to handle
    #  @param commit Boolean flag to manage commit (can be set to @c False to only checkout and process)
    #  @param license_text List of strings containing the license text to add (if @c False, then no
    #  license file is added)
    #  @param revision Force SVN revision number (useful for svnpull.py, where
    #  no svn metadata is available)
    #  @param license_path_accept Paths to force include in license file addition
    #  @param license_path_reject Paths to exclude from license file addition
    #  @param uncrustify_config Uncrustify configuration file
    #  @param uncrustify_path_accept Paths to force uncrustify to run on
    #  @param uncrustify_path_reject Paths to exclude from uncrustify
    if package in package_veto:
        logger.info("Package {0} is vetoed - skipping import".format(package))
        return

    msg = "Importing SVN path {0}/{1} to {0}".format(package, tag)
    if svn_metadata and tag == "trunk":
        msg += " (r{0})".format(svn_metadata["revision"])
    logger.info(msg)

    if branch:
        logger.info("Switching to branch {0}".format(branch))
        switch_to_branch(args.targetbranch)

    tempdir = tempfile.mkdtemp()
    full_svn_path = os.path.join(tempdir, package)
    cmd = ["svn", "checkout"]
    if revision:
        cmd.extend(["-r", str(revision)])
    elif svn_metadata:
        cmd.extend(["-r", svn_metadata["revision"]])
    cmd.extend([os.path.join(svnroot, package, tag), os.path.join(tempdir, package)])
    check_output_with_retry(cmd, retries=1, wait=3)

    # Clean out directory of things we don't want to import
    svn_cleanup(full_svn_path, svn_co_root=tempdir,
                svn_path_accept=svn_path_accept, svn_path_reject=svn_path_reject)
    
    # If desired, inject a licence into the source code
    if license_text:
        svn_license_injector(full_svn_path, svn_co_root=tempdir, license_text=license_text,
                             license_path_accept=license_path_accept, license_path_reject=license_path_reject)

    # Pass C++ sources through uncrustify
    if uncrustify_config:
        uncrustify_sources(full_svn_path, svn_co_root=tempdir, uncrustify_config=uncrustify_config,
                           uncrustify_path_accept=uncrustify_path_accept, uncrustify_path_reject=uncrustify_path_reject)

    # Copy to git
    full_git_path = os.path.join(gitrepo, package)
    package_root, package_name = os.path.split(full_git_path)
    try:
        if os.path.isdir(full_git_path):
            shutil.rmtree(full_git_path, ignore_errors=True)
        os.makedirs(package_root)
    except OSError:
        pass
    shutil.move(full_svn_path, package_root)

    if commit:
        # get ChangeLog diff
        cl_diff = changelog_diff(package)

        # Commit
        check_output_with_retry(("git", "add", "-A", package))
        if logger.level <= logging.DEBUG:
            logger.debug(check_output_with_retry(("git", "status")))


        cmd = ["git", "commit", "--allow-empty", "-m", "{0} ({1} - r{2})".
               format(svn_metadata['msg'], tag.replace('tags/','',1), svn_metadata['revision'])]
        if svn_metadata:
            cmd.extend(("--author='{0}'".format(author_string(svn_metadata["author"], author_metadata_cache)),
                        "--date={0}".format(svn_metadata["date"])))
        if cl_diff:
            cmd.extend(("-m", "Diff in ChangeLog:\n" + '\n'.join(cl_diff)))
        check_output_with_retry(cmd)
        cmd = ["git", "tag", "-a", get_flattened_git_tag(package, tag, svn_metadata["revision"]), "-m", ""]
        check_output_with_retry(cmd)

    # Clean up
    shutil.rmtree(tempdir)
Пример #29
0
def svn_co_tag_and_commit(svnroot,
                          gitrepo,
                          package,
                          tag,
                          full_clobber=True,
                          svn_path_accept=[],
                          svn_path_reject=[],
                          revision=None,
                          license_text=None,
                          license_path_accept=[],
                          license_path_reject=[]):
    ## @brief Make a temporary space, check out from svn, clean-up and copy into git checkout
    #  @param svnroot Base path to SVN repository
    #  @param gitrepo Path to git repository to import to
    #  @param package Path to package root (in git and svn)
    #  @param tag Package tag to import (i.e., path after base package path)
    #  @param full_clobber If @c True then all current files are deleted, if false then
    #  only newly imported files are copied to checkout
    #  @param svn_path_accept Paths to force import to git
    #  @param svn_path_reject Paths to force reject from the import
    #  @param license_text List of strings containing the license text to add (if @c False, then no
    #  license file is added)
    #  @param revision Force SVN revision number
    #  @param license_path_accept Paths to force include in license file addition
    #  @param license_path_reject Paths to exclude from license file addition
    msg = "Importing SVN path {0}/{1} to {2}/{0}".format(package, tag, gitrepo)
    logger.info(msg)

    tempdir = tempfile.mkdtemp()
    full_svn_path = os.path.join(tempdir, package)
    cmd = ["svn", "checkout"]
    if revision:
        cmd.extend(["-r", str(revision)])
    cmd.extend(
        [os.path.join(svnroot, package, tag),
         os.path.join(tempdir, package)])
    check_output_with_retry(cmd, retries=1, wait=3)

    # Clean out directory of things we don't want to import
    svn_cleanup(full_svn_path,
                svn_co_root=tempdir,
                svn_path_accept=svn_path_accept,
                svn_path_reject=svn_path_reject)

    # If desired, inject a licence into the source code
    if license_text:
        svn_license_injector(full_svn_path,
                             svn_co_root=tempdir,
                             license_text=license_text,
                             license_path_accept=license_path_accept,
                             license_path_reject=license_path_reject)

    # Copy to git
    full_git_path = os.path.join(gitrepo, package)
    package_root, package_name = os.path.split(full_git_path)
    if full_clobber:
        try:
            # We need to be a little more sophisticated here,
            # as the doxygen change on the master branch of
            # git rewrote docs/mainpage.h to docs/packagedoc.h
            # So we do need to keep that file as least, if it is
            # not in the SVN pull area
            pkgdoc = os.path.join(package, "doc", "packagedoc.h")
            if os.access(pkgdoc, os.R_OK):
                dest_dir = os.path.join(full_svn_path, "doc")
                if not os.access(os.path.join(dest_dir, "packagedoc.h"),
                                 os.R_OK):
                    try:
                        os.makedirs(os.path.join(full_svn_path, "doc"))
                    except OSError:
                        pass
                    shutil.copy2(pkgdoc, dest_dir)
                    logger.info(
                        "Doxygen file packagedoc.h was backed up before overwrite"
                    )
            shutil.rmtree(full_git_path, ignore_errors=True)
            os.makedirs(package_root)
        except OSError:
            pass
        logger.info("Replacing complete current git checkout with {0}".format(
            os.path.join(package, tag)))
        shutil.move(full_svn_path, package_root)
    else:
        for root, dirs, files in os.walk(full_svn_path):
            for name in files:
                src_filename = os.path.join(root, name)
                dst_filename = os.path.join(src_filename[len(tempdir) + 1:])
                logger.info(
                    "Pulling {0} into git checkout".format(dst_filename))
                if not os.access(os.path.basename(dst_filename), os.R_OK):
                    os.makedirs(os.path.basename(dst_filename))
                shutil.copy2(src_filename, dst_filename)

    # Clean up
    shutil.rmtree(tempdir)
Пример #30
0
            sys.exit(1)

    base_path, releases, project_path = find_cmake_releases(
        args.installpath, args.release, nightly=args.nightly, arch=args.arch)
    for release in releases:
        release_description = get_cmake_release_data(base_path,
                                                     args.release,
                                                     release,
                                                     project_path,
                                                     nightly=args.nightly)
        logger.debug("Release {0} parsed as {1}/PROJECT/{2}".format(
            base_path, release, project_path))
        release_tags = find_cmake_tags(base_path, release, project_path)
        output_file = os.path.join(args.tagdir, release_description["name"])
        if args.overwrite or not os.path.exists(output_file):
            with open(os.path.join(args.tagdir, release_description["name"]),
                      "w") as tag_output:
                my_release_data = {
                    "release": release_description,
                    "tags": release_tags
                }
                json.dump(my_release_data, tag_output, indent=2)
                logger.info("Wrote {0}".format(output_file))
        else:
            logger.debug("Skipped writing to {0} - overwrite is false".format(
                output_file))


if __name__ == '__main__':
    main()