Ejemplo n.º 1
0
def changelog_diff(package, from_tag=None, to_tag=None):
    ## @brief Return a cleaned up ChangeLog diff - this is only as useful as what the developer wrote.
    #  If @c from_tag and @c to_tag are given then the diff is done with these references, otherwise
    #  a diff in place is done
    #  @param package Path to package
    #  @param from_tag Import tag to use as the original ChangeLog version
    #  @param to_tag Import tag to use as the updated ChangeLog version
    #  @return ChangeLog diff (truncated if needed)
    truncate_lines = 20
    o_lines = []
    logger.debug("Finding ChangeLog diff for {0} (from {1} to {2})".format(
        package, from_tag, to_tag))
    cl_file = os.path.join(package, 'ChangeLog')
    cmd = ["git", "diff", "-U0"]
    if from_tag and to_tag:
        cmd.extend((from_tag + ".." + to_tag, ))
    elif to_tag:
        cmd.extend((to_tag, ))
    cmd.extend(("--", cl_file))
    try:
        o_lines = check_output_with_retry(cmd, retries=1).split("\n")
        o_lines = [
            line.lstrip("+").decode('ascii', 'ignore') for line in o_lines[6:]
            if line.startswith("+")
            and not re.search(r"(\s[MADR]\s+[\w\/\.]+)|(@@)", line)
        ]
        if len(o_lines) > truncate_lines:
            o_lines = o_lines[:truncate_lines]
            o_lines.append("...")
            o_lines.append("(Long ChangeLog diff - truncated)")
    except RuntimeError:
        o_lines = ["No ChangeLog diff available"]
    logger.debug("Found {0} line ChangeLog diff".format(len(o_lines)))
    return o_lines
Ejemplo n.º 2
0
def find_nicos_from_base(nicos_path, base_release):
    ## @brief Find base release and cache release tag files when only a base release number
    #  is given
    #  @param nicos_path Base path to NICOS tag file area
    #  @param base_release Base release number A.B or A.B.X (e.g., 21.0[.1])
    #  @return list of matching tag files, in release numbered order
    release_list = []
    dir_list = os.listdir(nicos_path)
    release_match = "{0}(\.(\d+))*$".format(
        os.path.basename(base_release).replace(".", r"\."))
    logger.debug(
        "Matching releases against pattern '{0}'".format(release_match))
    for entry in dir_list:
        if re.match(release_match, entry):
            release_list.append(entry)
    logger.debug("Matching releases: {0}".format(release_list))
    # It's not actually necessary to sort the releases, but it does no harm
    release_list.sort(cmp=release_compare)
    logger.info(
        "Found ordered list of production caches: {0}".format(release_list))

    tag_files = []
    for release in release_list:
        tag_files.append(get_tag_file(os.path.join(nicos_path, release)))

    return tag_files
Ejemplo n.º 3
0
def find_cmake_tags(base_path, release, project_path):
    ## @brief Find the tags that went into a CMake release found
    #  at the path specified
    #  @param base_path Starting base path for the release number and flavour
    #  @param release The Athena release number
    #  @param project_path The path element inside each project where the
    #  project is installed
    release_packages = {}
    project_directories = [
        dir for dir in os.listdir(base_path)
        if dir.startswith("Atlas") or dir == "DetCommon"
    ]
    for project in project_directories:
        packages_file = os.path.join(base_path, project, release, project_path,
                                     "packages.txt")
        if not os.path.exists(packages_file):
            logger.warning(
                "Project packages file {0} doesn't exist - skipping this project"
                .format(packages_file))
            continue
        project_packages = read_project_packages(packages_file, project)
        logger.debug("Found {0} packages for project {1}".format(
            len(project_packages), project))
        release_packages.update(project_packages)
    logger.info("Found {0} packages in release {1}".format(
        len(release_packages), release))
    return release_packages
Ejemplo n.º 4
0
def find_best_tagfile(arch_path):
    ## @brief Find the newest tag file when various NICOS tag files are available
    #  for a particular release
    #  @param arch_path Directory path to NICOS tag files
    #  @return Chosen tag file
    tag_files = os.listdir(arch_path)
    tag_files.sort()
    if len(tag_files) == 0:
        raise RuntimeError("No tags files found in {0}".format(arch_path))
    logger.debug("Best tagfile for {0} is {1} (chosen from {2})".format(
        arch_path, tag_files[-1], len(tag_files)))
    return (tag_files[-1])
Ejemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'ATLAS CMT tag parser, grabbing tag content for a CMT cache release. '
        'This is quite a hacky script, only filling in the gaps in NICOS knowledge for '
        'ATLAS P1HLT caches.')
    parser.add_argument('release',
                        metavar='RELEASE',
                        help="CMT requirements file to parse")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here (default \"tagdir\")"
    )
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    release_description = parse_release_data(args.release)
    release_tags = parse_tag_file(args.release)
    logger.info("Processing tags for release {0}".format(
        release_description["name"]))
    output_file = os.path.join(args.tagdir, release_description["name"])
    if args.overwrite or not os.path.exists(output_file):
        with open(os.path.join(args.tagdir, release_description["name"]),
                  "w") as tag_output:
            json.dump({
                "release": release_description,
                "tags": release_tags
            },
                      tag_output,
                      indent=2)
    else:
        logger.debug(
            "Skipped writing to {0} - overwrite is false".format(output_file))
Ejemplo n.º 6
0
def svn_cleanup(svn_path, svn_co_root, svn_path_accept=[], svn_path_reject=[]):
    # # @brief Cleanout files we do not want to import into git
    #  @param svn_path Full path to checkout of SVN package
    #  @param svn_co_root Base directory of SVN checkout
    #  @param svn_path_accept List of file path globs to always import to git
    #  @param svn_path_reject List of file path globs to never import to git

    # File size veto
    for root, dirs, files in os.walk(svn_path):
        if ".svn" in dirs:
            shutil.rmtree(os.path.join(root, ".svn"))
            dirs.remove(".svn")
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_accept_match = False
            for filter in svn_path_accept:
                if re.match(filter, svn_filename):
                    logger.info("{0} imported from globbed exception {1}".format(svn_filename, filter.pattern))
                    path_accept_match = True
                    break
            if path_accept_match:
                continue
            try:
                # Rejection always takes precedence
                for filter in svn_path_reject:
                    if re.match(filter, svn_filename):
                        logger.info("{0} not imported due to {1} filter".format(svn_filename, filter.pattern))
                        os.remove(filename)
                        continue

                if os.lstat(filename).st_size > 100 * 1024:
                    if "." in name and name.rsplit(".", 1)[1] in ("cxx", "py", "h", "java", "cc", "c", "icc", "cpp",
                                                                  "hpp", "hh", "f", "F"):
                        logger.info("Source file {0} is too large, but importing anyway (source files always imported)".format(filename))
                    else:
                        logger.info("File {0} is too large - not importing".format(filename))
                        os.remove(filename)
                        continue
                if name.startswith("."):
                    logger.info("File {0} starts with a '.' - not importing".format(filename))
                    os.remove(filename)
                    continue

            except OSError, e:
                logger.debug("Got OSError (usually harmless) treating {0}: {1}".format(filename, e))
Ejemplo n.º 7
0
def search_from(source_lines, re_string, start_idx=0, max=20, backwards=False):
    '''Flexibly search for a regexp in a list of lines'''
    current_idx = start_idx
    for i in range(max):
        if re.search(re_string, source_lines[current_idx]):
            logger.debug("Found search line {0} at index {1}".format(
                re_string, current_idx))
            return current_idx
        if backwards:
            current_idx -= 1
        else:
            current_idx += 1
        if current_idx < 0 or current_idx == len(source_lines):
            break

    # Not found
    return -1
Ejemplo n.º 8
0
def get_cmake_release_data(base_path,
                           base_release,
                           release,
                           project_path,
                           nightly=None):
    logger.debug("Parsing release data for {0} - {1} - {2} - {3} - {4}".format(
        base_path, base_release, release, project_path, nightly))
    if nightly:
        release_number_elements = base_release.split(".")
    else:
        release_number_elements = release.split(".")
    series = release_number_elements[0]
    flavour = release_number_elements[1]
    major = release_number_elements[2]
    if len(release_number_elements) == 3:
        release_type = "base"
        minor = None
        subminor = None
    elif len(release_number_elements) == 4:
        release_type = "cache"
        minor = release_number_elements[3]
        subminor = None

    sample_project = find_cmake_sample_project(base_path, release)
    timestamp = os.stat(
        os.path.join(base_path, sample_project, release,
                     "InstallArea")).st_mtime

    release_desc = {
        "name": release,
        "series": series,
        "flavour": flavour,
        "major": major,
        "minor": minor,
        "subminor": subminor,
        "type": release_type,
        "timestamp": timestamp,
        "nightly": False,
        "author": "ATLAS Librarian <*****@*****.**>"
    }
    if nightly:
        release_desc["nightly"] = True
        release_desc["name"] = base_release + "-" + time.strftime(
            "%Y-%m-%d", time.localtime(timestamp)) + "-" + nightly
    logger.debug(release_desc)
    return release_desc
Ejemplo n.º 9
0
def main():
    parser = argparse.ArgumentParser(
        description='Merge releases to create a super-release')
    parser.add_argument('targetrelease',
                        metavar='RELEASE',
                        help="Target release")
    parser.add_argument('mergerelease',
                        metavar='RELEASE',
                        nargs="+",
                        help="Releases to merge into target")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    with open(args.targetrelease) as target:
        target_release_data = json.load(target)

    for release in args.mergerelease:
        with open(release) as merge:
            merge_release_data = json.load(merge)
        for package_path, package_data in merge_release_data["tags"].iteritems(
        ):
            if package_path not in target_release_data["tags"]:
                target_release_data["tags"][package_path] = package_data
                logger.info("Merged {0} at tag {1} from {2}".format(
                    package_path, package_data["svn_tag"], release))
            else:
                logger.debug("Package {0} already exists in target".format(
                    package_path))

    try:
        os.rename(args.targetrelease, args.targetrelease + ".bak")
        with open(args.targetrelease, "w") as output_fh:
            json.dump(target_release_data, output_fh, indent=2)
    except OSError, e:
        logger.error("Error while rewriting target file {0}: {1}".format(
            args.targetrelease, e))
Ejemplo n.º 10
0
def parse_release_data(release_file_path, name_prefix=None):
    ## @brief Parse release data from the NICOS tag file
    #  @param release_file_path Path to file with NICOS tags for the release of interest
    #  @return Dictionary of values for the different release properties
    timestamp = os.stat(release_file_path).st_mtime
    release_name, nightly_flag = get_release_name(release_file_path)
    if name_prefix:
        full_name = name_prefix + "-" + release_name
    else:
        full_name = release_name
    release_elements = release_name.split(".")
    if len(release_elements) < 3:
        raise RuntimeError("Weird release: {0}".format(release_name))
    if len(release_elements) == 3:
        rel_type = "base"
        minor = None
        subminor = None
        cache_number = 0
    elif len(release_elements) == 4:
        rel_type = "cache"
        minor = release_elements[3]
        subminor = None
    major = release_elements[0]
    minor = release_elements[1]
    patch = release_elements[2]
    release_desc = {
        "name": full_name,
        "series": release_elements[0],
        "flavour": release_elements[1],
        "major": release_elements[2],
        "minor": minor,
        "subminor": subminor,
        "type": rel_type,
        "timestamp": timestamp,
        "nightly": nightly_flag,
        "author": "ATLAS Librarian <*****@*****.**>"
    }
    if nightly_flag:
        release_desc["name"] += "-{0}".format(
            time.strftime("%Y-%m-%d", time.localtime(timestamp)))
    logger.debug(release_desc)
    return release_desc
Ejemplo n.º 11
0
def load_exceptions_file(filename, reject_changelog=False):
    ## @brief Parse and return path globbing exceptions file
    #  @param filename File containing exceptions
    #  @param reject_changelog Special flag used by svnpull to ensure that
    #  ChangeLog files are rejected (in a normal svn2git they are accepted,
    #  onto the import branches, but then excluded specially from the
    #  release branches)
    #  @return Tuple of path globs to accept and globs to reject, converted to regexps
    path_accept = []
    path_reject = []
    if filename != "NONE":
        with open(filename) as filter_file:
            logger.info("Loaded import exceptions from {0}".format(filename))
            for line in filter_file:
                line = line.strip()
                if reject_changelog and ("ChangeLog" in line):
                    logger.debug(
                        "Found ChangeLog line, which will be forced to reject: {0}"
                        .format(line))
                    line = "- */ChangeLog"
                if line.startswith("#") or line == "":
                    continue
                if line.startswith("-"):
                    path_reject.append(
                        re.compile(fnmatch.translate(line.lstrip("- "))))
                else:
                    path_accept.append(
                        re.compile(fnmatch.translate(line.lstrip("+ "))))
    logger.debug("Glob accept: {0}".format([m.pattern for m in path_accept]))
    logger.debug("Glob reject: {0}".format([m.pattern for m in path_reject]))
    return path_accept, path_reject
Ejemplo n.º 12
0
def uncrustify_sources(svn_path, svn_co_root, uncrustify_config, uncrustify_path_accept, uncrustify_path_reject):
    ## @brief Uncrustify code before import
    #  @param svn_path Filesystem path to parsed up SVN checkout
    #  @param svn_co_root Base directory of SVN checkout
    #  @param uncrustify_config Config file for uncrustify pass
    #  @param uncrustify_path_accept Paths to force
    #  @param uncrustify_path_reject Paths to exclude

    for root, dirs, files in os.walk(svn_path):
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_veto = False
            for filter in uncrustify_path_reject:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} will not go through uncrustify".format(svn_filename, filter.pattern))
                    path_veto = True
                    break
            for filter in uncrustify_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} will be passed to uncrustify".format(svn_filename, filter.pattern))
                    path_veto = False
                    break
            if path_veto:
                continue
            extension = filename.rsplit(".", 1)[1] if "." in filename else ""
            if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h", "hpp", "hh"):
                logger.debug("Uncrustifying {0}".format(filename))
                cmd = ("uncrustify", "-c", uncrustify_config, "--no-backup", "-l", "CPP", filename)
                # We do not consider uncrustify errors as fatal for the import... this can
                # happen because of a source file issue or picking the wrong language
                try:
                    check_output_with_retry(cmd, retries=0)
                except RuntimeError:
                    logger.warning("Uncrustify failed on {0}".format(filename))
Ejemplo n.º 13
0
def check_output_with_retry(cmd,
                            retries=2,
                            wait=10,
                            ignore_fail=False,
                            dryrun=False):
    ## @brief Multiple attempt wrapper for subprocess.check_call (especially remote SVN commands can bork)
    #  @param cmd list or tuple of command line parameters
    #  @param retries Number of attempts to execute successfully
    #  @param wait Sleep time after an unsuccessful execution attempt
    #  @param ignore_fail Do not raise an exception if the command fails
    #  @param dryrun If @c True do not actually execute the command, only print it and return an empty string
    #  @return String containing command output
    if dryrun:
        logger.info("Dryrun mode: {0}".format(cmd))
        return ""
    success = failure = False
    tries = 0
    start = time.time()
    while not success and not failure:
        tries += 1
        try:
            logger.debug("Calling {0}".format(cmd))
            output = subprocess.check_output(cmd)
            success = True
        except subprocess.CalledProcessError:
            if ignore_fail:
                success = True
                output = ""
                continue
            logger.warning("Attempt {0} to execute {1} failed".format(
                tries, cmd))
            if tries > retries:
                failure = True
            else:
                time.sleep(wait)
    if failure:
        raise RuntimeError("Repeated failures to execute {0}".format(cmd))
    logger.debug("Executed in {0}s".format(time.time() - start))
    return output
Ejemplo n.º 14
0
def find_git_root():
    ## @brief Search for a git repository root from cwd
    found_git = False
    start = searching = os.getcwd()
    while True:
        if os.path.isdir(".git"):
            found_git = True
            break
        else:
            os.chdir("..")
            if os.getcwd() == searching:  # Hit filesystem root
                break
            searching = os.getcwd()

    os.chdir(start)
    if found_git and git_repo_ok(os.path.join(searching, ".git")):
        logger.debug(
            "Found .git for repository root here: {0}".format(searching))
        return searching

    logger.debug("No valid .git found descending from {0}".format(start))
    return None
Ejemplo n.º 15
0
def parse_release_data(release_file_path):
    ## @brief Parse release data from the CMT requirements file
    #  @param release_file_path Path to file for the release of interest
    #  @return Dictionary of values for the different release properties
    timestamp = os.stat(release_file_path).st_mtime
    path_elements = release_file_path.split("/")
    release_name = path_elements[
        -4]  # This is a bit hacky - limited usecase only
    release_elements = release_name.split(".")
    if len(release_elements) < 3:
        raise RuntimeError("Weird release: {0}".format(release_name))
    if len(release_elements) == 3:
        rel_type = "base"
        minor = None
        subminor = None
        cache_number = 0
    elif len(release_elements) == 4:
        rel_type = "cache"
        minor = release_elements[3]
        subminor = None
    major = release_elements[0]
    minor = release_elements[1]
    patch = release_elements[2]
    release_desc = {
        "name": release_name,
        "series": release_elements[0],
        "flavour": release_elements[1],
        "major": release_elements[2],
        "minor": minor,
        "subminor": subminor,
        "type": rel_type,
        "timestamp": timestamp,
        "nightly": False,
        "author": "ATLAS Librarian <*****@*****.**>"
    }
    logger.debug(release_desc)
    return release_desc
Ejemplo n.º 16
0
def parse_tag_file(release_file_path):
    ## @brief Open a CMT requirements file and extract the package tags
    #  @param release_file_path Path to requirements file for the release of interest
    #  @return Dictionary keyed by package, with each value a dictionary with @c tag and @project
    #  information for the package
    release_package_dict = {}
    with open(release_file_path) as tag_file:
        for line in tag_file:
            line = line.strip()
            logger.debug(line)
            if len(line) == 0 or line.startswith("#"):
                continue
            try:
                (use, package_name, tag, package_path) = line.split()
            except ValueError:
                continue
            if use != "use":
                continue
            release_package_dict[os.path.join(package_path, package_name)] = {
                "svn_tag": tag,
                "project": "",
                "package_name": package_name
            }
    return release_package_dict
Ejemplo n.º 17
0
def main():
    parser = argparse.ArgumentParser(
        description='Return release list chronologically ordered')
    parser.add_argument('release',
                        metavar='RELEASE',
                        nargs="+",
                        help="Release tag files")
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    release_list = []
    for release in args.release:
        with open(release) as rel_fh:
            release_data = json.load(rel_fh)
        release_list.append((release, release_data))

    release_list.sort(cmp=lambda x, y: cmp(x[1]["release"]["timestamp"], y[1][
        "release"]["timestamp"]))

    if logger.isEnabledFor(logging.DEBUG):
        for release_tuple in release_list:
            logger.debug("Release {0} built {1}".format(
                release_tuple[1]["release"]["name"],
                time.asctime(
                    time.localtime(release_tuple[1]["release"]["timestamp"]))))

    for release_tuple in release_list:
        print release_tuple[0],
    print
Ejemplo n.º 18
0
def find_best_arch(base_path):
    ## @brief Find the "best" achitecture when various install architectures are available
    #  for a particular release ("opt" release is preferred)
    #  @param base_path Directory path to architecture subdirectories
    #  @return Chosen architecture
    best_arch = None
    logger.debug("Finding best architecture in {0}".format(base_path))
    arch = os.listdir(base_path)
    logger.debug("Choices: {0}".format(" ".join(arch)))
    if len(arch) == 1:
        best_arch = arch[0]
    else:
        opt_arch = [a for a in arch if a.endswith("opt")]
        if len(opt_arch) == 1:
            best_arch = opt_arch[0]
        else:
            opt_arch.sort()
            best_arch = opt_arch[0]
    if not best_arch:
        raise RuntimeError(
            "Failed to find a good architecture from {0}".format(base_path))
    logger.debug("Best archfile for {0} is {1} (chosen from {2})".format(
        base_path, best_arch, len(arch)))
    return best_arch
Ejemplo n.º 19
0
        try:
            os.makedirs(args.tagdir)
        except OSError, e:
            logger.error("Failed to make directory {0}: {1}".format(
                args.tagdir, e))
            sys.exit(1)

    base_path, releases, project_path = find_cmake_releases(
        args.installpath, args.release, nightly=args.nightly, arch=args.arch)
    for release in releases:
        release_description = get_cmake_release_data(base_path,
                                                     args.release,
                                                     release,
                                                     project_path,
                                                     nightly=args.nightly)
        logger.debug("Release {0} parsed as {1}/PROJECT/{2}".format(
            base_path, release, project_path))
        release_tags = find_cmake_tags(base_path, release, project_path)
        output_file = os.path.join(args.tagdir, release_description["name"])
        if args.overwrite or not os.path.exists(output_file):
            with open(os.path.join(args.tagdir, release_description["name"]),
                      "w") as tag_output:
                my_release_data = {
                    "release": release_description,
                    "tags": release_tags
                }
                json.dump(my_release_data, tag_output, indent=2)
                logger.info("Wrote {0}".format(output_file))
        else:
            logger.debug("Skipped writing to {0} - overwrite is false".format(
                output_file))
Ejemplo n.º 20
0
def find_cmake_releases(install_path, release, nightly=None, arch=None):
    ## @brief Find the base path and project sub-path for a CMake release
    #  @param install_path Base release area for CMake installed releases
    #  @param release Athena release series + release flavour number (e.g., 21.0)
    #  @param nightly Nightly series to search (otherwise look for installed release)
    #  @param arch Manually specify architecture
    #  @return Tuple with full base release path, all matching releases and project sub-path
    base_path = os.path.join(install_path, release)
    if not os.path.isdir(base_path):
        logger.error(
            "Directory {0} is missing - cannot find CMake package data".format(
                base_path))
        sys.exit(1)
    logger.info("Using base path for release {0} of {1}".format(
        release, base_path))

    sample_project = find_cmake_sample_project(base_path)
    if not sample_project:
        logger.error(
            "Could not find any sample project from {0} - cannot find CMake package data"
            .format(base_path))
        sys.exit(1)
    logger.debug("Found build project {0} to build architecture with".format(
        sample_project))

    if nightly:
        releases = [nightly]
        if not os.path.isdir(
                os.path.join(os.path.join(base_path, sample_project,
                                          nightly))):
            logger.error("Could not find release {0} - "
                         "cannot find CMake package data".format(
                             os.path.join(base_path, sample_project, nightly)))
            sys.exit(1)
    else:
        releases = [
            d for d in os.listdir(os.path.join(base_path, sample_project))
            if os.path.isdir(os.path.join(base_path, sample_project, d))
            and d.startswith(release)
        ]
        if len(releases) == 0:
            logger.error("Could not find any releases in {0} - "
                         "cannot find CMake package data".format(
                             os.path.join(base_path, sample_project)))
            sys.exit(1)
    logger.debug("Found releases: {0}".format(releases))

    if arch:
        project_path = os.path.join("InstallArea", arch)
    else:
        if os.path.isdir(
                os.path.join(base_path, sample_project, releases[0],
                             "InstallArea")):
            arch = find_best_arch(
                os.path.join(base_path, sample_project, releases[0],
                             "InstallArea"))
            project_path = os.path.join("InstallArea", arch)
        else:
            arch = find_best_arch(
                os.path.join(base_path, sample_project, releases[0]))
            project_path = arch
    logger.debug("Using build architecture {0}".format(arch))

    return base_path, releases, project_path
Ejemplo n.º 21
0
def main():
    parser = argparse.ArgumentParser(
        description='SVN to git migrator, ATLAS style')
    parser.add_argument('svnroot',
                        metavar='SVNDIR',
                        help="Location of svn repository root")
    parser.add_argument('gitrepo',
                        metavar='GITDIR',
                        help="Location of git repository")
    parser.add_argument(
        'tagfiles',
        nargs="+",
        metavar='TAGFILE',
        help=
        "List of release tag content files to process - all tags found in these files will "
        "be imported (any already imported tags will be skipped)")
    parser.add_argument(
        '--targetbranch',
        default="package",
        help=
        "Target git branch for import. Default is the special value 'package' in which "
        "each package is imported onto its own branch")
    parser.add_argument(
        '--svnpath',
        metavar='PATH',
        nargs='+',
        default=[],
        help="Restrict actions to this list of paths in the SVN tree (use to "
        "make small scale tests of the import workflow).")
    parser.add_argument(
        '--intermediatetags',
        action="store_true",
        help=
        "Import all tags from oldest release tag found, instead of just release tags"
    )
    parser.add_argument(
        '--processtrunk',
        action="store_true",
        help=
        "Update trunk versions during the import (False by default, the trunk will be skipped)."
    )
    parser.add_argument(
        '--svncachefile',
        metavar='FILE',
        help=
        "File containing cache of SVN information - default '[gitrepo].svn.metadata'"
    )
    parser.add_argument(
        '--authorcachefile',
        metavar='FILE',
        help=
        "File containing cache of author name and email information - default '[gitrepo].author.metadata'"
    )
    parser.add_argument(
        '--importtimingfile',
        metavar="FILE",
        help=
        "File to dump SVN->git import timing information - default '[gitrepo]-timing.json'"
    )
    parser.add_argument(
        '--svnfilterexceptions',
        '--sfe',
        metavar="FILE",
        help=
        "File listing path globs to exempt from SVN import filter (lines with '+PATH') or "
        "to always reject (lines with '-PATH'); default %(default)s. Use NONE to have no exceptions.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasoffline-exceptions.txt"))
    parser.add_argument(
        '--packageveto',
        metavar="FILE",
        help="File listing packages that will be skipped completely on import.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaspackage-exceptions.txt"))
    parser.add_argument(
        '--licensefile',
        metavar="FILE",
        help="License file to add to source code files (default "
        "is to add %(default)s license file)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "cerncopy.txt"))
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument(
        '--uncrustify',
        metavar="FILE",
        help="Uncrustify configuration file to use to process C++ "
        "sources through before git import (by default uncrustify will not be used)"
    )
    parser.add_argument(
        '--uncrustifyexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply uncrustify to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasuncrustify-exceptions.txt"))
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Massage default values
    if not args.svncachefile:
        args.svncachefile = os.path.basename(args.gitrepo) + ".svn.metadata"
    if not args.authorcachefile:
        args.authorcachefile = os.path.basename(
            args.gitrepo) + ".author.metadata"
    if not args.importtimingfile:
        args.importtimingfile = os.path.basename(args.gitrepo) + "-timing.json"

    # Set svnroot and git repo, get some starting values
    svnroot = args.svnroot
    gitrepo = os.path.abspath(args.gitrepo)
    start_cwd = os.getcwd()
    start_timestamp_string = time.strftime("%Y%m%dT%H%M.%S")
    logger.debug("Set SVN root to {0} and git repo to {1}".format(
        svnroot, gitrepo))

    # Load exception globs
    svn_path_accept, svn_path_reject = load_exceptions_file(
        args.svnfilterexceptions)

    # Load package vetos
    if args.packageveto:
        package_veto = load_package_veto(args.packageveto)
    else:
        package_veto = []

    # License file loading
    if args.licensefile:
        with open(args.licensefile) as lfh:
            license_text = [line.rstrip() for line in lfh.readlines()]
    else:
        license_text = None
    if args.licenseexceptions:
        license_path_accept, license_path_reject = load_exceptions_file(
            args.licenseexceptions)
    else:
        license_path_accept = license_path_reject = []

    # Uncrustify exceptions file
    if args.uncrustify:
        uncrustify_path_accept, uncrustify_path_reject = load_exceptions_file(
            args.uncrustifyexceptions)
    else:
        uncrustify_path_accept = uncrustify_path_reject = []

    ### Main actions start here
    # Setup the git repository
    init_git(gitrepo)
    # Pull current list of tags here, to fast skip any work already done
    if args.targetbranch != "package":
        switch_to_branch(args.targetbranch, orphan=True)
    current_git_tags = get_current_git_tags(gitrepo)
    os.chdir(start_cwd)

    ## SVN interactions and reloading state
    # Decide which svn packages we will import
    # Note that if we're pulling the packages from a tag diff file, we also get tags
    # at this point, otherwise the tag list is empty.
    svn_packages = get_tags(args.tagfiles, args.svnpath)
    # Add "trunk" packages, if required
    if args.processtrunk:
        for package, tags in svn_packages.iteritems():
            if "trunk" not in tags:
                tags.append("trunk")

    # Initialise SVN and author metadata cache with any stored values
    svn_metadata_cache = initialise_metadata(args.svncachefile)
    author_metadata_cache = initialise_metadata(args.authorcachefile)

    # Prepare package import
    scan_svn_tags_and_get_metadata(svnroot,
                                   svn_packages,
                                   svn_metadata_cache,
                                   author_metadata_cache,
                                   args.intermediatetags,
                                   package_veto=package_veto)

    # Now presistify metadata cache
    backup_metadata(svn_metadata_cache, start_cwd, args.svncachefile,
                    start_timestamp_string)
    backup_metadata(author_metadata_cache, start_cwd, args.authorcachefile,
                    start_timestamp_string)

    # Setup dictionary for keying by SVN revision number
    svn_cache_revision_dict = svn_cache_revision_dict_init(svn_metadata_cache)

    ## git processing actions
    # Process each SVN tag in order
    ordered_revisions = svn_cache_revision_dict.keys()
    ordered_revisions.sort(cmp=lambda x, y: cmp(int(x), int(y)))
    logger.info("Will process {0} SVN revisions in total".format(
        len(ordered_revisions)))
    counter = 0
    processed_tags = 0
    timing = []
    os.chdir(gitrepo)

    for rev in ordered_revisions:
        counter += 1
        start = time.time()
        logger.info("SVN Revsion {0} ({1} of {2})".format(
            rev, counter, len(ordered_revisions)))
        for pkg_tag in svn_cache_revision_dict[rev]:
            if get_flattened_git_tag(pkg_tag["package"], pkg_tag["tag"],
                                     rev) in current_git_tags:
                logger.info("Tag {0} exists already - skipping".format(
                    os.path.join(pkg_tag["package"], pkg_tag["tag"])))
                continue
            if args.targetbranch == "package":
                switch_to_branch(os.path.basename(pkg_tag["package"]),
                                 orphan=True)
            svn_co_tag_and_commit(
                svnroot,
                gitrepo,
                pkg_tag["package"],
                pkg_tag["tag"],
                svn_metadata_cache[os.path.basename(
                    pkg_tag["package"])]["svn"][pkg_tag["tag"]][rev],
                author_metadata_cache,
                svn_path_accept=svn_path_accept,
                svn_path_reject=svn_path_reject,
                package_veto=package_veto,
                license_text=license_text,
                license_path_accept=license_path_accept,
                license_path_reject=license_path_reject,
                uncrustify_config=args.uncrustify,
                uncrustify_path_accept=uncrustify_path_accept,
                uncrustify_path_reject=uncrustify_path_reject)
            processed_tags += 1
        elapsed = time.time() - start
        logger.info(
            "{0} processed in {1}s ({2} packages really processed)".format(
                counter, elapsed, processed_tags))
        timing.append(elapsed)

    # Last task, clean all empty directories (git does not track these, but they are clutter)
    check_output_with_retry(("git", "clean", "-f", "-d"))

    if args.importtimingfile:
        os.chdir(start_cwd)
        with open(args.importtimingfile, "w") as time_file:
            json.dump(timing, time_file)
Ejemplo n.º 22
0
def branch_builder(gitrepo, branch, tag_files, svn_metadata_cache, author_metadata_cache,
                   parentbranch=None, baserelease=None,
                   skipreleasetag=False, dryrun=False, only_forward=False, commit_date="now"):
    ## @brief Main branch builder function
    #  @param gitrepo The git repository location
    #  @param branch The git branch to work on
    #  @param tag_files The plain tag content files to process
    #  @param svn_metadata_cache The standard metadata cache from SVN
    #  @param author_metadata_cache Cached author data
    #  @param parentbranch If creating a new branch, this is the BRANCH:COMMIT_ID of where to make the new branch from
    #  @param skipreleasetag If @c True then skip creating git tags for each processed release
    #  @param dryrun If @c True, do nothing except print commands that would have been executed
    #  @param only_forward If @c True then never revert a package to a previous version or import a branch tag 
    #  @param commit_date Choice for commit date when building branches
    
    # Prepare - chdir and then make sure we are on the correct branch
    os.chdir(gitrepo)
    prepare_branch_point(branch, parentbranch)            

    # Main loop starts here, with one pass for each tag file we are processing
    for tag_file in tag_files:
        with open(tag_file) as tag_file_fh:
            release_data = json.load(tag_file_fh)

        tag_list = get_current_git_tags(gitrepo)
        current_release_tags = get_current_release_tag_dict(tag_list, branch) # Markers for which packages have been processed
        logger.info("Processing release {0} ({1} current tags)".format(release_data["release"]["name"], len(current_release_tags)))
        release_tag = git_release_tag(release_data["release"], branch)
        if release_tag in tag_list and not skipreleasetag:
            logger.info("Release tag {0} already made - skipping".format(release_tag))
            continue

        if commit_date == "release":
            logger.info("Setting committer date to {0:.0f}".format(release_data["release"]["timestamp"]))
            os.environ["GIT_COMMITTER_DATE"] = "{0:.0f}".format(release_data["release"]["timestamp"])
        
        # Find which packages need updated for this new tag content file
        import_list, packages_considered = find_packages_for_update(release_data, tag_list, branch, 
                                                                    svn_metadata_cache, current_release_tags, only_forward)

        ## Sort the list of tags to be imported by SVN revision number for a
        #  more or less sensible package by package commit history
        sorted_import_revisions = import_list.keys()
        sorted_import_revisions.sort(cmp=lambda x,y: cmp(int(x), int(y)))

        ## Now loop over all the packages we have to import and update them
        pkg_processed = 0
        for revision in sorted_import_revisions:
            for pkg_import in import_list[revision]:
                pkg_processed += 1
                do_package_import(pkg_import, svn_metadata_cache, author_metadata_cache, release_name=release_data["release"]["name"], 
                                  branch=branch, dryrun=dryrun, commit_date=commit_date)
                logger.info("Processed {0}/{1} revisions".format(pkg_processed, len(import_list)))


        ## After all packages are updated, look for packages which present in the last
        #  release, but not this one, so they need to be removed
        new_current_release_tags = get_current_release_tag_dict(tag_list, branch) # Updated package list after upgrade
        packages_to_remove = []
        packages_to_revert = {}
        for package_name, old_package_state in current_release_tags.iteritems():
            if package_name in packages_considered:
                logger.debug("Package {0} was processed for {1}".format(package_name, release_data["release"]["name"]))
                continue
            ## @note We have a package that was not "considered" in the current release.
            #  If we don't have a baserelease then this has been removed, so we zap it.
            #  If there is a baserelease... 
            #   ...and this package is not in it, it was in the cache, then was removed, so zap it.
            #   ...and this package is in it, then compare the versions and "revert" to the base 
            #      release version if they are different.
            if baserelease:
                base_package_version = None
                for package, base_package_data in baserelease["tags"].iteritems():
                    if base_package_data["package_name"] == package_name:
                        base_package_version = base_package_data
                        break
                if base_package_version:
                    if base_package_version["svn_tag"] == old_package_state["svn_tag"]:
                        logger.debug("Package {0} remains at base release version {1}".format(base_package_data["package_name"],
                                                                                              base_package_version["svn_tag"]))
                        packages_considered.append(package_name) # Flag we dealt this package
                    else:
                        logger.info("Package {0} was removed from cache - reverting to base "
                                    "release version {1}".format(base_package_data["package_name"],
                                                     base_package_version["svn_tag"]))
                        package_name = base_package_data["package_name"]
                        svn_meta_tag_key = os.path.join("tags", base_package_version["svn_tag"])
                        svn_revision = svn_metadata_cache[package_name]["svn"][svn_meta_tag_key].keys()[0]
                        git_import_tag = get_flattened_git_tag(package, base_package_version["svn_tag"], svn_revision)
                        packages_to_revert[package_name] = {"package": package,
                                                            "package_name": os.path.basename(package),
                                                            "git_import_tag": get_flattened_git_tag(package, base_package_version["svn_tag"], svn_revision),
                                                            "svn_tag": base_package_version["svn_tag"],
                                                            "svn_revision": svn_revision,
                                                            "branch_import_tag": get_flattened_git_tag(package, base_package_version["svn_tag"], svn_revision, branch),
                                                            "svn_meta_tag_key": svn_meta_tag_key,
                                                            "current_branch_import_tag": current_release_tags[package_name]["git_tag"]}
                else:
                    logger.info("Package {0} was removed from the cache and is not in the base release".format(package_name))
                    packages_to_remove.append(package_name)
            else:
                logger.info("Package {0} has been removed from the release".format(package_name))
                packages_to_remove.append(package_name)

        if baserelease:
            logger.info("{0} packages have been reverted to their base SVN state".format(len(packages_to_revert)))
            for package_name, revert_data in packages_to_revert.iteritems():
                do_package_import(revert_data, svn_metadata_cache, author_metadata_cache, release_name=release_data["release"]["name"], 
                                  branch=branch, dryrun=dryrun, commit_date=commit_date)

        logger.info("{0} packages have been removed from the release".format(len(packages_to_remove)))
        for package in packages_to_remove:
            logger.info("Removing {0} from {1}".format(package, branch))
            if not dryrun:
                package_path = os.path.join(svn_metadata_cache[package]["path"], package)
                recursive_delete(package_path)
            check_output_with_retry(("git", "add", "-A"), dryrun=dryrun)
            cmd = ["git", "commit", "--allow-empty", "-m", "{0} deleted from {1}".format(package_path, branch)]
            check_output_with_retry(cmd, dryrun=dryrun)
            check_output_with_retry(("git", "tag", "-d", current_release_tags[package]["git_tag"]), retries=1, dryrun=dryrun)
            pkg_processed += 1

        ## Now, finally, tag the release as done
        if not skipreleasetag:
            if release_data["release"]["nightly"]:
                check_output_with_retry(("git", "tag", release_tag), retries=1, dryrun=dryrun)
            else:
                check_output_with_retry(("git", "tag", release_tag, "-a",
                                         "-m", "Tagging release {0}".format(release_data["release"]["name"])), 
                                        retries=1, dryrun=dryrun)
            logger.info("Tagged release {0} as {1} "
                        "({2} packages processed)".format(release_data["release"]["name"],
                                                          release_tag, pkg_processed))
        else:
            logger.info("Processed release {0} (no tag; {1} packages processed)".format(release_data["release"]["name"], pkg_processed))
Ejemplo n.º 23
0
def do_package_import(pkg_import, svn_metadata_cache, author_metadata_cache, release_name="unknown", branch="unknown", 
                      dryrun=False, commit_date="now"):
    ## @brief Import a package's SVN tag onto the current git branch
    #  updating the corresponding git tags
    #  @param pkg_import package import dictionary (see find_packages_for_update for the
    #  structure)
    #  @param svn_metadata_cache The standard metadata cache from SVN
    #  @param author_metadata_cache Cached author data
    #  @param release_name Name of current release being built (used only for generating log messages)
    #  @param branch Current branch name (used only for generating log messages)
    #  @param dryrun Boolean, if @c true then don't actually act
    #  @param commit_date Choices for setting committer date 
    logger.info("Migrating {0} from {1} to {2} for {3}...".format(pkg_import["package"], 
                                                          pkg_import["current_branch_import_tag"], 
                                                          pkg_import["svn_tag"], release_name))
    # Need to wipe out all contents in case files were removed from package
    if not dryrun:
        recursive_delete(pkg_import["package"])
    check_output_with_retry(("git", "checkout", pkg_import["git_import_tag"], pkg_import["package"]), dryrun=dryrun)
    # Splat Changelog file - we do not want these on the production branches
    try:
        os.remove(os.path.join(pkg_import["package"], "ChangeLog"))
    except OSError:
        pass
    # Done - now commit and tag
    if logger.level <= logging.DEBUG:
        cmd = ["git", "status"]
        logger.debug(check_output_with_retry(cmd))
    check_output_with_retry(("git", "add", "-A", pkg_import["package"]), dryrun=dryrun)
    staged = check_output_with_retry(("git", "diff", "--name-only", "--staged"), dryrun=dryrun)
    if len(staged) == 0 and (not dryrun): 
        # Nothing staged, so skip doing any commit, but do make the import tag for this branch
        # so that we don't repeat this step again
        logger.warning("Package {0} - no changes staged for {1}, " 
                       "git tagging and skipping commit".format(pkg_import["package"], release_name))
        check_output_with_retry(("git", "tag", pkg_import["branch_import_tag"]), retries=1, dryrun=dryrun)
        return

    rev_meta = svn_metadata_cache[pkg_import["package_name"]]["svn"][pkg_import["svn_meta_tag_key"]][pkg_import["svn_revision"]]
    msg = rev_meta["msg"]
    if pkg_import["svn_tag"] == "trunk":
        msg += " (trunk r{0})".format(rev_meta["revision"])
    else:
        msg += " ({0})".format(pkg_import["svn_tag"])
    cl_diff = changelog_diff(pkg_import["package"],
                             from_tag="/".join(pkg_import["current_branch_import_tag"].split("/")[1:]) if pkg_import["current_branch_import_tag"] else None,
                             to_tag=pkg_import["git_import_tag"])
    if cl_diff:
        msg += "\n\n" + "\n".join(cl_diff)
    cmd = ["git", "commit", "-m", msg]
    author = author_string(rev_meta["author"], author_metadata_cache)
    cmd.append("--author='{0}'".format(author))
    cmd.append("--date={0}".format(rev_meta["date"]))
    
    if commit_date == "author":
        os.environ["GIT_COMMITTER_DATE"] = rev_meta["date"]
    check_output_with_retry(cmd, retries=1, dryrun=dryrun)
    if commit_date == "author":
        del os.environ["GIT_COMMITTER_DATE"]
    
    check_output_with_retry(("git", "tag", pkg_import["branch_import_tag"]), retries=1, dryrun=dryrun)
    if pkg_import["current_branch_import_tag"]:
        check_output_with_retry(("git", "tag", "-d", pkg_import["current_branch_import_tag"]), retries=1, dryrun=dryrun)
    logger.info("Committed {0} ({1}) onto {2} for {3}".format(pkg_import["package"], 
                                                              pkg_import["svn_tag"], branch, release_name))
Ejemplo n.º 24
0
def find_packages_for_update(release_data, tag_list, branch, svn_metadata_cache, release_tag_unprocessed, only_forward=False):
    ## @brief Find packages that need updates, comparing release tag content with
    #  git import tags already made
    #  @param release_data Release tag content dictionary
    #  @param tag_list List of current git tags
    #  @param branch Git branch being constructed
    #  @param svn_metadata_cache Usual cache data for SVN stuff
    #  @param release_tag_unprocessed Dictionary with current "tag" metadata, useful to mark up import
    #  @param only_forward If @c True then never revert a package to a previous version or import a branch tag
    #  @return Tuple of tag import dictionary, and a list of all "processed" packages
    
    ## Loop over all packages in a release and see if the package
    # - is missing from the svn metadata cache (so skip)
    # - is already imported at current HEAD (so skip)
    # - is new or updated (so mark for import)
    # Final construct is the import_list dictionary
    import_list = {}
    packages_considered = []
    for package, package_data in release_data["tags"].iteritems():
        package_name = os.path.basename(package)
        packages_considered.append(package_name)
        package_tag = package_data["svn_tag"]
        if package_name not in svn_metadata_cache:
            logger.debug("Package {0} not found - assuming restricted import".format(package_name))
            continue
        if package_tag == "trunk":
            svn_meta_tag_key = "trunk"
        else:
            svn_meta_tag_key = os.path.join("tags", package_tag)

        for revision in svn_metadata_cache[package_name]["svn"][svn_meta_tag_key]:
            git_import_tag = get_flattened_git_tag(package, package_tag, revision)
            if git_import_tag not in tag_list:
                logger.debug("Import tag {0} not found - assuming restricted import".format(git_import_tag))
                continue
            branch_import_tag = get_flattened_git_tag(package, package_tag, revision, branch)
            logger.debug("Considering import of {0} ({1}@r{2}) to {3} "
                         "for release {4}".format(branch_import_tag, package_tag, revision, branch, release_data["release"]["name"]))
            if branch_import_tag in tag_list:
                logger.info("Import of {0} ({1} r{2}) onto {3} done - skipping".format(package, package_tag, revision, branch))
                continue
            if only_forward:
                if package_name in release_tag_unprocessed and package_compare(release_tag_unprocessed[package_name]["svn_tag"], package_tag) != -1:
                    logger.info("Import of {0} onto {1} is blocked - onlyforward option will not downgrade tags".format(package_tag, branch))
                    continue
            ## @note The structure of the dictionary used to direct the import of a package is:
            #  "package": full package path
            #  "package_name": package basename (for convenience)
            #  "git_import_tag": the git import tag that identifies the import of the version of this package
            #  "svn_tag": the SVN tag corresponding to this package version
            #  "svn_revision": the SVN revision for this package version
            #  "branch_import_tag": the git import tag that will be created to stamp this import as done
            #  "svn_meta_tag_key": the key used in the SVN metadata dictionary to get SVN metadata
            #  "current_branch_import_tag": the git tag indicating the current version of this package 
            import_element = {"package": package, 
                              "package_name": os.path.basename(package),
                              "git_import_tag": git_import_tag, 
                              "svn_tag": package_tag, 
                              "svn_revision": revision,
                              "branch_import_tag": branch_import_tag, "svn_meta_tag_key": svn_meta_tag_key, 
                              "current_branch_import_tag": release_tag_unprocessed[package_name]["git_tag"] if package_name in release_tag_unprocessed else None}
            logger.debug("Will import {0} to {1}".format(import_element, branch))
            if revision in import_list:
                import_list[revision].append(import_element)
            else:
                import_list[revision] = [import_element]

    return import_list, packages_considered
Ejemplo n.º 25
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'ATLAS tag munger, calculating tag evolution across a releases series')
    parser.add_argument(
        'release',
        metavar='RELEASE',
        nargs="+",
        help=
        "Files containing tag lists (NICOS format). If a release series/major is given (e.g., 20.1 or 20.1.5) "
        "the script will search for the base release and all caches to build the tag files in "
        "a simple way, without worrying about the details of the NICOS tag files and paths (N.B. "
        "in the rare cases when there is more than one tag file for a release, the last one will "
        "be used).")
    parser.add_argument(
        '--tagdir',
        default="tagdir",
        help=
        "output directory for tag files, each release will generate an entry here (default \"tagdir\")"
    )
    parser.add_argument(
        '--prefix',
        help=
        "Prefix for the name of the release, when the NICOS information is insufficient"
    )
    parser.add_argument(
        '--nicospath',
        default="/afs/cern.ch/atlas/software/dist/nightlies/nicos_work/tags/",
        help="path to NICOS tag files (defaults to usual CERN AFS location)")
    parser.add_argument(
        '--analysispkgfilter',
        action="store_true",
        help=
        "Special post processing for the (Ath)AnalysisBase-2.6.X release series, which "
        "filters tags to be only those which are missing from standard Athena releases"
    )
    parser.add_argument(
        '--overwrite',
        action="store_true",
        default=False,
        help=
        "Overwrite any exisitng configuration files (otherwise, just skip over)"
    )
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="switch logging into DEBUG mode")

    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Case when a single bese release is given - we have to expand this
    if len(args.release) == 1 and re.match(r"(\d+)\.(\d+)(\.(\d+))?$",
                                           args.release[0]):
        nicos_paths = find_nicos_from_base(args.nicospath, args.release[0])
    else:
        nicos_paths = []
        for path in args.release:
            if os.path.exists(path):
                nicos_paths.append(path)
            elif os.path.exists(os.path.join(args.nicospath, path)):
                nicos_paths.append(os.path.join(args.nicospath, path))
            else:
                logger.error(
                    "Path {0} doesn't exist (even after prepending NICOS path)"
                    .format(path))
                sys.exit(1)

    for release in nicos_paths:
        release_description = parse_release_data(release, args.prefix)
        release_tags = parse_tag_file(release, args.analysispkgfilter)
        logger.info("Processing tags for release {0}".format(
            release_description["name"]))
        output_file = os.path.join(args.tagdir, release_description["name"])
        if args.overwrite or not os.path.exists(output_file):
            with open(os.path.join(args.tagdir, release_description["name"]),
                      "w") as tag_output:
                json.dump(
                    {
                        "release": release_description,
                        "tags": release_tags
                    },
                    tag_output,
                    indent=2)
        else:
            logger.debug("Skipped writing to {0} - overwrite is false".format(
                output_file))
Ejemplo n.º 26
0
def main():
    parser = argparse.ArgumentParser(
        description=textwrap.dedent('''\
                                    Pull a package revision from SVN and apply to the current athena
                                    git repository.

                                    Run this script from inside the athena git repository clone to
                                    be updated.

                                    SVN package revisions are usually specified as
                                    - A simple package name, which means import the package trunk,
                                      e.g., xAODMuon imports Event/xAOD/xAODMuon/trunk

                                    - A package tag, which imports that SVN tag, e.g., xAODMuon-00-18-01
                                      imports Event/xAOD/xAODMuon/tags/xAODMuon-00-18-01

                                    Some more advanced specifiers can be used for special cases:
                                    - A tag name + "-branch" will import the corresponding development
                                      branch, e.g., xAODMuon-00-11-04-branch will import
                                      Event/xAOD/xAODMuon/branches/xAODMuon-00-11-04-branch

                                    - A package path + SVN sub path, PACKAGEPATH+SVNSUBPATH, where
                                      PACKAGEPATH is the path to the package root in SVN and git and
                                      SVNSUBPATH is the path to the SVN version to import; e.g.,
                                      Reconstruction/RecJobTransforms+devbranches/RecJobTransforms_RAWtoALL
                                      (note the plus sign!) will import the SVN path
                                      Reconstruction/RecJobTransforms/devbranches/RecJobTransforms_RAWtoALL
                                      to Reconstruction/RecJobTransforms

                                    The final specifier is only needed if the package to be imported is
                                    not in your current git checkout or if you want to import an unusual
                                    SVN revision, such as a development branch.

                                    The --files specifier can be used to import only some files or paths
                                    to git, with globs supported, e.g.,

                                      --files src/* MyPackage/*.h share/important_file.py

                                    For consistency all options applied during the primary ATLAS SVN to
                                    git migration are re-applied by default.
                                    '''),
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        'svnpackage',
        nargs="+",
        help=
        "SVN package to import, usually a plain package name or tag (see above)"
    )
    parser.add_argument(
        '--files',
        nargs="+",
        help=
        "Only package files matching the values specified here are imported (globs allowed). "
        "This can be used to import only some files from the SVN package and will "
        "disable the normal --svnfilterexceptions matching.",
        default=[])
    parser.add_argument(
        '--revision',
        type=int,
        default=0,
        help="Work at specific SVN revision number instead of HEAD")
    parser.add_argument(
        '--svnroot',
        metavar='SVNDIR',
        help="Location of the SVN repository (defaults to %(default)s)",
        default="svn+ssh://svn.cern.ch/reps/atlasoff")
    parser.add_argument(
        '--svnfilterexceptions',
        '--sfe',
        metavar="FILE",
        help=
        "File listing path globs to exempt from SVN import filter (lines with '+PATH') or "
        "to always reject (lines with '-PATH'); default %(default)s. "
        "It is strongly recommended to keep the default value to ensure consistency "
        "with the official ATLAS migration.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlasoffline-exceptions.txt"))
    parser.add_argument(
        '--licensefile',
        metavar="FILE",
        help="License file to add to C++ and python source code "
        "files (default %(default)s). "
        "It is strongly recommended to keep the default value to ensure consistency "
        "with the official ATLAS migration. Use NONE to disable if it is really necessary.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "cerncopy.txt"))
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions). "
        "It is strongly recommended to keep the default value to ensure consistency "
        "with the official ATLAS migration.",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument(
        '--debug',
        '--verbose',
        "-v",
        action="store_true",
        help="Switch logging into DEBUG mode (default is WARNING)")
    parser.add_argument(
        '--info',
        action="store_true",
        help="Switch logging into INFO mode (default is WARNING)")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.info:
        logger.setLevel(logging.INFO)
    if args.debug:
        logger.setLevel(logging.DEBUG)
    svn_path_accept, svn_path_reject = load_exceptions_file(
        args.svnfilterexceptions, reject_changelog=True)

    if len(args.svnpackage) > 1 and args.files:
        logger.error(
            "You have specified multiple SVN packages and to filter on package files "
            "to import, which almost certainly will not work - aborting")
        sys.exit(1)

    # Check that we do seem to be in a git repository
    gitrepo = find_git_root()
    if not gitrepo:
        logger.fatal(
            "Not a git repository (or any of the parent directories), run from inside a clone of the athena repository."
        )
        sys.exit(1)
    os.chdir(gitrepo)

    # License file loading
    if args.licensefile and args.licensefile != "NONE":
        with open(args.licensefile) as lfh:
            license_text = [line.rstrip() for line in lfh.readlines()]
    else:
        license_text = None
    if args.licenseexceptions:
        license_path_accept, license_path_reject = load_exceptions_file(
            args.licenseexceptions)
    else:
        license_path_accept = license_path_reject = []

    # Map package names to paths
    package_path_dict = map_package_names_to_paths()

    # Now loop over each package we were given
    try:
        for svn_package in args.svnpackage:
            full_clobber = True
            package_name, package, svn_package_path = get_svn_path_from_tag_name(
                svn_package, package_path_dict)
            # If we have a --files option then redo the accept/reject paths here
            # (as the package path needs to be prepended it needs to happen in this loop)
            if args.files:
                full_clobber = False
                svn_path_reject = [re.compile(fnmatch.translate("*"))]
                svn_path_accept = []
                for glob in args.files:
                    package_glob = os.path.join(
                        package_path_dict[package_name], glob)
                    logger.debug(
                        "Will accept files matching {0}".format(package_glob))
                    svn_path_accept.append(
                        re.compile(fnmatch.translate(package_glob)))
                logger.debug("{0}".format([m.pattern
                                           for m in svn_path_accept]))
            logger.debug("Will import {0} to {1}, SVN revision {2}".format(
                os.path.join(package, svn_package_path),
                package_path_dict[package_name],
                "HEAD" if args.revision == 0 else args.revision))
            svn_co_tag_and_commit(
                args.svnroot,
                gitrepo,
                package,
                svn_package_path,
                full_clobber,
                svn_path_accept=svn_path_accept,
                svn_path_reject=svn_path_reject,
                revision=args.revision,
                license_text=license_text,
                license_path_accept=license_path_accept,
                license_path_reject=license_path_reject,
            )
    except RuntimeError as e:
        logger.error(
            "Got a RuntimeError raised when processing package {0} ({1}). "
            "Usually this is caused by a failure to checkout from SVN, meaning you "
            "specified a package tag that does not exist, or even a package that "
            "does not exist. See --help for how to specify what to import.".
            format(svn_package, e))
        sys.exit(1)

    print textwrap.fill(
        "Pull from SVN succeeded. Use 'git status' to check which files "
        "have been changed and 'git diff' to review the changes in detail. "
        "When you are happy with your changes commit with a good commit message - "
        "as an update has been done from SVN it is recommended to give the "
        "SVN tag in the one line commit summary.")
Ejemplo n.º 27
0
def svn_license_injector(svn_path,
                         svn_co_root,
                         license_text,
                         license_path_accept=[],
                         license_path_reject=[]):
    ## @brief Add license statements to code before import
    #  @param svn_path Filesystem path to cleaned up SVN checkout
    #  @param svn_co_root Base directory of SVN checkout
    #  @param license_text List of strings that comprise the license to apply
    #  @param license_path_accept Paths to force include in license file addition (NOT IMPLEMENTED YET)
    #  @param license_path_reject Paths to exclude from license file addition
    #   license file addition
    for root, dirs, files in os.walk(svn_path):
        for name in files:
            filename = os.path.join(root, name)
            svn_filename = filename[len(svn_co_root) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, svn_filename):
                    logger.debug(
                        "File {0} will not have a license file applied".format(
                            svn_filename, filter.pattern))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug(
                        "File {0} will have a license file applied".format(
                            svn_filename, filter.pattern))
                    path_veto = False
                    break
            if path_veto:
                continue
            # Now see if the license file is already in SVN, as this is happening sometimes
            with open(filename) as fh:
                lines = 0
                licensed = False
                while lines < 10 and not licensed:
                    fline = fh.readline()
                    # Checking the first line of the file should be enough
                    if license_text[0] in fline:
                        licensed = True
                        break
                    lines += 1
            if licensed:
                logger.debug(
                    "File {0} appears to already have a copyright/license statement in it"
                )
                continue
            # Get the file's mode here to then restore it
            try:
                fmode = os.stat(filename).st_mode
                extension = svn_filename.rsplit(
                    ".", 1)[1] if "." in svn_filename else ""
                if extension in ("cxx", "cpp", "icc", "cc", "c", "C", "h",
                                 "hpp", "hh"):
                    inject_c_license(filename, license_text)
                    os.chmod(filename, fmode)
                elif extension in ("py", "cmake"):
                    inject_py_license(filename, license_text)
                    os.chmod(filename, fmode)
            except OSError, e:
                # Can happen if a file is a softlink to nowhere
                logger.warning("Got an exception on stating {0}: {1}".format(
                    filename, e))
Ejemplo n.º 28
0
def parse_tag_file(release_file_path, analysis_filter=False):
    ## @brief Open a NICOS tag file and extract the package tags
    #  @param release_file_path Path to file with NICOS tags for the release of interest
    #  @param analysis_filter Apply a filter to take only packages that are in the analysis
    #         release, but are missing from Athena releases
    #  @return Dictionary keyed by package, with each value a dictionary with @c tag and @project
    #  information for the package
    analysis_packages = [
        "AsgExternal/Asg_Test",
        #"PhysicsAnalysis/AnalysisCommon/AssociationUtils",    # Have to remove this for now because of the name clash with PhysicsAnalysis/AssociationBuilder/AssociationUtils
        "PhysicsAnalysis/AnalysisCommon/CPAnalysisExamples",
        "PhysicsAnalysis/AnalysisCommon/PMGTools",
        "PhysicsAnalysis/D3PDTools/EventLoop",
        "PhysicsAnalysis/D3PDTools/EventLoopAlgs",
        "PhysicsAnalysis/D3PDTools/EventLoopGrid",
        "PhysicsAnalysis/D3PDTools/MultiDraw",
        "PhysicsAnalysis/D3PDTools/SampleHandler",
        "PhysicsAnalysis/ElectronPhotonID/PhotonEfficiencyCorrection",
        "PhysicsAnalysis/ElectronPhotonID/PhotonVertexSelection",
        "PhysicsAnalysis/HiggsPhys/Run2/HZZ/Tools/ZMassConstraint",
        "PhysicsAnalysis/Interfaces/AsgAnalysisInterfaces",
        "PhysicsAnalysis/JetPhys/SemileptonicCorr",
        "PhysicsAnalysis/SUSYPhys/SUSYTools",
        "PhysicsAnalysis/TauID/DiTauMassTools",
        "PhysicsAnalysis/TauID/TauCorrUncert",
        "PhysicsAnalysis/TopPhys/QuickAna",
        "PhysicsAnalysis/TrackingID/InDetTrackSystematicsTools",
        "Reconstruction/Jet/JetAnalysisTools/JetTileCorrection",
        "Reconstruction/Jet/JetJvtEfficiency",
        "Reconstruction/Jet/JetReclustering",
        "Trigger/TrigAnalysis/TrigMuonEfficiency",
        "Trigger/TrigAnalysis/TrigTauAnalysis/TrigTauMatching",
    ]
    release_package_dict = {}
    with open(release_file_path) as tag_file:
        for line in tag_file:
            line = line.strip()
            logger.debug(line)
            if len(line) == 0 or line.startswith("#"):
                continue
            try:
                (package, tag, project) = line.split(" ")
            except ValueError:
                continue
            # Gaudi packages live in a separate project, so don't add them
            if project == "GAUDI":
                continue
            # "Release" and "RunTime" packages live inside the Release path, but in fact
            # we ignore them for git . Except for TriggerRelease, which is a real package!
            if package != "Trigger/TriggerRelease" and (
                    package.endswith("Release")
                    or package.endswith("RunTime")):
                logger.debug(
                    "Vetoing package auto-generated package {0}".format(
                        package))
                continue
            if package in [
                    "AtlasEvent", "AtlasAnalysis", "AtlasCore", "AtlasTrigger",
                    "AtlasProduction", "AtlasOffline", "DetCommon",
                    "AtlasReconstruction", "AtlasConditions", "AtlasExternals",
                    "AtlasSimulation", "AtlasHLT"
            ]:
                logger.debug(
                    "Vetoing fake 'project' package {0}".format(package))
                continue
            # Fake packages made by tag collector
            if "/" not in package and "22-00-00" in tag:
                continue
            logger.debug("Found package {0}, tag {1} in project {2}".format(
                package, tag, project))
            if analysis_filter and package not in analysis_packages:
                continue
            release_package_dict[package] = {
                "svn_tag": tag,
                "project": project,
                "package_name": os.path.basename(package)
            }
    return release_package_dict
Ejemplo n.º 29
0
def svn_co_tag_and_commit(svnroot, gitrepo, package, tag, svn_metadata=None, author_metadata_cache=None, branch=None,
                          svn_path_accept=[], svn_path_reject=[], package_veto=[], commit=True, revision=None,
                          license_text=None, license_path_accept=[], license_path_reject=[],
                          uncrustify_config=None, uncrustify_path_accept=[], uncrustify_path_reject=[]):
    ## @brief Make a temporary space, check out from svn, clean-up, copy and then git commit and tag
    #  @param svnroot Base path to SVN repository
    #  @param gitrepo Path to git repository to import to
    #  @param package Path to package root (in git and svn)
    #  @param tag Package tag to import (i.e., path after base package path)
    #  @param svn_metadata SVN metadata cache
    #  @param author_metadata_cache Author name/email cache
    #  @param branch Git branch to switch to before import
    #  @param svn_path_accept Paths to force import to git
    #  @param svn_path_reject Paths to force reject from the import
    #  @param package_veto List of packages to just plain refuse to handle
    #  @param commit Boolean flag to manage commit (can be set to @c False to only checkout and process)
    #  @param license_text List of strings containing the license text to add (if @c False, then no
    #  license file is added)
    #  @param revision Force SVN revision number (useful for svnpull.py, where
    #  no svn metadata is available)
    #  @param license_path_accept Paths to force include in license file addition
    #  @param license_path_reject Paths to exclude from license file addition
    #  @param uncrustify_config Uncrustify configuration file
    #  @param uncrustify_path_accept Paths to force uncrustify to run on
    #  @param uncrustify_path_reject Paths to exclude from uncrustify
    if package in package_veto:
        logger.info("Package {0} is vetoed - skipping import".format(package))
        return

    msg = "Importing SVN path {0}/{1} to {0}".format(package, tag)
    if svn_metadata and tag == "trunk":
        msg += " (r{0})".format(svn_metadata["revision"])
    logger.info(msg)

    if branch:
        logger.info("Switching to branch {0}".format(branch))
        switch_to_branch(args.targetbranch)

    tempdir = tempfile.mkdtemp()
    full_svn_path = os.path.join(tempdir, package)
    cmd = ["svn", "checkout"]
    if revision:
        cmd.extend(["-r", str(revision)])
    elif svn_metadata:
        cmd.extend(["-r", svn_metadata["revision"]])
    cmd.extend([os.path.join(svnroot, package, tag), os.path.join(tempdir, package)])
    check_output_with_retry(cmd, retries=1, wait=3)

    # Clean out directory of things we don't want to import
    svn_cleanup(full_svn_path, svn_co_root=tempdir,
                svn_path_accept=svn_path_accept, svn_path_reject=svn_path_reject)
    
    # If desired, inject a licence into the source code
    if license_text:
        svn_license_injector(full_svn_path, svn_co_root=tempdir, license_text=license_text,
                             license_path_accept=license_path_accept, license_path_reject=license_path_reject)

    # Pass C++ sources through uncrustify
    if uncrustify_config:
        uncrustify_sources(full_svn_path, svn_co_root=tempdir, uncrustify_config=uncrustify_config,
                           uncrustify_path_accept=uncrustify_path_accept, uncrustify_path_reject=uncrustify_path_reject)

    # Copy to git
    full_git_path = os.path.join(gitrepo, package)
    package_root, package_name = os.path.split(full_git_path)
    try:
        if os.path.isdir(full_git_path):
            shutil.rmtree(full_git_path, ignore_errors=True)
        os.makedirs(package_root)
    except OSError:
        pass
    shutil.move(full_svn_path, package_root)

    if commit:
        # get ChangeLog diff
        cl_diff = changelog_diff(package)

        # Commit
        check_output_with_retry(("git", "add", "-A", package))
        if logger.level <= logging.DEBUG:
            logger.debug(check_output_with_retry(("git", "status")))


        cmd = ["git", "commit", "--allow-empty", "-m", "{0} ({1} - r{2})".
               format(svn_metadata['msg'], tag.replace('tags/','',1), svn_metadata['revision'])]
        if svn_metadata:
            cmd.extend(("--author='{0}'".format(author_string(svn_metadata["author"], author_metadata_cache)),
                        "--date={0}".format(svn_metadata["date"])))
        if cl_diff:
            cmd.extend(("-m", "Diff in ChangeLog:\n" + '\n'.join(cl_diff)))
        check_output_with_retry(cmd)
        cmd = ["git", "tag", "-a", get_flattened_git_tag(package, tag, svn_metadata["revision"]), "-m", ""]
        check_output_with_retry(cmd)

    # Clean up
    shutil.rmtree(tempdir)
Ejemplo n.º 30
0
def main():
    parser = argparse.ArgumentParser(
        description="License file checker, parsing a git import and "
        "checking for any files that may have had the new ATLAS copyright "
        "and license applied in error. All files are listed, filtered by the current "
        "exceptions and then checked for statements of license or copyright that "
        "indicate a problem.")
    parser.add_argument("--path", help="Path to check (by default check cwd)")
    parser.add_argument(
        '--licenseexceptions',
        metavar="FILE",
        help="File listing path globs to exempt from or  "
        "always apply license file to (same format as --svnfilterexceptions)",
        default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "atlaslicense-exceptions.txt"))
    parser.add_argument('--debug',
                        '--verbose',
                        "-v",
                        action="store_true",
                        help="Switch logging into DEBUG mode")
    parser.add_argument(
        '--quiet',
        action="store_true",
        default=False,
        help=
        "Only print filenames that have issues for adding to the filter file")

    # Parse and handle initial arguments
    args = parser.parse_args()
    if args.debug:
        logger.setLevel(logging.DEBUG)

    # Where to check
    if args.path:
        check_path = args.path
    else:
        check_path = os.getcwd()
    license_path_accept, license_path_reject = load_exceptions_file(
        args.licenseexceptions)

    worry_files = 0
    for root, dirs, files in os.walk(check_path):
        if os.path.basename(root) == ".git":
            continue
        for name in files:
            extension = name.rsplit(".", 1)[1] if "." in name else ""
            if extension not in ("cxx", "cpp", "icc", "cc", "c", "C", "h",
                                 "hpp", "hh", "py", "cmake"):
                continue
            if name == "AtlasInternals.cmake":  # Many false matches, so skip...
                continue
            filename = os.path.join(root, name)
            git_filename = filename[len(check_path) + 1:]
            path_veto = False
            for filter in license_path_reject:
                if re.match(filter, git_filename):
                    logger.debug("File {0} was license file vetoed".format(
                        git_filename))
                    path_veto = True
                    break
            for filter in license_path_accept:
                if re.match(filter, svn_filename):
                    logger.debug("File {0} was license file forced".format(
                        git_filename))
                    path_veto = False
                    break
            if path_veto:
                continue
            worry_files += license_check_file(filename, git_filename,
                                              args.quiet)

    if worry_files:
        logger.warning("Found {0} concerning files".format(worry_files))
        sys.exit(1)

    return 0