Пример #1
0
def update_master():
    """
    Master branch is now checked out and needs updating.
    """
    para("Step 4 of 5: Commit versions and push changes to master.")
    utils.check_or_exit("Is your git repository now on master")

    # Update the master files.
    utils.update_files(MASTER_VERSION_REPLACE, release_data["versions"],
                       is_release=False)

    new_version = release_data["versions"]["version"]
    para("The master codebase has now been updated to reference the latest "
         "release.")
    para("Commit changes to master")
    run("git add --all")
    run('git commit -m "Update docs to version %s"' % new_version)

    actions()
    bullet("Self review the latest changes to master")
    bullet("Run: git diff origin/master", level=1)
    bullet("Push changes to master")
    bullet("Run: git push origin master", level=1)
    bullet("Verify builds are working")
    next("Once complete, re-run the script")
Пример #2
0
def update_files():
    """
    Continue the release process by updating the version information in all
    the files.
    """
    utils.check_or_exit("Is your git repository now on the candidate release "
                        "branch")

    # Update the code tree
    utils.update_files(CANDIDATE_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the release candidate "
         "artifacts.")
    actions()
    bullet("Add, commit and push the updated files to "
           "origin/%s-candidate" % new_version)
    bullet("git add --all", level=1)
    bullet('git commit -m "Update version strings for release '
           'candidate %s"' % new_version, level=1)
    bullet("git push origin %s-candidate" % new_version, level=1)
    bullet("Create a DockerHub release called '%s'" % new_version)
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the candidate "
           "branch (e.g. git checkout %s-candidate):" % new_version)
    bullet("Ubuntu libnetwork", level=1)
    bullet("CoreOS default networking", level=1)
    para("Follow the URL below to view the correct demonstration instructions "
         "for this release candidate.")
    bullet("https://github.com/projectcalico/calico-docker/tree/%s-candidate" % new_version)
    next("Once you have completed the testing, re-run the script.")
Пример #3
0
def update_master():
    """
    Master branch is now checked out and needs updating.
    """
    para("Step 4 of 5: Commit versions and push changes to master.")
    utils.check_or_exit("Is your git repository now on master")

    # Update the master files.
    utils.update_files(MASTER_VERSION_REPLACE,
                       release_data["versions"],
                       is_release=False)

    new_version = release_data["versions"]["version"]
    para("The master codebase has now been updated to reference the latest "
         "release.")
    para("Commit changes to master")
    run("git add --all")
    run('git commit -m "Update docs to version %s"' % new_version)

    actions()
    bullet("Self review the latest changes to master")
    bullet("Run: git diff origin/master", level=1)
    bullet("Push changes to master")
    bullet("Run: git push origin master", level=1)
    bullet("Verify builds are working")
    next("Once complete, re-run the script")
Пример #4
0
    def test_step(self, batch, batch_idx):
        src_ids, src_mask, tgt_ids = batch['input_ids'], batch['input_attention_mask'], batch['target_ids']
        generated_ids = self.bart.generate(
            input_ids=src_ids,
            attention_mask=src_mask,
            decoder_start_token_id=self.tokenizer.pad_token_id,
            num_beams=self.hparams.conf.testing.num_beams,
            max_length=self.hparams.conf.testing.max_length_generation,
        )

        predictions, targets, inputs = self._decode(generated_ids, tgt_ids, src_ids)
        update_files(self.hparams.conf, predictions, targets, inputs)
Пример #5
0
def cut_release():
    """
    The candidate branch has been tested, so cut the actual release.
    """
    para("Step 2 of 5: Push final branch, then cut release with binary.")
    utils.check_or_exit("Have you successfully tested your release candidate")

    # Update the code tree once more to set the final GitHub URLs
    utils.update_files(FINAL_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the GitHub release "
         "artifacts.")
    bullet("Adding, committing and pushing the updated files to "
           "origin/%s-candidate" % new_version)
    run("git add --all")
    run('git commit -m "Update version strings for release '
       '%s"' % new_version)
    run("git push origin %s-candidate" % new_version)

    actions()
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Create a Pull Request against master and review the changes (or "
           "run `git diff origin/master` from the candidate branch). Delete "
           "the pull request after comparing.")
    bullet("Create a GitHub release called '%s'" % new_version)

    para("Attach the calicoctl binaries to the release.  "
         "For linux, It can be downloaded from the following URL:")
    print "http://www.projectcalico.org/builds/calicoctl?circleci-branch=%s-candidate" % new_version
    para("For Windows and Mac it can be downloaded from")
    print "http://capitalship:8080/job/calicoctl-mac/"
    print "http://capitalship:8080/job/calicoctl-windows/"

    para("Once the release has been created on GitHub, perform a final test "
         "of the release:")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the tagged release "
           "(e.g. git checkout tags/%s):" % new_version)
    bullet("CoreOS libnetwork", level=1)
    bullet("Ubuntu default networking", level=1)
    bullet("Make sure to check the reported versions of all artifacts.")
    next("Once you have completed the testing, re-run the script.")
Пример #6
0
def cut_release():
    """
    The candidate branch has been tested, so cut the actual release.
    """
    utils.check_or_exit("Have you successfully tested your release candidate")

    # Update the code tree once more to set the final GitHub URLs
    utils.update_files(FINAL_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the GitHub release "
         "artifacts.")
    actions()
    bullet("Add, commit and push the updated files to "
           "origin/%s-candidate" % new_version)
    bullet("git add --all", level=1)
    bullet('git commit -m "Update version strings for release '
           '%s"' % new_version,
           level=1)
    bullet("git push origin %s-candidate" % new_version, level=1)
    bullet("[ideally squash the two commits into one]", level=1)
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Create a Pull Request and review the changes")
    bullet("Create a GitHub release called '%s'" % new_version)

    para("Attach the calicoctl binary to the release.  It can be downloaded "
         "from the following URL:")
    bullet(
        "http://www.projectcalico.org/builds/calicoctl?circleci-branch=%s-candidate"
        % new_version)

    para("Once the release has been created on GitHub, perform a final test "
         "of the release:")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the tagged release "
           "(e.g. git checkout tags/%s):" % new_version)
    bullet("CoreOS libnetwork", level=1)
    bullet("Ubuntu default networking", level=1)
    next("Once you have completed the testing, re-run the script.")
Пример #7
0
def cut_release():
    """
    The candidate branch has been tested, so cut the actual release.
    """
    utils.check_or_exit("Have you successfully tested your release candidate")

    # Update the code tree once more to set the final GitHub URLs
    utils.update_files(FINAL_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the GitHub release "
         "artifacts.")
    actions()
    bullet("Add, commit and push the updated files to "
           "origin/%s-candidate" % new_version)
    bullet("git add --all", level=1)
    bullet('git commit -m "Update version strings for release '
           '%s"' % new_version, level=1)
    bullet("git push origin %s-candidate" % new_version, level=1)
    bullet("[ideally squash the two commits into one]", level=1)
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Create a Pull Request and review the changes")
    bullet("Create a GitHub release called '%s'" % new_version)

    para("Attach the calicoctl binary to the release.  It can be downloaded "
         "from the following URL:")
    bullet("http://www.projectcalico.org/latest/calicoctl?circleci-branch=%s-candidate" % new_version)

    para("Once the release has been created on GitHub, perform a final test "
         "of the release:")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the tagged release "
           "(e.g. git checkout tags/%s):" % new_version)
    bullet("CoreOS libnetwork", level=1)
    bullet("Ubuntu default networking", level=1)
    next("Once you have completed the testing, re-run the script.")
Пример #8
0
def update_files():
    """
    Continue the release process by updating the version information in all
    the files.
    """
    utils.check_or_exit("Is your git repository now on the candidate release "
                        "branch")

    # Update the code tree
    utils.update_files(CANDIDATE_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the release candidate "
         "artifacts.")
    actions()
    bullet("Add, commit and push the updated files to "
           "origin/%s-candidate" % new_version)
    bullet("git add --all", level=1)
    bullet('git commit -m "Update version strings for release '
           'candidate %s"' % new_version,
           level=1)
    bullet("git push origin %s-candidate" % new_version, level=1)
    bullet("Create a DockerHub release called '%s'" % new_version)
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the candidate "
           "branch (e.g. git checkout %s-candidate):" % new_version)
    bullet("Ubuntu libnetwork", level=1)
    bullet("CoreOS default networking", level=1)
    para("Follow the URL below to view the correct demonstration instructions "
         "for this release candidate.")
    bullet("https://github.com/projectcalico/calico-docker/tree/%s-candidate" %
           new_version)
    next("Once you have completed the testing, re-run the script.")
Пример #9
0
def start_release():
    """
    Start the release process, asking user for version information.
    :return:
    """
    new_version = arguments.get("--calico")
    if not new_version:
        new_version = raw_input("New Calico version? (vX.Y): ")

    # Check if any of the new version dirs exist already
    new_dirs = [
        "./%s" % new_version,
        "./_data/%s" % new_version,
        "./_layouts/%s" % new_version
    ]
    for new_dir in new_dirs:
        if os.path.isdir(new_dir):
            # Quit instead of making assumptions.
            para(
                "A versioned folder for %s already exists. Remove and rerun this script?"
                % new_dir)

    # Create the versioned directories.
    shutil.copytree("./master", new_version)
    # Temporary workdown, use vX_Y instead of vX.Y
    # https://github.com/jekyll/jekyll/issues/5429
    shutil.copytree("./_data/master",
                    "./_data/%s" % new_version.replace(".", "_"))
    shutil.copytree("./_includes/master", "./_includes/%s" % new_version)

    run("git add --all")
    run('git commit -m "Copy Master for release %s"' % new_version)

    actions()
    para("Created commit of the raw, unchanged release files.")
    para("Moving on to Version replacement of files.")

    calico_containers_version = arguments["--calico-containers"]
    if not calico_containers_version:
        calico_containers_version = \
            utils.get_github_library_version("calico-containers", "https://github.com/projectcalico/calico-containers")

    felix_version = arguments["--felix"]
    if not felix_version:
        felix_version = \
            utils.get_github_library_version("felix", "https://github.com/projectcalico/felix")

    libnetwork_version = arguments["--libnetwork"]
    if not libnetwork_version:
        libnetwork_version = \
            utils.get_github_library_version("libnetwork-plugin", "https://github.com/projectcalico/libnetwork-plugin")

    calico_cni_version = arguments["--calico-cni"]
    if not calico_cni_version:
        calico_cni_version = \
            utils.get_github_library_version("calico-cni-version", "https://github.com/projectcalico/calico-cni")

    kube_policy_controller_version = arguments["--k8s-policy-controller"]
    if not kube_policy_controller_version:
        kube_policy_controller_version = \
            utils.get_github_library_version("kube-policy-controller", "https://github.com/projectcalico/k8s-policy")

    versions = {
        "calico-version": new_version,
        "calico-containers-version": calico_containers_version,
        "calico-containers-version-no-v": calico_containers_version[1:],
        "felix-version": felix_version,
        "libnetwork-version": libnetwork_version,
        "kube-policy-controller-version": kube_policy_controller_version,
        "calico-cni-version": calico_cni_version
    }

    actions()
    para("Using:")
    para(str(versions))
    check_or_exit("Continue?")

    # Update the code tree
    utils.update_files(VERSION_REPLACE, versions)

    para("The codebase has been updated to reference the release artifacts.")
    bullet("Adding, and committing the updated files")
    run("git add --all")
    run('git commit -m "Update version strings for release %s"' % new_version)
    actions()
    para(
        "You are done with release preparation. You now have two new commits on your branch which add the "
        "necessary files. Please: ")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the release "
           "folder (e.g. ./%s):" % new_version)
    bullet("Ubuntu libnetwork", level=1)
    bullet("CoreOS default networking", level=1)
    bullet("CoreOS libnetwork", level=1)
    bullet("Ubuntu default networking", level=1)
    bullet("Make sure to check the reported versions of all artifacts.")
    bullet("Create a Pull Request against master and review the changes (or "
           "run `git diff origin/master` from the candidate branch). "
           "Merge when ready.")
Пример #10
0
def start_release():
    """
    Start the release process, asking user for version information.
    :return:
    """
    para("Step 1 of 5: Create and push release branch with new versions.")
    para("Your git repository should be checked out to the correct revision "
         "that you want to cut a release with.  This is usually the HEAD of "
         "the master branch.")
    utils.check_or_exit("Are you currently on the correct revision")

    # Before asking for version info, perform validation on the current code.
    utils.validate_markdown_uris()

    old_version = utils.get_calicoctl_version()
    para("Current version is: %s" % old_version)

    new_version = arguments["CALICO_DOCKER_VERSION"]
    if not new_version:
        while True:
            new_version = raw_input("New calicoctl version?: ")
            release_type = utils.check_version_increment(old_version, new_version)
            if release_type:
                para("Release type: %s" % release_type)
                break

    calico_version = arguments["CALICO_VERSION"]
    libcalico_version = arguments["LIBCALICO_VERSION"]
    libnetwork_version = arguments["LIBNETWORK_VERSION"]
    kubernetes_version = arguments["KUBERNETES_VERSION"]


    if not (calico_version and libcalico_version and libnetwork_version and kubernetes_version):
        para("To pin the calico libraries used by calico-docker, please specify "
             "the name of the requested versions as they appear in the GitHub "
             "releases.")

        calico_version = \
            utils.get_github_library_version("calico (felix)", __felix_version__,
                                             "https://github.com/projectcalico/calico")
        libcalico_version = \
            utils.get_github_library_version("libcalico", __libcalico_version__,
                                             "https://github.com/projectcalico/libcalico")
        libnetwork_version = \
            utils.get_github_library_version("libnetwork-plugin", __libnetwork_plugin_version__,
                                             "https://github.com/projectcalico/libnetwork-plugin")

        kubernetes_version = \
            utils.get_github_library_version("kubernetes-plugin", __kubernetes_plugin_version__,
                                             "https://github.com/projectcalico/calico-kubernetes")

    release_data["versions"] = {"version": new_version,
                                "version-no-v": new_version[1:],
                                "calico-version": calico_version,
                                "libcalico-version": libcalico_version,
                                "libnetwork-version": libnetwork_version,
                                "kubernetes-version": kubernetes_version,
                                }

    bullet("Creating a candidate release branch called "
           "'%s-candidate'." % new_version)
    if arguments['--force']:
        run("git branch -D %s-candidate" % new_version)
    run("git checkout -b %s-candidate" % new_version)

    # Update the code tree
    utils.update_files(CANDIDATE_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the release candidate "
         "artifacts.")

    bullet("Adding, committing and pushing the updated files to "
           "origin/%s-candidate" % new_version)
    run("git add --all")
    run('git commit -m "Update version strings for release '
           'candidate %s"' % new_version)
    if arguments['--force']:
        run("git push -f origin %s-candidate" % new_version)
    else:
        run("git push origin %s-candidate" % new_version)
    actions()
    bullet("Create a DockerHub calico/node release tagged '%s'.  Use the "
           "candidate branch as the name and /calico_node as the Dockerfile "
           "location" % new_version)
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the candidate "
           "branch (e.g. git checkout %s-candidate):" % new_version)
    bullet("Ubuntu libnetwork", level=1)
    bullet("CoreOS default networking", level=1)
    para("Follow the URL below to view the correct demonstration instructions "
         "for this release candidate.")
    bullet("https://github.com/projectcalico/calico-docker/tree/%s-candidate" % new_version)
    next("Once you have completed the testing, re-run the script.")
Пример #11
0
def start_release():
    """
    Start the release process, asking user for version information.
    :return:
    """
    para("Step 1 of 5: Create and push release branch with new versions.")
    para("Your git repository should be checked out to the correct revision "
         "that you want to cut a release with.  This is usually the HEAD of "
         "the master branch.")
    utils.check_or_exit("Are you currently on the correct revision")

    # Before asking for version info, perform validation on the current code.
    utils.validate_markdown_uris()

    old_version = utils.get_calicoctl_version()
    para("Current version is: %s" % old_version)

    new_version = arguments["CALICO_DOCKER_VERSION"]
    if not new_version:
        while True:
            new_version = raw_input("New calicoctl version?: ")
            release_type = utils.check_version_increment(
                old_version, new_version)
            if release_type:
                para("Release type: %s" % release_type)
                break

    calico_version = arguments["CALICO_VERSION"]
    libcalico_version = arguments["LIBCALICO_VERSION"]
    libnetwork_version = arguments["LIBNETWORK_VERSION"]
    kubernetes_version = arguments["KUBERNETES_VERSION"]

    if not (calico_version and libcalico_version and libnetwork_version
            and kubernetes_version):
        para(
            "To pin the calico libraries used by calico-docker, please specify "
            "the name of the requested versions as they appear in the GitHub "
            "releases.")

        calico_version = \
            utils.get_github_library_version("calico (felix)", __felix_version__,
                                             "https://github.com/projectcalico/calico")
        libcalico_version = \
            utils.get_github_library_version("libcalico", __libcalico_version__,
                                             "https://github.com/projectcalico/libcalico")
        libnetwork_version = \
            utils.get_github_library_version("libnetwork-plugin", __libnetwork_plugin_version__,
                                             "https://github.com/projectcalico/libnetwork-plugin")

        kubernetes_version = \
            utils.get_github_library_version("kubernetes-plugin", __kubernetes_plugin_version__,
                                             "https://github.com/projectcalico/calico-kubernetes")

    release_data["versions"] = {
        "version": new_version,
        "version-no-v": new_version[1:],
        "calico-version": calico_version,
        "libcalico-version": libcalico_version,
        "libnetwork-version": libnetwork_version,
        "kubernetes-version": kubernetes_version,
    }

    bullet("Creating a candidate release branch called "
           "'%s-candidate'." % new_version)
    if arguments['--force']:
        run("git branch -D %s-candidate" % new_version)
    run("git checkout -b %s-candidate" % new_version)

    # Update the code tree
    utils.update_files(CANDIDATE_VERSION_REPLACE, release_data["versions"])

    new_version = release_data["versions"]["version"]
    para("The codebase has been updated to reference the release candidate "
         "artifacts.")

    bullet("Adding, committing and pushing the updated files to "
           "origin/%s-candidate" % new_version)
    run("git add --all")
    run('git commit -m "Update version strings for release '
        'candidate %s"' % new_version)
    if arguments['--force']:
        run("git push -f origin %s-candidate" % new_version)
    else:
        run("git push origin %s-candidate" % new_version)
    actions()
    bullet("Create a DockerHub calico/node release tagged '%s'.  Use the "
           "candidate branch as the name and /calico_node as the Dockerfile "
           "location" % new_version)
    bullet("Monitor the semaphore, CircleCI and Docker builds for this branch "
           "until all have successfully completed.  Fix any issues with the "
           "build.")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the candidate "
           "branch (e.g. git checkout %s-candidate):" % new_version)
    bullet("Ubuntu libnetwork", level=1)
    bullet("CoreOS default networking", level=1)
    para("Follow the URL below to view the correct demonstration instructions "
         "for this release candidate.")
    bullet("https://github.com/projectcalico/calico-docker/tree/%s-candidate" %
           new_version)
    next("Once you have completed the testing, re-run the script.")
Пример #12
0
def start_release():
    """
    Start the release process, asking user for version information.
    :return:
    """
    new_version = arguments.get("--calico")
    if not new_version:
        new_version = raw_input("New Calico version? (vX.Y): ")

    # Check if any of the new version dirs exist already
    new_dirs = ["./%s" % new_version,
            "./_data/%s" % new_version,
            "./_layouts/%s" % new_version]
    for new_dir in new_dirs:
        if os.path.isdir(new_dir):
            # Quit instead of making assumptions.
            para("A versioned folder for %s already exists. Remove and rerun this script?" % new_dir)

    # Create the versioned directories.
    shutil.copytree("./master", new_version)
    # Temporary workdown, use vX_Y instead of vX.Y
    # https://github.com/jekyll/jekyll/issues/5429
    shutil.copytree("./_data/master", "./_data/%s" % new_version.replace(".","_"))
    shutil.copytree("./_includes/master", "./_includes/%s" % new_version)

    run("git add --all")
    run('git commit -m "Copy Master for release %s"' % new_version)

    actions()
    para("Created commit of the raw, unchanged release files.")
    para("Moving on to Version replacement of files.")

    calico_containers_version = arguments["--calico-containers"]
    if not calico_containers_version:
        calico_containers_version = \
            utils.get_github_library_version("calico-containers", "https://github.com/projectcalico/calico-containers")

    felix_version = arguments["--felix"]
    if not felix_version:
        felix_version = \
            utils.get_github_library_version("felix", "https://github.com/projectcalico/felix")

    libnetwork_version = arguments["--libnetwork"]
    if not libnetwork_version:
        libnetwork_version = \
            utils.get_github_library_version("libnetwork-plugin", "https://github.com/projectcalico/libnetwork-plugin")

    calico_cni_version = arguments["--calico-cni"]
    if not calico_cni_version:
        calico_cni_version = \
            utils.get_github_library_version("calico-cni-version", "https://github.com/projectcalico/calico-cni")

    kube_policy_controller_version = arguments["--k8s-policy-controller"]
    if not kube_policy_controller_version:
        kube_policy_controller_version = \
            utils.get_github_library_version("kube-policy-controller", "https://github.com/projectcalico/k8s-policy")

    versions = {
        "calico-version": new_version,
        "calico-containers-version": calico_containers_version,
        "calico-containers-version-no-v": calico_containers_version[1:],
        "felix-version": felix_version,
        "libnetwork-version": libnetwork_version,
        "kube-policy-controller-version": kube_policy_controller_version,
        "calico-cni-version": calico_cni_version
    }

    actions()
    para("Using:")
    para(str(versions))
    check_or_exit("Continue?")

    # Update the code tree
    utils.update_files(VERSION_REPLACE, versions)

    para("The codebase has been updated to reference the release artifacts.")
    bullet("Adding, and committing the updated files")
    run("git add --all")
    run('git commit -m "Update version strings for release %s"' % new_version)
    actions()
    para("You are done with release preparation. You now have two new commits on your branch which add the "
         "necessary files. Please: ")
    bullet("Run through a subset of the demonstrations.  When running the "
           "vagrant instructions, make sure you are using the release "
           "folder (e.g. ./%s):" % new_version)
    bullet("Ubuntu libnetwork", level=1)
    bullet("CoreOS default networking", level=1)
    bullet("CoreOS libnetwork", level=1)
    bullet("Ubuntu default networking", level=1)
    bullet("Make sure to check the reported versions of all artifacts.")
    bullet("Create a Pull Request against master and review the changes (or "
           "run `git diff origin/master` from the candidate branch). "
           "Merge when ready.")
    def test_step(self, batch, batch_idx):
        src_ids, src_mask, tgt_ids = batch['classifier_input_ids'], batch[
            'classifier_input_attention_mask'], batch['generator_target_ids']
        supporting_facts = [''] * src_ids[0].shape[0]

        # classify sentences to select supporting facts
        for sentences, masks in zip(src_ids, src_mask):
            logits, = self.sentence_classifier.bert(sentences,
                                                    attention_mask=masks)
            probs = torch.nn.functional.softmax(logits, dim=1)[:, 1]
            predictions = probs >= self.threshold

            # decode
            for idx, sentence in enumerate(sentences):
                if predictions[idx]:
                    sentence = self.bert_tokenizer.decode(
                        sentence,
                        skip_special_tokens=False,
                        clean_up_tokenization_spaces=True)
                    sentence = sentence[sentence.find('[CLS]') +
                                        len('[CLS]'):sentence.find('[SEP]')]
                    supporting_facts[idx] += sentence

        # get context answer
        answers = []
        for sentence in src_ids[0]:
            sentence = self.bert_tokenizer.decode(
                sentence,
                skip_special_tokens=False,
                clean_up_tokenization_spaces=True)
            sentence = sentence[sentence.find('[SEP]') + len('[SEP]'):sentence.
                                find('[SEP]',
                                     sentence.find('[SEP]') +
                                     len('[SEP]'), len(sentence))]
            answers.append(sentence.strip())

        # encode supporting facts
        sf_ids = []
        sf_mask = []
        for idx, sf in enumerate(supporting_facts):
            input_dict = self.bart_tokenizer(text=sf,
                                             text_pair=answers[idx],
                                             add_special_tokens=True,
                                             max_length=100,
                                             padding="max_length",
                                             truncation=True)
            ids, masks = input_dict['input_ids'], input_dict['attention_mask']
            sf_ids.append(ids)
            sf_mask.append(masks)
        sf_ids, sf_mask = torch.tensor(sf_ids).to('cuda'), torch.tensor(
            sf_mask).to('cuda')

        # generate questions with selected supporting facts
        generated_ids = self.question_generator.bart.generate(
            input_ids=sf_ids,
            attention_mask=sf_mask,
            decoder_start_token_id=self.bart_tokenizer.pad_token_id,
            num_beams=self.hparams.conf.testing.num_beams,
            max_length=self.hparams.conf.testing.max_length_generation)

        predictions, targets = self._decode(generated_ids, tgt_ids)
        update_files(self.hparams.conf, predictions, targets, supporting_facts)
Пример #14
0
def update_backup(commands):
    if (commands.virtual_drive):
        # Если диск V уже занят, смонтирует диск на указаной букве
        virtual_drive = commands.virtual_drive
    else:
        virtual_drive = DEFAULT_VIRTUAL_DRIVE

    if (not virtual_drive.endswith(':')):
        virtual_drive = virtual_drive + ':'

    if (commands.name):
        volume = utils.find_path_to_volume_by_backup_name(
            commands.name, program_directory)

    elif (commands.volume):
        volume = commands.volume

    else:
        return Font.YELLOW + '[!] Введите имя бэкапа или путь к тому бэкапа для обновления'

    if (commands.password):
        if (len(commands.password) < MIN_PASSWORD_LENGTH):
            return Font.YELLOW + '[!] Пароль слишком короткий. Минимум 25 символов'
    else:
        return Font.YELLOW + '[!] Пароль не найден'

    if (utils.volume_is_mount(virtual_drive)):
        print(Font.YELLOW +
              '[!] Том уже смонтирован или диск с таким именем уже существует')
    else:
        # Монтирование тома
        if (not utils.mount_veracrypt_volume(DEFAULT_VERACRYPT_PATH, volume,
                                             commands.password,
                                             virtual_drive)):
            return Font.YELLOW + '[!] Возникла ошибка при монтировании тома'

    # Проверка смонтированого тома на наличие нужных файлов бэкапа
    if (not utils.is_backup_drive(virtual_drive)):
        return Font.YELLOW + '[i] Диск не является бэкапом'

    print(Font.YELLOW + '[i] Загрузка старых метаданных...')
    try:
        backup_metadata = utils.load_metadata_from_json(
            os.path.join(virtual_drive, 'metadata.json'))
    except utils.CastomException as exc:
        return Font.YELLOW + exc
    except Exception as exc:
        return Font.YELLOW + '[!] Возникла непредвиденая ошибка: %s' % exc

    metadata = backup_metadata['metadata']
    backup_name = backup_metadata['backup_name']
    backup_directory = backup_metadata['directory']
    compression_level = backup_metadata['compression_level']
    amount_files_in_backup = backup_metadata['amount_files_in_backup']

    last_filelist = utils.read_file(os.path.join(
        virtual_drive, 'filelist.txt'))  # Загрузка старого списка файлов

    if (commands.blacklist):
        blacklist = utils.read_file(commands.blacklist)
    else:
        blacklist = utils.read_file(
            os.path.join(virtual_drive,
                         'blacklist.txt'))  # Загрузка старого черного списка

    if (commands.recursion_level):
        max_recursion_level = commands.recursion_level
    else:
        max_recursion_level = backup_metadata['recursion_level']
    ''' ЗАГРУЗКА СТАРЫХ И ПОЛУЧЕНИЕ НОВЫХ ДАННЫХ, ИХ СРАВНИВАНИЕ, ОТСЛЕЖИВАНИЕ ТИПА ИЗМЕНЕНИЙ И РАСПРЕДИЛЕНИЕ ИХ ПО СООТВЕТСТВУЮЩИХ СПИСКАХ'''
    print(Font.YELLOW + '[i] Начало сбора данных...')
    # Получение списка файлов находящихся в папке бэкапа
    new_filelist = collect_backup_files(backup_directory, blacklist,
                                        max_recursion_level)
    print(Font.YELLOW + '[i] Найдено файлов: %i шт.' % len(new_filelist))

    # Сравнивание списков файло для поика изменений
    deleted_files, appended_files = utils.cmp_lists(last_filelist,
                                                    new_filelist)
    # КОСТЫЛЬ. Удаляем со списка удаленных нашу "подставную папку" посколькуон находиться в директории программы
    # а не в директории для архивации, что в свою очередь скрипт расценит это как удаление файла
    if (IGNORED_EMPTY_DIR in deleted_files):
        deleted_files.remove(IGNORED_EMPTY_DIR)

    # Отслеживание типа изменений и также изменение списка удаленных файлов
    changes_list = utils.identify_changes(metadata, appended_files,
                                          deleted_files)

    # Добавление метаданных новых файлов
    for change in changes_list:
        parent, filename, status = change
        if (not parent and not status):
            metadata.update({filename: get_information(filename)})

    # Проверка на наличие обновлений только существующих файлов с архива
    updated_files = []
    for filename in metadata:
        if (os.path.exists(filename)):
            if (os.stat(filename).st_mtime > metadata[filename]['st_mtime']):
                updated_files.append(filename)
                metadata.update({filename: get_information(filename)})
                if (commands.verbosity):
                    print(Font.GREEN + '[>] Файл %s нужно обновить' % filename)

    for data in changes_list:
        if (all(data)):
            updated_files.append(data[0])
            if (commands.verbosity):
                print(Font.GREEN + '[>] Файл %s %s в %s' %
                      (data[0], CHANGES_STATUSES.get(data[2], 'Неизвестно'),
                       data[1]))

    for file in deleted_files:
        if (commands.verbosity):
            print(Font.GREEN + '[>] Файл %s был удален!' % file)
    '''КОНЕЦ ЭТОГО БЛОКА'''

    # Поик копий, перемещений и переименований файлов для создания зависимостей для уменьшения
    appended_files = optimize_metadata(changes_list, metadata)

    amount_appended_files = len(appended_files)
    amount_updated_files = len(updated_files)
    amount_deleted_files = len(deleted_files)

    amount_changes = amount_updated_files + amount_appended_files + amount_deleted_files

    if (amount_changes >= MIN_AMOUNT_CHANGES_TO_UPDATE or commands.force):

        if (amount_appended_files > 0):
            print(Font.CYAN +
                  '[i] Добавлено файлов: %i шт.' % amount_appended_files)

        if (amount_updated_files > 0):
            print(Font.CYAN +
                  '[i] Обновлено файлов: %i шт.' % amount_updated_files)

        if (amount_deleted_files):
            print(Font.CYAN +
                  '[i] Удалено файлов: %i шт.' % amount_deleted_files)

        # Создание папки для бэкапа
        utils.create_if_not_exists(os.path.join(virtual_drive, 'updates'))
        # Если будем делать обновление бэкапа, сохраняем старые данные и список изменений для возможности отката изменений
        updates_directory = os.path.join(
            virtual_drive, 'updates',
            time.ctime(time.time()).replace(':', '-'))
        # Проверка наличия папки для сохранения старых метаданных
        utils.create_if_not_exists(updates_directory)
        # Сохранение, старых метаданных. Зачем? Хз :D
        try:
            utils.dump_metadata_to_json(
                os.path.join(updates_directory, 'metadata.json'),
                backup_metadata)
            utils.dump_metadata_to_txt(
                os.path.join(updates_directory, 'filelist.txt'), last_filelist)
            utils.dump_metadata_to_json(
                os.path.join(updates_directory, 'changes.json'), changes_list)
        except utils.CastomException as exc:
            print(Font.YELLOW + exc)

        # Обновление файлов в архиве
        if (updated_files):
            asigned_updated_files = utils.asign_unf(updated_files, metadata)
            utils.update_files(virtual_drive, backup_name, compression_level,
                               asigned_updated_files, Font)
        # Добавление новых файлов в архив
        if (appended_files):
            asigned_appended_files = utils.asign_unf(appended_files, metadata)
            utils.compress_files(virtual_drive, backup_name, compression_level,
                                 asigned_appended_files, Font)
        # Запись в метаданные метки, что файл удален
        if (deleted_files):
            utils.set_flags_is_deleted_files(metadata, deleted_files)

        backup_metadata.update({
            'last_update':
            time.ctime(time.time()),
            'amount_appended_filse':
            amount_appended_files,
            'amount_updated_files':
            amount_updated_files,
            'amount_deleted_files':
            amount_deleted_files,
            'amount_files_in_backup':
            amount_files_in_backup + amount_appended_files -
            amount_deleted_files,
            'metadata':
            metadata,
        })

        update_backup_metadata(virtual_drive, backup_metadata, new_filelist,
                               blacklist)
        auto_dismount_veracrypt_volume_or_open_backup_drive(
            commands, virtual_drive)

        return Font.CYAN + '[>] Бэкап успешно обновлен!'

    else:
        return Font.YELLOW + '[!] Бэкап не требует обовления'
Пример #15
0
def energy(user_func, *args, powerLoss = 0.8, year, printToScreen, timeseries):
    """ Evaluates the kwh needed for your code to run

    Parameters:
       user_func (function): user's function

    Returns:
        (process_kwh, return_value, watt_averages)

    """

    baseline_check_seconds = 5
    files, multiple_cpus = utils.get_files()
    is_nvidia_gpu = utils.valid_gpu()
    is_valid_cpu = utils.valid_cpu()

    # GPU handling if Nvidia
    gpu_baseline =[0]
    gpu_process = [0]
    bash_command = "nvidia-smi -i 0 --format=csv,noheader --query-gpu=power.draw"

    time_baseline = []
    reading_baseline_wattage = []

    time_process = []
    reading_process_wattage = []

    for i in range(int(baseline_check_seconds / DELAY)):
        if is_nvidia_gpu:
            output = subprocess.check_output(['bash','-c', bash_command])
            output = float(output.decode("utf-8")[:-2])
            gpu_baseline.append(output)
        if is_valid_cpu:
            files = utils.measure_files(files, DELAY)
            files = utils.update_files(files)
        else:
            time.sleep(DELAY)
        # Adds the most recent value of GPU; 0 if not Nvidia
        last_reading = utils.get_total(files, multiple_cpus) + gpu_baseline[-1]
        if last_reading >=0 and printToScreen:
            utils.log("Baseline wattage", last_reading)
            time = round(i* DELAY, 1)
            time_baseline.append(time)
            reading_baseline_wattage.append(last_reading)
    if timeseries:
        with open('baseline_wattage.csv', 'w') as baseline_wattage_file:
            baseline_wattage_writer = csv.writer(baseline_wattage_file)
            baseline_wattage_writer.writerow(["time", "baseline wattage reading"])
            for i in range(len(time_baseline)):
                baseline_wattage_writer.writerow([time_baseline[i], reading_baseline_wattage[i]])
    if printToScreen:
        utils.newline()

    # Running the process and measuring wattage
    q = Queue()
    p = Process(target = func, args = (user_func, q, *args,))

    start = timer()
    small_delay_counter = 0
    return_value = None
    p.start()

    while(p.is_alive()):
        # Checking at a faster rate for quick processes
        if (small_delay_counter > DELAY):
            delay = DELAY / 10
            small_delay_counter+=1
        else:
            delay = DELAY

        if is_nvidia_gpu:
            output = subprocess.check_output(['bash','-c', bash_command])
            output = float(output.decode("utf-8")[:-2])
            gpu_process.append(output)
        if is_valid_cpu:
            files = utils.measure_files(files, delay)
            files = utils.update_files(files, True)
        else:
            time.sleep(delay)
        # Just output, not added
        last_reading = (utils.get_total(files, multiple_cpus) + gpu_process[-1]) / powerLoss
        if last_reading >=0 and printToScreen:
            utils.log("Process wattage", last_reading)
            time = round(timer()-start, 1)
            time_process.append(time)
            reading_process_wattage.append(last_reading)
        # Getting the return value of the user's function
        try:
            return_value = q.get_nowait()
            break
        except queue.Empty:
            pass
    if timeseries:
        with open('process_wattage.csv', 'w') as process_wattage_file:
            process_wattage_writer = csv.writer(process_wattage_file)
            process_wattage_writer.writerow(["time", "process wattage reading"])
            for i in range(len(time_process)):
                process_wattage_writer.writerow([time_process[i], reading_process_wattage[i]])
    p.join()
    end = timer()
    for file in files:
        file.process = file.process[1:-1]
        file.baseline = file.baseline[1:-1]
    if is_nvidia_gpu:
        gpu_baseline_average = statistics.mean(gpu_baseline[2:-1])
        gpu_process_average = statistics.mean(gpu_process[2:-1])
    else:
        gpu_baseline_average = 0
        gpu_process_average = 0

    total_time = end-start # seconds
    # Formatting the time nicely
    timedelta = str(datetime.timedelta(seconds=total_time)).split('.')[0]

    if files[0].process == []:
        raise Exception("Process executed too fast to gather energy consumption")
    files = utils.average_files(files)

    process_average = utils.get_process_average(files, multiple_cpus, gpu_process_average)
    baseline_average = utils.get_baseline_average(files, multiple_cpus, gpu_baseline_average)
    difference_average = process_average - baseline_average
    watt_averages = [baseline_average, process_average, difference_average, timedelta]

    # Subtracting baseline wattage to get more accurate result
    process_kwh = convert.to_kwh((process_average - baseline_average)*total_time) / powerLoss

    if is_nvidia_gpu:
        gpu_file = file("GPU", "")
        gpu_file.create_gpu(gpu_baseline_average, gpu_process_average)
        files.append(file("GPU", ""))

    # Logging
    if printToScreen:
        utils.log("Final Readings", baseline_average, process_average, difference_average, timedelta)
    return (process_kwh, return_value, watt_averages, files, total_time, time_baseline, reading_baseline_wattage, time_process, reading_process_wattage)