Beispiel #1
0
    def make_new_pypi_release(self):
        def release_handler(success):
            result = "released" if success else "failed to release"
            msg = f"I just {result} version {self.new_release['version']} on PyPI"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            self.github.comment.append(msg)

        latest_pypi = self.pypi.latest_version()
        if Version.coerce(latest_pypi) >= Version.coerce(
                self.new_release['version']):
            self.logger.info(
                f"{self.new_release['version']} has already been released on PyPi"
            )
            return False
        self.git.fetch_tags()
        self.git.checkout(self.new_release['version'])
        try:
            self.pypi.release()
            release_handler(success=True)
        except ReleaseException:
            release_handler(success=False)
            raise

        return True
def setup_ceph_debug():
    """
    Set Ceph to run in debug log level using a ConfigMap.
    This functionality is available starting OCS 4.7.

    """
    ceph_debug_log_configmap_data = templating.load_yaml(
        constants.CEPH_CONFIG_DEBUG_LOG_LEVEL_CONFIGMAP
    )
    ocs_version = config.ENV_DATA["ocs_version"]
    if Version.coerce(ocs_version) < Version.coerce("4.8"):
        stored_values = constants.ROOK_CEPH_CONFIG_VALUES.split("\n")
    else:
        stored_values = constants.ROOK_CEPH_CONFIG_VALUES_48.split("\n")
    ceph_debug_log_configmap_data["data"]["config"] = (
        stored_values + constants.CEPH_DEBUG_CONFIG_VALUES
    )

    ceph_configmap_yaml = tempfile.NamedTemporaryFile(
        mode="w+", prefix="config_map", delete=False
    )
    templating.dump_data_to_temp_yaml(
        ceph_debug_log_configmap_data, ceph_configmap_yaml.name
    )
    log.info("Setting Ceph to work in debug log level using a new ConfigMap resource")
    run_cmd(f"oc create -f {ceph_configmap_yaml.name}")
Beispiel #3
0
def clone_openshift_installer():
    """
    Clone the openshift installer repo
    """
    # git clone repo from openshift installer
    # installer ( https://github.com/openshift/installer ) master and
    # other branches (greater than release-4.3) structure has been
    # changed. Use appropriate branch when ocs-ci is ready
    # with the changes.
    # Note: Currently use release-4.3 branch for the ocp versions
    # which is greater than 4.3
    upi_repo_path = os.path.join(
        constants.EXTERNAL_DIR,
        'installer'
    )
    ocp_version = get_ocp_version()
    if Version.coerce(ocp_version) >= Version.coerce('4.4'):
        clone_repo(
            constants.VSPHERE_INSTALLER_REPO, upi_repo_path,
            constants.VSPHERE_INSTALLER_BRANCH
        )
    else:
        clone_repo(
            constants.VSPHERE_INSTALLER_REPO, upi_repo_path,
            f'release-{ocp_version}'
        )
Beispiel #4
0
def generate_terraform_vars_and_update_machine_conf():
    """
    Generates the terraform.tfvars file
    """
    ocp_version = get_ocp_version()
    folder_structure = False
    if Version.coerce(ocp_version) >= Version.coerce("4.5"):

        folder_structure = True
        # export AWS_REGION
        set_aws_region()

        # generate terraform variable file
        generate_terraform_vars_with_folder()

        # update the machine configurations
        update_machine_conf(folder_structure)

        if Version.coerce(ocp_version) >= Version.coerce("4.5"):
            modify_haproxyservice()
    else:
        # generate terraform variable file
        generate_terraform_vars_with_out_folder()

        # update the machine configurations
        update_machine_conf(folder_structure)
Beispiel #5
0
    def destroy_scaleup_nodes(self, scale_up_terraform_data_dir,
                              scale_up_terraform_var):
        """
        Destroy the scale-up nodes

        Args:
            scale_up_terraform_data_dir (str): Path to scale-up terraform
                data directory
            scale_up_terraform_var (str): Path to scale-up
                terraform.tfvars file

        """
        clone_repo(constants.VSPHERE_SCALEUP_REPO, self.upi_scale_up_repo_path)
        # git clone repo from cluster-launcher
        clone_repo(constants.VSPHERE_CLUSTER_LAUNCHER,
                   self.cluster_launcer_repo_path)

        # modify scale-up repo
        helpers = VSPHEREHELPERS()
        helpers.modify_scaleup_repo()

        vsphere_dir = constants.SCALEUP_VSPHERE_DIR
        if Version.coerce(self.ocp_version) >= Version.coerce("4.5"):
            vsphere_dir = os.path.join(
                constants.CLUSTER_LAUNCHER_VSPHERE_DIR,
                f"aos-{get_ocp_version('_')}",
                "vsphere",
            )

        terraform_scale_up = Terraform(vsphere_dir)
        os.chdir(scale_up_terraform_data_dir)
        terraform_scale_up.initialize(upgrade=True)
        terraform_scale_up.destroy(scale_up_terraform_var)
Beispiel #6
0
        def __init__(self):
            super(VSPHEREUPI.OCPDeployment, self).__init__()
            self.public_key = {}
            self.upi_repo_path = os.path.join(constants.EXTERNAL_DIR,
                                              "installer")
            self.previous_dir = os.getcwd()

            # get OCP version
            ocp_version = get_ocp_version()

            # create terraform_data directory
            self.terraform_data_dir = os.path.join(
                self.cluster_path, constants.TERRAFORM_DATA_DIR)
            create_directory_path(self.terraform_data_dir)

            # Download terraform binary based on ocp version and
            # update the installer path in ENV_DATA
            # use "0.11.14" for releases below OCP 4.5
            terraform_version = config.DEPLOYMENT["terraform_version"]
            terraform_installer = get_terraform(version=terraform_version)
            config.ENV_DATA["terraform_installer"] = terraform_installer

            # Download terraform ignition provider
            # ignition provider dependancy from OCP 4.6
            if Version.coerce(ocp_version) >= Version.coerce("4.6"):
                get_terraform_ignition_provider(self.terraform_data_dir)

            # Initialize Terraform
            self.terraform_work_dir = constants.VSPHERE_DIR
            self.terraform = Terraform(self.terraform_work_dir)

            self.folder_structure = False
            if Version.coerce(ocp_version) >= Version.coerce("4.5"):
                self.folder_structure = True
                config.ENV_DATA["folder_structure"] = self.folder_structure
Beispiel #7
0
def create_external_secret(ocs_version=None, apply=False):
    """
    Creates secret data for external cluster

    Args:
         ocs_version (str): OCS version
         apply (bool): True if want to use apply instead of create command

    """
    ocs_version = ocs_version or config.ENV_DATA["ocs_version"]
    secret_data = templating.load_yaml(constants.EXTERNAL_CLUSTER_SECRET_YAML)
    if Version.coerce(ocs_version) >= Version.coerce("4.8"):
        external_cluster_details = config.EXTERNAL_MODE.get(
            "external_cluster_details_ocs48", "")
    else:
        external_cluster_details = config.EXTERNAL_MODE.get(
            "external_cluster_details", "")
    if not external_cluster_details:
        raise ExternalClusterDetailsException("No external cluster data found")
    secret_data["data"]["external_cluster_details"] = external_cluster_details
    secret_data_yaml = tempfile.NamedTemporaryFile(
        mode="w+", prefix="external_cluster_secret", delete=False)
    templating.dump_data_to_temp_yaml(secret_data, secret_data_yaml.name)
    logger.info(
        f"Creating external cluster secret for OCS version: {ocs_version}")
    oc_type = "apply" if apply else "create"
    run_cmd(f"oc {oc_type} -f {secret_data_yaml.name}")
Beispiel #8
0
    def make_new_github_release(self):
        def release_handler(success):
            result = "released" if success else "failed to release"
            msg = f"I just {result} version {self.new_release['version']} on Github"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            self.github.comment.append(msg)

        try:
            latest_github = self.github.latest_release()
            if Version.coerce(latest_github) >= Version.coerce(
                    self.new_release['version']):
                self.logger.info(
                    f"{self.new_release['version']} has already been released on Github"
                )
                # to fill in new_release['fs_path'] so that we can continue with PyPi upload
                self.new_release = self.github.download_extract_zip(
                    self.new_release)
                return self.new_release
        except ReleaseException as exc:
            raise ReleaseException(
                f"Failed getting latest Github release (zip).\n{exc}")

        try:
            released, self.new_release = self.github.make_new_release(
                self.new_release)
            if released:
                release_handler(success=True)
        except ReleaseException:
            release_handler(success=False)
            raise

        return self.new_release
    def test_validate_ceph_config_values_in_rook_config_override(self):
        """
        Test case for comparing the cluster's config values of
        Ceph, set by ceph-config-override configMap, with the static set of configuration saved in ocs-ci

        """
        cm_obj = OCP(
            kind="configmap",
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            resource_name=constants.ROOK_CONFIG_OVERRIDE_CONFIGMAP,
        )
        config_data = cm_obj.get()["data"]["config"]
        config_data = config_data.split("\n")
        log.info(
            "Validating that the Ceph values, configured by ceph-config-override "
            "confiMap, match the ones stored in ocs-ci")
        ocs_version = config.ENV_DATA["ocs_version"]
        if Version.coerce(ocs_version) < Version.coerce("4.8"):
            stored_values = constants.ROOK_CEPH_CONFIG_VALUES.split("\n")
        else:
            stored_values = constants.ROOK_CEPH_CONFIG_VALUES_48.split("\n")
        assert collections.Counter(config_data) == collections.Counter(
            stored_values
        ), (f"The Ceph config, set by {constants.ROOK_CONFIG_OVERRIDE_CONFIGMAP} "
            f"is different than the expected. Please inform OCS-QE about this discrepancy. "
            f"The expected values are:\n{stored_values}\n"
            f"The cluster's Ceph values are:{config_data}")
Beispiel #10
0
        def deploy_prereq(self):
            """
            Pre-Requisites for vSphere UPI Deployment
            """
            super(VSPHEREUPI.OCPDeployment, self).deploy_prereq()
            # generate manifests
            self.generate_manifests()

            # create chrony resource
            if Version.coerce(get_ocp_version()) >= Version.coerce("4.4"):
                add_chrony_to_ocp_deployment()

            # create ignitions
            self.create_ignitions()
            self.kubeconfig = os.path.join(
                self.cluster_path, config.RUN.get("kubeconfig_location"))
            self.terraform_var = os.path.join(
                config.ENV_DATA["cluster_path"],
                constants.TERRAFORM_DATA_DIR,
                "terraform.tfvars",
            )

            # git clone repo from openshift installer
            clone_openshift_installer()

            # generate terraform variable file
            generate_terraform_vars_and_update_machine_conf()

            # sync guest time with host
            vm_file = (constants.VM_MAIN if self.folder_structure else
                       constants.INSTALLER_MACHINE_CONF)
            if config.ENV_DATA.get("sync_time_with_host"):
                sync_time_with_host(vm_file, True)
Beispiel #11
0
        def __init__(self):
            super(VSPHEREUPI.OCPDeployment, self).__init__()
            self.public_key = {}
            self.upi_repo_path = os.path.join(constants.EXTERNAL_DIR,
                                              'installer')
            self.previous_dir = os.getcwd()

            # Download terraform binary based on ocp version and
            # update the installer path in ENV_DATA
            # use "0.11.14" for releases below OCP 4.5
            terraform_version = config.DEPLOYMENT['terraform_version']
            terraform_installer = get_terraform(version=terraform_version)
            config.ENV_DATA['terraform_installer'] = terraform_installer

            # Initialize Terraform
            self.terraform_data_dir = os.path.join(
                self.cluster_path, constants.TERRAFORM_DATA_DIR)
            create_directory_path(self.terraform_data_dir)
            self.terraform_work_dir = constants.VSPHERE_DIR
            self.terraform = Terraform(self.terraform_work_dir)
            ocp_version = get_ocp_version()
            self.folder_structure = False
            if Version.coerce(ocp_version) >= Version.coerce('4.5'):
                self.folder_structure = True
                config.ENV_DATA['folder_structure'] = self.folder_structure
Beispiel #12
0
    def test_check_mon_pdb_post_upgrade(self):
        """
        Testcase to check disruptions_allowed and minimum
        available, maximum unavailable mon count

        """
        ceph_obj = CephCluster()

        # Check for mon count
        mons_after_upgrade = ceph_obj.get_mons_from_cluster()
        log.info(f"Mons after upgrade {mons_after_upgrade}")

        disruptions_allowed, min_available_mon, max_unavailable_mon = get_mon_pdb(
        )
        log.info(f"Number of Mons Disruptions_allowed {disruptions_allowed}")
        log.info(f"Minimum_available mon count {min_available_mon}")
        log.info(f"Maximum_available mon count {max_unavailable_mon}")

        # The PDB values are considered from OCS 4.5 onwards.
        assert disruptions_allowed == 1, "Mon Disruptions_allowed count not matching"
        ocs_version = config.ENV_DATA["ocs_version"]
        if Version.coerce(ocs_version) < Version.coerce("4.6"):
            assert min_available_mon == 2, "Minimum available mon count is not matching"
        else:
            # This mon pdb change is from 4.6.5, 4.7 on wards, please refer bz1946573, bz1935065
            # (https://bugzilla.redhat.com/show_bug.cgi?id=1946573)
            # (https://bugzilla.redhat.com/show_bug.cgi?id=1935065)
            assert (max_unavailable_mon == 1
                    ), "Maximum unavailable mon count is not matching"
def check_version():
    endpoint = "https://api.github.com/repos/n2qzshce/ham-radio-sync/tags"
    try:
        result = requests.get(endpoint)
        latest_version = result.json()[0]['name']
        latest = Version.coerce(latest_version)
    except Exception as e:
        logging.info("Unable to fetch version info.")
        logging.debug("Unable to fetch version info", e)
        return

    try:
        current = Version.coerce(version)
    except Exception as e:
        if version == 'DEVELOPMENT':
            logging.info("You're running a dev version of the code!")
        else:
            logging.error("Could not parse version number.", e)
        return

    if latest > current:
        logging.warning(
            f"You are running version `{version}`. Latest is `{latest_version}`"
        )
        logging.info(
            f"Update at: `https://github.com/n2qzshce/ham-radio-sync/releases`"
        )
    else:
        logging.info("You are on the latest version")
    return
Beispiel #14
0
    def make_new_github_release(self):
        def release_handler(success):
            result = "released" if success else "failed to release"
            msg = f"I just {result} version {self.new_release.version} on {self.git_service.name}"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            self.github.comment.append(msg)

        try:
            latest_release = self.github.latest_release()
        except ReleaseException as exc:
            raise ReleaseException(
                f"Failed getting latest {self.git_service.name} release (zip).\n{exc}"
            )

        if Version.coerce(latest_release) >= Version.coerce(
                self.new_release.version):
            self.logger.info(
                f"{self.new_release.version} has already been released on {self.git_service.name}"
            )
        else:
            try:
                if self.conf.dry_run:
                    return None
                released, self.new_release = self.github.make_new_release(
                    self.new_release)
                if released:
                    release_handler(success=True)
            except ReleaseException:
                release_handler(success=False)
                raise
        return self.new_release
Beispiel #15
0
    def make_new_pypi_release(self):
        def release_handler(success):
            result = "released" if success else "failed to release"
            msg = f"I just {result} version {self.new_release['version']} on PyPI"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            self.github.comment.append(msg)

        latest_pypi = self.pypi.latest_version()

        # if there are no previous releases, set version to 0.0.0
        latest_pypi = latest_pypi if latest_pypi else '0.0.0'
        if Version.coerce(latest_pypi) >= Version.coerce(
                self.new_release['version']):
            self.logger.info(
                f"{self.new_release['version']} has already been released on PyPi"
            )
            return False

        try:
            self.pypi.release(self.new_release)
            release_handler(success=True)
        except ReleaseException:
            release_handler(success=False)
            raise

        return True
Beispiel #16
0
    def make_new_pypi_release(self):
        if not self.new_release.pypi:
            self.logger.debug('Skipping PyPi release')
            return False

        def release_handler(success):
            result = "released" if success else "failed to release"
            if self.conf.dry_run:
                msg = f"I would have {result} version {self.new_release.version} on PyPI now."
            else:
                msg = f"I just {result} version {self.new_release.version} on PyPI"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            self.github.comment.append(msg)

        latest_pypi = self.pypi.latest_version()
        if Version.coerce(latest_pypi) >= Version.coerce(
                self.new_release.version):
            msg = f"{self.conf.pypi_project}-{self.new_release.version} " \
                  f"or higher version has already been released on PyPi"
            self.logger.info(msg)
            return False
        self.git.fetch_tags()
        self.git.checkout(self.new_release.version)
        try:
            if self.pypi.release() is False:
                return False
            release_handler(success=True)
        except ReleaseException:
            release_handler(success=False)
            raise
        finally:
            self.git.checkout('master')

        return True
Beispiel #17
0
    def make_new_github_release(self):
        def release_handler(success):
            result = "released" if success else "failed to release"
            msg = f"I just {result} version {self.new_release['version']} on Github"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            self.github.comment.append(msg)

        try:
            latest_release = self.github.latest_release()
        except ReleaseException as exc:
            raise ReleaseException(
                f"Failed getting latest Github release (zip).\n{exc}")

        if Version.coerce(latest_release) >= Version.coerce(
                self.new_release['version']):
            self.logger.info(
                f"{self.new_release['version']} has already been released on Github"
            )
        else:
            try:
                released, self.new_release = self.github.make_new_release(
                    self.new_release)
                if released:
                    release_handler(success=True)
            except ReleaseException:
                release_handler(success=False)
                raise
        self.github.update_changelog(self.new_release['version'])
        return self.new_release
Beispiel #18
0
    def test_version_incremented(self):
        ref = 'DEV'
        headers = {}
        if 'GITHUB_REF' in os.environ.keys():
            ref = os.environ['GITHUB_REF']
        if 'GITHUB_TOKEN' in os.environ.keys():
            headers['Authorization'] = f"Bearer {os.environ['GITHUB_TOKEN']}"
        else:
            logging.critical('GITHUB_TOKEN not set')

        if 'CI' not in os.environ.keys():
            return

        logging.critical(f"Current ref is `{ref}`")
        is_master = ref == 'refs/heads/master'
        if is_master:
            logging.critical("Skipping version increment check on master.")
            return
        endpoint = "https://api.github.com/repos/n2qzshce/cw_typist/tags"
        result = requests.get(endpoint, headers=headers)
        result_json = result.json()
        if len(result_json) == 0:
            return
        latest_version = result_json[0]['name']
        latest = Version.coerce(latest_version)
        current = Version.coerce(cw_typist_version.version)
        self.assertGreater(current, latest,
                           "Version has not been incremented.")
Beispiel #19
0
def test_rgw_unavailable(measure_stop_rgw):
    """
    Test that there is appropriate alert when RGW is unavailable and that
    this alert is cleared when the RGW interface is back online.

    """
    api = prometheus.PrometheusAPI()

    # get alerts from time when manager deployment was scaled down
    alerts = measure_stop_rgw.get("prometheus_alerts")
    target_label = constants.ALERT_CLUSTEROBJECTSTORESTATE
    # The alert message is changed since OCS 4.7
    ocs_version = config.ENV_DATA["ocs_version"]
    if Version.coerce(ocs_version) < Version.coerce("4.7"):
        target_msg = (
            "Cluster Object Store is in unhealthy state for more than 15s. "
            "Please check Ceph cluster health or RGW connection.")
    else:
        target_msg = "Cluster Object Store is in unhealthy state. Please check Ceph cluster health."
    states = ["pending", "firing"]

    prometheus.check_alert_list(
        label=target_label,
        msg=target_msg,
        alerts=alerts,
        states=states,
        severity="error",
    )
    api.check_alert_cleared(label=target_label,
                            measure_end_time=measure_stop_rgw.get("stop"))
Beispiel #20
0
    def subscribe_ocs(self):
        """
        This method subscription manifest and subscribe to OCS operator.

        """
        live_deployment = config.DEPLOYMENT.get("live_deployment")
        if (config.ENV_DATA["platform"] == constants.IBMCLOUD_PLATFORM
                and not live_deployment):
            link_all_sa_and_secret_and_delete_pods(constants.OCS_SECRET,
                                                   self.namespace)
        operator_selector = get_selector_for_ocs_operator()
        # wait for package manifest
        # For OCS version >= 4.9, we have odf-operator
        ocs_version = config.ENV_DATA["ocs_version"]
        if Version.coerce(ocs_version) >= Version.coerce("4.9"):
            ocs_operator_name = defaults.ODF_OPERATOR_NAME
            subscription_file = constants.SUBSCRIPTION_ODF_YAML
        else:
            ocs_operator_name = defaults.OCS_OPERATOR_NAME
            subscription_file = constants.SUBSCRIPTION_YAML

        package_manifest = PackageManifest(
            resource_name=ocs_operator_name,
            selector=operator_selector,
        )
        # Wait for package manifest is ready
        package_manifest.wait_for_resource(timeout=300)
        default_channel = package_manifest.get_default_channel()
        subscription_yaml_data = templating.load_yaml(subscription_file)
        subscription_plan_approval = config.DEPLOYMENT.get(
            "subscription_plan_approval")
        if subscription_plan_approval:
            subscription_yaml_data["spec"][
                "installPlanApproval"] = subscription_plan_approval
        custom_channel = config.DEPLOYMENT.get("ocs_csv_channel")
        if custom_channel:
            logger.info(f"Custom channel will be used: {custom_channel}")
            subscription_yaml_data["spec"]["channel"] = custom_channel
        else:
            logger.info(f"Default channel will be used: {default_channel}")
            subscription_yaml_data["spec"]["channel"] = default_channel
        if config.DEPLOYMENT.get("stage"):
            subscription_yaml_data["spec"][
                "source"] = constants.OPERATOR_SOURCE_NAME
        if config.DEPLOYMENT.get("live_deployment"):
            subscription_yaml_data["spec"]["source"] = config.DEPLOYMENT.get(
                "live_content_source", defaults.LIVE_CONTENT_SOURCE)
        subscription_manifest = tempfile.NamedTemporaryFile(
            mode="w+", prefix="subscription_manifest", delete=False)
        templating.dump_data_to_temp_yaml(subscription_yaml_data,
                                          subscription_manifest.name)
        run_cmd(f"oc create -f {subscription_manifest.name}")
        logger.info("Sleeping for 15 seconds after subscribing OCS")
        if subscription_plan_approval == "Manual":
            wait_for_install_plan_and_approve(self.namespace)
Beispiel #21
0
def no_downgrade(version, service):
    if version == 'latest' or version[
            'version'] == 'latest':  # always upgrade if it's latest version available
        return version.get('available', True)
    if service['version'] == 'latest':
        return False
    new_v = Version.coerce(version['version'])
    current_version = Version.coerce(service['version'])

    # always provide current version
    # or new version is an upgrade and is availble
    return new_v == current_version \
        or (new_v >= current_version and version.get('available', True))
Beispiel #22
0
def parse_next_version(version: Text) -> Text:
    """Find the next version as a proper semantic version string."""
    if version == "major":
        return str(Version.coerce(get_current_version()).next_major())
    elif version == "minor":
        return str(Version.coerce(get_current_version()).next_minor())
    elif version == "patch":
        return str(Version.coerce(get_current_version()).next_patch())
    elif semantic_version.validate(version):
        return version
    else:
        raise Exception(
            f"Invalid version number '{cmdline_args.next_version}'.")
Beispiel #23
0
 def make_new_pypi_release(self):
     # check if a new release was made
     latest_pypi = self.pypi.latest_version()
     if Version.coerce(latest_pypi) < Version.coerce(self.new_release['version']):
         self.logger.info("Newer version on github, triggering PyPi release")
         # load release configuration from release-conf.yaml in repository
         release_conf = self.conf.load_release_conf(os.path.join(self.new_release['fs_path'],
                                                                 'release-conf.yaml'))
         self.new_release.update(release_conf)
         self.pypi.release(self.new_release)
     else:
         self.logger.debug((f"PyPi version {latest_pypi} | "
                            f"Github version {self.github.latest_version()} -> nothing to do"))
Beispiel #24
0
    def deploy_with_external_mode(self):
        """
        This function handles the deployment of OCS on
        external/indpendent RHCS cluster

        """
        live_deployment = config.DEPLOYMENT.get("live_deployment")
        logger.info("Deploying OCS with external mode RHCS")
        ui_deployment = config.DEPLOYMENT.get("ui_deployment")
        if not ui_deployment:
            logger.info("Creating namespace and operator group.")
            run_cmd(f"oc create -f {constants.OLM_YAML}")
        if not live_deployment:
            create_catalog_source()
        self.subscribe_ocs()
        operator_selector = get_selector_for_ocs_operator()
        subscription_plan_approval = config.DEPLOYMENT.get(
            "subscription_plan_approval")
        ocs_version = config.ENV_DATA["ocs_version"]
        if Version.coerce(ocs_version) >= Version.coerce("4.9"):
            ocs_operator_names = [
                defaults.ODF_OPERATOR_NAME,
                defaults.OCS_OPERATOR_NAME,
            ]
        else:
            ocs_operator_names = [defaults.OCS_OPERATOR_NAME]
        channel = config.DEPLOYMENT.get("ocs_csv_channel")
        for ocs_operator_name in ocs_operator_names:
            package_manifest = PackageManifest(
                resource_name=ocs_operator_name,
                selector=operator_selector,
                subscription_plan_approval=subscription_plan_approval,
            )
            package_manifest.wait_for_resource(timeout=300)
            csv_name = package_manifest.get_current_csv(channel=channel)
            csv = CSV(resource_name=csv_name, namespace=self.namespace)
            csv.wait_for_phase("Succeeded", timeout=720)

        # Create secret for external cluster
        create_external_secret()

        cluster_data = templating.load_yaml(
            constants.EXTERNAL_STORAGE_CLUSTER_YAML)
        cluster_data["metadata"]["name"] = config.ENV_DATA[
            "storage_cluster_name"]
        cluster_data_yaml = tempfile.NamedTemporaryFile(
            mode="w+", prefix="external_cluster_storage", delete=False)
        templating.dump_data_to_temp_yaml(cluster_data, cluster_data_yaml.name)
        run_cmd(f"oc create -f {cluster_data_yaml.name}", timeout=2400)
        self.external_post_deploy_validation()
        setup_ceph_toolbox()
Beispiel #25
0
    def make_release_pull_request(self):
        """
        Makes release pull request and handles outcome
        :return: whether making PR was successful
        """

        def pr_handler(success):
            """
            Handler for the outcome of making a PR
            :param success: whether making PR was successful
            :return:
            """
            result = "made" if success else "failed to make"
            msg = f"I just {result} a PR request for a release version {self.new_pr.version}"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            if success:
                msg += f"\n Here's a [link to the PR]({self.new_pr.pr_url})"
            comment_backup = self.github.comment.copy()
            self.github.comment = [msg]
            self.project.get_issue(self.new_pr.issue_number).comment(msg)
            self.github.comment = comment_backup
            if success:
                self.project.get_issue(self.new_pr.issue_number).close()
                self.logger.debug(f"Closed issue #{self.new_pr.issue_number}")

        latest_gh_str = self.github.latest_release()
        self.new_pr.previous_version = latest_gh_str
        if Version.coerce(latest_gh_str) >= Version.coerce(self.new_pr.version):
            msg = f"Version ({latest_gh_str}) is already released and this issue is ignored."
            self.logger.warning(msg)
            return False
        msg = (
            f"Making a new PR for release of version "
            f"{self.new_pr.version} based on the issue."
        )
        if not self.conf.dry_run:
            self.logger.info(msg)

        try:
            self.new_pr.repo = self.git
            if not self.new_pr.repo:
                raise ReleaseException("Couldn't clone repository!")
            if self.github.make_release_pr(self.new_pr, self.conf.gitchangelog):
                pr_handler(success=True)
                return True
        except ReleaseException:
            pr_handler(success=False)
            raise
        return False
Beispiel #26
0
    def make_release_pull_request(self):
        """
        Makes release pull request and handles outcome
        :return: whether making PR was successful
        """
        def pr_handler(success):
            """
            Handler for the outcome of making a PR
            :param success: whether making PR was successful
            :return:
            """
            result = 'made' if success else 'failed to make'
            msg = f"I just {result} a PR request for a release version {self.new_pr['version']}"
            level = logging.INFO if success else logging.ERROR
            self.logger.log(level, msg)
            if success:
                msg += f"\n Here's a [link to the PR]({self.new_pr['pr_url']})"
            comment_backup = self.github.comment.copy()
            self.github.comment = [msg]
            self.github.add_comment(self.new_pr['issue_id'])
            self.github.comment = comment_backup
            if success:
                self.github.close_issue(self.new_pr['issue_number'])
            self.new_pr['repo'].cleanup()

        prev_version = self.github.latest_release()

        # if there are no previous releases, set version to 0.0.0
        prev_version = prev_version if prev_version else '0.0.0'
        self.new_pr['previous_version'] = prev_version
        if Version.coerce(prev_version) >= Version.coerce(
                self.new_pr['version']):
            msg = f"Version ({prev_version}) is already released and this issue is ignored."
            self.logger.warning(msg)
            return False
        msg = f"Making a new PR for release of version {self.new_pr['version']} based on an issue."
        self.logger.info(msg)

        try:
            self.new_pr['repo'] = self.github.clone_repository()
            if not self.new_pr['repo']:
                raise ReleaseException("Couldn't clone repository!")

            if self.github.make_release_pr(self.new_pr):
                pr_handler(success=True)
                return True
        except ReleaseException:
            pr_handler(success=False)
            raise
        return False
Beispiel #27
0
    def get_package(self, spec, parent_channels=()):

        res = req.get('https://pypi.python.org/pypi/%s/json' % spec.package)
        info = res.json()
        versions = {Version.coerce(v):r for v, r in info['releases'].items()}
        version = max(spec.version_spec.filter(versions.keys()))
        releases = versions[version]

        data = info['info']
        data['files'] = []

        for file_info in releases:
            file_info['basename'] = file_info['filename']
            file_info['attrs'] = {'packagetype': file_info['packagetype']}
            file_info['distribution_type'] = 'pypi'
            file_info['version'] = str(version)
            file_info['md5'] = file_info['md5_digest']

            data['files'].append(file_info)

        pkg = Package(self.env, data)

        self.fetch(pkg.file)

        with open(pkg.file.cache_path) as fileobj:
            _, _, data = pypi_inspect.inspect_pypi_package(pkg.file.cache_path, fileobj)
            file_info['dependencies'] = data.get('dependencies', [])

        return pkg
Beispiel #28
0
def list_packages(android):
    packages = []
    separator = '----------'
    out = subprocess.check_output([android, 'list', 'sdk', '--all', '--extended'])
    fields = out.split(separator)[1:]
    p_id = re.compile('^id: (\d+) or "(.+)"$', flags=re.MULTILINE)
    p_revision = re.compile('[Rr]evision (.+)')
    p_type = re.compile('Type: (\w+)')
    for field in fields:
        m = p_id.search(field)
        if m is None:
            print("Failed to parse package ID:", field, file=sys.stderr)
            continue
        num, name = m.groups()
        m = p_revision.search(field)
        if m is None:
            print("Failed to parse revision:", field, file=sys.stderr)
            continue
        revision, = m.groups()
        revision = revision.replace(' (Obsolete)', '')
        semver = Version.coerce(revision)

        m = p_type.search(field)
        if m is None:
            print("Failed to parse type:", field, file=sys.stderr)
            continue
        ptype, = m.groups()
        category = categories[ptype]
        if category is None:
            print("Unrecognized type:", ptype, file=sys.stderr)
            category = ptype.lower()
        packages.append(Package(category, name, revision, semver, num))
    return packages
Beispiel #29
0
def parse(top, root):
    path = os.path.relpath(root, top)

    parts = path.split(os.path.sep)

    if parts[0] in blacklist:
        print('WARNING: Ignoring \'{:s}\' as it is blacklisted.'.format(path), file=sys.stderr)
        return None

    props = parse_properties(os.path.join(root, 'source.properties'))
    name = {
        'add-ons': add_ons,
        'build-tools': build_tools,
        'docs': docs,
        'extras': extras,
        'platforms': platforms,
        'platform-tools': platform_tools,
        'samples': samples,
        'sources': sources,
        'system-images': system_images,
        'tools': tools
    }.get(parts[0], default)(props, parts)
    if not name:
        print("WARNING: Failed to parse package:", path, file=sys.stderr)
        return None
    return Package(parts[0], name, props['revision'], Version.coerce(props['revision']))
Beispiel #30
0
Datei: util.py Projekt: scztt/qpm
def sort_versions(versions):
    sem_version_map = dict()
    for v in versions:
        sem_version_map[Version.coerce(v)] = v

    sorted_sem_versions = sorted(sem_version_map.keys())
    return map(lambda v: sem_version_map[v], sorted_sem_versions)
Beispiel #31
0
def _get_solc_version(solc_binary: Union[Path, str]) -> Version:
    # private wrapper function to get `solc` version
    stdout_data = subprocess.check_output([solc_binary, "--version"],
                                          encoding="utf8")
    version_str = re.findall(r"(?<=Version: ).*?(?=\+)", stdout_data)[0]
    version_str = re.sub(r"\.0(?=[1-9])", ".", version_str)
    return Version.coerce(version_str)
Beispiel #32
0
    def __init__(self, name='default', path=None):
        """Initialize the handler

        name: name of the file, without the extension
        """

        # Saving the name
        self._name = name

        # Move path in cache if set
        if path is not None:
            if not os.path.exists(path):
                raise NotInstalledError
            self._path = path

        # Read the version file
        version = self.read()

        # Initialize the version management
        semver = str(Version.coerce(version))
        Version.__init__(self, semver)

        # Make build and prerelease as list to improve UX
        self.build = list(self.build)
        self.prerelease = list(self.prerelease)
Beispiel #33
0
    def __init__(self, *args, **kwargs):
        os_name = kwargs.pop("os_override", None)
        super(PytorchRequirement, self).__init__(*args, **kwargs)
        self.log = self._session.get_logger(__name__)
        self.package_manager = self.config["agent.package_manager.type"].lower(
        )
        self.os = os_name or self.get_platform()
        self.cuda = "cuda{}".format(self.cuda_version).lower()
        self.python_version_string = str(self.config["agent.default_python"])
        self.python_semantic_version = Version.coerce(
            self.python_version_string, partial=True)
        self.python = "python{}.{}".format(self.python_semantic_version.major,
                                           self.python_semantic_version.minor)

        self.exceptions = [
            PytorchResolutionError(message) for message in (
                None,
                'cuda version "{}" is not supported'.format(self.cuda),
                'python version "{}" is not supported'.format(
                    self.python_version_string),
            )
        ]

        try:
            self.validate_python_version()
        except PytorchResolutionError as e:
            self.log.warn("will not be able to install pytorch wheels: %s",
                          e.args[0])
Beispiel #34
0
 def vrt_version(self, version):
     if version in self.vrt_versions:
         return self.vrt_versions[version]
     elif Version.coerce(version) > self.last_tagged_version:
         return self.vrt_versions['current']
     else:
         self.fail('Unknown version: %s' % version)
Beispiel #35
0
    def load_ocp_version_config_file(self, ocp_upgrade_version):
        """
        Loads config file to the ocs-ci config with upgrade version

        Args:
            ocp_upgrade_version (str): version to be upgraded

        """

        version = Version.coerce(ocp_upgrade_version)
        short_ocp_upgrade_version = ".".join([str(version.major), str(version.minor)])
        version_before_upgrade = parse_version(
            config.DEPLOYMENT.get("installer_version")
        )
        version_post_upgrade = parse_version(ocp_upgrade_version)
        version_change = version_post_upgrade > version_before_upgrade
        if version_change:
            version_config_file = os.path.join(
                constants.OCP_VERSION_CONF_DIR,
                f"ocp-{short_ocp_upgrade_version}-config.yaml",
            )
            logger.debug(f"config file to be loaded: {version_config_file}")
            load_config_file(version_config_file)
        else:
            logger.info(
                f"Upgrade version {version_post_upgrade} is not higher than old version:"
                f" {version_before_upgrade}, new config file will not be loaded"
            )
Beispiel #36
0
Datei: util.py Projekt: scztt/qpm
def select_versions(spec, versions):
    spec = to_spec(spec)

    sem_version_map = dict()
    for v in versions:
        sem_version_map[Version.coerce(v)] = v

    selected_version = spec.select(sem_version_map.keys())
    return sem_version_map[selected_version]
def check_latest_version_property_value(context, property_name, expected_value):
    """Check if the latest_version property contains expected value."""
    value = read_property_value_from_gremlin_response(context, property_name)
    try:
        assert Version.coerce(value) >= Version(expected_value)
    except Exception:
        data, meta = get_results_from_gremlin(context)
        print("Metadata returned by Gremlin:")
        pprint.pprint(meta)
        print("Data returned by Gremlin:")
        pprint.pprint(data)
        raise
Beispiel #38
0
def is_compatible_with_framework(version):
    """
    Returns ``True`` if the supplied version is compatible with the current framework version,
    otherwise the function returns ``False``. Evaluation of versions is performed
    using the `semantic_version`_-package:

    .. sourcecode:: Python

        is_compatible_with_framework('2.0.0')

    All whitespace is stripped from the string prior to evaluation.

    :param version: A version to validate against the framework version.
    :return: True if framework version is compatible with specification, False otherwise.

    .. _semantic_version: https://pypi.python.org/pypi/semantic_version/
    """
    if version is None:
        return None

    lewis_version = Version.coerce(__version__)

    return lewis_version == Version.coerce(version.strip())
Beispiel #39
0
def gen_version(version_str):
    """Generates an :class:`Version` object

    takes a SemVer string and returns a :class:`Version`
    if not a proper SemVer string it coerces it

    Args:
        version_str (str): version string to use
    """
    try:
        ver = Version(version_str)
    except ValueError:
        ver = Version.coerce(version_str)
    return ver
 def __init__(self, *args, **kwargs):
     requirement = kwargs.pop('requirement', None)
     super(PackageVersion, self).__init__(*args, **kwargs)
     if requirement is None:
         return
     self.raw = requirement.line
     self.package_name = requirement.name
     self.is_editable = requirement.editable
     if requirement.editable:
         self.url = requirement.uri
     else:
         # HACK: we only take the first version.
         self.current_version = Version.coerce(requirement.specs[0][1])
         self.url = package_url(requirement.name)
Beispiel #41
0
def update_version_number(update_level='patch'):
    """Update version number

    Returns a semantic_version object"""

    """Find current version"""
    temp_file = version_file().parent / ("~" + version_file().name)
    with open(str(temp_file), 'w') as g:
        with open(str(version_file()), 'r') as f:
            for line in f:
                version_matches = bare_version_re.match(line)
                if version_matches:
                    bare_version_str = version_matches.groups(0)[0]
                    if semantic_version.validate(bare_version_str):
                        current_version = Version(bare_version_str)
                        print("{}Current version is {}".format(" "*4, current_version))
                    else:
                        current_version = Version.coerce(bare_version_str)
                        if not text.query_yes_quit("{}I think the version is {}. Use it?".format(" "*4, current_version), default="yes"):
                            exit(colorama.Fore.RED + 'Please set an initial version number to continue')

                    """Determine new version number"""
                    if update_level is 'major':
                        current_version = current_version.next_major()
                    elif update_level is 'minor':
                        current_version = current_version.next_minor()
                    elif update_level is 'patch':
                        current_version = current_version.next_patch()
                    elif update_level is 'prerelease':
                        if not current_version.prerelease:
                            current_version = current_version.next_patch()
                            current_version.prerelease = ('dev', )
                    elif update_level is None:
                        # don't update version
                        pass
                    else:
                        exit(colorama.Fore.RED + 'Cannot update version in {} mode'.format(update_level))

                    print("{}New version is     {}".format(" "*4, current_version))

                    """Update version number"""
                    line = '__version__ = "{}"\n'.format(current_version)
                print(line, file=g, end="")
        #print('', file=g)  # add a blank line at the end of the file
    shutil.copyfile(str(temp_file), str(version_file()))
    os.remove(str(temp_file))
    return(current_version)
Beispiel #42
0
def get_current_version(repo):
    latest = None
    for tag in repo.tags:
        v = tag.name
        if v.startswith('v.'):
            v = v[2:]
        elif v.startswith('v'):
            v = v[1:]

        v = Version.coerce(v)

        if not latest:
            latest = v
        else:
            if v > latest:
                latest = v
    return latest
Beispiel #43
0
    def get_versions(self):

        repo_owner, repo_name = self.url.rstrip('.git').rsplit('/', 2)[-2:]
        repo_ident = '%s/%s' % (repo_owner, repo_name)
        url = '%s/repos/%s/tags' % (self.BASE_URL, repo_ident)

        LOGGER.debug('Getting version list from %s ...', url)

        versions = OrderedDict()
        for version_data in get_json(url):
            version_name = version_data['name']
            version_num = Version.coerce(version_name.lstrip('v'), partial=True)
            versions[version_num] = {
                'name': version_name,
                'url_pack': version_data['tarball_url'],
                'url_root': '%s/%s/%s' % (self.RAW_URL, repo_ident, version_name),
            }

        return versions
Beispiel #44
0
def parse(top, root):
    path = root[len(top):]
    parts = path.split(os.path.sep)
    props = parse_properties(os.path.join(root, 'source.properties'))
    name = {
        'add-ons': add_ons,
        'build-tools': build_tools,
        'docs': docs,
        'extras': extras,
        'platforms': platforms,
        'platform-tools': platform_tools,
        'samples': samples,
        'sources': sources,
        'system-images': system_images,
        'tools': tools
    }.get(parts[0], default)(props, parts)
    if not name:
        print("Package parse failed:", path, file=sys.stderr)
        return None
    return Package(parts[0], name, props['revision'], Version.coerce(props['revision']))
 def __init__(self, *args, **kwargs):
     requirement = kwargs.pop('requirement', None)
     super(PackageVersion, self).__init__(*args, **kwargs)
     if requirement is None:
         return
     self.raw = requirement.line
     self.package_name = requirement.name
     self.is_editable = requirement.editable
     if requirement.editable:
         self.url = ''
         self.current_version = None
     else:
         # HACK: we only take the first version.
         try:
             self.current_version = Version.coerce(requirement.specs[0][1])
             self.is_parseable = True
         except ValueError as ex:
             self.current_version = None
             self.is_parseable = False
             logger.debug("Unparseable package version (%s): %s", requirement.specs[0][1], ex)
         self.url = pypi.package_url(requirement.name)
Beispiel #46
0
def parse_version(string):
    if string[0] == 'v':
        return Version.coerce(string[1:])
    return Version(string)
Beispiel #47
0
def semantic_version_key(file_data):
    return Version.coerce(make_safe_version(file_data['version']))
Beispiel #48
0
 def __init__(self, name, version):
     self.name = name
     self.version_string = version
     self.version = Version.coerce(version)
Beispiel #49
0
def parse_versioned_name(name):
    if '@' not in name:
        return name, None
    name, version = name.split('@', 1)
    return name, Version.coerce(version)
Beispiel #50
0
def parse_version(version_string):
    """Parse a string into a PackageVersion."""
    try:
        return Version.coerce(version_string)
    except:
        return None