Ejemplo n.º 1
0
def test_get_deployments(mock_zone):
    mock_zone.return_value = "us-central1-a"

    test_dir = os.path.join(os.path.dirname(__file__), "test_data")

    with open(os.path.join(test_dir, "deployments.yaml")) as hf:
        deployments = yaml.load(hf)

    dm_reconciler = reconciler.Reconciler()

    dm_reconciler._get_deployments(deployments=deployments)  # pylint: disable=protected-access

    vmaster = [
        auto_deploy_util.AutoDeployment(
            deployment_name="kf-vmaster-0126-1c1",
            manifests_branch="master",
            create_time="2020-01-26 04:04:19.267000-08:00"),
        auto_deploy_util.AutoDeployment(
            deployment_name="kf-vmaster-0127-502",
            manifests_branch="master",
            create_time="2020-01-26 16:04:13.855000-08:00"),
        auto_deploy_util.AutoDeployment(
            deployment_name="kf-vmaster-0127-082",
            manifests_branch="master",
            create_time="2020-01-27 04:15:52.111000-08:00"),
    ]
    expected = {
        "vmaster": vmaster,
    }
    assertions.assert_dicts_equal(dm_reconciler._deployments, expected)  # pylint: disable=protected-access
Ejemplo n.º 2
0
    def _get_deployments(self, deployments=None):
        """Build a map of all deployments

    Args:
      deployments: (Optional) Iterator over GCP deployments.
    """
        logging.info("Building map of auto deployments")

        self._deployments = collections.defaultdict(lambda: [])
        if not deployments:
            deployments = gcp_util.deployments_iterator(self.config["project"])

        for d in deployments:
            is_auto_deploy = False
            # Use labels to identify auto-deployed instances
            labels = {}
            for label_pair in d.get("labels", []):
                # Newer clusters
                if label_pair["key"] == "auto-deploy":
                    is_auto_deploy = True
                # Older clusters
                if (label_pair["key"] == "purpose"
                        and label_pair["value"] == "kf-test-cluster"):
                    is_auto_deploy = True
                labels[label_pair["key"]] = label_pair["value"]

            if not is_auto_deploy:
                logging.info(
                    "Skipping deployment %s; its missing the label "
                    "auto-deploy", d["name"])
                continue

            if d.get("operation", {}).get("operationType") == "delete":
                logging.info(
                    f"Skipping deployment {d['name']} it is being deleted.")
            if auto_deploy_util.is_storage_deployment(d["name"]):
                logging.info(f"Skipping deployment {d['name']}; it is storage")
                continue

            version_name = labels.get(auto_deploy_util.AUTO_NAME_LABEL,
                                      "unknown")

            if not "manifest" in d:
                # Since we don't know the manifest we can't get the zone.
                # It looks like the manfiest might also be stored in the operation.
                # However, it looks like the reason the manifest isn't there
                # is because the deployment failed. So we can just set zone to the
                # empty string. I think zone only matters for getting cluster
                # credentials but since the deployment failed that shouldn't matter.
                logging.error(f"Deployment {d['name']} doesn't "
                              "have a manifest. This typically indicates the "
                              "deployment failed")
                zone = ""
            else:
                dm_manifest_name = d["manifest"].split("/")[-1]

                zone = self._get_deployment_zone(d["name"], dm_manifest_name)

            context = {
                "deployment_name": d['name'],
                "version_name": version_name,
            }

            manifests_branch = labels.get(auto_deploy_util.BRANCH_LABEL,
                                          "unknown")

            create_time = date_parser.parse(d.get("insertTime"))
            deployment = auto_deploy_util.AutoDeployment(
                manifests_branch=manifests_branch,
                create_time=create_time,
                deployment_name=d["name"],
                labels=labels)
            deployment.zone = zone
            logging.info(
                f"Found auto deployment={d['name']} for version={version_name}",
                extra=context)
            self._deployments[version_name] = (
                self._deployments[version_name] + [deployment])

        # Sort the values by timestamp
        branches = self._deployments.keys()
        for b in branches:
            self._deployments[b] = sorted(self._deployments[b],
                                          key=lambda x: x.create_time)

        self._save_deployments()
Ejemplo n.º 3
0
    def _get_deployments(self, deployments=None):
        """Build a map of all deployments

    Args:
      deployments: (Optional) Iterator over GCP deployments.
    """
        logging.info("Building map of auto deployments")

        self._deployments = collections.defaultdict(lambda: [])
        if not deployments:
            deployments = gcp_util.deployments_iterator(self.config["project"])

        for d in deployments:
            is_auto_deploy = False
            # Use labels to identify auto-deployed instances
            labels = {}
            for label_pair in d.get("labels", []):
                # Newer clusters
                if label_pair["key"] == "auto-deploy":
                    is_auto_deploy = True
                # Older clusters
                if (label_pair["key"] == "purpose"
                        and label_pair["value"] == "kf-test-cluster"):
                    is_auto_deploy = True
                labels[label_pair["key"]] = label_pair["value"]

            if not is_auto_deploy:
                logging.info(
                    "Skipping deployment %s; its missing the label "
                    "auto-deploy", d["name"])
                continue

            if d.get("operation", {}).get("operationType") == "delete":
                logging.info(
                    f"Skipping deployment {d['name']} it is being deleted.")
            if auto_deploy_util.is_storage_deployment(d["name"]):
                logging.info(f"Skipping deployment {d['name']}; it is storage")
                continue

            version_name = labels.get(auto_deploy_util.AUTO_NAME_LABEL,
                                      "unknown")

            dm_manifest_name = d["manifest"].split("/")[-1]

            zone = self._get_deployment_zone(d["name"], dm_manifest_name)

            context = {
                "deployment_name": d['name'],
                "version_name": version_name,
            }

            manifests_branch = labels.get(auto_deploy_util.BRANCH_LABEL,
                                          "unknown")

            create_time = date_parser.parse(d.get("insertTime"))
            deployment = auto_deploy_util.AutoDeployment(
                manifests_branch=manifests_branch,
                create_time=create_time,
                deployment_name=d["name"],
                labels=labels)
            deployment.zone = zone
            logging.info(
                f"Found auto deployment={d['name']} for version={version_name}",
                extra=context)
            self._deployments[version_name] = (
                self._deployments[version_name] + [deployment])

        # Sort the values by timestamp
        branches = self._deployments.keys()
        for b in branches:
            self._deployments[b] = sorted(self._deployments[b],
                                          key=lambda x: x.create_time)

        self._save_deployments()