Beispiel #1
0
    def __init__(self, config: Config, project_name, channel_name=None, notes=None):
        self._config = config
        self._common = Common(config=self._config)

        assert project_name, "project name must not be empty!"
        project = self._common.get_single_item_by_name(item_type=item_type_projects, item_name=project_name)
        assert project, f"Project {project_name} cannot be found or you do not have permission to access!"
        self._project_id = project.get(id_key)
        self._deployment_process_id = project.get(deployment_process_id_key)
        self._notes = notes
        self._release_request_payload = {project_id_key: self._project_id, release_notes_key: notes}

        self._channel_id = ""
        if channel_name:
            channel = self._common.find_sub_by_item(item_type=item_type_projects, item_id=self._project_id,
                                                    sub_type=item_type_channels, sub_name=channel_name)
            assert channel, f"Cannot find channel {channel_name} in project {project_name}"
            self._channel_id = channel.get(id_key)
        if self._channel_id:
            self._release_request_payload[channel_id_key] = self._channel_id

        # package versions read from a variable set,
        # e.g. release_versions = {"Name": "package.near", "Value": "20.0225.1714"}
        self._packages_variable_set_name = None
        # user selected package versions, e.g. "{'packages': {'package.near': '20.0225.1714'}}"
        self._package_version_dict = None

        self._template = None
        self._selected_packages = None
        self._release_response = None
        self._release_id = None
        self._commits_variable_set_name = "configuration_commits" + dot_sign + project_name
        self._gitlab_url_prefix = self._get_url_prefix(set_name="gitlab_info")
        self._latest_commit_dict = None
Beispiel #2
0
 def __init__(self, width: int = 800, height: int = 600):
     self.width = width
     self.height = height
     self.config = Config()
     self.server = Common(config=self.config)
     self.source_config = Config(is_source_server=True)
     self.source = Common(config=self.source_config)
 def set_radio_spaces_frame(self,
                            server: Common,
                            title: str = "Select a space: "):
     list_spaces = server.get_list_spaces()
     return CommonWidgets.set_radio_items_frame(
         parent=self,
         list_items=list_spaces,
         default_id=server.config.space_id,
         title=title)
 def verify_spaces(server: Common):
     if not server.get_list_spaces():
         messagebox.showerror(
             title="No Spaces!",
             message=
             f"No spaces can be found on {'source' if server.config.is_source_server else 'target'} "
             f"server {server.config.endpoint}. Please check your permission and/or credential"
         )
         return False
     return True
Beispiel #5
0
 def __init__(self):
     self._target_config = Config()
     self._target_common = Common(config=self._target_config)
     self._source_config = Config(is_source_server=True)
     self._source_common = Common(config=self._source_config)
Beispiel #6
0
class OctopusClient:
    def __init__(self):
        self._target_config = Config()
        self._target_common = Common(config=self._target_config)
        self._source_config = Config(is_source_server=True)
        self._source_common = Common(config=self._source_config)

    @staticmethod
    def _parse_args():
        parser = argparse.ArgumentParser()
        parser.add_argument("-v",
                            "--version",
                            help="if present, print the version information",
                            action="store_true")
        parser.add_argument("-o", "--endpoint", help="octopus endpoint")
        parser.add_argument("-s",
                            "--space_id_name",
                            help="octopus space id or name")
        parser.add_argument(
            "-m",
            "--pem",
            help="octopus endpoint root pem file path; -m=false to disable pem"
        )
        parser.add_argument(
            "-sps",
            "--spaces",
            help=
            'list of octopus space id or name, like "my space,Spaces-1,Spaces-2"'
        )
        parser.add_argument(
            "-d",
            "--data_path",
            help=
            "the local path for the Octopus server data, 'current' = the current work path"
        )
        parser.add_argument(
            "-k",
            "--api_key",
            help=
            "api key for octopus; either api_key or user_name and password are required"
        )
        parser.add_argument(
            "-user",
            "--user_name",
            help=
            "user_name for octopus; either api_key or user_name and password are required"
        )
        parser.add_argument(
            "-pass",
            "--password",
            help=
            "password for octopus; either api_key or user_name and password are required"
        )
        parser.add_argument("-sre",
                            "--source_endpoint",
                            help="source octopus endpoint for clone")
        parser.add_argument(
            "-srd",
            "--source_data_path",
            help=
            "the local path for the source Octopus server data, 'current' = the current work path"
        )
        parser.add_argument(
            "-srk",
            "--source_api_key",
            help=
            "api key for octopus; either api_key or user_name and password are required"
        )
        parser.add_argument(
            "-sru",
            "--source_user_name",
            help=
            "user_name for octopus; either api_key or user_name and password are required"
        )
        parser.add_argument(
            "-srp",
            "--source_password",
            help=
            "password for octopus; either api_key or user_name and password are required"
        )
        parser.add_argument(
            "-srs",
            "--source_space_id_name",
            help="source octopus space id or name for clone/migration")
        parser.add_argument(
            "-srm",
            "--source_pem",
            help=
            "source octopus endpoint root pem file path; -srm=false to disable pem"
        )
        parser.add_argument(
            "-ld",
            "--local_data",
            help="if present, local_data = True; the source server/space "
            "data are stored as YAML files locally",
            action="store_true")
        parser.add_argument("-a",
                            "--action",
                            help=str(Actions.__dict__.values()))
        parser.add_argument("-ow",
                            "--overwrite",
                            help="if present, overwrite = True",
                            action="store_true")
        parser.add_argument(
            "-ns",
            "--no_stdout",
            help="if present, no_stdout = True, means no stdout",
            action="store_true")
        parser.add_argument(
            "-ts",
            "--item_types",
            help=
            "if not item_types and not octopus_space_id, get all item types "
            "regardless whether they are above Spaces; if (not item_types) and octopus_space_id, "
            "get all item types below octopus_space_id; "
            'list like "accounts,actiontemplates,artifacts" is also accepted; '
            "item types above Spaces: " +
            ", ".join(outer_space_download_types) +
            "; \nitem types above and under Spaces: " +
            ", ".join(inside_space_download_types))
        parser.add_argument("-tp",
                            "--item_type",
                            help="one of item types above Spaces: " +
                            ", ".join(outer_space_download_types) +
                            "; \nitem types above and under Spaces: " +
                            ", ".join(inside_space_download_types))
        parser.add_argument("-nm",
                            "--item_name",
                            help="item name: e.g. project_name")
        parser.add_argument("-id",
                            "--item_id",
                            help="item id: e.g. Lifecycles-1")
        parser.add_argument(
            "-bn",
            "--base_item_name",
            help="base item name, either local or on Octopus server")
        parser.add_argument("-pn",
                            "--parent_name",
                            help="parent item name: e.g. project_name")
        parser.add_argument("-pt",
                            "--parent_type",
                            help="parent item type: e.g. projects")
        parser.add_argument(
            "-ct",
            "--child_type",
            help=f"child item type: e.g. {item_type_deployment_processes}")
        parser.add_argument(
            "-ck",
            "--child_id_key",
            help=f"child id key: e.g. {deployment_process_id_key}")
        parser.add_argument(
            "-sk",
            "--sub_item_key",
            help=
            f"sub-item key: this sub-item is for copy/clone e.g. {steps_key}")
        parser.add_argument(
            "-bp",
            "--base_parent_name",
            help="base parent item name: e.g. a base project_name")
        parser.add_argument("-sn",
                            "--step_name",
                            help="step name as in octopus process")
        parser.add_argument(
            "-bs",
            "--base_step_name",
            help="base step name as in octopus process for cloning a step")
        parser.add_argument(
            "-ps",
            "--prev_step_name",
            help=
            "previous step name in octopus process for the step insertion location"
        )
        parser.add_argument("-tl",
                            "--time_limit_second",
                            help="time limit in second")
        parser.add_argument("-rv",
                            "--release_version",
                            help="release version for creating a new release")
        parser.add_argument(
            "-as",
            "--add_suffix",
            help="if present, add suffix to variable sets name")
        parser.add_argument(
            "-rs",
            "--remove_suffix",
            help="if present, remove suffix from variable sets name")
        parser.add_argument("-pj", "--project_name", help="project name")
        parser.add_argument("-cn", "--channel_name", help="channel name")
        parser.add_argument("-nt", "--notes", help="notes")
        parser.add_argument("-ri",
                            "--release_id",
                            help="release id for deployment")
        parser.add_argument("-en",
                            "--environment_name",
                            help="environment name, like Integration")
        parser.add_argument("-tn",
                            "--tenant_name",
                            help="tenant name, like cd-near")
        parser.add_argument("-cm", "--comments", help="comments")
        parser.add_argument("-eps",
                            "--excluded_projects",
                            help="comma delimited project names")
        parser.add_argument("-pgs",
                            "--project_groups",
                            help="comma delimited project group names")
        parser.add_argument("-pkg",
                            "--package_history",
                            help="if present, package_history = True",
                            action="store_true")

        args, unknown = parser.parse_known_args()
        return args

    def _process_args_to_configs(self):
        args = self._parse_args()
        if args.action:
            self._target_config.action = args.action
        elif args.version:
            print(SystemConfig.TITLE)
            sys.exit()
        else:
            MainGUI().set_gui()
            sys.exit()

        if args.endpoint:
            self._target_config.endpoint = args.endpoint
        assert self._target_config.endpoint.endswith("/api/"), \
            f"octopus endpoint must end with /api/; {self._target_config.endpoint} is invalid"

        if args.data_path == Config.USE_CURRENT_DATA_PATH:
            self._target_config.data_path = os.getcwd()
        elif args.data_path:
            self._target_config.data_path = args.data_path

        if args.api_key and args.api_key.startswith("API-"):
            self._target_config.api_key = args.api_key
        elif args.api_key or args.api_key == "":
            self._target_common.log_info_print(
                msg=
                f"The octopus API-KEY does not start with 'API-'; so use user/password instead"
            )
            self._target_config.api_key = ""

        if args.user_name:
            self._target_config.user_name = args.user_name
        if args.password:
            self._target_config.password = args.password
        assert self._target_config.api_key or (self._target_config.user_name and self._target_config.password), \
            f"either api_key or user_name and password are required"

        if args.pem and args.pem.lower() == "false":
            self._target_config.pem = False
        elif args.pem:
            self._target_config.pem = args.pem

        self._target_config.overwrite = args.overwrite
        logger.info(
            f"self._target_config.overwrite: {self._target_config.overwrite}")
        self._target_config.no_stdout = args.no_stdout
        logger.info(
            f"self._target_config.no_stdout: {self._target_config.no_stdout}")
        self._target_config.package_history = args.package_history
        logger.info(
            f"self._target_config.package_history: {self._target_config.package_history}"
        )

        if args.space_id_name:
            self._target_config.space_id = self._target_common.verify_space(
                space_id_name=args.space_id_name)
            if self._target_config.space_id:
                logger.info(
                    f"the target space_id is: {self._target_config.space_id}")
            else:
                raise ValueError(
                    f"the space id/name {args.space_id_name} you specified does not exist or "
                    f"you do not have permission to access it on server {self._target_config.endpoint}"
                )

        if args.action != Actions.ACTION_GET_SPACES and not self._target_config.space_id \
                and input(f"Are you sure you want to run a command against {None} space [Y/n]? ") != "Y":
            return

        if args.action in MIGRATION_LIST:
            self._target_common.log_info_print(
                msg=
                f"===== Action: {args.action}; processing the source config ====="
            )

            self._source_config.local_data = args.local_data
            logger.info(
                f"self._source_config.local_data: {self._source_config.local_data}"
            )
            if args.source_endpoint:
                self._source_config.endpoint = args.source_endpoint
            assert self._source_config.local_data or self._source_config.endpoint.endswith("/api/"), \
                f"octopus endpoint must end with /api/; {self._source_config.endpoint} is invalid"

            if args.source_data_path == Config.USE_CURRENT_DATA_PATH:
                self._source_config.data_path = os.getcwd()
            elif args.source_data_path:
                self._source_config.data_path = args.source_data_path
            if args.local_data and self._target_config.endpoint != self._source_config.endpoint \
                    and self._target_config.data_path == self._source_config.data_path:
                raise ValueError(
                    f"the source Octopus server {self._source_config.endpoint} and the target "
                    f"Octopus server {self._target_config.endpoint} cannot use the same local "
                    f"path {self._target_config.data_path} if cloning source is the local data"
                )

            if args.source_api_key and args.source_api_key.startswith("API-"):
                self._source_config.api_key = args.source_api_key
            elif args.source_api_key or args.source_api_key == "":
                self._target_common.log_info_print(
                    msg=
                    f"The source octopus API-KEY does not start with 'API-'; so use user/password instead"
                )
                self._source_config.api_key = ""

            if args.source_user_name:
                self._source_config.user_name = args.source_user_name
            if args.source_password:
                self._source_config.password = args.source_password
            assert self._source_config.api_key or (self._source_config.user_name and self._source_config.password), \
                f"either api_key or user_name and password are required"

            if args.source_pem and args.source_pem.lower() == "false":
                self._source_config.pem = False
            elif args.source_pem:
                self._source_config.pem = args.source_pem

            if args.source_space_id_name:
                self._source_config.space_id = self._source_common.verify_space(
                    space_id_name=args.source_space_id_name)
                if self._source_config.space_id:
                    self._target_common.log_info_print(
                        msg=f"The source octopus space_id is: "
                        f"{self._source_config.space_id}")
                elif self._source_config.local_data:
                    self._target_common.log_info_print(
                        msg=
                        f"{args.action} from local source data {args.source_space_id_name}"
                    )
                    self._source_config.space_id = args.source_space_id_name
            if not self._source_config.space_id and not self._source_config.local_data:
                raise ValueError(
                    f"On Octopus server {self._source_config.endpoint}, the space id/name "
                    f"{args.source_space_id_name} does not exist or you do not have permission to "
                    f"access it.")

            if self._source_config.endpoint == self._target_config.endpoint:
                if args.action == Actions.ACTION_CLONE_SERVER:
                    raise ValueError(
                        f"Cannot {args.action} from an endpoint to the same one: "
                        f"{self._source_config.endpoint}")
                elif self._source_config.space_id == self._target_config.space_id:
                    raise ValueError(
                        f"Cannot {args.action} from a space to the same space "
                        f"{self._source_config.space_id} on the same Octopus server "
                        f"{self._source_config.endpoint}")

            if args.action == Actions.ACTION_CLONE_SERVER:
                self._target_common.log_info_print(
                    msg=f"{args.action} from {self._source_config.endpoint} to "
                    f"{self._target_config.endpoint}; space ids are cleared")
                self._source_config.space_id = None
                self._target_config.space_id = None
            elif not self._source_config.space_id or not self._target_config.space_id:
                raise ValueError(
                    f"Cannot {args.action} from space {self._source_config.space_id} of the source "
                    f"Octopus server {self._source_config.endpoint} to space "
                    f"{self._target_config.space_id} of the target Octopus server "
                    f"{self._target_config.endpoint}")

            self._source_config.save_config()

        self._target_config.save_config()

        return args

    def run(self):
        args = self._process_args_to_configs()

        if self._target_config.action == Actions.ACTION_GET_SPACES:
            self._target_common.get_spaces_save(
                item_types_comma_delimited=args.item_types,
                space_id_or_name_comma_delimited=args.spaces)
        elif self._target_config.action == Actions.ACTION_GET_TYPES:
            self._target_common.get_types_save(
                item_types_comma_delimited=args.item_types)
        elif self._target_config.action == Actions.ACTION_GET_TYPE:
            self._target_common.get_one_type_save(item_type=args.item_type)
        elif self._target_config.action == Actions.ACTION_DELETE_TYPE:
            self._target_common.delete_one_type(item_type=args.item_type)
        elif self._target_config.action == Actions.ACTION_DELETE_TYPES:
            self._target_common.delete_types(
                item_types_comma_delimited=args.item_types)
        elif self._target_config.action == Actions.ACTION_GET:
            self._target_common.get_single_item_by_name_or_id_save(
                item_type=args.item_type,
                item_name=args.item_name,
                item_id=args.item_id)
        elif self._target_config.action == Actions.ACTION_UPDATE:
            self._target_common.update_single_item_save(
                item_type=args.item_type,
                item_name=args.item_name,
                item_id=args.item_id)
        elif self._target_config.action == Actions.ACTION_UPDATE_MERGE:
            self._target_common.merge_single_item_save(
                item_type=args.item_type,
                item_name=args.item_name,
                item_id=args.item_id,
                child_id_key=args.child_id_key)
        elif self._target_config.action == Actions.ACTION_CREATE:
            self._target_common.create_single_item_from_local_file(
                item_type=args.item_type,
                item_name=args.item_name,
                local_item_name=args.base_item_name)
        elif self._target_config.action == Actions.ACTION_CLONE:
            self._target_common.clone_single_item_from_remote_item(
                item_type=args.item_type,
                item_name=args.item_name,
                base_item_name=args.base_item_name)
        elif self._target_config.action == Actions.ACTION_DELETE:
            self._target_common.delete_single_item_by_name_or_id(
                item_type=args.item_type,
                item_name=args.item_name,
                item_id=args.item_id)
        elif self._target_config.action == Actions.ACTION_GET_CHILD:
            self._target_common.get_child_item_save(
                parent_name=args.parent_name,
                parent_type=args.parent_type,
                child_id_key=args.child_id_key,
                child_type=args.child_type)
        elif self._target_config.action == Actions.ACTION_UPDATE_CHILD:
            self._target_common.update_child_item_from_local_save(
                parent_name=args.parent_name,
                parent_type=args.parent_type,
                child_id_key=args.child_id_key,
                child_type=args.child_type)
        elif self._target_config.action == Actions.ACTION_CLONE_CHILD:
            self._target_common.clone_child_item_from_another_parent_save(
                parent_name=args.parent_name,
                base_parent_name=args.base_parent_name,
                parent_type=args.parent_type,
                child_id_key=args.child_id_key,
                child_type=args.child_type,
                sub_item_key=args.sub_item_key)
        elif self._target_config.action == Actions.ACTION_CLONE_PROCESS_STEP:
            DeploymentProcesses(config=self._target_config).clone_process_step(
                project_literal_name=args.project_name,
                step_name=args.step_name,
                base_step_name=args.base_step_name,
                prev_step_name=args.prev_step_name)
        elif self._target_config.action == Actions.ACTION_DELETE_PROCESS_STEP:
            DeploymentProcesses(
                config=self._target_config).delete_process_step(
                    project_literal_name=args.project_name,
                    step_name=args.step_name)
        elif self._target_config.action == Actions.ACTION_CLONE_PROJECT:
            Projects(config=self._target_config).clone_project(
                project_literal_name=args.project_name,
                base_project_name=args.base_item_name)
        elif self._target_config.action == Actions.ACTION_DELETE_PROJECT:
            Projects(config=self._target_config).delete_project(
                project_literal_name=args.project_name)
        elif self._target_config.action == Actions.ACTION_DELETE_PROJECTS:
            Projects(config=self._target_config).delete_projects(
                project_groups_comma_delimited=args.project_groups,
                excluded_projects_comma_delimited=args.excluded_projects)
        elif self._target_config.action == Actions.ACTION_GET_PROJECT:
            Projects(config=self._target_config).get_project(
                project_literal_name=args.project_name)
        elif self._target_config.action == Actions.ACTION_PROJECT_UPDATE_VARIABLE_SETS:
            Projects(config=self._target_config).project_update_variable_sets(
                project_literal_name=args.project_name,
                remove_suffix=args.remove_suffix,
                add_suffix=args.add_suffix)
        elif self._target_config.action == Actions.ACTION_CLONE_SERVER:
            Migration(src_config=self._source_config,
                      dst_config=self._target_config).clone_server(
                          space_id_or_name_comma_delimited=args.spaces,
                          item_types_comma_delimited=args.item_types)
        elif self._target_config.action == Actions.ACTION_CLONE_SPACE:
            Migration(src_config=self._source_config,
                      dst_config=self._target_config).clone_space(
                          item_types_comma_delimited=args.item_types)
        elif self._target_config.action == Actions.ACTION_CLONE_SPACE_ITEM:
            Migration(src_config=self._source_config,
                      dst_config=self._target_config).clone_space_item(
                          item_type=args.item_type,
                          item_name=args.item_name,
                          item_id=args.item_id)
        elif self._target_config.action == Actions.ACTION_TASK_STATUS:
            self._target_common.get_task_status(task_id=args.item_id)
        elif self._target_config.action == Actions.ACTION_WAIT_TASK:
            self._target_common.wait_task(
                task_id=args.item_id, time_limit_second=args.time_limit_second)
        elif self._target_config.action == Actions.ACTION_CREATE_RELEASE:
            ReleaseDeployment.create_release_direct(
                config=self._target_config,
                release_version=args.release_version,
                project_name=args.project_name,
                channel_name=args.channel_name,
                notes=args.notes)
        elif self._target_config.action == Actions.ACTION_CREATE_DEPLOYMENT:
            ReleaseDeployment.create_deployment_direct(
                config=self._target_config,
                release_id=args.release_id,
                environment_name=args.environment_name,
                tenant_name=args.tenant_name,
                comments=args.comments,
                project_name=args.project_name)
        elif self._target_config.action == Actions.ACTION_CREATE_RELEASE_DEPLOYMENT:
            ReleaseDeployment.create_release_deployment(
                config=self._target_config,
                release_version=args.release_version,
                project_name=args.project_name,
                channel_name=args.channel_name,
                notes=args.notes,
                environment_name=args.environment_name,
                tenant_name=args.tenant_name,
                comments=args.comments)
        elif self._target_config.action == Actions.ACTION_GUI:
            MainGUI().set_gui()
        else:
            log_raise_value_error(local_logger=logger,
                                  err="We only support actions: " +
                                  str(Actions.__dict__.values()))
Beispiel #7
0
 def create_release_direct(config: Config, release_version, project_name, channel_name=None, notes=None):
     common = Common(config=config)
     release = ReleaseDeployment(config=config, project_name=project_name, channel_name=channel_name, notes=notes)
     release.create_release(release_version=release_version)
     common.log_info_print(local_logger=logger, msg=json.dumps(release.release_response))
     return release
Beispiel #8
0
    def create_deployment_direct(config: Config, environment_name, tenant_name, release_id=None, project_name=None,
                                 comments=None):
        logger.info(f"creating a deployment for {release_id} in space {config.space_id} with environment "
                    f"{environment_name}, tenant {tenant_name} and comments: {comments}")
        common = Common(config=config)

        # TODO project_name
        assert (release_id or project_name), "either release_id or project_name must exist!"
        assert environment_name, "environment_name must not be empty!"
        assert tenant_name, "tenant_name must not be empty!"

        if not release_id:
            logger.info(f"Get the latest release id for project {project_name}")
            project_id = common.get_item_id_by_name(item_type=item_type_projects, item_name=project_name)
            releases_list = common.get_project_releases_sorted_list(project_id=project_id)
            if not releases_list:
                raise ValueError(f"Project {project_name} does not have any releases. Please create a release first")
            release_id = releases_list[0].get(id_key)
            logger.info(f"The latest release id is {release_id}")

        deployment_request_payload = \
            {release_id_key: release_id,
             environment_id_key: common.get_item_id_by_name(item_type=item_type_environments,
                                                            item_name=environment_name),
             tenant_id_key: common.get_item_id_by_name(item_type=item_type_tenants, item_name=tenant_name),
             comments_key: comments}
        logger.info("the request deployment payload is")
        logger.info(pformat(deployment_request_payload))
        deployment_response_payload = common.request_octopus_item(address=item_type_deployments,
                                                                  payload=deployment_request_payload,
                                                                  operation=operation_post)
        logger.info("the response deployment payload is")
        common.log_info_print(local_logger=logger, msg=json.dumps(deployment_response_payload))
        common.save_single_item(item_type=item_type_deployments, item=deployment_response_payload)
        return deployment_response_payload
Beispiel #9
0
class ReleaseDeployment:
    def __init__(self, config: Config, project_name, channel_name=None, notes=None):
        self._config = config
        self._common = Common(config=self._config)

        assert project_name, "project name must not be empty!"
        project = self._common.get_single_item_by_name(item_type=item_type_projects, item_name=project_name)
        assert project, f"Project {project_name} cannot be found or you do not have permission to access!"
        self._project_id = project.get(id_key)
        self._deployment_process_id = project.get(deployment_process_id_key)
        self._notes = notes
        self._release_request_payload = {project_id_key: self._project_id, release_notes_key: notes}

        self._channel_id = ""
        if channel_name:
            channel = self._common.find_sub_by_item(item_type=item_type_projects, item_id=self._project_id,
                                                    sub_type=item_type_channels, sub_name=channel_name)
            assert channel, f"Cannot find channel {channel_name} in project {project_name}"
            self._channel_id = channel.get(id_key)
        if self._channel_id:
            self._release_request_payload[channel_id_key] = self._channel_id

        # package versions read from a variable set,
        # e.g. release_versions = {"Name": "package.near", "Value": "20.0225.1714"}
        self._packages_variable_set_name = None
        # user selected package versions, e.g. "{'packages': {'package.near': '20.0225.1714'}}"
        self._package_version_dict = None

        self._template = None
        self._selected_packages = None
        self._release_response = None
        self._release_id = None
        self._commits_variable_set_name = "configuration_commits" + dot_sign + project_name
        self._gitlab_url_prefix = self._get_url_prefix(set_name="gitlab_info")
        self._latest_commit_dict = None

    def _get_url_prefix(self, set_name):
        info_service_list_variables = self._common.get_list_variables_by_set_name_or_id(set_name=set_name)
        if info_service_list_variables:
            url_prefix_variable = find_item(lst=info_service_list_variables, key=name_key, value=url_prefix_key)
            if url_prefix_variable:
                return url_prefix_variable.get(value_key)
        return ""

    def _get_deployment_process_template(self):
        logger.info(f"Fetching deployment process template from {self._deployment_process_id} with channel "
                    f"{self._channel_id} in {self._config.space_id}, which is used for defining release and deployment")
        address = f"{item_type_deployment_processes}/{self._deployment_process_id}/template?channel=" \
                  f"{self._channel_id}"
        self._template = self._common.request_octopus_item(address=address)

    def _get_selected_packages(self):
        logger.info(f"getting package information for each step")
        self._selected_packages = []
        for package in self._template.get(packages_key):
            logger.info(f"getting package information for {package.get(action_name_key)} with package "
                        f"{package.get(package_reference_name_key)}")
            address = f"{item_type_feeds}/{package.get(feed_id_key)}/{item_type_packages}/versions?packageId=" \
                      f"{package.get(package_id_key)}&take=1"
            package_detail = self._common.request_octopus_item(address=address)
            selected_package = {action_name_key: package.get(action_name_key),
                                package_reference_name_key: package.get(package_reference_name_key),
                                version_key: package_detail.get(items_key)[0].get(version_key)}
            self._selected_packages.append(selected_package)

    def _update_package_version(self, package, version):
        match_dict = {package_reference_name_key: package}
        replace_dict = {version_key: version}
        replace_list_new_value(lst=self._selected_packages, match_dict=match_dict, replace_dict=replace_dict)

    def _update_selected_packages(self):
        logger.info("update package versions...")
        if self._packages_variable_set_name:
            list_release_versions = self._common.get_list_variables_by_set_name_or_id(
                set_name=self._packages_variable_set_name)
            for package_version in list_release_versions:
                self._update_package_version(package=package_version.get(name_key),
                                             version=package_version.get(value_key))
        if self._package_version_dict:
            for package, version in self._package_version_dict.items():
                self._update_package_version(package=package, version=version)

    def _form_single_commit_note(self, commit_variable):
        date_time = commit_variable.get(name_key)
        commit_json = commit_variable.get(value_key)
        commit_dict = json.loads(commit_json)
        title = ". ".join(commit_dict.get(title_key)) if isinstance(commit_dict.get(title_key), list) else \
            str(commit_dict.get(title_key))
        return f"- {date_time} - [{title}]({self._gitlab_url_prefix}" \
               f"{commit_dict.get(sha_key)}) - {commit_dict.get(author_key)}"

    def _get_prev_release_match_commit_date_time(self, list_releases: list):
        if list_releases:
            for release in list_releases:
                logger.info(f"checking {release.get(id_key)} for project {self._project_id}...")
                if release.get(release_notes_key):
                    logger.info(f"found notes in {release.get(id_key)} and try to get the commit timestamp...")
                    notes_last_line = release.get(release_notes_key).splitlines()[-1]
                    last_line_parsed = parse_string(local_logger=logger, string=notes_last_line)
                    if isinstance(last_line_parsed, dict) and last_line_parsed.get(self._commits_variable_set_name):
                        prev_release_match_commit_date_time = last_line_parsed.get(self._commits_variable_set_name)
                        topic_note = f"\nThe previous release with the commit timestamp " \
                                     f"{prev_release_match_commit_date_time} is {release.get(id_key)} " \
                                     f"(release version: {release.get(version_key)}). "
                        logger.info(topic_note)
                        return topic_note, prev_release_match_commit_date_time
                    else:
                        logger.warning(f"the commit timestamp in {release.get(id_key)} not exist")
                else:
                    logger.warning(f"{release.get(id_key)} has no release notes")
            topic_note = f"\nNo previous release with the commit timestamp in notes was found"
            logger.warning(topic_note)
            return topic_note, ""
        else:
            topic_note = f"\nThis is the first release for project {self._project_id}. "
            logger.info(topic_note)
            return topic_note, ""

    def _generate_commits_notes(self):
        logger.info("generating the release notes for the commits history")
        # find the latest/previous release for this project
        list_releases = self._common.get_project_releases_sorted_list(project_id=self._project_id)
        topic_note, prev_release_match_commit_date_time = \
            self._get_prev_release_match_commit_date_time(list_releases=list_releases)
        list_notes = ["\n========== below is auto-generated notes ==========", topic_note]

        # historical commits since the latest release
        list_configuration_commits = self._common.get_list_variables_by_set_name_or_id(
            set_name=self._commits_variable_set_name)
        if not list_configuration_commits:
            msg = f"\nVariable set {self._commits_variable_set_name} contains NONE historical commits. No commits " \
                  f"can be matched to the releases."
            logger.error(msg)
            list_notes.append(msg)
            return newline_sign.join(list_notes)

        list_configuration_commits_sorted = sorted(list_configuration_commits, key=lambda k: k.get(name_key))
        list_commit_notes = []
        latest_commit_variable = None
        for commit_variable in list_configuration_commits_sorted:
            latest_commit_variable = commit_variable
            commit_note = self._form_single_commit_note(commit_variable=commit_variable)
            list_commit_notes.append(commit_note)
            # if prev release has no matched commit or the commit could not be found, append all commits
            # once the prev release matched commit is found, only append the commits after it
            if prev_release_match_commit_date_time == commit_variable.get(name_key):
                logger.info(f"found a matched timestamp {prev_release_match_commit_date_time} in commits history, "
                            f"so will start to record all the commits after it")
                list_commit_notes = []
        list_notes.append("\nThe gitlab commits since the previous release are: ")
        if not list_commit_notes:
            list_notes.append("None")
        else:
            list_commit_notes.reverse()
            list_notes.extend(list_commit_notes)

        # matched latest commit for the current release
        if latest_commit_variable:
            latest_timestamp = latest_commit_variable.get(name_key)
            self._latest_commit_dict = json.loads(latest_commit_variable.get(value_key))
            self._latest_commit_dict[timestamp_key] = latest_timestamp
            latest_commit_note = self._form_single_commit_note(commit_variable=latest_commit_variable)
            list_notes.append(f"\nThe matched latest gitlab commit for this release is {latest_commit_note}")
            list_notes.append(f"\nBelow is a python dictionary read by Octopus python client in the succeeding "
                              f"releases to identify the gitlab commit for the preceding release and it must be the "
                              f"last line in the release notes. '{self._commits_variable_set_name}' is the variable "
                              f"set name for the commits history and the value is the matched commit timestamp for "
                              f"this release")
            list_notes.append("\n{'" + f"{self._commits_variable_set_name}" + "': '" + f"{latest_timestamp}" + "'}")
        return newline_sign.join(list_notes)

    def _process_notes(self):
        logger.info("process notes...")
        notes = parse_string(local_logger=logger, string=self._notes)
        if isinstance(notes, dict):
            logger.info("the notes is a dictionary, so further process...")
            logger.info(pformat(notes))
            self._packages_variable_set_name = notes.get(release_versions_key)
            self._package_version_dict = notes.get(item_type_packages)
            self._update_selected_packages()
        commit_notes = self._generate_commits_notes()
        if self._notes:
            self._release_request_payload[release_notes_key] = newline_sign.join([self._notes, commit_notes])
        else:
            self._release_request_payload[release_notes_key] = commit_notes

    def _process_package_versions_notes(self):
        self._get_deployment_process_template()
        self._get_selected_packages()
        self._process_notes()

    # release version must be unique for each release
    def create_release(self, release_version=None):
        self._process_package_versions_notes()
        if not release_version:
            release_version = self._template.get(next_version_increment_key)
        self._release_request_payload[version_key] = release_version
        self._release_request_payload[selected_packages_key] = self._selected_packages
        logger.info("the request release payload is")
        logger.info(pformat(self._release_request_payload))
        self._release_response = self._common.request_octopus_item(address=item_type_releases,
                                                                   payload=self._release_request_payload,
                                                                   operation=operation_post)
        if self._latest_commit_dict:
            self._release_response[latest_commit_sha_key] = self._latest_commit_dict.get(sha_key)
        logger.info("the response release payload is")
        logger.info(pformat(self._release_response))
        self._common.save_single_item(item_type=item_type_releases, item=self._release_response)
        self._release_id = self._release_response.get(id_key)
        return self._release_response

    @staticmethod
    def create_deployment_direct(config: Config, environment_name, tenant_name, release_id=None, project_name=None,
                                 comments=None):
        logger.info(f"creating a deployment for {release_id} in space {config.space_id} with environment "
                    f"{environment_name}, tenant {tenant_name} and comments: {comments}")
        common = Common(config=config)

        # TODO project_name
        assert (release_id or project_name), "either release_id or project_name must exist!"
        assert environment_name, "environment_name must not be empty!"
        assert tenant_name, "tenant_name must not be empty!"

        if not release_id:
            logger.info(f"Get the latest release id for project {project_name}")
            project_id = common.get_item_id_by_name(item_type=item_type_projects, item_name=project_name)
            releases_list = common.get_project_releases_sorted_list(project_id=project_id)
            if not releases_list:
                raise ValueError(f"Project {project_name} does not have any releases. Please create a release first")
            release_id = releases_list[0].get(id_key)
            logger.info(f"The latest release id is {release_id}")

        deployment_request_payload = \
            {release_id_key: release_id,
             environment_id_key: common.get_item_id_by_name(item_type=item_type_environments,
                                                            item_name=environment_name),
             tenant_id_key: common.get_item_id_by_name(item_type=item_type_tenants, item_name=tenant_name),
             comments_key: comments}
        logger.info("the request deployment payload is")
        logger.info(pformat(deployment_request_payload))
        deployment_response_payload = common.request_octopus_item(address=item_type_deployments,
                                                                  payload=deployment_request_payload,
                                                                  operation=operation_post)
        logger.info("the response deployment payload is")
        common.log_info_print(local_logger=logger, msg=json.dumps(deployment_response_payload))
        common.save_single_item(item_type=item_type_deployments, item=deployment_response_payload)
        return deployment_response_payload

    def create_deployment_for_current_release(self, config, environment_name=None, tenant_name=None, comments=None):
        return ReleaseDeployment.create_deployment_direct(config=config, release_id=self._release_id,
                                                          environment_name=environment_name, tenant_name=tenant_name,
                                                          comments=comments)

    @property
    def release_id(self):
        logger.info(f"get the release id {self._release_id}")
        return self._release_id

    @property
    def release_response(self):
        logger.info(f"get the release response for {self._release_id}")
        return self._release_response

    def _extract_package_versions(self):
        package_versions_dict = {}
        for package in self._selected_packages:
            if package.get(package_reference_name_key):
                package_versions_dict[package.get(package_reference_name_key)] = package.get(version_key)
        return package_versions_dict

    @staticmethod
    def get_package_versions(config: Config, project_name, channel_name=None, notes=None):
        release = ReleaseDeployment(config=config, project_name=project_name, channel_name=channel_name, notes=notes)
        release._process_package_versions_notes()
        return release._extract_package_versions()

    @staticmethod
    def create_release_direct(config: Config, release_version, project_name, channel_name=None, notes=None):
        common = Common(config=config)
        release = ReleaseDeployment(config=config, project_name=project_name, channel_name=channel_name, notes=notes)
        release.create_release(release_version=release_version)
        common.log_info_print(local_logger=logger, msg=json.dumps(release.release_response))
        return release

    @staticmethod
    def create_release_deployment(config: Config, release_version, project_name, comments, channel_name=None,
                                  notes=None, environment_name=None, tenant_name=None):
        release = ReleaseDeployment.create_release_direct(
            config=config, release_version=release_version, project_name=project_name, channel_name=channel_name,
            notes=notes)
        return release.create_deployment_for_current_release(config=config, environment_name=environment_name,
                                                             tenant_name=tenant_name, comments=comments)
Beispiel #10
0
 def __init__(self, config, logger=None):
     self.logger = logger if logger else logging.getLogger(
         self.__class__.__name__)
     self.config = config
     self.common = Common(config=config)
Beispiel #11
0
class DeploymentProcesses:
    def __init__(self, config, logger=None):
        self.logger = logger if logger else logging.getLogger(
            self.__class__.__name__)
        self.config = config
        self.common = Common(config=config)

    def get_all_deployment_processes(self):
        return self.common.get_one_type_save(
            item_type=item_type_deployment_processes)

    def get_deployment_process(self, project_literal_name):
        return self.common.get_child_item_save(
            parent_name=project_literal_name,
            parent_type=item_type_projects,
            child_id_key=deployment_process_id_key,
            child_type=item_type_deployment_processes)

    def update_deployment_process(self, project_literal_name):
        self.common.update_child_item_from_local_save(
            parent_name=project_literal_name,
            parent_type=item_type_projects,
            child_id_key=deployment_process_id_key,
            child_type=item_type_deployment_processes)

    # clone deployment process from a base project to a project
    def clone_deployment_process(self, project_literal_name,
                                 base_project_name):
        self.common.clone_child_item_from_another_parent_save(
            parent_name=project_literal_name,
            base_parent_name=base_project_name,
            parent_type=item_type_projects,
            child_id_key=deployment_process_id_key,
            child_type=item_type_deployment_processes,
            sub_item_key=steps_key)

    def clone_process_step(self,
                           project_literal_name,
                           step_name,
                           base_step_name,
                           prev_step_name=None):
        if not project_literal_name or not step_name or not base_step_name:
            raise ValueError(
                'Project literal name or step or base step names must not be empty'
            )
        self.common.log_info_print(
            local_logger=self.logger,
            msg=
            f"clone project {project_literal_name} step from base step {base_step_name} to new step "
            f"{step_name} and place it after step {prev_step_name}")
        process = self.common.get_child_item(
            parent_name=project_literal_name,
            parent_type=item_type_projects,
            child_id_key=deployment_process_id_key,
            child_type=item_type_deployment_processes)
        steps = process[steps_key]
        step = copy.deepcopy(
            find_item(lst=steps, key=name_key, value=base_step_name))
        step[name_key] = step_name
        step[id_key] = ""
        for action in step[actions_key]:
            action[name_key] = step_name
            action[id_key] = ""
        if prev_step_name:
            prev_step_name_index = find_index(lst=steps,
                                              key=name_key,
                                              value=prev_step_name)
            if prev_step_name_index < 0:
                log_raise_value_error(
                    local_logger=self.logger,
                    err=f"previous step {prev_step_name} does not exist")
            steps.insert(prev_step_name_index + 1, step)
        else:
            steps.append(step)
        child_item = self.common.put_child_item_save(
            parent_name=project_literal_name,
            child_type=item_type_deployment_processes,
            payload=process)
        return child_item

    def delete_process_step(self, project_literal_name, step_name):
        if not project_literal_name or not step_name:
            raise ValueError(
                'Project literal name or step name must not be empty')
        self.common.log_info_print(
            local_logger=self.logger,
            msg=
            f"deleting step {step_name} of project {project_literal_name} in space {self.config.space_id}"
        )
        process = self.common.get_child_item(
            parent_name=project_literal_name,
            parent_type=item_type_projects,
            child_id_key=deployment_process_id_key,
            child_type=item_type_deployment_processes)
        steps = process[steps_key]
        step_index = find_index(lst=steps, key=name_key, value=step_name)
        if step_index < 0:
            log_raise_value_error(local_logger=self.logger,
                                  err=f"step {step_name} does not exit")
        steps.pop(step_index)
        child_item = self.common.put_child_item_save(
            parent_name=project_literal_name,
            child_type=item_type_deployment_processes,
            payload=process)
        return child_item
 def __init__(self, config, logger=None):
     self.logger = logger if logger else logging.getLogger(self.__class__.__name__)
     self.config = config
     self.common = Common(config=config)
     self.deployment_processes = DeploymentProcesses(config=config)
class Projects:
    def __init__(self, config, logger=None):
        self.logger = logger if logger else logging.getLogger(self.__class__.__name__)
        self.config = config
        self.common = Common(config=config)
        self.deployment_processes = DeploymentProcesses(config=config)

    def get_all_projects(self):
        return self.common.get_one_type_save(item_type=item_type_projects)

    def get_project(self, project_literal_name):
        self.common.log_info_print(local_logger=self.logger,
                                   msg=f"get project {project_literal_name} in space {self.config.space_id}...")
        project = self.common.get_single_item_by_name_or_id_save(item_type=item_type_projects,
                                                                 item_name=project_literal_name)
        return project

    def update_project(self, project_literal_name):
        self.common.update_single_item_save(item_type=item_type_projects, item_name=project_literal_name)

    def create_project_from_local_file(self, project_literal_name=None, local_project_name=None):
        return self.common.create_single_item_from_local_file(
            item_type=item_type_projects, item_name=project_literal_name, local_item_name=local_project_name)

    def clone_project(self, project_literal_name, base_project_name):
        self.common.log_info_print(
            local_logger=self.logger,
            msg=f"clone project from {base_project_name} to {project_literal_name} inside space {self.config.space_id}")
        new_project_dict = self.common.clone_single_item_from_remote_item(
            item_type=item_type_projects, item_name=project_literal_name, base_item_name=base_project_name)
        self.deployment_processes.clone_deployment_process(
            project_literal_name=project_literal_name, base_project_name=base_project_name)
        return new_project_dict

    def delete_projects(self, project_groups_comma_delimited, excluded_projects_comma_delimited=None):
        assert project_groups_comma_delimited, f"project groups delimited by comma must not be empty"
        list_project_group_ids = []
        list_project_group_names = project_groups_comma_delimited.split(comma_sign)
        for project_group_name in list_project_group_names:
            project_group_id = self.common.get_single_item_by_name(item_type=item_type_project_groups,
                                                                   item_name=project_group_name).get(id_key)
            if not project_group_id:
                raise ValueError(f"{project_group_name} does not match any project group in {self.config.space_id}")
            list_project_group_ids.append(project_group_id)

        list_excluded_project_names = []
        if excluded_projects_comma_delimited:
            list_excluded_project_names = excluded_projects_comma_delimited.split(comma_sign)
            for excluded_project_name in list_excluded_project_names:
                excluded_project = self.common.get_single_item_by_name(item_type=item_type_projects,
                                                                       item_name=excluded_project_name)
                if not excluded_project:
                    raise ValueError(f"Excluded project {excluded_project_name} does not match any project in "
                                     f"{self.config.space_id}")

        if not self.config.overwrite and input(
                f"Are you sure to delete all projects inside project groups {list_project_group_names} in "
                f"{self.config.space_id}) excluding projects {list_excluded_project_names}? [Y/n]: ") != 'Y':
            return

        self.config.overwrite = True

        for project_group_id in list_project_group_ids:
            address = slash_sign.join([item_type_project_groups, project_group_id, item_type_projects])
            group_projects = self.common.request_octopus_item(address=address)
            list_group_projects = self.common.get_list_items_from_all_items(group_projects)
            for project in list_group_projects:
                if project.get(name_key) not in list_excluded_project_names:
                    self.common.log_info_print(local_logger=self.logger, msg=f"deleting {project.get(name_key)}...")
                    self.common.get_or_delete_single_item_by_id(
                        item_type=item_type_projects, item_id=project.get(id_key), action=operation_delete)

    def delete_project(self, project_literal_name):
        self.common.log_info_print(local_logger=self.logger,
                                   msg=f"delete project {project_literal_name} in space {self.config.space_id}")
        self.common.delete_single_item_by_name_or_id(item_type=item_type_projects, item_name=project_literal_name)

    @staticmethod
    def process_suffix(name, remove_suffix, add_suffix):
        if remove_suffix and name.endswith(remove_suffix):
            name = name[:-len(remove_suffix)]
        if add_suffix:
            name += add_suffix
        return name

    def project_update_variable_sets(self, project_literal_name, remove_suffix, add_suffix):
        if not project_literal_name:
            raise ValueError("project name must not be empty")
        if not add_suffix and not remove_suffix:
            raise ValueError("add_suffix and remove_suffix can not be both empty")
        self.common.log_info_print(
            local_logger=self.logger,
            msg=f"===== updating {self.config.space_id}'s project {project_literal_name}'s variable sets by the "
                f"following operation(s)")
        if remove_suffix:
            self.common.log_info_print(local_logger=self.logger, msg=f"removing a suffix {remove_suffix}")
        if add_suffix:
            self.common.log_info_print(local_logger=self.logger, msg=f"adding a suffix {add_suffix}")

        all_variable_sets = self.common.get_one_type_ignore_error(item_type=item_type_library_variable_sets)
        library_variable_sets = self.common.get_list_items_from_all_items(all_items=all_variable_sets)
        project = self.get_project(project_literal_name)
        project_variable_sets_ids = project.get(included_library_variable_set_ids_key, [])
        self.logger.info("original variable sets id:")
        self.logger.info(project_variable_sets_ids)
        mapped_ids = copy.deepcopy(project_variable_sets_ids)
        for index, variable_sets_id in enumerate(project_variable_sets_ids):
            variable_set = find_item(lst=library_variable_sets, key=id_key, value=variable_sets_id)
            variable_set_name = variable_set.get(name_key)
            variable_set_name_updated = self.process_suffix(
                name=variable_set_name, remove_suffix=remove_suffix, add_suffix=add_suffix)
            new_variable_set_in_library_variable_sets = \
                find_item(lst=library_variable_sets, key=name_key, value=variable_set_name_updated)
            if new_variable_set_in_library_variable_sets:
                self.logger.info(f"{new_variable_set_in_library_variable_sets.get(id_key)} found in variable sets")
                mapped_ids[index] = new_variable_set_in_library_variable_sets.get(id_key)
        self.logger.info("mapped variable sets id:")
        self.logger.info(mapped_ids)
        no_change = compare_lists(project_variable_sets_ids, mapped_ids)
        if no_change:
            self.logger.info(f"The variable sets have no change")
            return project
        project[included_library_variable_set_ids_key] = mapped_ids
        return self.common.put_single_item_save(item_type=item_type_projects, payload=project)