예제 #1
0
    def _start_job(self, job_details: dict):
        # Validate and fill optional value to deployment
        K8sAzureExecutor._standardize_start_job_deployment(start_job_deployment=job_details)
        job_name = job_details['name']

        # Mkdir and save job details
        os.makedirs(os.path.expanduser(f"{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}"),
                    exist_ok=True)
        save_job_details(
            cluster_name=self.cluster_name,
            job_name=job_name,
            job_details=job_details
        )

        # Set job id
        self._set_job_id(job_name=job_name)

        # Create folder
        os.makedirs(os.path.expanduser(
            f'{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}/k8s_configs'), exist_ok=True)

        # Create and save k8s config
        k8s_job_config = self._create_k8s_job_config(job_name=job_name)
        with open(os.path.expanduser(
                f'{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}/k8s_configs/jobs.yml'), 'w') as fw:
            yaml.safe_dump(k8s_job_config, fw)

        # Apply k8s config
        command = f"kubectl apply -f " \
                  f"{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}/k8s_configs/jobs.yml"
        _ = SubProcess.run(command)
예제 #2
0
    def _set_job_id(self, job_name: str):
        # Load details
        job_details = load_job_details(cluster_name=self.cluster_name,
                                       job_name=job_name)

        # Set cluster id
        job_details['id'] = generate_job_id()

        # Set component id
        for component, component_details in job_details['components'].items():
            component_details['id'] = generate_component_id()

        # Save details
        save_job_details(cluster_name=self.cluster_name,
                         job_name=job_name,
                         job_details=job_details)
예제 #3
0
    def _start_job(self, job_details: dict):
        logger.info(f"Start sending job ticket {job_details['name']}")

        # Load details
        cluster_details = self.cluster_details
        admin_username = cluster_details['user']['admin_username']
        master_public_ip_address = cluster_details['master'][
            'public_ip_address']
        job_name = job_details['name']

        # Sync mkdir
        sync_mkdir(
            remote_path=
            f"{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}",
            admin_username=admin_username,
            node_ip_address=master_public_ip_address)

        # Save job deployment
        save_job_details(cluster_name=self.cluster_name,
                         job_name=job_name,
                         job_details=job_details)

        # Set job id
        self._set_job_id(job_name=job_name)

        # Sync job details to master
        copy_files_to_node(
            local_path=
            f"{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}/details.yml",
            remote_dir=
            f"{GlobalPaths.MARO_CLUSTERS}/{self.cluster_name}/jobs/{job_name}",
            admin_username=admin_username,
            node_ip_address=master_public_ip_address)

        # Remote start job
        self.grass_executor.remote_create_job_details(job_name=job_name)
        self.grass_executor.remote_create_pending_job_ticket(job_name=job_name)

        logger.info_green(f"Job ticket {job_details['name']} is sent")