def create_pytorch_experiment_jobs(experiment, spawner): master_job_uuid = spawner.job_uuids[TaskType.MASTER][0] create_job(job_uuid=master_job_uuid, experiment=experiment, resources=spawner.spec.master_resources, node_selector=spawner.spec.master_node_selector, affinity=spawner.spec.master_affinity, tolerations=spawner.spec.master_tolerations) cluster, is_distributed = spawner.spec.cluster_def environment = spawner.spec.config.pytorch worker_resources = PytorchSpecification.get_worker_resources( environment=environment, cluster=cluster, is_distributed=is_distributed ) worker_node_selectors = PytorchSpecification.get_worker_node_selectors( environment=environment, cluster=cluster, is_distributed=is_distributed ) worker_affinities = PytorchSpecification.get_worker_affinities( environment=environment, cluster=cluster, is_distributed=is_distributed ) worker_tolerations = PytorchSpecification.get_worker_tolerations( environment=environment, cluster=cluster, is_distributed=is_distributed ) for i, worker_job_uuid in enumerate(spawner.job_uuids[TaskType.WORKER]): create_job(job_uuid=worker_job_uuid, experiment=experiment, role=TaskType.WORKER, sequence=i, resources=worker_resources.get(i), node_selector=worker_node_selectors.get(i), affinity=worker_affinities.get(i), tolerations=worker_tolerations.get(i))
def tolerations(self): cluster, is_distributed, = self.spec.cluster_def worker_tolerations = PytorchSpecification.get_worker_tolerations( environment=self.spec.config.pytorch, cluster=cluster, is_distributed=is_distributed ) return { TaskType.MASTER: {0: self.spec.master_tolerations}, TaskType.WORKER: worker_tolerations, }