def handle_pytorch_experiment(experiment, spawner, response): # Get the number of jobs this experiment started master = response[TaskType.MASTER] job_uuid = master['pod']['metadata']['labels']['job_uuid'] job_uuid = uuid.UUID(job_uuid) create_job(job_uuid=job_uuid, experiment=experiment, definition=get_job_definition(master), resources=spawner.spec.master_resources, node_selector=spawner.spec.master_node_selector, affinity=spawner.spec.master_affinity, tolerations=spawner.spec.master_tolerations) cluster, is_distributed = spawner.spec.cluster_def worker_resources = PytorchSpecification.get_worker_resources( environment=spawner.spec.environment, cluster=cluster, is_distributed=is_distributed ) worker_node_selectors = PytorchSpecification.get_worker_node_selectors( environment=spawner.spec.environment, cluster=cluster, is_distributed=is_distributed ) worker_affinities = PytorchSpecification.get_worker_affinities( environment=spawner.spec.environment, cluster=cluster, is_distributed=is_distributed ) worker_tolerations = PytorchSpecification.get_worker_tolerations( environment=spawner.spec.environment, cluster=cluster, is_distributed=is_distributed ) for i, worker in enumerate(response[TaskType.WORKER]): job_uuid = worker['pod']['metadata']['labels']['job_uuid'] job_uuid = uuid.UUID(job_uuid) create_job(job_uuid=job_uuid, experiment=experiment, definition=get_job_definition(worker), role=TaskType.WORKER, sequence=i, resources=worker_resources.get(i), node_selector=worker_node_selectors.get(i), affinity=worker_affinities.get(i), tolerations=worker_tolerations.get(i))
def create_pytorch_experiment_jobs(experiment, spawner): master_job_uuid = spawner.job_uuids[TaskType.MASTER][0] k8s_replica = None create_job(job_uuid=master_job_uuid, experiment=experiment, k8s_replica=k8s_replica, resources=spawner.spec.master_resources, node_selector=spawner.spec.master_node_selector, affinity=spawner.spec.master_affinity, tolerations=spawner.spec.master_tolerations) cluster, is_distributed = spawner.spec.cluster_def environment = spawner.spec.config.pytorch worker_resources = PytorchSpecification.get_worker_resources( environment=environment, cluster=cluster, is_distributed=is_distributed ) worker_node_selectors = PytorchSpecification.get_worker_node_selectors( environment=environment, cluster=cluster, is_distributed=is_distributed ) worker_affinities = PytorchSpecification.get_worker_affinities( environment=environment, cluster=cluster, is_distributed=is_distributed ) worker_tolerations = PytorchSpecification.get_worker_tolerations( environment=environment, cluster=cluster, is_distributed=is_distributed ) for i, worker_job_uuid in enumerate(spawner.job_uuids[TaskType.WORKER]): create_job(job_uuid=worker_job_uuid, experiment=experiment, role=TaskType.WORKER, sequence=i, resources=worker_resources.get(i), node_selector=worker_node_selectors.get(i), affinity=worker_affinities.get(i), tolerations=worker_tolerations.get(i))
def tolerations(self): cluster, is_distributed, = self.spec.cluster_def worker_tolerations = PytorchSpecification.get_worker_tolerations( environment=self.spec.config.pytorch, cluster=cluster, is_distributed=is_distributed) return { TaskType.MASTER: { 0: self.spec.master_tolerations }, TaskType.WORKER: worker_tolerations, }
def affinities(self): cluster, is_distributed, = self.spec.cluster_def worker_affinities = PytorchSpecification.get_worker_affinities( environment=self.spec.environment, cluster=cluster, is_distributed=is_distributed) return { TaskType.MASTER: { 0: self.spec.master_affinity }, TaskType.WORKER: worker_affinities, }
def node_selectors(self): cluster, is_distributed, = self.spec.cluster_def worker_node_selectors = PytorchSpecification.get_worker_node_selectors( environment=self.spec.environment, cluster=cluster, is_distributed=is_distributed) return { TaskType.MASTER: { 0: self.spec.master_node_selector }, TaskType.WORKER: worker_node_selectors, }