def run(self): """ Create a stateful container, and wait for it to be running. """ volumes = [ MountedDataset(dataset_id=self.dataset_id, mountpoint=self.mountpoint) ] d = create_container(self.reactor, self.control_service, self.node_uuid, self.name, self.image, volumes) return d
def run(self): """ Create a stateful container, and wait for it to be running. """ volumes = [ MountedDataset( dataset_id=self.dataset_id, mountpoint=self.mountpoint ) ] d = create_container( self.reactor, self.control_service, self.node_uuid, self.name, self.image, volumes ) return d
def parallel_setup(node): # Ensure the Docker image is cached by starting and stopping a # container. name = unicode(uuid4()) container_setup = create_container(reactor, control_service, node.uuid, name, image) container_setup.addCallback( partial(delete_container, reactor, control_service)) # Create the dataset dataset_id = uuid4() dataset_setup = create_dataset(reactor, control_service, node.uuid, dataset_id, volume_size) d = gather_deferreds((container_setup, dataset_setup)) # Return only the dataset state d.addCallback(lambda results: results[1]) return d
def start_container(dataset): volume = MountedDataset(dataset_id=dataset.dataset_id, mountpoint=self.mountpoint) d = create_container(self.reactor, control_service=self.client, node_uuid=node.uuid, name=unicode(uuid4()), image=self.image, volumes=[volume], timeout=self.timeout) # If container creation fails, delete dataset as well def delete_dataset(failure): d = self.client.delete_dataset(dataset.dataset_id) d.addErrback(write_failure) d.addBoth(lambda _ignore: failure) return d d.addErrback(delete_dataset) return d
def start_container(dataset): volume = MountedDataset(dataset_id=dataset.dataset_id, mountpoint=self.mountpoint) d = create_container( self.reactor, control_service=self.client, node_uuid=node.uuid, name=unicode(uuid4()), image=self.image, volumes=[volume], timeout=self.timeout, ) # If container creation fails, delete dataset as well def delete_dataset(failure): d = self.client.delete_dataset(dataset.dataset_id) d.addErrback(write_failure) d.addBoth(lambda _ignore: failure) return d d.addErrback(delete_dataset) return d
def parallel_setup(node): # Ensure the Docker image is cached by starting and stopping a # container. name = unicode(uuid4()) container_setup = create_container( reactor, control_service, node.uuid, name, image ) container_setup.addCallback( partial(delete_container, reactor, control_service) ) # Create the dataset dataset_id = uuid4() dataset_setup = create_dataset( reactor, control_service, node.uuid, dataset_id, volume_size ) d = gather_deferreds((container_setup, dataset_setup)) # Return only the dataset state d.addCallback(lambda results: results[1]) return d