Exemplo n.º 1
0
 def __init__(self, task_id, processor, workspace, docker_image=None):
     self.task_id = task_id
     self.processor = processor
     self.workspace = workspace
     self.storage_helper = StorageHelper(self.processor)
     self.docker_helper = DockerHelper(self.processor)
     self.docker_image = docker_image
Exemplo n.º 2
0
    def __init__(self, task_id, processor, workspace, docker_image=None):
        self.task_id = task_id
        self.processor = processor
        self.workspace = workspace
        self.storage_helper = StorageHelper(self.processor)
        self.docker_helper = DockerHelper(self.processor)
        self.docker_image = docker_image

        # Create workspace directory structure
        self.__create_workspace()
Exemplo n.º 3
0
    def validate(self):

        # Assume all validations are working
        has_errors = False

        # Validate the sample set
        sample_validator = SampleValidator(self.sample_data)
        has_errors = sample_validator.validate() or has_errors
        if not has_errors:
            logging.debug("Sample sheet validated!")

        # Validate the graph
        graph_validator = GraphValidator(self.graph, self.resource_kit, self.sample_data)
        has_errors = graph_validator.validate() or has_errors
        if not has_errors:
            logging.debug("Graph validated!")

        # Validate the platform
        self.platform.validate()

        # Stop the pipeline before launching if there are any errors
        if has_errors:
            raise SystemError("One or more errors have been encountered during validation. "
                              "See the above logs for more information")

        # Create helper processor and storage/docker helpers for checking input files
        self.helper_processor   = self.platform.get_helper_processor()
        self.helper_processor.create()

        self.storage_helper     = StorageHelper(self.helper_processor)
        self.docker_helper      = DockerHelper(self.helper_processor)

        # Validate all pipeline inputs can be found on platform
        input_validator = InputValidator(self.resource_kit, self.sample_data, self.storage_helper, self.docker_helper)
        has_errors = input_validator.validate() or has_errors

        # Stop the pipeline if there are any errors
        if has_errors:
            raise SystemError("One or more errors have been encountered during validation. "
                              "See the above logs for more information")

        # Validate that pipeline workspace can be created
        workspace = self.datastore.get_task_workspace()
        for dir_type, dir_path in workspace.get_workspace().items():
            self.storage_helper.mkdir(dir_path=str(dir_path), job_name="mkdir_%s" % dir_type, wait=True)
        logging.info("CloudCounductor run validated! Beginning pipeline execution.")
Exemplo n.º 4
0
    def __init__(self,
                 task_id,
                 processor,
                 final_output_dir,
                 final_tmp_dir,
                 docker_image=None):
        self.task_id = task_id
        self.processor = processor
        self.storage_helper = StorageHelper(self.processor)
        self.docker_helper = DockerHelper(self.processor)
        self.docker_image = docker_image

        self.final_output_dir = final_output_dir
        self.final_tmp_dir = final_tmp_dir

        # Create workspace directory structure
        self.__create_workspace()
Exemplo n.º 5
0
    def validate(self):

        # Assume all validations are working
        has_errors = False

        # Validate the sample set
        sample_validator = SampleValidator(self.sample_data)
        has_errors = sample_validator.validate() or has_errors
        if not has_errors:
            logging.debug("Sample sheet validated!")

        # Validate the graph
        graph_validator = GraphValidator(self.graph, self.resource_kit,
                                         self.sample_data)
        has_errors = graph_validator.validate() or has_errors
        if not has_errors:
            logging.debug("Graph validated!")

        # Validate the platform
        self.platform.validate()

        # Stop the pipeline before launching if there are any errors
        if has_errors:
            raise SystemError(
                "One or more errors have been encountered during validation. "
                "See the above logs for more information")

        # Create storage/docker helpers for checking input files
        self.storage_helper = StorageHelper(None)
        self.docker_helper = DockerHelper(None)

        # Validate all pipeline inputs can be found on platform
        input_validator = InputValidator(self.resource_kit, self.sample_data,
                                         self.storage_helper,
                                         self.docker_helper)
        has_errors = input_validator.validate() or has_errors

        # Stop the pipeline if there are any errors
        if has_errors:
            raise SystemError(
                "One or more errors have been encountered during validation. "
                "See the above logs for more information")

        logging.info(
            "CloudCounductor run validated! Beginning pipeline execution.")
Exemplo n.º 6
0
rk = ResourceKit(rk_good)

ss_file = "/home/alex/Desktop/projects/gap/test/ss.json"
ss = SampleSet(ss_file)

proc = GoogleStandardProcessor(
    "test-proc-4",
    4,
    12,
    75,
    zone="us-east1-c",
    service_acct="*****@*****.**",
    boot_disk_size=75,
    disk_image="davelab-image-docker")

sh = StorageHelper(proc)
dh = DockerHelper(proc)
iv = InputValidator(rk, ss, sh, dh)

try:
    proc.create()
    proc.__configure_SSH()
    print "We validatin'"
    print iv.validate()
    print "We done validatin'"

finally:
    proc.destroy(wait=False)

exit(0)