Пример #1
0
    def get(self, workflow_id, filename):
        uuid.UUID(workflow_id)
        workflow = Workflow.find_by_id(self.base_path, workflow_id)
        zstream = workflow.bag.package_as_zipstream(compression=None)

        self.set_status(200)
        self.set_header('Content-type', 'application/zip')
        self.set_header('Content-length',
                        str(util.calculate_zipsize(zstream.paths_to_write)))

        self.zstream_iter = iter(zstream)

        self.send_next_chunk()
Пример #2
0
def upload_workflow(wf_id,
                    base_path,
                    endpoint,
                    user_config,
                    start_process=False,
                    start_output=False):
    logger.debug("Uploading workflow to postprocessing server")

    workflow = Workflow.find_by_id(base_path, wf_id)
    # NOTE: This is kind of nasty.... We temporarily write the user-supplied
    # configuration to the bag, update the tag-payload, create the zip, and
    # once everything is done, we restore the old version
    tmp_cfg = copy.deepcopy(workflow.config)
    tmp_cfg.set(user_config)
    tmp_cfg_path = workflow.path / 'config.yml'
    tmp_cfg.dump(filename=unicode(tmp_cfg_path),
                 sections=(user_config['plugins'] + ["plugins", "device"]))
    workflow.bag.add_tagfiles(unicode(tmp_cfg_path))

    # Create a zipstream from the workflow-bag
    zstream = workflow.bag.package_as_zipstream(compression=None)
    zsize = calculate_zipsize(zstream.paths_to_write)

    def zstream_wrapper():
        """ Wrapper around our zstream so we can emit a signal when all data
        has been streamed to the client.
        """
        transferred = 0
        progress = "0.00"
        for data in zstream:
            yield data
            transferred += len(data)
            # Only update progress if we've progress at least by 0.01
            new_progress = "{0:.2f}".format(transferred / zsize)
            if new_progress != progress:
                progress = new_progress
                signals['submit:progressed'].send(
                    workflow,
                    progress=float(progress),
                    status="Uploading workflow...")

    # NOTE: This is neccessary since requests makes a chunked upload when
    #       passed a plain generator, which is not supported by the WSGI
    #       protocol that receives it. Hence we wrap it inside of a
    #       GeneratorIO to make it appear as a file-like object with a
    #       known size.
    zstream_fp = GeneratorIO(zstream_wrapper(), zsize)
    logger.debug("Projected size for upload: {}".format(zsize))
    signals['submit:started'].send(workflow)
    resp = requests.post(endpoint,
                         data=zstream_fp,
                         headers={'Content-Type': 'application/zip'})
    if not resp:
        error_msg = "Upload failed: {0}".format(resp.content)
        signals['submit:error'].send(workflow,
                                     message=error_msg,
                                     data=resp.content)
        logger.error(error_msg)
    else:
        wfid = resp.json()['id']
        if start_process:
            requests.post(endpoint + "/{0}/process".format(wfid))
        if start_output:
            requests.post(endpoint + "/{0}/output".format(wfid))
        signals['submit:completed'].send(workflow, remote_id=wfid)

    # Restore our old configuration
    workflow._save_config()