示例#1
0
def get_workflow(workflow_id):
    # See if the workflow is among our cached instances
    if workflow_id in WorkflowCache:
        return WorkflowCache[workflow_id]
    logger.debug("Loading workflow {0} from database".format(workflow_id))
    with open_connection() as con:
        db_data = con.execute("SELECT * FROM workflow WHERE workflow.id=?",
                              (workflow_id, )).fetchone()
    if db_data is None:
        logger.warn("Workflow {0} was not found.".format(workflow_id))
        return None

    db_workflow = DbWorkflow(*db_data)

    # Try to load configuration from database
    if db_workflow.config is not None:
        config = json.loads(db_workflow.config)
    else:
        config = None
    workflow = Workflow(path=Path(app.config['base_path']) / db_workflow.name,
                        config=config,
                        step=db_workflow.step,
                        step_done=bool(db_workflow.step_done),
                        id=workflow_id)
    WorkflowCache[workflow_id] = workflow
    return workflow
示例#2
0
文件: web.py 项目: randyamiel/spreads
def create_workflow():
    """ Create a new workflow.

    Payload should be a JSON object. The only required attribute is 'name' for
    the desired workflow name. Optionally, 'config' can be set to a
    configuration object in the form "plugin_name: { setting: value, ...}".

    Returns the newly created workflow as a JSON object.
    """
    data = json.loads(request.data)
    path = Path(app.config['base_path']) / unicode(data['name'])

    # Setup default configuration
    config = app.config['default_config']
    # Overlay user-supplied values, if existant
    user_config = data.get('config', None)
    if user_config is not None:
        config = config.with_overlay(user_config)
    workflow = Workflow(config=config,
                        path=path,
                        step=data.get('step', None),
                        step_done=data.get('step_done', None))
    try:
        workflow.id = persistence.save_workflow(workflow)
    except persistence.ValidationError as e:
        return make_response(json.dumps(dict(errors=e.errors)), 400,
                             {'Content-Type': 'application/json'})
    return make_response(json.dumps(workflow), 200,
                         {'Content-Type': 'application/json'})
示例#3
0
def output(config):
    path = config['path'].get()
    workflow = Workflow(config=config, path=path)
    draw_progress(0)
    workflow.on_step_progressed.connect(
        lambda x, **kwargs: draw_progress(kwargs['progress']),
        sender=workflow,
        weak=False)
    workflow.output()
示例#4
0
    def post(self):
        self.fp.close()
        with zipfile.ZipFile(self.fname) as zf:
            wfname = os.path.dirname(zf.namelist()[0])
            zf.extractall(path=self.base_path)
        os.unlink(self.fname)

        workflow = Workflow(path=os.path.join(self.base_path, wfname))
        from spreads.workflow import on_created
        on_created.send(workflow, workflow=workflow)

        self.set_header('Content-Type', 'application/json')
        self.write(json.dumps(workflow, cls=util.CustomJSONEncoder))
示例#5
0
def create_workflow():
    """ Create a new workflow.

    Payload should be a JSON object. The only required attribute is 'name' for
    the desired workflow name. Optionally, 'config' can be set to a
    configuration object in the form "plugin_name: { setting: value, ...}".

    Returns the newly created workflow as a JSON object.
    """
    if request.content_type == 'application/zip':
        zfile = zipfile.ZipFile(StringIO.StringIO(request.data))
        zfile.extractall(path=app.config['base_path'])
        wfname = os.path.dirname(zfile.filelist[0].filename)
        workflow = Workflow(path=os.path.join(app.config['base_path'], wfname))
        from spreads.workflow import on_created
        on_created.send(workflow, workflow=workflow)
    else:
        data = json.loads(request.data)

        if data.get('config'):
            config = app.config['default_config'].with_overlay(
                data.get('config'))
        else:
            config = app.config['default_config']

        metadata = data.get('metadata', {})

        try:
            workflow = Workflow.create(location=app.config['base_path'],
                                       name=unicode(data['name']),
                                       config=config,
                                       metadata=metadata)
        except ValidationError as e:
            return make_response(json.dumps(dict(errors=e.errors)), 400,
                                 {'Content-Type': 'application/json'})
    return make_response(json.dumps(workflow),
                         200, {'Content-Type': 'application/json'})
示例#6
0
def workflow(config):
    from spreads.workflow import Workflow
    wf = Workflow(path="/tmp/foobar", config=config)
    return wf
示例#7
0
def workflow(config, tmpdir):
    from spreads.workflow import Workflow
    wf = Workflow(path=unicode(tmpdir), config=config)
    return wf
示例#8
0
def capture(config):
    path = config['path'].get()
    workflow = Workflow(config=config, path=path)
    workflow.on_created.send(workflow=workflow)
    capture_keys = workflow.config['core']['capture_keys'].as_str_seq()

    # Some closures
    def refresh_stats():
        # Callback to print statistics
        if refresh_stats.start_time is not None:
            pages_per_hour = ((3600 /
                               (time.time() - refresh_stats.start_time)) *
                              workflow.pages_shot)
        else:
            pages_per_hour = 0.0
            refresh_stats.start_time = time.time()
        status = ("\rShot {0: >3} pages [{1: >4.0f}/h] ".format(
            unicode(workflow.pages_shot), pages_per_hour))
        sys.stdout.write(status)
        sys.stdout.flush()

    refresh_stats.start_time = None

    def trigger_loop():
        is_posix = sys.platform != 'win32'
        old_count = workflow.pages_shot
        if is_posix:
            import select
            old_settings = termios.tcgetattr(sys.stdin)
            data_available = lambda: (select.select([sys.stdin], [], [], 0) ==
                                      ([sys.stdin], [], []))
            read_char = lambda: sys.stdin.read(1)
        else:
            data_available = msvcrt.kbhit
            read_char = msvcrt.getch

        try:
            if is_posix:
                tty.setcbreak(sys.stdin.fileno())
            while True:
                time.sleep(0.01)
                if workflow.pages_shot != old_count:
                    old_count = workflow.pages_shot
                    refresh_stats()
                if not data_available():
                    continue
                char = read_char()
                if char in tuple(capture_keys) + ('r', ):
                    workflow.capture(retake=(char == 'r'))
                    refresh_stats()
                elif char == 'f':
                    break
        finally:
            if is_posix:
                termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)

    if len(workflow.devices) != 2:
        raise DeviceException("Please connect and turn on two"
                              " pre-configured devices! ({0} were"
                              " found)".format(len(workflow.devices)))
    print(
        colorize("Found {0} devices!".format(len(workflow.devices)),
                 colorama.Fore.GREEN))
    if any(not x.target_page for x in workflow.devices):
        raise DeviceException("At least one of the devices has not been"
                              " properly configured, please re-run the"
                              " program with the \'configure\' option!")
    # Set up for capturing
    print("Setting up devices for capturing.")
    workflow.prepare_capture()

    print("({0}) capture | (r) retake last shot | (f) finish ".format(
        "/".join(capture_keys)))
    # Start trigger loop
    trigger_loop()

    workflow.finish_capture()