예제 #1
0
    def __call__(self, f):
        token = sha1(f.__name__).hexdigest()

        def f_job(*args, **kwargs):
            current_job = get_current_job()
            if not args[-1] == token and not current_job:
                # Add the token as a last argument
                args += (token,)
                # Default arguments
                osv_object = args[0]._name
                dbname = args[1].dbname
                uid = args[2]
                fname = f.__name__
                redis_conn = setup_redis_connection()
                q = Queue(self.queue, default_timeout=self.timeout,
                          connection=redis_conn, async=self.async)
                # Pass OpenERP server config to the worker
                conf_attrs = dict(
                    [(attr, value) for attr, value in config.options.items()]
                )
                job = q.enqueue(execute, conf_attrs, dbname, uid, osv_object,
                                fname, *args[3:])
                hash = set_hash_job(job)
                log('Enqueued job (id:%s): [%s] pool(%s).%s%s'
                        % (job.id, dbname, osv_object, fname, args[2:]))
                return job
예제 #2
0
    def __call__(self, f):
        token = sha1(f.__name__).hexdigest()

        def f_job(*args, **kwargs):
            current_job = get_current_job()
            if not args[-1] == token and not current_job:
                # Add the token as a last argument
                args += (token,)
                # Default arguments
                osv_object = args[0]._name
                dbname = args[1].dbname
                uid = args[2]
                ids = args[3]
                if not isinstance(ids, (list, tuple)):
                    raise OORQNotIds()
                
                fname = f.__name__
                redis_conn = setup_redis_connection()
                q = Queue(self.queue, default_timeout=self.timeout,
                          connection=redis_conn, async=self.async)
                # Pass OpenERP server configuration to the worker
                conf_attrs = dict(
                    [(attr, value) for attr, value in config.options.items()]
                )
                jobs = []
                if self.isolated:
                    task = isolated_execute
                    mode = 'isolated'
                else:
                    mode = 'not isolated'
                    task = execute
                # We have to convert args to list
                args = list(args)
                chunks = make_chunks(ids, n_chunks=self.n_chunks,
                                     size=self.chunk_size)
                for idx, chunk in enumerate(chunks):
                    args[3] = chunk
                    job = q.enqueue(task, conf_attrs, dbname, uid, osv_object,
                                    fname, *args[3:])
                    hash =  set_hash_job(job)
                    log('Enqueued split job (%s/%s) in %s mode (id:%s): [%s] '
                        'pool(%s).%s%s' % (
                            idx + 1, len(chunks), mode, job.id,
                            dbname, osv_object, fname, tuple(args[2:])
                        )
                    )
                    jobs.append(job.id)
                return jobs