Example #1
0
 def set_options(self, opts):
     """
     pickle dump the options.
     """
     p = obj_to_pickle(opts)
     self.options = p
     self.options_hash = str(md5(p).hexdigest())
Example #2
0
 def stash_kw(self, job_id):
     """
     Stash the kwargs and return the redis key.
     """
     kw_key = "{}:{}".format(copy.copy(self.kw_prefix), job_id)
     kw = copy.copy(self.sous_chef_kwargs)
     rds.set(kw_key, obj_to_pickle(kw), ex=self.kw_ttl)
     return kw_key
Example #3
0
 def stash_kw(self, job_id):
     """
     Stash the kwargs and return the redis key.
     """
     kw_key = "{}:{}".format(copy.copy(self.kw_prefix), job_id)
     kw = copy.copy(self.sous_chef_kwargs)
     rds.set(kw_key, obj_to_pickle(kw), ex=self.kw_ttl)
     return kw_key
Example #4
0
def bulkload(data, **kw):
    """
    Bulk Load any data.
    """
    kw['src'] = kw.pop('q_src', kw.pop('src', None))
    if not kw['src']:
        raise ValueError('Missing src.')

    job_id = gen_uuid()

    # set queue defaults
    qkw = dict(
        queued=kw.pop('queued', True),
        job_id=job_id,
        timeout=kw.pop('q_timeout', 1000),
        serializer=kw.pop('q_serializer', 'json'),
        result_ttl=kw.pop('q_result_ttl', 60),
        kwargs_ttl=kw.pop('q_kwargs_ttl', 120),
        name=kw.pop('q_name', 'bulk'),
        max_workers=kw.pop('q_max_workers', MAX_WORKERS),
        job_key_fmt=kw.pop('q_job_key', 'rq:{src}:bulk:'.format(**kw)+"{}"),
        chunk_size=kw.pop('q_chunk_size', MAX_CHUNK_SIZE)
    )
    kw.update({'queued': qkw.get('queued', True)})

    # if this is not a queued job, just run ingest.
    if not qkw.get('queued'):
        return ingest.source(data, **kw)

    q = queues.get(qkw.pop('name', 'bulk'))

    # store the data + kwargs in redis temporarily
    # this makes the enqueuing process much, much more
    # efficient by allowing us to only pass a single key
    # into the queue rather than a massive dump of data
    # however it also means that all kwargs must be
    # json serializable
    job_key = qkw['job_key_fmt'].format(job_id)
    job = {'data': data, 'kw': kw}

    if qkw['serializer'] == 'json':
        job = obj_to_json(job)

    elif qkw['serializer'] == 'pickle':
        job = obj_to_pickle(job)

    rds.set(job_key, job, ex=qkw['kwargs_ttl'])

    q.enqueue(bulkworker, job_id, **qkw)
    return job_id
Example #5
0
def bulkload(data, **kw):
    """
    Bulk Load any data.
    """
    kw['src'] = kw.pop('q_src', kw.pop('src', None))
    if not kw['src']:
        raise ValueError('Missing src.')

    job_id = gen_uuid()

    # set queue defaults
    qkw = dict(queued=kw.pop('queued', True),
               job_id=job_id,
               timeout=kw.pop('q_timeout', 1000),
               serializer=kw.pop('q_serializer', 'json'),
               result_ttl=kw.pop('q_result_ttl', 60),
               kwargs_ttl=kw.pop('q_kwargs_ttl', 120),
               name=kw.pop('q_name', 'bulk'),
               max_workers=kw.pop('q_max_workers', MAX_WORKERS),
               job_key_fmt=kw.pop('q_job_key',
                                  'rq:{src}:bulk:'.format(**kw) + "{}"),
               chunk_size=kw.pop('q_chunk_size', MAX_CHUNK_SIZE))
    kw.update({'queued': qkw.get('queued', True)})

    # if this is not a queued job, just run ingest.
    if not qkw.get('queued'):
        return ingest.source(data, **kw)

    q = queues.get(qkw.pop('name', 'bulk'))

    # store the data + kwargs in redis temporarily
    # this makes the enqueuing process much, much more
    # efficient by allowing us to only pass a single key
    # into the queue rather than a massive dump of data
    # however it also means that all kwargs must be
    # json serializable
    job_key = qkw['job_key_fmt'].format(job_id)
    job = {'data': data, 'kw': kw}

    if qkw['serializer'] == 'json':
        job = obj_to_json(job)

    elif qkw['serializer'] == 'pickle':
        job = obj_to_pickle(job)

    rds.set(job_key, job, ex=qkw['kwargs_ttl'])

    q.enqueue(bulkworker, job_id, **qkw)
    return job_id
Example #6
0
    def run(self, data, **kw):

        # store the data + kwargs in redis temporarily
        # this makes the enqueuing process much, much more
        # efficient by allowing us to only pass a single key
        # into the queue rather than a massive dump of data
        # however it also means that all kwargs must be
        # json serializable
        job_id = gen_uuid()
        kwargs_key = self.kwargs_key.format(job_id)
        kwargs = {'data': data, 'kw': kw}
        self.redis.set(kwargs_key, obj_to_pickle(kwargs), ex=self.kwargs_ttl)

        # send the job to the task queue
        self.q.enqueue(
            self.load_all, kwargs_key,
            job_id=job_id, timeout=self.timeout,
            result_ttl=self.result_ttl)

        return job_id
Example #7
0
    def run(self, data, **kw):

        # store the data + kwargs in redis temporarily
        # this makes the enqueuing process much, much more
        # efficient by allowing us to only pass a single key
        # into the queue rather than a massive dump of data
        # however it also means that all kwargs must be
        # json serializable
        job_id = gen_uuid()
        kwargs_key = self.kwargs_key.format(job_id)
        kwargs = {'data': data, 'kw': kw}
        self.redis.set(kwargs_key, obj_to_pickle(kwargs), ex=self.kwargs_ttl)

        # send the job to the task queue
        self.q.enqueue(self.load_all,
                       kwargs_key,
                       job_id=job_id,
                       timeout=self.timeout,
                       result_ttl=self.result_ttl)

        return job_id
Example #8
0
 def set_options(self, opts):
     """
     pickle dump the options.
     """
     self.options = obj_to_pickle(opts)
Example #9
0
 def set_options(self, opts):
     """
     pickle dump the options.
     """
     self.options = obj_to_pickle(opts)
Example #10
0
 def serialize(self, obj):
     """
     The function for serializing the object
     returned from `get` to a string.
     """
     return obj_to_pickle(obj)
Example #11
0
 def serialize(self, obj):
     """
     The function for serializing the object
     returned from `get` to a string.
     """
     return obj_to_pickle(obj)