def test_pmq(): import time current_time = time.asctime() collection = 'pmq_test_collection' keyname = data_class.put(collection, {'_id' : 'pmq_test_keyname', 'nested' : {'time' : {'info': current_time}}}, replace=True) from queue import filesystemqueue from queue.consumers import read_textdb_func filesystemqueue.defer(read_textdb_func, collection, {'_id':keyname}, app.config) return Response("Inserted keyname: %s! Check pmq log!!\n" % (keyname))
def replicate_collection(collection, metadata, replication_id, destination_hostname, data_class): for idx, key_name in enumerate(metadata['key_distribution']): _range = {'prop':'_id'} if idx != 0: _range['start'] = key_name try: _range['stop'] = metadata['key_distribution'][idx+1] except IndexError: pass deferred.defer(replicate_batch, data_class.config, collection, metadata, replication_id, _range, destination_hostname) return "Replicated!! %s %s %s" % (collection, destination_hostname, replication_id)
def init_replication(data_class, destination_hostname, replication_id = None): if not replication_id: replication_id = "%s.%s" % (time.strftime("%Y%m%d%H%M%S", time.localtime()), data_class.ns) """ If can find replication_id in replication collection??? check for stop point and continue? raise exception?? """ collections = data_class.get_collection_names() for collection in collections: if collection in ['tokenmaps', 'metadata']: continue deferred.defer(build_replication_metadata, data_class.config, collection, destination_hostname, replication_id, batch_size = 1000) return replication_id
def replicate_collection(collection, metadata, replication_id, destination_hostname, data_class): for idx, key_name in enumerate(metadata['key_distribution']): _range = {'prop': '_id'} if idx != 0: _range['start'] = key_name try: _range['stop'] = metadata['key_distribution'][idx + 1] except IndexError: pass deferred.defer(replicate_batch, data_class.config, collection, metadata, replication_id, _range, destination_hostname) return "Replicated!! %s %s %s" % (collection, destination_hostname, replication_id)
def init_replication(data_class, destination_hostname, replication_id=None): if not replication_id: replication_id = "%s.%s" % (time.strftime( "%Y%m%d%H%M%S", time.localtime()), data_class.ns) """ If can find replication_id in replication collection??? check for stop point and continue? raise exception?? """ collections = data_class.get_collection_names() for collection in collections: if collection in ['tokenmaps', 'metadata']: continue deferred.defer(build_replication_metadata, data_class.config, collection, destination_hostname, replication_id, batch_size=1000) return replication_id
def test_pmq(): import time current_time = time.asctime() collection = 'pmq_test_collection' keyname = data_class.put(collection, { '_id': 'pmq_test_keyname', 'nested': { 'time': { 'info': current_time } } }, replace=True) from queue import filesystemqueue from queue.consumers import read_textdb_func filesystemqueue.defer(read_textdb_func, collection, {'_id': keyname}, app.config) return Response("Inserted keyname: %s! Check pmq log!!\n" % (keyname))
def insert_work(): from queue import filesystemqueue from queue.consumers import my_func filesystemqueue.defer(my_func, "hello from heroku", _name='myfunctest') return Response("Inserted stuff to pmq.\n", content_type="text/plain")
def insert_work(): from queue import filesystemqueue from queue.consumers import my_func filesystemqueue.defer(my_func, "hello from heroku", _name = 'myfunctest') return Response("Inserted stuff to pmq.\n", content_type = "text/plain")