Ejemplo n.º 1
0
def publish_output(feeders,messageid,output):
  feed = rabbit.feed([feeders])
  pack = {
    id: messageid,
    out: output
  }
  feed(pack)
Ejemplo n.º 2
0
def publish_output(feeders,messageid,output):
	feed = rabbit.feed([feeders])
	pack = {
		id: messageid,
		out: output
	}
	feed(pack)
Ejemplo n.º 3
0
Source MQ requeue task
@starcolon projects
"""

from pypipe import pipe as Pipe
from pypipe.operations import rabbit
import json

if __name__ == '__main__':
	qsrc = rabbit.create('localhost','pantip-x0')
	qdst = [rabbit.create('localhost',q) for q in ['pantip-x1','pantip-x2','pantip-x3','pantip-x00']]

	# Requeue!
	print('Requeuing ...')
	for m in rabbit.iter(qsrc):
		rabbit.feed(qdst)(m)	
	
	# Bye all queues!
	rabbit.end_multiple(qdst)
	rabbit.end(qsrc)

	# Transfer from temp MQ#00 to MQ#0
	q00 = rabbit.create('localhost','pantip-x00')
	q0 = rabbit.create('localhost','pantip-x0')
	for m in rabbit.iter(q00):
		rabbit.feed([q0])(m)

	# Bye all queues!
	rabbit.end_multiple([q0,q00])

	print('[DONE] All input queues are recycled.')
Ejemplo n.º 4
0
        'ruby {0}/core/tokenizer/tokenizer.rb'.format(REPO_DIR),
        ##'python3 {0}/core/textprocess.py'.format(REPO_DIR)
    ]
    workers = execute_background_services(services)

    # Delayed start
    time.sleep(1)

    # These are MQs we'll push preprocessed records to
    qs = ['pantip-x1', 'pantip-x2', 'pantip-x3', 'pantip-x0']
    mqs = [rabbit.create('localhost', q) for q in qs]

    # Prepare the processing pipeline (order matters)
    pipe = Pipe.new('preprocess', [])
    Pipe.push(pipe, preprocess.take)
    Pipe.push(pipe, rabbit.feed(mqs))
    Pipe.push(pipe, wordbag.feed(bag))
    Pipe.then(pipe, lambda out: print(colored('[DONE!]', 'cyan')))

    # Iterate through each record and processing
    couch.each_do(db, process_with(pipe), limit=40000)

    # Disconnect from the MQs
    [rabbit.end(mq) for mq in mqs]

    # Waiting for the background services
    # and kill `em
    terminate_background_services(workers)

    # Report the collected word bag
    print(colored('[Word bag]', 'green'))
Ejemplo n.º 5
0
    'ruby {0}/core/tokenizer/tokenizer.rb'.format(REPO_DIR),
    ##'python3 {0}/core/textprocess.py'.format(REPO_DIR)
    ]
  workers  = execute_background_services(services)

  # Delayed start
  time.sleep(1)

  # These are MQs we'll push preprocessed records to
  qs = ['pantip-x1','pantip-x2','pantip-x3','pantip-x0']
  mqs = [rabbit.create('localhost',q) for q in qs]

  # Prepare the processing pipeline (order matters)
  pipe = Pipe.new('preprocess',[])
  Pipe.push(pipe,preprocess.take)
  Pipe.push(pipe,rabbit.feed(mqs))
  Pipe.push(pipe,wordbag.feed(bag))
  Pipe.then(pipe,lambda out: print(colored('[DONE!]','cyan')))

  # Iterate through each record and processing
  couch.each_do(db,process_with(pipe),limit=40000)

  # Disconnect from the MQs
  [rabbit.end(mq) for mq in mqs]

  # Waiting for the background services
  # and kill `em
  terminate_background_services(workers)

  # Report the collected word bag
  print(colored('[Word bag]','green'))
Ejemplo n.º 6
0
from pypipe import pipe as Pipe
from pypipe.operations import rabbit
import json

if __name__ == '__main__':
    qsrc = rabbit.create('localhost', 'pantip-x0')
    qdst = [
        rabbit.create('localhost', q)
        for q in ['pantip-x1', 'pantip-x2', 'pantip-x3', 'pantip-x00']
    ]

    # Requeue!
    print('Requeuing ...')
    for m in rabbit.iter(qsrc):
        rabbit.feed(qdst)(m)

    # Bye all queues!
    rabbit.end_multiple(qdst)
    rabbit.end(qsrc)

    # Transfer from temp MQ#00 to MQ#0
    q00 = rabbit.create('localhost', 'pantip-x00')
    q0 = rabbit.create('localhost', 'pantip-x0')
    for m in rabbit.iter(q00):
        rabbit.feed([q0])(m)

    # Bye all queues!
    rabbit.end_multiple([q0, q00])

    print('[DONE] All input queues are recycled.')