def __init__(self): self.url_counter = 1 self.document_client = DocumentClient() self.indexing_client = IndexingClient() self.pagerank_client = PagerankClient() self.producer = Producer('url_queue') self.consumer = Consumer('url_queue')
def __init__(self, interval, processes, topic, partition, **consumer): self._logger = logging.getLogger('SHIELD.SIMPLE.WORKER') self._logger.info('Initializing Simple Worker process...') self._interval = interval self._isalive = True self._processes = processes # .............................init Kafka Consumer self.Consumer = Consumer(**consumer) self.Consumer.assign(topic, [int(partition)]) # .............................set up local staging area self._tmpdir = tempfile.mkdtemp(prefix='_SW.', dir=tempfile.gettempdir()) self._logger.info('Use directory "{0}" as local staging area.'.format( self._tmpdir)) # .............................define a process pool object self._pool = Pool(self._processes, init_child) self._logger.info( 'Master Collector will use {0} parallel processes.'.format( self._processes)) signal.signal(signal.SIGUSR1, self.kill) self._logger.info('Initialization completed successfully!')
class TestConsumer(unittest.TestCase): def setUp(self): broker_address = cfg.BROKER_ADDRESS broker_exchange_name = cfg.EXCHANGE_NAME broker_queue_name = cfg.QUEUE_NAME broker_client_obj = BrokerClient(broker_address, broker_exchange_name, broker_queue_name) self._consumer = Consumer(broker_client_obj) def test_connect_to_broker(self): try: loop = asyncio.get_event_loop() loop.run_until_complete(self._consumer.connect_to_broker()) self.assertTrue(True) except ConnectionError: self.assertTrue(False) def test_consume(self): try: loop = asyncio.get_event_loop() loop.run_until_complete(self._consumer.connect_to_broker()) loop.run_until_complete(self._consumer.consume(pv_simulator.process_message)) self.assertTrue(True) except ConnectionError: self.assertTrue(False) def test_disconnect_from_broker(self): try: loop = asyncio.get_event_loop() loop.run_until_complete(self._consumer.connect_to_broker()) loop.run_until_complete(self._consumer.disconnect_from_broker()) self.assertTrue(True) except ConnectionError: self.assertTrue(False)
def test_create_file_name(self): file_name = Consumer.create_file_name(self.consumer, "http://www.netflix.com") self.assertEqual(file_name, "netflix.com") file_name = Consumer.create_file_name(self.consumer, "http://www.python.org/doc/") self.assertEqual(file_name, "python.org")
def event_loop(): "this is the main event loop where everything happens" # this isn't being called during sys.exit :/ atexit.register(config.STOP_EVENT.set) tailer_threads = [] # initiate threads to tail from files for fdict in config.FILES: for tailer in path_tailers(fdict): tailer.start() tailer_threads.append(tailer) # initiate threads to consume logs pushed into queue consumer_threads = [] for i in range(config.NUM_CONSUMERS): consumer = Consumer(config.LOG_QUEUE, config.STOP_EVENT, poll_interval=config.POLL_INTERVAL, name=CONSUMER_MAP.get(i % 4)) consumer.start() consumer_threads.append(consumer) # this part continues to block even though all # queue items were processed :/ # LOG_QUEUE.join() # Commenting for now... # logging.debug('finished processing queue') while True: try: time.sleep(10) except KeyboardInterrupt: config.STOP_EVENT.set() print for consumer in consumer_threads: logging.info( '{0.name} sent {0.sent_records} records!'.format(consumer)) sys.exit('shutting down streamer...')
def publish_and_get_response(self, message, payload, timeout_secs=5): sent_correlation = str(uuid.uuid1()) consumer_ready = Event() def on_consumer_ready(): consumer_ready.set() consumer = Consumer(self.broker_url, self._queue_prefix, self.exchange) consumer.on_connection_setup_finished = on_consumer_ready response = {} response_received = Event() def response_callback(response_payload, **kwargs): if not sent_correlation == kwargs['properties'].correlation_id: return response['payload'] = response_payload response_received.set() def wait_for_response(): consumer.subscribe(message + '.answered', response_callback, transient_queue=True) consumer.start() thread = Thread(target=wait_for_response) thread.daemon = True thread.start() consumer_ready.wait(2) self._publish(message, payload, correlation_id=sent_correlation) timed_out = not response_received.wait(timeout_secs) if timed_out: raise MessageBusTimeoutError() consumer.stop() return response.get('payload')
def main(): config = {} with open('./config.txt', 'r') as file: for line in file: line = line.rstrip() key, val = line.split('=') config[key] = val file.close() captchaList = [] condition = threading.Condition() producer = Producer(sitekey=config['sitekey'], twocaptchaKey=config['twocaptchaKey'], condition=condition, producerNo=int(config['producerThread']), captchaList=captchaList) producer.start() for i in range(int(config['consumerThread'])): consumer = Consumer(url=config['url'], productNo=config['productNo'], raffleNo=config['raffleNo'], areCode=config['phoneAreaCode'], domain=config['catchAllDomain'], prefix=config['catchAllPrefix'], condition=condition, captchaList=captchaList) consumer.start()
def main(): # 创建 socket 对象 server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # 获取本地主机名 host = socket.gethostname() port = 9999 # 绑定端口号 server_socket.bind((host, port)) # 设置最大连接数,超过后排队 server_socket.listen(5) # 线程池队列 work_queue = queue.Queue() # 消息队列 message_queue = queue.Queue() # 创建一个有4个线程的线程池 thread_pool = ThreadPoolManger(4, work_queue) # 启动生产者进程 p = Producer() p.start() # 启动消费者进程 c = Consumer() c.start() while True: # 建立客户端连接 client_socket, addr = server_socket.accept() t = threading.Thread(target=tcp_link, args=(client_socket, addr, thread_pool, message_queue)) t.start()
def main(): #datetime can be removed retrieve = Consumer(CONNECTION_URL, EXCHANGE, HOST, ROUTING_KEY, EXCHANGE_TYPE) try: retrieve.run() except KeyboardInterrupt: retrieve.stop()
def run(self): self.logger.info("Running Kafka Consumer") consumer_thread = Consumer(self.kafka_broker, self.topic) consumer_thread.register_kafka_listener()
def __init__(self): self.spark = SparkSession.builder.appName('kmeans').getOrCreate() self.conf = SparkConf().setMaster('local').setAppName('kmeans') self.sc = SparkContext(conf=self.conf) self.consumer = Consumer('bus', 'localhost') self.stream = self.consumer.get_stream() self.kafka_stream = ConsumerKafka('bus', 'localhost')
def main(flag, host): message = Consumer(flag, host) message = message.pull_message() print message.callback() res = es.index(index="bus", doc_type='json', body=message) # filter(res) # print res '''
def test_consumer_no_commits_returns_unix_start(db): conn, cur = db consumer = Consumer(conn, "foo", "offsets") cur.execute(consumer.create_query) conn.commit() offset = consumer.get_offset() assert offset == datetime(1970, 1, 1)
def __init__(self, broker_url='amqp://localhost', queue_prefix=None, exchange='messagebus'): self.broker_url = broker_url self.consumer = Consumer(self.broker_url, queue_prefix, exchange) self._queue_prefix = queue_prefix self.exchange = exchange
def __init__(self): self.consumer = Consumer('bus', 'localhost') self.stream = self.consumer.get_stream() self.kafka_stream = ConsumerKafka('bus', 'localhost') self.cleaned_stream = self.stream.map(self.clean_up) self.conf = SparkConf().setMaster('local').setAppName( 'linear_regression') self.sc = SparkContext(conf=self.conf) self.spark = SparkSession(self.sc)
def test_consumer_creates_table_if_not_exists(db): conn, _ = db consumer = Consumer(conn, "foo", "offsets") offset = consumer.get_offset() assert offset == datetime(1970, 1, 1) offset = consumer.get_offset() assert offset == datetime(1970, 1, 1)
def wake_consumer(self): """唤醒消费者""" self.consumer_poison_list = [] self.consumer_list = [] for _ in xrange(self.consumer_count): tmp_poison = Event() consumer = Consumer(queue=self.queue, poison=tmp_poison, consume=self.consume) consumer.start() self.consumer_poison_list.append(tmp_poison) self.consumer_list.append(consumer)
def test_consumer_invalid_data(db_session, invalid_data): """ Test with invalid data schema should not load anything to database. """ c = Consumer() task_id = c.s(**invalid_data).apply_async() all_users = db_session.query(User).all() assert isinstance(task_id, celery.result.EagerResult) assert len(all_users) == 0
def test_consumer_valid_data(db_session, valid_data): """ Test consumer class with valid data. Verify the results by querying the database. """ c = Consumer() task_id = c.s(**valid_data).apply_async() all_users = db_session.query(User).all() assert len(all_users) == 1 assert all_users[0].email == valid_data['email']
def test_consumer_duplicate_email(db_session, duplicate_email): """ Test to detemine unique contraint violation is handled properly in code. """ c = Consumer() for data in duplicate_email: task_id = c.s(**data).apply_async() all_users = db_session.query(User).all() assert len(all_users) == len(duplicate_email) - 1
def test_parse_links_from_html(self): file = open("netflix.com.html", "r") html_1 = file.read() file.close() html_2 = "" result_1 = Consumer.parse_links_from_html(self.consumer, html_1) self.assertEqual(len(result_1), 1, "Equal lengths in first example") self.assertEqual(result_1[0].get("href"), "http://ir.netflix.com/", "Equal contents in first example") result_2 = Consumer.parse_links_from_html(self.consumer, html_2) self.assertEqual(len(result_2), 0, "Equal lengths in second example") self.assertEqual(result_2, [], "Equal contents in second example")
def consume_data(topic, borker, group_id): consumer = Consumer(brokers=[borker], group_id=group_id) consumer.subscribe([topic, "thing-event"]) for msg in consumer: try: value = msg.value print(value) except Exception as err: print(err) continue
def start(): logging.basicConfig(level=logging.INFO) logging.info("Started multiprocessing...") product_queue = Queue() task_queue = Queue() producer = Producer(task_queue) consumer = Consumer(task_queue, product_queue) producer.start() consumer.start() while True: product = product_queue.get() print("Received: " + str(product))
def __init__(self, config, queue_name=None, routing_key=None): if not 'ckan.api_key' in config: log.warn("No CKAN API key has been specified") base_location = self.base_location if 'ckan.site_url' in config: base_location = urljoin(config.get('ckan.site_url'), "api") CkanClient.__init__(self, base_location=base_location, api_key=config.get('ckan.api_key')) Consumer.__init__(self, config, queue_name=queue_name, routing_key=routing_key)
def sources(self, cb): for foreign in self.filtered_sources(): try: consumer = Consumer(self.translate(foreign), environment = self.environment) if self.avoid_flv and '.flv' in consumer.asset_url(): raise Exception('Avoiding .flv') cb(consumer) break except Exception, e: #util.print_exception(e) continue
def createAndRunConsumer(self, doc): triggerFQN = doc['_id'] # Create a representation for this trigger, even if it is disabled # This allows it to appear in /health as well as allow it to be deleted # Creating this object is lightweight and does not initialize any connections consumer = Consumer(triggerFQN, doc) self.consumers.addConsumerForTrigger(triggerFQN, consumer) if self.__isTriggerDocActive(doc): logging.info('[{}] Trigger was determined to be active, starting...'.format(triggerFQN)) consumer.start() else: logging.info('[{}] Trigger was determined to be disabled, not starting...'.format(triggerFQN))
def createAndRunConsumer(triggerFQN, params, record=True): if app.config['TESTING'] == True: logging.debug("Just testing") else: # generate a random uuid for new triggers if not 'uuid' in params: params['uuid'] = str(uuid.uuid4()) consumer = Consumer(triggerFQN, params) consumer.start() consumers.addConsumerForTrigger(triggerFQN, consumer) if record: database.recordTrigger(triggerFQN, params)
def __init__(self, opc): ConfigOp.__init__(self, 'mqtt', CONFIG_NAME) Consumer.__init__(self) self.add_command(self.__get_info, GET) self.__client = None self.__pub_queue = queue.Queue(10) self.__connected = False self.__last_ping_tm = 0 self.__valid_config = False self.__with_auth = False self.__opc = opc self.__topic = set() self.__sub_cb = None self.add_command(self.__reconnect, SET, 'reconnect')
class HedwigWorker(multiprocessing.Process): def __init__(self, settings, *args, **kwargs): self.hedwig_consumer = Consumer(settings) super(HedwigWorker, self).__init__(*args, **kwargs) def run(self): LOGGER.info("Hedwig consumer: starting") self.hedwig_consumer.consume() LOGGER.info("hedwig consumer: stopped") def shutdown(self): LOGGER.info("Hedwig consumer: shutting down") self.hedwig_consumer.shutdown() LOGGER.info("Hedwig consumer: shutdown complete")
def __init__(self, opc, name="ybb", rxbuf=1024): Consumer.__init__(self) self._ble = BLE() self._ble.active(True) self._ble.irq(self._irq) self._ble.config(gap_name = name[:16]) ((self._tx_handle, self._rx_handle),) = self._ble.gatts_register_services((_UART_SERVICE,)) # Increase the size of the rx buffer and enable append mode. self._ble.gatts_set_buffer(self._rx_handle, rxbuf, True) self._connections = {} self._rx_buffer = bytearray() self._opc = opc # Optionally add services=[_UART_UUID], but this is likely to make the payload too large. self._payload = advertising_payload(name=name[:16], appearance=_ADV_APPEARANCE_GENERIC_COMPUTER) self._advertise()
def run(): consumer = Consumer('amqp://*****:*****@127.0.0.1:5672/%2F', queue='text', routing_key='example.text') consumer.run() print 'runed' # kk = yield w.fetch("http://www.abercrombie.cn/on/demandware.store/Sites-abercrombie_cn-Site/en_CN/Product-Variation?pid=anf-87741&dwvar_anf-87741_4MPrmry=4080&dwvar_anf-87741_color=01&Quantity=1&format=ajax&_=1431591378963") kk = yield w.fetch( 'http://www.abercrombie.cn/en_CN/mens-shorts-twill-classic-fit/aandf-classic-fit-shorts/anf-87743.html?dwvar_anf-87743_color=01#ict=ICT%3ASUM15%3AM%3AHT%3A1%3AT%3ASEA%3AShorts&start=1' ) #kk = yield w.fetch('http://127.0.0.1:8000', method="POST", headers={'User-Agent':'F**K'}) Handler.carters = Carters() Handler.sixpm = SixPM() application.listen(WEBSERVER_PORT)
def run(numOfConsumer, conCoeffA, conCoeffB, numOfPro, proCoeffAList, proCoeffBList, maxSlot): consumerList = [] for i in range(numOfConsumer): valueDict = {} valueDict[1] = randint(10,60) valueDict[2] = valueDict[1] + randint(10,60) valueDict[3] = valueDict[2] + randint(10,60) valueDict[4] = valueDict[3] + randint(10,60) valueDict[5] = valueDict[4] + randint(10,60) #print(valueDict) consumer = Consumer(i, valueDict) consumer.set_bidList(conCoeffA, conCoeffB) consumerList.append(consumer) propertyDict = {1:[], 2:[], 3:[], 4:[], 5:[]} numOfPro = 20 for n in range(numOfPro): star = randint(1,5) if star == 1: cost = randint(10,40) elif star == 2: cost = randint(30,100) elif star == 3: cost = randint(50,160) elif star == 4: cost = randint(70, 220) elif star == 5: cost = randint(90,280) #print(cost) pro = Property(n, star, cost, randint(1,5)) pro.set_priceList(proCoeffAList, proCoeffBList, maxSlot) propertyDict[star].append(pro) priceline = NYOP() priceline.compute(consumerList, propertyDict) #print("priceline's profit", priceline.profit) aveConU = sum(c.utility for c in consumerList)/numOfConsumer #print("average consumer utility", aveConU) sumProProfit = 0 for k in propertyDict.keys(): sumProProfit += sum(p.profit for p in propertyDict[k]) aveProProfit = sumProProfit/numOfPro #print("average property profit", aveProProfit) return (priceline.profit, aveConU, aveProProfit)
def __init__(self): super().__init__() self.ui = Ui_MainWindow() self.ui.setupUi(self) # буфер для отправленных, но еще не обработанных сообщений self.buffer = queue.Queue() # потоки для обработки информации self.sentThread = QThread() self.sentObj = Producer(self.sem, self.buffer) self.sentObj.moveToThread(self.sentThread) self.n = 1 self.getThreadsPool = [QThread()] self.getObjs = [Consumer(self.sem, self.sem_forBuffer, 1, self.buffer)] self.getObjs[0].moveToThread(self.getThreadsPool[0]) self.ui.sendBt.clicked.connect(self.sentObj.run) self.ui.sendBt.clicked.connect(self.check) self.sentObj.message_sented.connect(self.getObjs[0].run) self.sentObj.message_sented.connect(self.addSendedMessage) self.getObjs[0].message_getted.connect(self.addGettedMessage) self.ui.okBt.clicked.connect(self.change_threadNumber) self.sem_forBuffer.release() self.sentThread.start() self.getThreadsPool[0].start()
def main(): rospy.init_node('iot_consumer', anonymous=True, disable_signals=True) params = wrap_namespace(rospy.get_param('~')) control_pub = rospy.Publisher(params.topic.control_cmd, Control, queue_size=10) mission_pub = rospy.Publisher(params.topic.mission_cmd, Mission, queue_size=10) cmdexe_pub = rospy.Publisher(params.topic.navi_cmdexe, String, queue_size=10) naviCommand = NaviCommand(control_pub, mission_pub, cmdexe_pub) dispatcher = Dispatcher(naviCommand) consumer = Consumer(dispatcher.dispatch_cb) def handler(signum, frame): rospy.loginfo('shutting down...') consumer.shutdown() signal.signal(signal.SIGINT, handler) Container(consumer).run() rospy.signal_shutdown('finish')
def do_GET(self): if self.path == "/favicon.ico": self.favicon() elif "/metrics" in self.path: q = parse_qs(urlsplit(self.path).query) target = q.get("target") if target is None: self.target_missing() else: self.send_response(HTTPStatus.OK) self.send_header(contentType, typePrometheus) self.end_headers() consumer = Consumer(target) success = 0 if consumer.load(): success = 1 sn = consumer.serial_number() subject = consumer.subject() san = consumer.alternative_name() not_before = consumer.not_before() not_after = consumer.not_after() self.metric_write( h_ssl_tls_version_info, t_ssl_tls_version_info, ssl_tls_version_info % consumer.version()) self.metric_write( none, none, ssl_tls_version_info % consumer.protocol()) self.metric_write( h_ssl_cert_not_after, t_ssl_cert_not_after, ssl_cert_not_after % (subject, san, sn, not_after)) self.metric_write( h_ssl_cert_not_before, t_ssl_cert_not_before, ssl_cert_not_before % (subject, san, sn, not_before)) # if load self.metric_write(h_ssl_tls_connect_success, t_ssl_tls_connect_success, ssl_tls_connect_success % success) self.wfile.write(NL) # if target is None elif self.path == "/": self.main_page() else: self.send_response(HTTPStatus.INTERNAL_SERVER_ERROR) self.end_headers()
class Crawler: MAX_URL = 10 def __init__(self): self.url_counter = 1 self.document_client = DocumentClient() self.indexing_client = IndexingClient() self.pagerank_client = PagerankClient() self.producer = Producer('url_queue') self.consumer = Consumer('url_queue') def run(self): self.consumer.subscribe(self.run_for_url) def run_for_url(self, ch, method, properties, body): doc_url = body.decode("utf-8") print("[Crawler] Received %r" % doc_url) document_text = WebScraper.get_text(doc_url) document_links = WebScraper.get_links(doc_url) hash_object = hashlib.sha256(document_text.encode("utf-8")) digest = hash_object.hexdigest() doc_record = self.document_client.get_by_url(doc_url) if "id" not in doc_record: doc_record = self.document_client.create(doc_url, digest) doc_indexed = self.indexing_client.get_by_id(doc_record["id"]) if "url" not in doc_indexed: self.indexing_client.index(doc_record["id"], doc_url, document_text) if doc_record["digest"] != digest: self.document_client.update_digest(doc_record["id"], digest) self.indexing_client.update_content(doc_record["id"], document_text) for link in document_links: if self.url_counter < Crawler.MAX_URL: self.url_counter += 1 child_doc_record = self.document_client.get_by_url(link.geturl()) if "id" not in child_doc_record: child_doc_record = self.document_client.create(link.geturl(), "digest") self.document_client.create_link(doc_record["id"], child_doc_record["id"]) self.producer.publish(link.geturl()) self.pagerank_client.update(doc_record["id"])
def consume_example(): consumer = Consumer( host="192.168.1.13", #host="10.0.0.13", consumer_exchange="exchange.push.in", consumer_routing_keys=["topic.push.in"], consumer_queue_name="queue.push", producer_exchange="exchange.push.out", producer_routing_key="topic.push.out") try: consumer.start_consuming() except KeyboardInterrupt: # Someone pressed CTRL-C, stop consuming and close consumer.close() except Exception as e: consumer.close() raise e
def __init__(self,nickname,email,phone,name,eid): Consumer.__init__(self,nickname,email,phone,name,eid) self.posts = []
def run(self): consumer = Consumer() while True: consumer.progress()
from produceur_class import Producer from consumer import Consumer from collector import Collector prod = Producer() cons = Consumer() coll = Collector() prod.producer() cons.consumer() cons.consumer() coll.collector()
def run(self): # import pdb; pdb.set_trace() consumer = Consumer() while True: consumer.progress()
def __init__(self, settings, *args, **kwargs): self.hedwig_consumer = Consumer(settings) super(HedwigWorker, self).__init__(*args, **kwargs)
def test(): consumer = Consumer() while True: consumer.progress()
import sys from multiprocessing import Process from consumer import Consumer from nlp_consumer import NLPConsumer from reddit_consumer import RedditConsumer q = Consumer.make_queue() c = Consumer("twitter") if len(sys.argv) > 1: c.consumer.seek(int(sys.argv[1]), 0) else: c.consumer.seek(0, 2) p = Process(target=c.run, args=(q, )) p.start() def reddit(): reddit = RedditConsumer() while True: reddit.consume(q.get(True)) def nlp(): nlp = NLPConsumer() while True: nlp.consume(q.get(True)) nlp()
def readqueue(self): queue = Queue.Queue() try: f = open('user.queue', 'r') for line in f: queue.queue = deque(eval(line)) except: "" finally: return queue if __name__=="__main__": bullyalgorithm(opt=False) producer = Producer() producer.setDaemon(True) producer.start() consumer = Consumer(producer.queue) consumer.setDaemon(True) consumer.start() acceptor = Acceptor() acceptor.setDaemon(True) acceptor.start() while threading.active_count() > 0: time.sleep(0.1)
""" Dummy client to simulate the worker nodes """ import time import stomp import sys from workflow.settings import brokers, icat_user, icat_passcode from consumer import Consumer queues = ['CATALOG.DATA_READY', 'REDUCTION.DATA_READY', 'REDUCTION_CATALOG.DATA_READY', 'LIVEDATA.UPDATE'] print brokers c = Consumer(brokers, icat_user, icat_passcode, queues) c.processing_loop()