async def home(request): from producer import Producer pro = Producer() fids = ['40', '41', '43', '44', '45', '46', '47'] for fid in fids: pro.produce(fid=fid, page=2) return await file('home.html')
def __init__(self, functional_connectivity, patient_data, **kwargs): self.types = [ 'ConcentrationLinear', 'Constant', 'ConcentrationSigmoid', 'WeightedDegreeLinear', 'WeightedDegreeSigmoid' ] self.producer = Producer(self.types) self.params = self.producer.params for key, value in kwargs.items(): if key == "nodeCoordinates": self.nodeCoordinates = value elif key == "optimizer": self.optimizer = value elif key == "loss": self.loss = value elif key == "euclideanAdjacency": self.euclideanAdjacency = value elif key == "producer": self.producer = value elif key == "diffuser": self.diffuser = value elif key == "params": self.params.update(value) else: raise TypeError("Illegal Keyword '" + str(key) + "'") self.functionalConnectivity = functional_connectivity self.patientData = patient_data self.numNodes, _ = np.shape(functional_connectivity) self.loss = Loss("mse", self.patientData) self.lastloss = 0 self.reset()
def main(): config = {} with open('./config.txt', 'r') as file: for line in file: line = line.rstrip() key, val = line.split('=') config[key] = val file.close() captchaList = [] condition = threading.Condition() producer = Producer(sitekey=config['sitekey'], twocaptchaKey=config['twocaptchaKey'], condition=condition, producerNo=int(config['producerThread']), captchaList=captchaList) producer.start() for i in range(int(config['consumerThread'])): consumer = Consumer(url=config['url'], productNo=config['productNo'], raffleNo=config['raffleNo'], areCode=config['phoneAreaCode'], domain=config['catchAllDomain'], prefix=config['catchAllPrefix'], condition=condition, captchaList=captchaList) consumer.start()
def test_seperator(file_with_seperator): """ Test Producer class with custom CSV seperator. """ p = Producer() task_id = p.s(file_with_seperator, sep='|').apply_async() assert isinstance(task_id, celery.result.EagerResult)
def __init__(self): super().__init__() self.ui = Ui_MainWindow() self.ui.setupUi(self) # буфер для отправленных, но еще не обработанных сообщений self.buffer = queue.Queue() # потоки для обработки информации self.sentThread = QThread() self.sentObj = Producer(self.sem, self.buffer) self.sentObj.moveToThread(self.sentThread) self.n = 1 self.getThreadsPool = [QThread()] self.getObjs = [Consumer(self.sem, self.sem_forBuffer, 1, self.buffer)] self.getObjs[0].moveToThread(self.getThreadsPool[0]) self.ui.sendBt.clicked.connect(self.sentObj.run) self.ui.sendBt.clicked.connect(self.check) self.sentObj.message_sented.connect(self.getObjs[0].run) self.sentObj.message_sented.connect(self.addSendedMessage) self.getObjs[0].message_getted.connect(self.addGettedMessage) self.ui.okBt.clicked.connect(self.change_threadNumber) self.sem_forBuffer.release() self.sentThread.start() self.getThreadsPool[0].start()
def main(): # 创建 socket 对象 server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # 获取本地主机名 host = socket.gethostname() port = 9999 # 绑定端口号 server_socket.bind((host, port)) # 设置最大连接数,超过后排队 server_socket.listen(5) # 线程池队列 work_queue = queue.Queue() # 消息队列 message_queue = queue.Queue() # 创建一个有4个线程的线程池 thread_pool = ThreadPoolManger(4, work_queue) # 启动生产者进程 p = Producer() p.start() # 启动消费者进程 c = Consumer() c.start() while True: # 建立客户端连接 client_socket, addr = server_socket.accept() t = threading.Thread(target=tcp_link, args=(client_socket, addr, thread_pool, message_queue)) t.start()
def __init__(self): self.url_counter = 1 self.document_client = DocumentClient() self.indexing_client = IndexingClient() self.pagerank_client = PagerankClient() self.producer = Producer('url_queue') self.consumer = Consumer('url_queue')
def main(start, end, fakeid): """创建表 生产 消费""" sql.create_table() #生产 do_producer = Producer(start, end, fakeid) do_producer.run() #消费 do_consumer()
def publish(): try: producer = Producer(bootstrap_servers, url, topic) values = request.json["val"] response = producer.publish(key=values, values=values) del producer return response except Exception as e: return format(e)
def reset(self): self.initializer = Initializer("braak1", self.numNodes, self.params) self.concentration = self.initializer.get() self.concentrationHistory = np.copy(self.concentration) self.producer = Producer(self.types) self.diffusor = Diffusor("euclidean", self.params, EuclideanAdjacency=self.euclideanAdjacency)
def working_logic(channel, method_frame, header_frame, body): # Pull maintenance_window_name out of queue payload print("Received from queue: %s" % body.decode('utf-8')) payload = json.loads(body.decode('utf-8')) message = payload.pop('message', []) prod = Producer() prod.create_message(message) print("Produced response and Ack'ing") rec.ack_message(method_frame)
def test_read_urls_from_file(self): urls = Producer.read_urls_from_file(self.producer, "urls/urls.txt") self.assertEqual(len(urls), 2, "Equal sizes") self.assertEqual( urls, ["http://www.python.org/doc/", "http://www.netflix.com"], "Equal content") urls = Producer.read_urls_from_file(self.producer, "urls/empty_urls.txt") self.assertEqual(len(urls), 0, "Equal sizes for empty file") self.assertEqual(urls, [], "Equal content for empty file")
def post(): try: con = request.json con['auth'] = request.headers['Authorization'] Producer.post_to_queue(con) REQUESTS.labels(method='POST', endpoint="/post", status_code=200).inc() return "OK", 200 except: REQUESTS.labels(method='POST', endpoint="/post", status_code=400).inc() return "ERROR", 400
def test_column_map(file_with_header): """ Test Producer class with custom column mappings of csv files. """ p = Producer() reader = csv.reader(file_with_header) p.column_map = {'name': 1, 'email': 2} for line in reader: with pytest.raises(IndexError): p._parse_row(line)
def run(self): producer = Producer(self.kafka_broker, self.topic) self.logger.info("Running Kafka Producer") for i in range(1, 5): data1 = {'message': {"dataObjectID": "test" + str(i)}} producer.send_data(data1) time.sleep(4)
def read_transactions(): message_list = [] p = Producer() transaction_list = p.read_transaction_list() for i in range(traffic_interval): for j in range(traffic_frequency): transaction = int(transaction_list[i]/traffic_frequency) message = transaction * traffic_ratio message_list.append(message) return message_list
def test_header_rows(file_with_header): """ Test Producer class with header rows parameter values. """ p = Producer() p.header_rows = 1 total_rows = sum(1 for row in csv.reader(file_with_header)) file_with_header.seek(0) reader = p._skip_headers(file_with_header) out_rows = sum(1 for row in reader) assert total_rows == (out_rows + p.header_rows)
def test_successful_execution(custom_file_format): """ Test valid CSV file with all three parameters. """ p = Producer() sep = '|' header = 1 column_map = {'email': 0, 'name': 1} task_id = p.s(custom_file_format, header_rows=header, column_map=column_map, sep='|').apply_async() assert isinstance(task_id, celery.result.EagerResult)
def __init__(self, is_continue=False): self.net = Net() self.is_continue = is_continue if self.is_continue: logger.info("Interrupt loading!") self.net.load(conf.model_path) with open(conf.data_path, "rb") as fp: self.data = pickle.load(fp) logger.info("load %s data!" % len(self.data)) logger.info("Interrupt loaded!") else: self.data = [] self.producer = Producer() self.game_num = 0
def main(algo, count): p = Producer() c = Client() interval = traffic_frequency c.describe_topic(topic_name) c.describe_consumer_group(group_name) for i in range(interval): transaction_list = p.read_transaction_list() count1 = count % len(transaction_list) transaction = int(transaction_list[count1]) if workload_type == "fix": transaction = transaction_value message_count = traffic_ratio * int(transaction/interval) start_time = time.time() producer_info = p.producer_per_test(topic_name, message_count) end_time = time.time() time_diff = end_time - start_time print "producers take %s time " % time_diff p.write_latency(algo, producer_info) # output = c.describe_consumer_group(group_name) # print output duration = int(60/interval) for i in range(int(duration)): p.wait_time(1) inter_end_time = time.time() if inter_end_time - start_time >= duration: break
class Main(object): def __init__(self, is_continue=False): self.net = Net() self.is_continue = is_continue if self.is_continue: logger.info("Interrupt loading!") self.net.load(conf.model_path) with open(conf.data_path, "rb") as fp: self.data = pickle.load(fp) logger.info("load %s data!" % len(self.data)) logger.info("Interrupt loaded!") else: self.data = [] self.producer = Producer() self.game_num = 0 def get_sample_data(self, n): mini_batch = [ self.data.pop(random.randrange(len(self.data))) for _ in range(n) ] return [item[0] for item in mini_batch], \ [item[1] for item in mini_batch], \ [item[2] for item in mini_batch] def run(self): try: if not self.is_continue: logger.info("Init data start!") self.data.extend( self.producer.playn(conf.init_game_num, self.net)) self.game_num += conf.batch_game_num logger.info("Played %s games, produce %s data" % (self.game_num, len(self.data))) while True: batch_data = self.producer.playn(conf.batch_game_num, self.net) self.data.extend(batch_data) self.game_num += conf.batch_game_num logger.info( "all games: %s, batch gamse: %s, this batch produce: %s" % (self.game_num, conf.batch_game_num, len(batch_data))) x, y1, y2 = self.get_sample_data(len(batch_data)) self.net.train(x, y1, y2) except KeyboardInterrupt: with open(conf.data_path, "wb") as fwp: pickle.dump(self.data, fwp) self.net.save(conf.model_path) logger.info("Interrupt saved!")
def producer(self, *args, **kwargs): if not callable(args[0]): raise TypeError("Producer must be a callable object") producer_pioneer_instance = Producer(args[0]) self._producers.append(producer_pioneer_instance) return producer_pioneer_instance
def __init__(self, host, consumer_exchange, consumer_routing_keys, consumer_queue_name, producer_exchange, producer_routing_key): try: self.handler = Handler() log.info("init consumer for host=%s, exchange=%s, routing_keys=%s" % (host, consumer_exchange, repr(consumer_routing_keys))) self.host = host self.exchange = consumer_exchange self.routing_keys = consumer_routing_keys self.queue_name = consumer_queue_name if not consumer_routing_keys: log.error("routing_keys can't be null...") sys.exit(1) # init producer self.resend_producer = Producer(self.host, producer_exchange, producer_routing_key ) # init pika connection self._connection = self._connect() self._closing = False self._consumer_tag = None except Exception as e: log.error("Unexpected error:%r", e)
def get_producer(): if 'producer' not in g: uri = app.config['AMQP_URI'] producer = Producer(uri, 'photo-processor') g.producer = producer return g.producer
async def initialize(app: aiohttp.web.Application): """ Инициализация контекста. :return: """ global task_provider, _db_engine, _rmq_connection_pool, _rmq_channel_pool, \ producer, was_initialized, logger _db_engine = await gino.create_engine(configs.DATABASE_URI) task_provider = TaskProvider(_db_engine) loop = asyncio.get_running_loop() _rmq_connection_pool = aio_pika.pool.Pool(_get_connection, loop=loop) _rmq_channel_pool = aio_pika.pool.Pool(_get_channel, loop=loop) async with _rmq_channel_pool.acquire() as channel: await channel.declare_queue(configs.TASK_Q_NAME, durable=True) producer = Producer(_rmq_channel_pool) logger = JsonLogger.with_default_handlers( level=logging.DEBUG, exclude_fields=[FUNCTION_NAME_FIELDNAME, 'file_path', 'line_number']) was_initialized = True
class Crawler: MAX_URL = 10 def __init__(self): self.url_counter = 1 self.document_client = DocumentClient() self.indexing_client = IndexingClient() self.pagerank_client = PagerankClient() self.producer = Producer('url_queue') self.consumer = Consumer('url_queue') def run(self): self.consumer.subscribe(self.run_for_url) def run_for_url(self, ch, method, properties, body): doc_url = body.decode("utf-8") print("[Crawler] Received %r" % doc_url) document_text = WebScraper.get_text(doc_url) document_links = WebScraper.get_links(doc_url) hash_object = hashlib.sha256(document_text.encode("utf-8")) digest = hash_object.hexdigest() doc_record = self.document_client.get_by_url(doc_url) if "id" not in doc_record: doc_record = self.document_client.create(doc_url, digest) doc_indexed = self.indexing_client.get_by_id(doc_record["id"]) if "url" not in doc_indexed: self.indexing_client.index(doc_record["id"], doc_url, document_text) if doc_record["digest"] != digest: self.document_client.update_digest(doc_record["id"], digest) self.indexing_client.update_content(doc_record["id"], document_text) for link in document_links: if self.url_counter < Crawler.MAX_URL: self.url_counter += 1 child_doc_record = self.document_client.get_by_url(link.geturl()) if "id" not in child_doc_record: child_doc_record = self.document_client.create(link.geturl(), "digest") self.document_client.create_link(doc_record["id"], child_doc_record["id"]) self.producer.publish(link.geturl()) self.pagerank_client.update(doc_record["id"])
def test_read_html_from_url(self): html = Producer.read_html_from_url(self.producer, "http://www.python.org") self.assertEqual(html["url_html"][0:15], "<!doctype html>", "Equal start") self.assertEqual( str(html["url_html"][len(html["url_html"]) - 8:]).strip(), "</html>", "Equal end")
def callback(cmd): cmd_name = next(iter(cmd['cmd'])) payload = { 'cmdexe': { cmd_name: f'processed {cmd["cmd"][cmd_name]}', } } producer = Producer(payload) Container(producer).run()
def test_message(self): topic = "test" # Make test message test_json = {} test_json["id"] = 1 json_msg = json.loads(json.dumps(test_json)) key = bytes(str(json_msg["id"]), 'utf-8') # Send message producer = Producer(config.bootstrap_servers) p_msg = producer.send_json_to_kafka(json_msg, key, topic) # Consume message consumer = Consumer(config.bootstrap_servers, [topic]) c_msg = consumer.consume_message() # Check if the messages are the same self.assertTrue(p_msg == c_msg)
def get_config_parser(): parser = ArgumentParser() parser.add_argument( "--producer", action=ActionParser(parser=Producer.get_config_parser())) parser.add_argument( "--transformer", action=ActionParser(parser=Transformer.get_config_parser())) return parser