def test_set_timeout(self): # Checks that context manager sets and reverts timeout properly with patch.object(self.t, 'sock') as sock_mock: sock_mock.gettimeout.return_value = 3 with self.t.having_timeout(5) as actual_sock: assert actual_sock == self.t.sock sock_mock.gettimeout.assert_called() sock_mock.settimeout.assert_has_calls([ call(5), call(3), ])
def test_close_open_fds(patching): _close = patching('os.close') fdmax = patching('celery.platforms.get_fdmax') with patch('os.closerange', create=True) as closerange: fdmax.return_value = 3 close_open_fds() if not closerange.called: _close.assert_has_calls([call(2), call(1), call(0)]) _close.side_effect = OSError() _close.side_effect.errno = errno.EBADF close_open_fds()
def test_blocking_read__timeout(self): sock = self.conn.transport.sock = Mock(name='sock') sock.gettimeout.return_value = 1 self.conn.on_inbound_frame = Mock(name='on_inbound_frame') self.conn.blocking_read(3) sock.gettimeout.assert_called_with() sock.settimeout.assert_has_calls([call(3), call(1)]) self.conn.transport.read_frame.assert_called_with() self.conn.on_inbound_frame.assert_called_with( self.conn.transport.read_frame(), ) sock.gettimeout.return_value = 3 self.conn.blocking_read(3)
def test_mget(self): x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.get.side_effect = [ {'found': True, '_id': sentinel.task_id1, '_source': {'result': sentinel.result1}}, {'found': True, '_id': sentinel.task_id2, '_source': {'result': sentinel.result2}}, ] assert x.mget([sentinel.task_id1, sentinel.task_id2]) == [sentinel.result1, sentinel.result2] x._server.get.assert_has_calls([ call(index=x.index, doc_type=x.doc_type, id=sentinel.task_id1), call(index=x.index, doc_type=x.doc_type, id=sentinel.task_id2), ])
def test_mget(self, mock_client): keys = [b"mykey1", b"mykey2"] self.backend.mget(keys) mock_client.ReadDocument.assert_has_calls( [call("dbs/celerydb/colls/celerycol/docs/mykey1", {"partitionKey": "mykey1"}), call().get("value"), call("dbs/celerydb/colls/celerycol/docs/mykey2", {"partitionKey": "mykey2"}), call().get("value")])
def test_connect_missing_capabilities(self): # Test checking connection handshake with broker # supporting subset of capabilities frame_writer_cls_mock = Mock() on_open_mock = Mock() frame_writer_mock = frame_writer_cls_mock() conn = Connection(frame_writer=frame_writer_cls_mock, on_open=on_open_mock) with patch.object(conn, 'Transport') as transport_mock: server_properties = dict(SERVER_PROPERTIES) server_properties['capabilities'] = { # This capability is not supported by client 'basic.nack': True, 'consumer_cancel_notify': True, 'connection.blocked': False, # server does not support 'authentication_failure_close' # which is supported by client } client_properties = dict(CLIENT_PROPERTIES) client_properties['capabilities'] = { 'consumer_cancel_notify': True, } handshake(conn, transport_mock, server_properties=server_properties) on_open_mock.assert_called_once_with(conn) security_mechanism = sasl.AMQPLAIN('guest', 'guest').start(conn).decode( 'utf-8', 'surrogatepass') # Expected responses from client frame_writer_mock.assert_has_calls([ call( 1, 0, spec.Connection.StartOk, # Due Table type, we cannot compare bytestream directly DataComparator('FsSs', (client_properties, 'AMQPLAIN', security_mechanism, 'en_US')), None), call( 1, 0, spec.Connection.TuneOk, dumps('BlB', (conn.channel_max, conn.frame_max, conn.heartbeat)), None), call(1, 0, spec.Connection.Open, dumps('ssb', (conn.virtual_host, '', False)), None) ]) assert conn.client_properties == client_properties
def test_channel_ignore_methods_during_close(self): # Test checking that py-amqp will discard any received methods # except Close and Close-OK after sending Channel.Close method # to server. frame_writer_cls_mock = Mock() conn = Connection(frame_writer=frame_writer_cls_mock) consumer_tag = 'amq.ctag-PCmzXGkhCw_v0Zq7jXyvkg' with patch.object(conn, 'Transport') as transport_mock: handshake(conn, transport_mock) channel_id = 1 transport_mock().read_frame.side_effect = [ # Inject Open Handshake build_frame_type_1(spec.Channel.OpenOk, channel=channel_id, args=(1, False), arg_format='Lb'), # Inject basic-deliver response build_frame_type_1( spec.Basic.Deliver, channel=1, arg_format='sLbss', args=( # consumer-tag, delivery-tag, redelivered, consumer_tag, 1, False, # exchange-name, routing-key 'foo_exchange', 'routing-key')), build_frame_type_2(channel=1, body_len=12, properties=b'0\x00\x00\x00\x00\x00\x01'), build_frame_type_3(channel=1, body=b'Hello World!'), # Inject close method build_frame_type_1(spec.Channel.CloseOk, channel=channel_id), ] frame_writer_mock = frame_writer_cls_mock() frame_writer_mock.reset_mock() with patch('amqp.Channel._on_basic_deliver') as on_deliver_mock: ch = conn.channel(channel_id=channel_id) ch.close() on_deliver_mock.assert_not_called() frame_writer_mock.assert_has_calls([ call(1, 1, spec.Channel.Open, dumps('s', ('', )), None), call(1, 1, spec.Channel.Close, dumps('BsBB', (0, '', 0, 0)), None) ]) assert ch.is_open is False
def test_set_timeout_exception_raised(self): # Checks that context manager sets and reverts timeout properly # when exception is raised. with patch.object(self.t, 'sock') as sock_mock: sock_mock.gettimeout.return_value = 3 with pytest.raises(DummyException): with self.t.having_timeout(5) as actual_sock: assert actual_sock == self.t.sock raise DummyException() sock_mock.gettimeout.assert_called() sock_mock.settimeout.assert_has_calls([ call(5), call(3), ])
def test_on_chord_part_return(self, restore): tasks = [self.create_task() for i in range(10)] for i in range(10): self.b.on_chord_part_return(tasks[i].request, states.SUCCESS, i) assert self.b.client.rpush.call_count self.b.client.rpush.reset_mock() assert self.b.client.lrange.call_count jkey = self.b.get_key_for_group('group_id', '.j') tkey = self.b.get_key_for_group('group_id', '.t') self.b.client.delete.assert_has_calls([call(jkey), call(tkey)]) self.b.client.expire.assert_has_calls([ call(jkey, 86400), call(tkey, 86400), ])
def test_init(self): with patch('kombu.asynchronous.http.curl.pycurl') as _pycurl: x = self.Client() assert x._multi is not None assert x._pending is not None assert x._free_list is not None assert x._fds is not None assert x._socket_action == x._multi.socket_action assert len(x._curls) == x.max_clients assert x._timeout_check_tref x._multi.setopt.assert_has_calls([ call(_pycurl.M_TIMERFUNCTION, x._set_timeout), call(_pycurl.M_SOCKETFUNCTION, x._handle_socket), ])
def test_connect(self): # Test checking connection handshake frame_writer_cls_mock = Mock() on_open_mock = Mock() frame_writer_mock = frame_writer_cls_mock() conn = Connection( frame_writer=frame_writer_cls_mock, on_open=on_open_mock ) with patch.object(conn, 'Transport') as transport_mock: handshake(conn, transport_mock) on_open_mock.assert_called_once_with(conn) security_mechanism = sasl.AMQPLAIN( 'guest', 'guest' ).start(conn).decode('utf-8', 'surrogatepass') # Expected responses from client frame_writer_mock.assert_has_calls( [ call( 1, 0, spec.Connection.StartOk, # Due Table type, we cannot compare bytestream directly DataComparator( 'FsSs', ( CLIENT_PROPERTIES, 'AMQPLAIN', security_mechanism, 'en_US' ) ), None ), call( 1, 0, spec.Connection.TuneOk, dumps( 'BlB', (conn.channel_max, conn.frame_max, conn.heartbeat) ), None ), call( 1, 0, spec.Connection.Open, dumps('ssb', (conn.virtual_host, '', False)), None ) ] ) assert conn.client_properties == CLIENT_PROPERTIES
def test_get_many(self): for is_dict in True, False: self.b.mget_returns_dict = is_dict ids = {uuid(): i for i in range(10)} for id, i in items(ids): self.b.mark_as_done(id, i) it = self.b.get_many(list(ids), interval=0.01) for i, (got_id, got_state) in enumerate(it): assert got_state['result'] == ids[got_id] assert i == 9 assert list(self.b.get_many(list(ids), interval=0.01)) self.b._cache.clear() callback = Mock(name='callback') it = self.b.get_many( list(ids), on_message=callback, interval=0.05 ) for i, (got_id, got_state) in enumerate(it): assert got_state['result'] == ids[got_id] assert i == 9 assert list( self.b.get_many(list(ids), interval=0.01) ) callback.assert_has_calls([ call(ANY) for id in ids ])
def main: # todo # Pipeline is as follows: # Alignment and filtering # * picard IlluminaBasecallsToFastq # * bwa mem to align samples # * samtools view to compress samples to .bam # * samtools sort # * samtools index # * picard MarkDuplicates # Variant calling # * Vardict # * GATK - Mutect2 # Variant annotation # * Annovar call(["java", picardMem, "-jar", "picard", "IlluminaBasecallsToFastq", picardOptions, "|", "bwa", "mem", bwaOptions, "|", ])
def test_send_buffer_group(self): buf_received = [None] producer = MockProducer() producer.connection = self.app.connection_for_write() connection = Mock() connection.transport.driver_type = 'amqp' eventer = self.app.events.Dispatcher( connection, enabled=False, buffer_group={'task'}, buffer_limit=2, ) eventer.producer = producer eventer.enabled = True eventer._publish = Mock(name='_publish') def on_eventer_publish(events, *args, **kwargs): buf_received[0] = list(events) eventer._publish.side_effect = on_eventer_publish assert not eventer._group_buffer['task'] eventer.on_send_buffered = Mock(name='on_send_buffered') eventer.send('task-received', uuid=1) prev_buffer = eventer._group_buffer['task'] assert eventer._group_buffer['task'] eventer.on_send_buffered.assert_called_with() eventer.send('task-received', uuid=1) assert not eventer._group_buffer['task'] eventer._publish.assert_has_calls([ call([], eventer.producer, 'task.multi'), ]) # clear in place assert eventer._group_buffer['task'] is prev_buffer assert len(buf_received[0]) == 2 eventer.on_send_buffered = None eventer.send('task-received', uuid=1)
def test_on_chord_part_return_no_expiry(self, restore): old_expires = self.b.expires self.b.expires = None tasks = [self.create_task(i) for i in range(10)] for i in range(10): self.b.on_chord_part_return(tasks[i].request, states.SUCCESS, i) assert self.b.client.zadd.call_count self.b.client.zadd.reset_mock() assert self.b.client.zrangebyscore.call_count jkey = self.b.get_key_for_group('group_id', '.j') tkey = self.b.get_key_for_group('group_id', '.t') self.b.client.delete.assert_has_calls([call(jkey), call(tkey)]) self.b.client.expire.assert_not_called() self.b.expires = old_expires
def test_exchange_declare_error(self, reply_code, reply_text, exception): # Test verifying wrong declaring exchange frame_writer_cls_mock = Mock() conn = Connection(frame_writer=frame_writer_cls_mock) with patch.object(conn, 'Transport') as transport_mock: handshake(conn, transport_mock) ch = create_channel(1, conn, transport_mock) transport_mock().read_frame.return_value = build_frame_type_1( spec.Connection.Close, args=(reply_code, reply_text) + spec.Exchange.Declare, arg_format='BsBB') frame_writer_mock = frame_writer_cls_mock() frame_writer_mock.reset_mock() with pytest.raises(exception) as excinfo: ch.exchange_declare('exchange', 'exchange-type') assert excinfo.value.code == reply_code assert excinfo.value.message == reply_text assert excinfo.value.method == 'Exchange.declare' assert excinfo.value.method_name == 'Exchange.declare' assert excinfo.value.method_sig == spec.Exchange.Declare # Client is sending to broker: # 1. Exchange Declare # 2. Connection.CloseOk as reply to received Connecton.Close frame_writer_calls = [ call( 1, 1, spec.Exchange.Declare, dumps( 'BssbbbbbF', ( 0, # exchange, type, passive, durable, 'exchange', 'exchange-type', False, False, # auto_delete, internal, nowait, arguments True, False, False, None)), None), call(1, 0, spec.Connection.CloseOk, '', None), ] frame_writer_mock.assert_has_calls(frame_writer_calls)
def test_channel_open_close(self): # Test checking opening and closing channel frame_writer_cls_mock = Mock() conn = Connection(frame_writer=frame_writer_cls_mock) with patch.object(conn, 'Transport') as transport_mock: handshake(conn, transport_mock) channel_id = 1 transport_mock().read_frame.side_effect = [ # Inject Open Handshake build_frame_type_1( spec.Channel.OpenOk, channel=channel_id, args=(1, False), arg_format='Lb' ), # Inject close method build_frame_type_1( spec.Channel.CloseOk, channel=channel_id ) ] frame_writer_mock = frame_writer_cls_mock() frame_writer_mock.reset_mock() on_open_mock = Mock() ch = conn.channel(channel_id=channel_id, callback=on_open_mock) on_open_mock.assert_called_once_with(ch) assert ch.is_open is True ch.close() frame_writer_mock.assert_has_calls( [ call( 1, 1, spec.Channel.Open, dumps('s', ('',)), None ), call( 1, 1, spec.Channel.Close, dumps('BsBB', (0, '', 0, 0)), None ) ] ) assert ch.is_open is False
def assert_context(self, default, full, get_returns=None, set_effect=None): with termsigs(default, full): with patch('signal.getsignal') as GET: with patch('signal.signal') as SET: GET.return_value = get_returns SET.side_effect = set_effect reset_signals() GET.assert_has_calls([call(signo(sig)) for sig in default]) yield GET, SET
def test_send_all(self): nodes = [Mock(name='n1'), Mock(name='n2')] self.cluster.getpids = Mock(name='getpids') self.cluster.getpids.return_value = nodes self.cluster.send_all(15) self.cluster.on_node_signal.assert_has_calls( call(node, 'TERM') for node in nodes) for node in nodes: node.send.assert_called_with(15, self.cluster.on_node_signal_dead)
def test_on_chord_part_return_no_expiry__unordered(self, restore): self.app.conf.result_backend_transport_options = dict( result_chord_ordered=False, ) old_expires = self.b.expires self.b.expires = None tasks = [self.create_task(i) for i in range(10)] for i in range(10): self.b.on_chord_part_return(tasks[i].request, states.SUCCESS, i) assert self.b.client.rpush.call_count self.b.client.rpush.reset_mock() assert self.b.client.lrange.call_count jkey = self.b.get_key_for_group('group_id', '.j') tkey = self.b.get_key_for_group('group_id', '.t') self.b.client.delete.assert_has_calls([call(jkey), call(tkey)]) self.b.client.expire.assert_not_called() self.b.expires = old_expires
def test_on_chord_part_return__ordered(self, restore): self.app.conf.result_backend_transport_options = dict( result_chord_ordered=True, ) tasks = [self.create_task(i) for i in range(10)] random.shuffle(tasks) for i in range(10): self.b.on_chord_part_return(tasks[i].request, states.SUCCESS, i) assert self.b.client.zadd.call_count self.b.client.zadd.reset_mock() assert self.b.client.zrangebyscore.call_count jkey = self.b.get_key_for_group('group_id', '.j') tkey = self.b.get_key_for_group('group_id', '.t') self.b.client.delete.assert_has_calls([call(jkey), call(tkey)]) self.b.client.expire.assert_has_calls([ call(jkey, 86400), call(tkey, 86400), ])
def test_close_deletes_autodelete_fanout_queues(self): self.channel._fanout_queues = {'foo': ('foo', ''), 'bar': ('bar', '')} self.channel.auto_delete_queues = ['foo'] self.channel.queue_delete = Mock(name='queue_delete') client = self.channel.client self.channel.close() self.channel.queue_delete.assert_has_calls([ call('foo', client=client), ])
def result(): #Успешная оплата, запрос от Робокассы с помощью CURL if check_hash(mrh_pass2): payment_id = request.args["InvId"] print(payment_id) with closing(pymysql.connect(host='localhost', user='******', password='', db='callprank', charset='utf8mb4', cursorclass=DictCursor)) as conn: with conn.cursor() as cursor: query = 'UPDATE orders SET isPaid=1 WHERE id=%s' % payment_id print(query) cursor.execute(query) conn.commit() sql = ("SELECT * FROM orders WHERE id=%s") % payment_id cursor.execute(sql) order = cursor.fetchall() call(order[0]["chat_id"], campaigns[order[0]["prank_id"]-1], order[0]["phone"], payment_id) #Ставим звонок в очередь return "Success" return "Refused"
def test_register_with_event_loop(self): transport = self.connection.transport transport.cycle = Mock(name='cycle') transport.cycle.fds = {12: 'LISTEN', 13: 'BRPOP'} conn = Mock(name='conn') loop = Mock(name='loop') redis.Transport.register_with_event_loop(transport, conn, loop) transport.cycle.on_poll_init.assert_called_with(loop.poller) loop.call_repeatedly.assert_called_with( 10, transport.cycle.maybe_restore_messages, ) loop.on_tick.add.assert_called() on_poll_start = loop.on_tick.add.call_args[0][0] on_poll_start() transport.cycle.on_poll_start.assert_called_with() loop.add_reader.assert_has_calls([ call(12, transport.on_readable, 12), call(13, transport.on_readable, 13), ])
def test_connect_short_curcuit_on_INET_fails(self, getaddrinfo, sock_mock): self.t.sock = Mock() self.t.close() with patch.object(sock_mock.return_value, 'connect', side_effect=(socket.error, None)): self.t.connect() getaddrinfo.assert_has_calls([ call('localhost', 5672, addr_type, ANY, ANY) for addr_type in (socket.AF_INET, socket.AF_INET6) ])
def test_delete(self): x = self.channel x._create_client = Mock() x._create_client.return_value = x.client delete = x.client.delete = Mock() srem = x.client.srem = Mock() x._delete('queue', 'exchange', 'routing_key', None) delete.assert_has_calls( [call(x._q_for_pri('queue', pri)) for pri in redis.PRIORITY_STEPS]) srem.assert_called_with(x.keyprefix_queue % ('exchange', ), x.sep.join(['routing_key', '', 'queue']))
def test_do_restore_message(self): client = Mock(name='client') pl1 = {'body': 'BODY'} spl1 = dumps(pl1) lookup = self.channel._lookup = Mock(name='_lookup') lookup.return_value = {'george', 'elaine'} self.channel._do_restore_message( pl1, 'ex', 'rkey', client, ) client.rpush.assert_has_calls([ call('george', spl1), call('elaine', spl1), ], any_order=True) client = Mock(name='client') pl2 = {'body': 'BODY2', 'headers': {'x-funny': 1}} headers_after = dict(pl2['headers'], redelivered=True) spl2 = dumps(dict(pl2, headers=headers_after)) self.channel._do_restore_message( pl2, 'ex', 'rkey', client, ) client.rpush.assert_any_call('george', spl2) client.rpush.assert_any_call('elaine', spl2) client.rpush.side_effect = KeyError() with patch('kombu.transport.redis.crit') as crit: self.channel._do_restore_message( pl2, 'ex', 'rkey', client, ) crit.assert_called()
def test_has_queue(self): self.channel._create_client = Mock() self.channel._create_client.return_value = self.channel.client exists = self.channel.client.exists = Mock() exists.return_value = True assert self.channel._has_queue('foo') exists.assert_has_calls([ call(self.channel._q_for_pri('foo', pri)) for pri in redis.PRIORITY_STEPS ]) exists.return_value = False assert not self.channel._has_queue('foo')
def test_qos_restore_visible(self): client = self.channel._create_client = Mock(name='client') client = client() def pipe(*args, **kwargs): return Pipeline(client) client.pipeline = pipe client.zrevrangebyscore.return_value = [ (1, 10), (2, 20), (3, 30), ] qos = redis.QoS(self.channel) restore = qos.restore_by_tag = Mock(name='restore_by_tag') qos._vrestore_count = 1 qos.restore_visible() client.zrevrangebyscore.assert_not_called() assert qos._vrestore_count == 2 qos._vrestore_count = 0 qos.restore_visible() restore.assert_has_calls([ call(1, client), call(2, client), call(3, client), ]) assert qos._vrestore_count == 1 qos._vrestore_count = 0 restore.reset_mock() client.zrevrangebyscore.return_value = [] qos.restore_visible() restore.assert_not_called() assert qos._vrestore_count == 1 qos._vrestore_count = 0 client.setnx.side_effect = redis.MutexHeld() qos.restore_visible()
def test_start(self): c = Mock() c.app.connection_for_read = _amqp_connection() mingle = Mingle(c) assert mingle.enabled Aig = LimitedSet() Big = LimitedSet() Aig.add('Aig-1') Aig.add('Aig-2') Big.add('Big-1') I = c.app.control.inspect.return_value = Mock() I.hello.return_value = { '*****@*****.**': { 'clock': 312, 'revoked': Aig._data, }, '*****@*****.**': { 'clock': 29, 'revoked': Big._data, }, '*****@*****.**': { 'error': 'unknown method', }, } our_revoked = c.controller.state.revoked = LimitedSet() mingle.start(c) I.hello.assert_called_with(c.hostname, our_revoked._data) c.app.clock.adjust.assert_has_calls([ call(312), call(29), ], any_order=True) assert 'Aig-1' in our_revoked assert 'Aig-2' in our_revoked assert 'Big-1' in our_revoked
import call import time print('bla bla bla') time.sleep(1) call("clear") print('bla2')
def init(): call("gpsd /dev/ttyUSB0 -F /var/run/gpsd.sock", shell=True)
yield from self.sender.sendMessage('Done') response = yield from bot.sendPhoto(chat_id, f) # Let admin know :) #pprint(response) if (chat_id != admin_chat_id): yield from bot.sendPhoto(admin_chat_id, response['photo'][-1]['file_id']) self._count = 0 # Do some simple stuff for every message, to be paired with per_message() def simple_function(seed_tuple): bot, msg, id = seed_tuple content_type, chat_type, chat_id = telepot.glance(msg) if (chat_id != admin_chat_id): yield from bot.sendMessage(admin_chat_id,'Request from @'+msg['from']['username']) yield from bot.forwardMessage(admin_chat_id,chat_id, msg['message_id']) TOKEN = sys.argv[1] # get token from command-line bot = telepot.async.DelegatorBot(TOKEN, [ (per_chat_id(), create_open(MessageCounter, timeout=10)), (per_message(), call(simple_function)), ]) loop = asyncio.get_event_loop() loop.create_task(bot.message_loop()) print('Listening ...') loop.run_forever()