Пример #1
0
    def test_check_destroy(self):
        # Only the first and last node in the circuit may check a destroy message
        with self.assertRaises(StopIteration):
            next(self.tunnel_community.check_destroy([]))
        sock_addr = ("127.0.0.1", 1234)

        meta = self.tunnel_community.get_meta_message(u"destroy")
        msg1 = meta.impl(authentication=(self.member, ),
                         distribution=(self.tunnel_community.global_time, ),
                         candidate=Candidate(sock_addr, False),
                         payload=(42, 43))
        msg2 = meta.impl(authentication=(self.member, ),
                         distribution=(self.tunnel_community.global_time, ),
                         candidate=Candidate(sock_addr, False),
                         payload=(43, 44))

        self.tunnel_community.exit_sockets[42] = TunnelExitSocket(
            42, self.tunnel_community, sock_addr=("128.0.0.1", 1234))

        for i in self.tunnel_community.check_destroy([msg1]):
            self.assertIsInstance(i, DropMessage)

        self.tunnel_community.exit_sockets = {}
        circuit = Circuit(42L, first_hop=("128.0.0.1", 1234))
        self.tunnel_community.circuits[42] = circuit

        for i in self.tunnel_community.check_destroy([msg1, msg2]):
            self.assertIsInstance(i, DropMessage)

        self.tunnel_community.relay_from_to[42] = RelayRoute(42, sock_addr)
        for i in self.tunnel_community.check_destroy([msg1]):
            self.assertIsInstance(i, type(msg1))
Пример #2
0
    def on_created_e2e(self, messages):
        for message in messages:
            cache = self.request_cache.pop(u"e2e-request", message.payload.identifier)
            shared_secret = self.crypto.verify_and_generate_shared_secret(cache.hop.dh_secret,
                                                                          message.payload.key,
                                                                          message.payload.auth,
                                                                          cache.hop.public_key.key.pk)
            session_keys = self.crypto.generate_session_keys(shared_secret)

            _, decoded = decode(self.crypto.decrypt_str(message.payload.rp_sock_addr,
                                                        session_keys[EXIT_NODE],
                                                        session_keys[EXIT_NODE_SALT]))
            rp_info, cookie = decoded

            if self.notifier:
                self.notifier.notify(NTFY_TUNNEL, NTFY_ONCREATED_E2E, cache.info_hash.encode('hex')[:6], rp_info)

            # Since it is the seeder that chose the rendezvous_point, we're essentially losing 1 hop of anonymity
            # at the downloader end. To compensate we add an extra hop.
            required_exit = Candidate(rp_info[:2], False)
            required_exit.associate(self.get_member(public_key=rp_info[2]))
            self.create_circuit(self.hops[cache.info_hash] + 1,
                                CIRCUIT_TYPE_RENDEZVOUS,
                                callback=lambda circuit, cookie=cookie, session_keys=session_keys,
                                info_hash=cache.info_hash, sock_addr=cache.sock_addr: self.create_link_e2e(circuit,
                                                                                                           cookie,
                                                                                                           session_keys,
                                                                                                           info_hash,
                                                                                                           sock_addr),
                                required_exit=required_exit,
                                info_hash=cache.info_hash)
Пример #3
0
    def setup_peer(self):
        """
        Setup a second peer that contains some search results.
        """
        self.setUpPreSession()

        self.config2 = self.config.copy()
        self.config2.set_state_dir(self.getStateDir(2))

        self.session2 = Session(self.config2)

        yield self.session2.start()
        self.dispersy2 = self.session2.get_dispersy_instance()

        @inlineCallbacks
        def unload_communities():
            for community in self.dispersy.get_communities():
                if isinstance(community, SearchCommunity) or isinstance(community, AllChannelCommunity):
                    yield community.unload_community()

            for community in self.dispersy2.get_communities():
                if isinstance(community, SearchCommunity) or isinstance(community, AllChannelCommunity):
                    yield community.unload_community()

        def load_communities():
            self.search_community = \
            self.dispersy.define_auto_load(SearchCommunityTests, self.session.dispersy_member, load=True,
                                           kargs={'tribler_session': self.session})[0]
            self.dispersy2.define_auto_load(SearchCommunityTests, self.session2.dispersy_member, load=True,
                                            kargs={'tribler_session': self.session2})

            self.allchannel_community = \
            self.dispersy.define_auto_load(AllChannelCommunityTests, self.session.dispersy_member, load=True,
                                           kargs={'tribler_session': self.session})[0]
            self.dispersy2.define_auto_load(AllChannelCommunityTests, self.session2.dispersy_member, load=True,
                                            kargs={'tribler_session': self.session2})

        yield unload_communities()
        load_communities()

        self.search_community.add_discovered_candidate(Candidate(self.dispersy2.lan_address, tunnel=False))
        self.allchannel_community.add_discovered_candidate(Candidate(self.dispersy2.lan_address, tunnel=False))

        # Add some content to second session
        torrent_db_handler = self.session2.open_dbhandler(NTFY_TORRENTS)
        torrent_db_handler.addExternalTorrentNoDef(str(unichr(97)) * 20, 'test test', [('Test.txt', 1337)], [], 1337)
        torrent_db_handler.updateTorrent(str(unichr(97)) * 20, is_collected=1)

        channel_db_handler = self.session2.open_dbhandler(NTFY_CHANNELCAST)
        channel_db_handler.on_channel_from_dispersy('f' * 20, 42, "test", "channel for unit tests")
        torrent_list = [
            [1, 1, 1, ('a' * 40).decode('hex'), 1460000000, "ubuntu-torrent.iso", [['file1.txt', 42]], []]
        ]
        channel_db_handler.on_torrents_from_dispersy(torrent_list)

        # We also need to add the channel to the database of the session initiating the search
        channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST)
        channel_db_handler.on_channel_from_dispersy('f' * 20, 42, "test", "channel for unit tests")
Пример #4
0
    def test_e2e_transaction(self):
        """
        test whether a full transaction will be executed between two nodes.
        """
        bid_session = yield self.create_session(1)
        test_deferred = Deferred()

        ask_community = self.market_communities[self.session]
        bid_community = self.market_communities[bid_session]

        @inlineCallbacks
        def on_tx_done(_):
            self.assertEqual(ask_community.wallets['DUM1'].balance, 1100)
            self.assertEqual(bid_community.wallets['DUM1'].balance, 900)

            balance_ask = yield ask_community.wallets['MC'].get_balance()
            balance_bid = yield bid_community.wallets['MC'].get_balance()
            self.assertEqual(balance_ask['available'], -10)
            self.assertEqual(balance_bid['available'], 10)

            # Verify whether everything is cleaned up correctly
            order_ask = ask_community.order_manager.order_repository.find_all(
            )[0]
            order_bid = ask_community.order_manager.order_repository.find_all(
            )[0]
            self.assertEqual(order_ask.reserved_quantity, Quantity(0, 'MC'))
            self.assertEqual(order_ask.traded_quantity, Quantity(10, 'MC'))
            self.assertEqual(order_bid.reserved_quantity, Quantity(0, 'MC'))
            self.assertEqual(order_bid.traded_quantity, Quantity(10, 'MC'))
            self.assertEqual(len(order_ask.reserved_ticks.keys()), 0)
            self.assertEqual(len(order_bid.reserved_ticks.keys()), 0)
            self.assertEqual(len(ask_community.order_book.asks), 0)
            self.assertEqual(len(ask_community.order_book.bids), 0)
            self.assertEqual(len(bid_community.order_book.asks), 0)
            self.assertEqual(len(bid_community.order_book.bids), 0)

        ask_community.add_discovered_candidate(
            Candidate(bid_session.get_dispersy_instance().lan_address,
                      tunnel=False))
        bid_community.add_discovered_candidate(
            Candidate(self.session.get_dispersy_instance().lan_address,
                      tunnel=False))
        yield self.async_sleep(10)
        bid_community.create_bid(10, 'DUM1', 10, 'MC', 3600)
        ask_community.create_ask(10, 'DUM1', 10, 'MC', 3600)

        ask_community.test_deferred.chainDeferred(test_deferred)
        bid_community.test_deferred.chainDeferred(test_deferred)
        yield test_deferred.addCallback(on_tx_done)
Пример #5
0
        def start_community():
            if self.crawl_keypair_filename:
                keypair = read_keypair(self.crawl_keypair_filename)
                member = self.dispersy.get_member(
                    private_key=self.dispersy.crypto.key_to_bin(keypair))
                cls = TunnelCommunityCrawler
            else:
                if self.settings.enable_multichain:
                    from Tribler.community.multichain.community import MultiChainCommunity
                    member = self.dispersy.get_member(
                        private_key=self.session.multichain_keypair.key_to_bin(
                        ))
                    self.dispersy.define_auto_load(MultiChainCommunity,
                                                   member,
                                                   load=True)
                else:
                    member = self.dispersy.get_new_member(u"curve25519")
                cls = HiddenTunnelCommunity

            self.community = self.dispersy.define_auto_load(
                cls, member, (self.session, self.settings), load=True)[0]

            self.session.set_anon_proxy_settings(
                2, ("127.0.0.1",
                    self.session.get_tunnel_community_socks5_listen_ports()))
            if introduce_port:
                self.community.add_discovered_candidate(
                    Candidate(('127.0.0.1', introduce_port), tunnel=False))
Пример #6
0
    def test_load_other_tunnel_community(self):
        """
        Testing whether we do not load two different tunnel communities in the same session
        """

        # Load/unload this community so we have a classification
        dispersy = self.session.lm.dispersy
        master_member = DummyTunnelCommunity.get_master_members(dispersy)[0]
        keypair = self.session.multichain_keypair
        dispersy_member = dispersy.get_member(private_key=keypair.key_to_bin())
        community = DummyTunnelCommunity.init_community(
            dispersy,
            master_member,
            dispersy_member,
            tribler_session=self.session,
            settings=TunnelSettings())
        yield community.unload_community()

        some_candidate = Candidate(("1.2.3.4", 1234), False)
        some_packet = self.create_valid_packet(community)
        dispersy.on_incoming_packets([
            (some_candidate, some_packet),
        ])

        tunnel_communities = 0
        for community in dispersy.get_communities():
            if isinstance(community, HiddenTunnelCommunity):
                tunnel_communities += 1

        # We should only have one tunnel community, not multiple
        self.assertEqual(tunnel_communities, 1)
Пример #7
0
    def test_unload_receive(self):
        """
        Testing whether the TunnelCommunity does not reload itself after unloading
        """
        tunnel_community = self.session.lm.tunnel_community
        dispersy = self.session.lm.dispersy

        some_candidate = Candidate(("1.2.3.4", 1234), False)
        some_packet = self.create_valid_packet(tunnel_community)

        dispersy.on_incoming_packets([
            (some_candidate, some_packet),
        ])

        # We should have a functional TunnelCommunity
        self.assertIn(tunnel_community, dispersy.get_communities())

        yield tunnel_community.unload_community()

        # We no longer have a functional TunnelCommunity
        self.assertNotIn(tunnel_community, dispersy.get_communities())

        # There should be no TunnelCommunity classes loaded
        for community in dispersy.get_communities():
            self.assertNotIsInstance(community, HiddenTunnelCommunity)

        dispersy.on_incoming_packets([
            (some_candidate, some_packet),
        ])

        # The TunnelCommunity should not reload itself
        for community in dispersy.get_communities():
            self.assertNotIsInstance(community, HiddenTunnelCommunity)
Пример #8
0
    def test_orderbook_sync(self):
        """
        Test whether the order book of two nodes are being synchronized
        """
        def check_orderbook_size():
            if len(ask_community.order_book.bids) == 1 and len(
                    bid_community.order_book.asks) == 1:
                check_lc.stop()
                test_deferred.callback(None)

        test_deferred = Deferred()
        bid_session = yield self.create_session(1)
        ask_community = self.market_communities[self.session]
        bid_community = self.market_communities[bid_session]

        ask_community.create_ask(10, 'DUM1', 2, 'DUM2', 3600)
        bid_community.create_bid(1, 'DUM1', 2, 'DUM2',
                                 3600)  # Does not match the ask

        ask_community.add_discovered_candidate(
            Candidate(bid_session.get_dispersy_instance().lan_address,
                      tunnel=False))
        check_lc = LoopingCall(check_orderbook_size)
        check_lc.start(0.2)

        yield test_deferred
Пример #9
0
 def _start_download():
     candidate = Candidate(("127.0.0.1", self.session1_port), False)
     self.session2.lm.rtorrent_handler.download_torrent(
         candidate, self.infohashes[0])
     self.session2.lm.rtorrent_handler.download_torrent(
         candidate,
         self.infohashes[1],
         user_callback=do_check_download)
Пример #10
0
        def setup_proxies():
            tunnel_communities = []
            baseindex = 3
            for i in range(baseindex, baseindex + nr_relays):  # Normal relays
                tunnel_communities.append(
                    create_proxy(i, False, crypto_enabled))

            baseindex += nr_relays + 1
            for i in range(baseindex, baseindex + nr_exitnodes):  # Exit nodes
                tunnel_communities.append(create_proxy(i, True,
                                                       crypto_enabled))

            if bypass_dht:
                # Replace pymdht with a fake one
                class FakeDHT(object):
                    def __init__(self, dht_dict, mainline_dht):
                        self.dht_dict = dht_dict
                        self.mainline_dht = mainline_dht

                    def get_peers(self, lookup_id, _, callback_f, bt_port=0):
                        if bt_port != 0:
                            self.dht_dict[lookup_id] = self.dht_dict.get(
                                lookup_id, []) + [('127.0.0.1', bt_port)]
                        callback_f(lookup_id,
                                   self.dht_dict.get(lookup_id, None), None)

                    def stop(self):
                        self.mainline_dht.stop()

                dht_dict = {}
                for session in self.sessions + [self.session]:
                    session.lm.mainline_dht = FakeDHT(dht_dict,
                                                      session.lm.mainline_dht)

            # Connect the proxies to the Tribler instance
            for community in self.lm.dispersy.get_communities():
                if isinstance(community, HiddenTunnelCommunity):
                    self._logger.debug(
                        "Hidden tunnel community appended to the list")
                    tunnel_communities.append(community)

            candidates = []
            for session in self.sessions:
                self._logger.debug(
                    "Appending candidate from this session to the list")
                dispersy = session.get_dispersy_instance()
                candidates.append(Candidate(dispersy.lan_address,
                                            tunnel=False))

            for community in tunnel_communities:
                for candidate in candidates:
                    self._logger.debug(
                        "Add appended candidate as discovered candidate to this community"
                    )
                    # We are letting dispersy deal with addins the community's candidate to itself.
                    community.add_discovered_candidate(candidate)

            callback(tunnel_communities)
Пример #11
0
 def test_schedule_block_invalid_candidate(self):
     """
     Test the schedule_block function with an invalid candidate to cover all branches
     """
     # Arrange
     [node] = self.create_nodes(1)
     candidate = Candidate(("127.0.0.1", 10), False)
     # Act
     node.call(node.community.schedule_block, candidate, 0, 0)
Пример #12
0
    def test_check_pong(self):
        circuit = Circuit(42L)
        ping_request_cache = PingRequestCache(self.tunnel_community, circuit)
        ping_num = self.tunnel_community.request_cache.add(ping_request_cache).number
        meta = self.tunnel_community.get_meta_message(u"ping")
        msg1 = meta.impl(distribution=(self.tunnel_community.global_time,),
                         candidate=Candidate(("127.0.0.1", 1234), False), payload=(42, ping_num - 1))
        msg2 = meta.impl(distribution=(self.tunnel_community.global_time,),
                         candidate=Candidate(("127.0.0.1", 1234), False), payload=(42, ping_num))

        # Invalid ping identifier
        for i in self.tunnel_community.check_pong([msg1]):
            self.assertIsInstance(i, DropMessage)

        for i in self.tunnel_community.check_pong([msg2]):
            self.assertIsInstance(i, type(msg2))

        self.tunnel_community.request_cache.pop(u"ping", ping_num)
Пример #13
0
        def callback(circuit):
            # We got a circuit, now let's create an introduction point
            circuit_id = circuit.circuit_id
            self.my_intro_points[circuit_id].append((info_hash))

            cache = self.request_cache.add(IPRequestCache(self, circuit))
            self.send_cell([Candidate(circuit.first_hop, False)],
                           u'establish-intro', (circuit_id, cache.number, info_hash))
            self.tunnel_logger.info("Established introduction tunnel %s", circuit_id)
            if self.notifier:
                self.notifier.notify(NTFY_TUNNEL, NTFY_IP_CREATED, info_hash.encode('hex')[:6], circuit_id)
Пример #14
0
        def callback(circuit):
            # We got a circuit, now let's create a rendezvous point
            circuit_id = circuit.circuit_id
            rp = RendezvousPoint(circuit, os.urandom(20), finished_callback)

            cache = self.request_cache.add(RPRequestCache(self, rp))
            if self.notifier:
                self.notifier.notify(NTFY_TUNNEL, NTFY_IP_CREATED, info_hash.encode('hex')[:6], circuit_id)

            self.send_cell([Candidate(circuit.first_hop, False)],
                           u'establish-rendezvous', (circuit_id, cache.number, rp.cookie))
Пример #15
0
 def get_offer_sync(self, tick):
     meta = self.market_community.get_meta_message(u"offer-sync")
     candidate = Candidate(
         self.market_community.lookup_ip(TraderId(
             self.market_community.mid)), False)
     return meta.impl(
         authentication=(self.market_community.my_member, ),
         distribution=(self.market_community.claim_global_time(), ),
         destination=(candidate, ),
         payload=tick.to_network() + (Ttl(1), ) + ("127.0.0.1", 1234) +
         (isinstance(tick, Ask), ))
Пример #16
0
    def create_link_e2e(self, circuit, cookie, session_keys, info_hash,
                        sock_addr):
        self.my_download_points[circuit.circuit_id] = (info_hash,
                                                       circuit.goal_hops,
                                                       sock_addr)
        circuit.hs_session_keys = session_keys

        cache = self.request_cache.add(
            LinkRequestCache(self, circuit, info_hash))
        self.send_cell([Candidate(circuit.first_hop, False)], u'link-e2e',
                       (circuit.circuit_id, cache.number, cookie))
Пример #17
0
        def do_start_download():
            self.setup_metadata_downloader()

            timeout = 10

            candidate = Candidate(("127.0.0.1", self.session1_port), False)
            self.session2.lm.rtorrent_handler.download_metadata(
                candidate, self.thumb_hash, usercallback=do_check_download)
            self.CallConditional(
                timeout, self.download_event.is_set, do_check_download,
                u"Failed to download metadata within %s seconds" % timeout)
Пример #18
0
 def get_half_block_message(self, block):
     """
     Create a message from a given block
     """
     candidate = Candidate(self.dispersy.lan_address, False)
     meta = self.market_community.get_meta_message(HALF_BLOCK)
     message = meta.impl(
         distribution=(self.market_community.claim_global_time(), ),
         destination=(candidate, ),
         payload=(block, ))
     return message
Пример #19
0
 def get_proposed_trade_msg(self):
     destination, payload = self.proposed_trade.to_network()
     payload += ("127.0.0.1", 1234)
     candidate = Candidate(self.market_community.lookup_ip(destination),
                           False)
     meta = self.market_community.get_meta_message(u"proposed-trade")
     message = meta.impl(
         authentication=(self.market_community.my_member, ),
         distribution=(self.market_community.claim_global_time(), ),
         destination=(candidate, ),
         payload=payload)
     return message
Пример #20
0
    def _send_packet(self, session, packet):
        packet_buff = encode_packet(packet)
        extra_msg = u" block_number = %s" % packet['block_number'] if packet.get('block_number') is not None else ""
        extra_msg += u" block_size = %s" % len(packet['data']) if packet.get('data') is not None else ""

        self._logger.debug(u"SEND OP[%s] -> %s:%s %s",
                           packet['opcode'], session.address[0], session.address[1], extra_msg)
        self._endpoint.send_packet(Candidate(session.address, False), packet_buff, prefix=self._prefix)

        # update information
        session.last_contact_time = time()
        session.last_sent_packet = packet
Пример #21
0
    def take_step(self):
        if not self._live_edges_enabled:
            return super(TrustChainCommunity, self).take_step()

        now = time()
        self._logger.debug(
            "previous sync was %.1f seconds ago",
            now - self._last_sync_time if self._last_sync_time else -1)

        if not self._live_edge or len(
                self._live_edge) == 5 or self._live_edge_next is None:
            self._live_edge_id += 1

            # New live edges always start with our member
            my_candidate = Candidate(
                ("127.0.0.1", self.dispersy.endpoint.get_address()[1]), False)
            my_candidate.associate(self.my_member)
            self._live_edge = [my_candidate]

            # Callback our live edge handler
            if self._live_edge_cb:
                self._live_edge_cb(self._live_edge_id, self._live_edge)

        if self._live_edge_next:
            candidate = self._live_edge_next
            self._live_edge_next = None
        else:
            candidate = self.dispersy_get_walk_candidate()

        if candidate:
            self._logger.debug("%s %s taking step towards %s",
                               self.cid.encode("HEX"),
                               self.get_classification(), candidate)
            self.create_introduction_request(
                candidate, self.dispersy_enable_bloom_filter_sync)
        else:
            self._logger.debug("%s %s no candidate to take step",
                               self.cid.encode("HEX"),
                               self.get_classification())
        self._last_sync_time = time()
Пример #22
0
    def do_dht_lookup(self, info_hash):
        # Select a circuit from the pool of exit circuits
        self.tunnel_logger.info("Do DHT request: select circuit")
        circuit = self.selection_strategy.select(None, self.hops[info_hash])
        if not circuit:
            self.tunnel_logger.info("No circuit for dht-request")
            return False

        # Send a dht-request message over this circuit
        self.tunnel_logger.info("Do DHT request: send dht request")
        self.last_dht_lookup[info_hash] = time.time()
        cache = self.request_cache.add(
            DHTRequestCache(self, circuit, info_hash))
        self.send_cell([Candidate(circuit.first_hop, False)], u"dht-request",
                       (circuit.circuit_id, cache.number, info_hash))
Пример #23
0
    def tunnel_data(self, destination, payload):
        """
        Convenience method to tunnel data over this circuit
        @param (str, int) destination: the destination of the packet
        @param str payload: the packet's payload
        """

        self._logger.info("Tunnel data (len %d) to end for circuit %s with ultimate destination %s", len(payload),
                           self.circuit_id, destination)

        num_bytes = self.proxy.send_data([Candidate(self.first_hop, False)], self.circuit_id, destination, ('0.0.0.0', 0), payload)
        self.proxy.increase_bytes_sent(self, num_bytes)

        if num_bytes == 0:
            self._logger.warning("Should send %d bytes over circuit %s, zero bytes were sent",
                                 len(payload), self.circuit_id)
Пример #24
0
    def on_key_response(self, messages):
        for message in messages:
            if not message.source.startswith(u"circuit_"):
                cache = self.request_cache.pop(u"key-request",
                                               message.payload.identifier)
                self.tunnel_logger.info(
                    'On key response: forward message because received over socket'
                )
                meta = self.get_meta_message(u'key-response')
                relay_message = meta.impl(distribution=(self.global_time, ),
                                          payload=(cache.identifier,
                                                   message.payload.public_key,
                                                   message.payload.pex_peers))
                self.send_packet([Candidate(cache.return_sock_addr, False)],
                                 u"key-response",
                                 TUNNEL_PREFIX + relay_message.packet)
            else:
                # pop key-request cache and notify gui
                self.tunnel_logger.info("On key response: received keys")
                cache = self.request_cache.pop(u"key-request",
                                               message.payload.identifier)
                _, pex_peers = decode(message.payload.pex_peers)
                if self.notifier:
                    self.notifier.notify(NTFY_TUNNEL, NTFY_KEY_RESPONSE,
                                         cache.info_hash.encode('hex')[:6],
                                         cache.circuit.circuit_id)

                # Cache this peer and key for pex via key-response
                self.tunnel_logger.info("Added key to peer exchange cache")
                self.infohash_pex[cache.info_hash].add(
                    (cache.sock_addr, message.payload.public_key))

                # Add received pex_peers to own list of known peers for this infohash
                for pex_peer in pex_peers:
                    pex_peer_sock, pex_peer_key = pex_peer
                    self.infohash_pex[cache.info_hash].add(
                        (pex_peer_sock, pex_peer_key))

                # Initate end-to-end circuits for all known peers in the pex list
                for peer in self.infohash_pex[cache.info_hash]:
                    peer_sock, peer_key = peer
                    if cache.info_hash not in self.infohash_ip_circuits:
                        self.tunnel_logger.info(
                            "Create end-to-end on pex_peer %s" %
                            repr(peer_sock))
                        self.create_e2e(cache.circuit, peer_sock,
                                        cache.info_hash, peer_key)
Пример #25
0
    def test_torrent_collecting(self):
        infohash3, roothash3 = self._create_and_save_torrent(
            self.session, 'file2.wmv', False)

        from Tribler.dispersy.candidate import Candidate
        candidate = Candidate(
            ("127.0.0.1", self.session.get_swift_tunnel_listen_port()), True)

        event = threading.Event()
        starttime = time.time()
        self.session2.lm.rtorrent_handler.download_torrent(candidate,
                                                           infohash3,
                                                           roothash3,
                                                           lambda: event.set(),
                                                           prio=1,
                                                           timeout=60)
        assert event.wait(60)
        print >> sys.stderr, "took", time.time() - starttime
Пример #26
0
    def tunnel_data(self, destination, payload):
        """
        Convenience method to tunnel data over this circuit
        @param (str, int) destination: the destination of the packet
        @param str payload: the packet's payload
        @return bool: whether the tunnel request has succeeded, this is in no
         way an acknowledgement of delivery!
        """

        self._logger.info(
            "Tunnel data (len %d) to end for circuit %s with ultimate destination %s",
            len(payload), self.circuit_id, destination)

        num_bytes = self.proxy.send_data([Candidate(self.first_hop, False)],
                                         self.circuit_id, destination,
                                         ('0.0.0.0', 0), payload)
        self.proxy.increase_bytes_sent(self, num_bytes)

        return num_bytes > 0
Пример #27
0
    def get_start_transaction_msg(self):
        transaction = self.market_community.transaction_manager.create_from_proposed_trade(
            self.proposed_trade, 'abcd')
        start_transaction = StartTransaction(
            self.market_community.message_repository.next_identity(),
            transaction.transaction_id, transaction.order_id,
            self.proposed_trade.order_id, self.proposed_trade.proposal_id,
            self.proposed_trade.price, self.proposed_trade.quantity,
            Timestamp.now())

        meta = self.market_community.get_meta_message(u"start-transaction")
        candidate = Candidate(
            self.market_community.lookup_ip(TraderId(
                self.market_community.mid)), False)
        return meta.impl(
            authentication=(self.market_community.my_member, ),
            distribution=(self.market_community.claim_global_time(), ),
            destination=(candidate, ),
            payload=start_transaction.to_network())
Пример #28
0
    def setup_nodes(self, num_relays=1, num_exitnodes=1, seed_hops=0):
        """
        Setup all required nodes, including the relays, exit nodes and seeder.
        """
        assert isInIOThread()
        baseindex = 3
        for i in xrange(baseindex, baseindex + num_relays):  # Normal relays
            proxy = yield self.create_proxy(i)
            self.tunnel_communities.append(proxy)

        baseindex += num_relays + 1
        for i in xrange(baseindex, baseindex + num_exitnodes):  # Exit nodes
            proxy = yield self.create_proxy(i, exitnode=True)
            self.tunnel_communities.append(proxy)

        # Setup the seeder session
        self.setup_tunnel_seeder(seed_hops)

        # Add the tunnel community of the downloader session
        self.tunnel_communities.append(self.tunnel_community)

        # Connect the candidates with each other in all available tunnel communities
        candidates = []
        for session in self.sessions:
            self._logger.debug(
                "Appending candidate from this session to the list")
            candidates.append(
                Candidate(session.get_dispersy_instance().lan_address,
                          tunnel=False))

        communities_to_inject = self.tunnel_communities
        if self.tunnel_community_seeder is not None:
            communities_to_inject.append(self.tunnel_community_seeder)

        for community in communities_to_inject:
            for candidate in candidates:
                self._logger.debug(
                    "Add appended candidate as discovered candidate to this community"
                )
                # We are letting dispersy deal with adding the community's candidate to itself.
                community.add_discovered_candidate(candidate)
Пример #29
0
    def lineReceived(self, line):
        anon_tunnel = self.anon_tunnel

        if line == 'threads':
            for thread in threading.enumerate():
                logger.debug("%s \t %d", thread.name, thread.ident)
        elif line == 'c':
            logger.debug(
                "========\nCircuits\n========\nid\taddress\t\t\t\t\tgoal\thops\tIN (MB)\tOUT (MB)\tinfohash\ttype"
            )
            for circuit_id, circuit in anon_tunnel.community.circuits.items():
                info_hash = circuit.info_hash.encode(
                    'hex')[:10] if circuit.info_hash else '?'
                logger.debug(
                    "%d\t%s:%d\t%d\t%d\t\t%.2f\t\t%.2f\t\t%s\t%s" % circuit_id,
                    circuit.first_hop[0], circuit.first_hop[1],
                    circuit.goal_hops, len(circuit.hops),
                    circuit.bytes_down / 1024.0 / 1024.0,
                    circuit.bytes_up / 1024.0 / 1024.0, info_hash,
                    circuit.ctype)

        elif line.startswith('s'):
            cur_path = os.getcwd()
            line_split = line.split(' ')
            filename = 'test_file' if len(line_split) == 1 else line_split[1]

            if not os.path.exists(filename):
                logger.info("Creating torrent..")
                with open(filename, 'wb') as fp:
                    fp.write(os.urandom(50 * 1024 * 1024))
                tdef = TorrentDef()
                tdef.add_content(os.path.join(cur_path, filename))
                tdef.set_tracker("udp://localhost/announce")
                tdef.set_private()
                tdef.finalize()
                tdef.save(os.path.join(cur_path, filename + '.torrent'))
            else:
                logger.info("Loading existing torrent..")
                tdef = TorrentDef.load(filename + '.torrent')
            logger.info("loading torrent done, infohash of torrent: %s" %
                        (tdef.get_infohash().encode('hex')[:10]))

            defaultDLConfig = DefaultDownloadStartupConfig.getInstance()
            dscfg = defaultDLConfig.copy()
            dscfg.set_hops(1)
            dscfg.set_dest_dir(cur_path)

            reactor.callFromThread(
                anon_tunnel.session.start_download_from_tdef, tdef, dscfg)
        elif line.startswith('i'):
            # Introduce dispersy port from other main peer to this peer
            line_split = line.split(' ')
            to_introduce_ip = line_split[1]
            to_introduce_port = int(line_split[2])
            self.anon_tunnel.community.add_discovered_candidate(
                Candidate((to_introduce_ip, to_introduce_port), tunnel=False))
        elif line.startswith('d'):
            line_split = line.split(' ')
            filename = 'test_file' if len(line_split) == 1 else line_split[1]

            logger.info("Loading torrent..")
            tdef = TorrentDef.load(filename + '.torrent')
            logger.info("Loading torrent done")

            defaultDLConfig = DefaultDownloadStartupConfig.getInstance()
            dscfg = defaultDLConfig.copy()
            dscfg.set_hops(1)
            dscfg.set_dest_dir(
                os.path.join(
                    os.getcwd(),
                    'downloader%s' % anon_tunnel.session.get_dispersy_port()))

            def start_download():
                def cb(ds):
                    logger.info(
                        'Download infohash=%s, down=%s, progress=%s, status=%s, seedpeers=%s, candidates=%d'
                        % (tdef.get_infohash().encode('hex')[:10],
                           ds.get_current_speed('down'), ds.get_progress(),
                           dlstatus_strings[ds.get_status()],
                           sum(ds.get_num_seeds_peers()),
                           sum(1 for _ in anon_tunnel.community.
                               dispersy_yield_verified_candidates())))
                    return 1.0, False

                download = anon_tunnel.session.start_download_from_tdef(
                    tdef, dscfg)
                download.set_state_callback(cb)

            reactor.callFromThread(start_download)

        elif line == 'q':
            anon_tunnel.should_run = False

        elif line == 'r':
            logger.debug("circuit\t\t\tdirection\tcircuit\t\t\tTraffic (MB)")
            from_to = anon_tunnel.community.relay_from_to
            for key in from_to.keys():
                relay = from_to[key]
                logger.info("%s-->\t%s\t\t%.2f" % (
                    (key[0], key[1]),
                    (relay.sock_addr, relay.circuit_id),
                    relay.bytes[1] / 1024.0 / 1024.0,
                ))
Пример #30
0
    def update_book_from_address(self,
                                 swift_address,
                                 timestamp,
                                 bytes_up,
                                 bytes_down,
                                 delayed=True):
        """
        Updates the book associated with SWIFT_ADDRESS.

        When we do not yet know the book associated with SWIFT_ADDRESS we will attempt to retrieve
        this information, the update will only occur when this is successful.
        """
        assert self._dispersy.callback.is_current_thread, "Must be called on the dispersy.callback thread"

        def _update(member):
            if member:
                book = self.get_book(member)
                book.cycle = max(book.cycle, int(timestamp / CYCLE_SIZE))
                book.upload += bytes_up
                book.download += bytes_down
                logger.debug("update book for %s +%d -%d",
                             member.mid.encode("HEX"), book.upload,
                             book.download)

                # associated_{up,down} is from our viewpoint while bytes_{up,down} is from the other
                # peers' viewpoint
                self._associated_up += bytes_down
                self._associated_down += bytes_up
                return True
            return False

        def _delayed_update(response):
            member = response.authentication.member
            logger.debug(
                "retrieved member %s from swift address %s:%d [id:%d]",
                member.mid.encode("HEX"), swift_address[0], swift_address[1],
                identifier)
            association = self._address_association.setdefault(
                swift_address, Association())
            association.member = member
            if len(self._address_association
                   ) > self._address_association_length:
                self._address_association.popitem(False)
            return _update(response.authentication.member)

        # total_{up,down} is from our viewpoint while bytes_{up,down} is from the other peers'
        # viewpoint
        self._total_up += bytes_down
        self._total_down += bytes_up

        association = self._address_association.setdefault(
            swift_address, Association())
        if association.member:
            _update(association.member)

        elif delayed and association.retrieve():
            # we do not have the member associated to the address, we will attempt to retrieve it
            cache = MemberRequestCache(_delayed_update)
            identifier = self._dispersy.request_cache.claim(cache)
            meta = self._meta_messages[u"member-request"]
            request = meta.impl(
                distribution=(self.global_time, ),
                destination=(Candidate(swift_address,
                                       True), ),  # assume tunnel=True
                payload=(identifier, ))
            logger.debug(
                "trying to obtain member from swift address %s:%d [id:%d]",
                swift_address[0], swift_address[1], identifier)
            self._dispersy.store_update_forward([request], False, False, True)