async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()
        self.key = b'deadbeef' * 4
        self.cleartext = os.urandom(20000000)

        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        self.storage = SQLiteStorage(self.conf, os.path.join(tmp_dir, "lbrynet.sqlite"))
        await self.storage.open()
        self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage, self.conf)
        self.stream_manager = StreamManager(self.loop, Config(), self.blob_manager, None, self.storage, None)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(server_tmp_dir))
        self.server_conf = Config()
        self.server_storage = SQLiteStorage(self.server_conf, os.path.join(server_tmp_dir, "lbrynet.sqlite"))
        await self.server_storage.open()
        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir, self.server_storage, self.server_conf)

        download_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(download_dir))

        # create the stream
        file_path = os.path.join(tmp_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(self.cleartext)

        self.stream = await self.stream_manager.create_stream(file_path)
Example #2
0
async def get_logger(argv, **conf_options):
    # loggly requires loop, so we do this in async function

    logger = logging.getLogger('test-root-logger')
    temp_dir = tempfile.mkdtemp()
    temp_config = os.path.join(temp_dir, 'settings.yml')

    try:
        # create a config (to be loaded on startup)
        _conf = Config.create_from_arguments(
            SimpleNamespace(config=temp_config))
        with _conf.update_config():
            for opt_name, opt_value in conf_options.items():
                setattr(_conf, opt_name, opt_value)

        # do what happens on startup
        argv.extend(['--data-dir', temp_dir])
        argv.extend(['--wallet-dir', temp_dir])
        argv.extend(['--config', temp_config])
        parser = cli.get_argument_parser()
        args, command_args = parser.parse_known_args(argv)
        conf: Config = Config.create_from_arguments(args)
        setup_logging(logger, args, conf)
        yield logger

    finally:
        shutil.rmtree(temp_dir, ignore_errors=True)
        for mod in cli.LOG_MODULES:
            log = logger.getChild(mod)
            log.setLevel(logging.NOTSET)
            while log.handlers:
                h = log.handlers[0]
                log.removeHandler(log.handlers[0])
                h.close()
Example #3
0
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()

        self.client_dir = tempfile.mkdtemp()
        self.server_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, self.client_dir)
        self.addCleanup(shutil.rmtree, self.server_dir)
        self.server_config = Config(data_dir=self.server_dir, download_dir=self.server_dir, wallet=self.server_dir,
                                    reflector_servers=[])
        self.server_storage = SQLiteStorage(self.server_config, os.path.join(self.server_dir, "lbrynet.sqlite"))
        self.server_blob_manager = BlobManager(self.loop, self.server_dir, self.server_storage, self.server_config)
        self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')

        self.client_config = Config(data_dir=self.client_dir, download_dir=self.client_dir, wallet=self.client_dir,
                                    reflector_servers=[])
        self.client_storage = SQLiteStorage(self.client_config, os.path.join(self.client_dir, "lbrynet.sqlite"))
        self.client_blob_manager = BlobManager(self.loop, self.client_dir, self.client_storage, self.client_config)
        self.client_peer_manager = PeerManager(self.loop)
        self.server_from_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)

        await self.client_storage.open()
        await self.server_storage.open()
        await self.client_blob_manager.setup()
        await self.server_blob_manager.setup()

        self.server.start_server(33333, '127.0.0.1')
        self.addCleanup(self.server.stop_server)
        await self.server.started_listening.wait()
Example #4
0
def get_argument_parser():
    main = ArgumentParser(
        'lbrynet',
        description='An interface to the LBRY Network.',
        allow_abbrev=False,
    )
    main.add_argument('-v',
                      '--version',
                      dest='cli_version',
                      action="store_true",
                      help='Show lbrynet CLI version and exit.')
    main.set_defaults(group=None, command=None)
    CLIConfig.contribute_to_argparse(main)
    sub = main.add_subparsers(metavar='COMMAND')
    start = sub.add_parser(
        'start',
        usage=
        'lbrynet start [--config FILE] [--data-dir DIR] [--wallet-dir DIR] [--download-dir DIR] ...',
        help='Start LBRY Network interface.')
    start.add_argument('--quiet',
                       dest='quiet',
                       action="store_true",
                       help='Disable all console output.')
    start.add_argument(
        '--verbose',
        nargs="*",
        help=
        ('Enable debug output. Optionally specify loggers for which debug output '
         'should selectively be applied.'))
    Config.contribute_to_argparse(start)
    start.set_defaults(command='start',
                       start_parser=start,
                       doc=start.format_help())

    api = Daemon.get_api_definitions()
    groups = {}
    for group_name in sorted(api['groups']):
        group_parser = sub.add_parser(group_name,
                                      group_name=group_name,
                                      help=api['groups'][group_name])
        groups[group_name] = group_parser.add_subparsers(metavar='COMMAND')

    nicer_order = ['stop', 'get', 'publish', 'resolve']
    for command_name in sorted(api['commands']):
        if command_name not in nicer_order:
            nicer_order.append(command_name)

    for command_name in nicer_order:
        command = api['commands'][command_name]
        if command['group'] is None:
            add_command_parser(sub, command)
        else:
            add_command_parser(groups[command['group']], command)

    return main
Example #5
0
    def setUp(self):
        async def noop():
            return None

        test_utils.reset_time(self)
        self.test_daemon = get_test_daemon(Config())
        self.test_daemon.wallet_manager.get_best_blockhash = noop
Example #6
0
    async def test_old_key_sort_sd_blob(self):
        loop = asyncio.get_event_loop()
        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        storage = SQLiteStorage(self.conf, ":memory:")
        await storage.open()
        blob_manager = BlobManager(loop, tmp_dir, storage, self.conf)

        sd_bytes = b'{"stream_name": "4f62616d6120446f6e6b65792d322e73746c", "blobs": [{"length": 1153488, "blob_num' \
                   b'": 0, "blob_hash": "9fa32a249ce3f2d4e46b78599800f368b72f2a7f22b81df443c7f6bdbef496bd61b4c0079c7' \
                   b'3d79c8bb9be9a6bf86592", "iv": "0bf348867244019c9e22196339016ea6"}, {"length": 0, "blob_num": 1,' \
                   b' "iv": "9f36abae16955463919b07ed530a3d18"}], "stream_type": "lbryfile", "key": "a03742b87628aa7' \
                   b'228e48f1dcd207e48", "suggested_file_name": "4f62616d6120446f6e6b65792d322e73746c", "stream_hash' \
                   b'": "b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e1' \
                   b'60207"}'
        sd_hash = '9313d1807551186126acc3662e74d9de29cede78d4f133349ace846273ef116b9bb86be86c54509eb84840e4b032f6b2'
        stream_hash = 'b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e160207'

        blob = blob_manager.get_blob(sd_hash)
        blob.set_length(len(sd_bytes))
        writer = blob.get_blob_writer()
        writer.write(sd_bytes)
        await blob.verified.wait()
        descriptor = await StreamDescriptor.from_stream_descriptor_blob(
            loop, blob_manager.blob_dir, blob
        )
        self.assertEqual(stream_hash, descriptor.get_stream_hash())
        self.assertEqual(sd_hash, descriptor.calculate_old_sort_sd_hash())
        self.assertNotEqual(sd_hash, descriptor.calculate_sd_hash())
Example #7
0
 async def asyncSetUp(self):
     self.conf = Config()
     self.storage = SQLiteStorage(self.conf, ':memory:')
     self.blob_dir = tempfile.mkdtemp()
     self.addCleanup(shutil.rmtree, self.blob_dir)
     self.blob_manager = BlobManager(asyncio.get_event_loop(), self.blob_dir, self.storage, self.conf)
     await self.storage.open()
Example #8
0
    async def asyncSetUp(self):
        await super().asyncSetUp()

        logging.getLogger('lbry.blob_exchange').setLevel(self.VERBOSITY)
        logging.getLogger('lbry.daemon').setLevel(self.VERBOSITY)
        logging.getLogger('lbry.stream').setLevel(self.VERBOSITY)
        logging.getLogger('lbry.wallet').setLevel(self.VERBOSITY)

        self.daemon = await self.add_daemon(self.wallet_node)

        await self.account.ensure_address_gap()
        address = (await self.account.receiving.get_addresses(limit=1, only_usable=True))[0]
        sendtxid = await self.blockchain.send_to_address(address, 10)
        await self.confirm_tx(sendtxid)
        await self.generate(5)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, server_tmp_dir)
        self.server_config = Config()
        self.server_config.transaction_cache_size = 10000
        self.server_storage = SQLiteStorage(self.server_config, ':memory:')
        await self.server_storage.open()

        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir, self.server_storage, self.server_config)
        self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
        self.server.start_server(5567, '127.0.0.1')
        await self.server.started_listening.wait()

        self.reflector = ReflectorServer(self.server_blob_manager)
        self.reflector.start_server(5566, '127.0.0.1')
        await self.reflector.started_listening.wait()
        self.addCleanup(self.reflector.stop_server)
Example #9
0
    def test_init_with_wrong_overrides(self):
        class FakeRandomComponent:
            component_name = "someComponent"
            depends_on = []

        with self.assertRaises(SyntaxError):
            ComponentManager(Config(), randomComponent=FakeRandomComponent)
Example #10
0
async def main(host: str, port: int, db_file_path: str,
               bootstrap_node: Optional[str], prometheus_port: int):
    loop = asyncio.get_event_loop()
    conf = Config()
    storage = SQLiteStorage(conf, db_file_path, loop, loop.time)
    if bootstrap_node:
        nodes = bootstrap_node.split(':')
        nodes = [(nodes[0], int(nodes[1]))]
    else:
        nodes = conf.known_dht_nodes
    await storage.open()
    node = Node(loop,
                PeerManager(loop),
                generate_id(),
                port,
                port,
                3333,
                None,
                storage=storage)
    if prometheus_port > 0:
        metrics = SimpleMetrics(prometheus_port, node)
        await metrics.start()
    node.start(host, nodes)
    while True:
        await asyncio.sleep(10)
        PEERS.labels('main').set(len(node.protocol.routing_table.get_peers()))
        BLOBS_STORED.labels('main').set(
            len(node.protocol.data_store.get_storing_contacts()))
        log.info(
            "Known peers: %d. Storing contact information for %d blobs from %d peers.",
            len(node.protocol.routing_table.get_peers()),
            len(node.protocol.data_store),
            len(node.protocol.data_store.get_storing_contacts()))
Example #11
0
    async def test_host_different_blobs_to_multiple_peers_at_once(self):
        blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
        mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)

        sd_hash = "3e2706157a59aaa47ef52bc264fce488078b4026c0b9bab649a8f2fe1ecc5e5cad7182a2bb7722460f856831a1ac0f02"
        mock_sd_blob_bytes = b"""{"blobs": [{"blob_hash": "6f53c72de100f6f007aa1b9720632e2d049cc6049e609ad790b556dba262159f739d5a14648d5701afc84b991254206a", "blob_num": 0, "iv": "3b6110c2d8e742bff66e4314863dee7e", "length": 2097152}, {"blob_hash": "18493bc7c5164b00596153859a0faffa45765e47a6c3f12198a4f7be4658111505b7f8a15ed0162306a0672c4a9b505d", "blob_num": 1, "iv": "df973fa64e73b4ff2677d682cdc32d3e", "length": 2097152}, {"blob_num": 2, "iv": "660d2dc2645da7c7d4540a466fcb0c60", "length": 0}], "key": "6465616462656566646561646265656664656164626565666465616462656566", "stream_hash": "22423c6786584974bd6b462af47ecb03e471da0ef372fe85a4e71a78bef7560c4afb0835c689f03916105404653b7bdf", "stream_name": "746573745f66696c65", "stream_type": "lbryfile", "suggested_file_name": "746573745f66696c65"}"""

        second_client_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, second_client_dir)
        second_client_conf = Config()

        second_client_storage = SQLiteStorage(second_client_conf, os.path.join(second_client_dir, "lbrynet.sqlite"))
        second_client_blob_manager = BlobManager(
            self.loop, second_client_dir, second_client_storage, second_client_conf
        )
        server_from_second_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)

        await second_client_storage.open()
        await second_client_blob_manager.setup()

        await self._add_blob_to_server(blob_hash, mock_blob_bytes)
        await self._add_blob_to_server(sd_hash, mock_sd_blob_bytes)

        second_client_blob = self.client_blob_manager.get_blob(blob_hash)

        await asyncio.gather(
            request_blob(
                self.loop, second_client_blob, server_from_second_client.address,
                server_from_second_client.tcp_port, 2, 3
            ),
            self._test_transfer_blob(sd_hash),
            second_client_blob.verified.wait()
        )
        self.assertTrue(second_client_blob.get_is_verified())
Example #12
0
 async def setup_blob_manager(self, save_blobs=True):
     tmp_dir = tempfile.mkdtemp()
     self.addCleanup(lambda: shutil.rmtree(tmp_dir))
     self.config = Config(save_blobs=save_blobs)
     self.storage = SQLiteStorage(self.config, os.path.join(tmp_dir, "lbrynet.sqlite"))
     self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage, self.config)
     await self.storage.open()
Example #13
0
async def main(blob_hash: str, url: str):
    conf = Config()
    loop = asyncio.get_running_loop()
    host_url, port = url.split(":")
    try:
        host = None
        if ipaddress.ip_address(host_url):
            host = host_url
    except ValueError:
        host = None
    if not host:
        host_info = await loop.getaddrinfo(
            host_url, 'https',
            proto=socket.IPPROTO_TCP,
        )
        host = host_info[0][4][0]

    storage = SQLiteStorage(conf, os.path.join(conf.data_dir, "lbrynet.sqlite"))
    blob_manager = BlobManager(loop, os.path.join(conf.data_dir, "blobfiles"), storage)
    await storage.open()
    await blob_manager.setup()

    blob = blob_manager.get_blob(blob_hash)
    success, keep = await request_blob(loop, blob, host, int(port), conf.peer_connect_timeout,
                                       conf.blob_download_timeout)
    print(f"{'downloaded' if success else 'failed to download'} {blob_hash} from {host}:{port}\n"
          f"keep connection: {keep}")
    if blob.get_is_verified():
        await blob_manager.delete_blobs([blob.blob_hash])
        print(f"deleted {blob_hash}")
    async def test_host_same_blob_to_multiple_peers_at_once(self):
        blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
        mock_blob_bytes = b'1' * ((2 * 2**20) - 1)

        second_client_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, second_client_dir)
        second_client_conf = Config()
        second_client_storage = SQLiteStorage(
            second_client_conf,
            os.path.join(second_client_dir, "lbrynet.sqlite"))
        second_client_blob_manager = BlobManager(self.loop, second_client_dir,
                                                 second_client_storage,
                                                 second_client_conf)
        server_from_second_client = make_kademlia_peer(b'1' * 48,
                                                       "127.0.0.1",
                                                       tcp_port=33333,
                                                       allow_localhost=True)

        await second_client_storage.open()
        await second_client_blob_manager.setup()

        await self._add_blob_to_server(blob_hash, mock_blob_bytes)

        second_client_blob = second_client_blob_manager.get_blob(blob_hash)

        # download the blob
        await asyncio.gather(
            request_blob(self.loop, second_client_blob,
                         server_from_second_client.address,
                         server_from_second_client.tcp_port, 2, 3),
            self._test_transfer_blob(blob_hash))
        await second_client_blob.verified.wait()
        self.assertTrue(second_client_blob.get_is_verified())
async def get_mock_wallet(sd_hash, storage, balance=10.0, fee=None):
    claim = Claim()
    if fee:
        if fee['currency'] == 'LBC':
            claim.stream.fee.lbc = Decimal(fee['amount'])
        elif fee['currency'] == 'USD':
            claim.stream.fee.usd = Decimal(fee['amount'])
    claim.stream.title = "33rpm"
    claim.stream.languages.append("en")
    claim.stream.source.sd_hash = sd_hash
    claim.stream.source.media_type = "image/png"

    tx = get_claim_transaction("33rpm", claim.to_bytes())
    tx.height = 514081
    txo = tx.outputs[0]
    txo.meta.update({
        "permanent_url": "33rpm#c49566d631226492317d06ad7fdbe1ed32925124",

    })

    class FakeHeaders:
        def estimated_timestamp(self, height):
            return 1984

        def __init__(self, height):
            self.height = height

        def __getitem__(self, item):
            return {'timestamp': 1984}

    wallet = Wallet()
    ledger = Ledger({
        'db': Database(':memory:'),
        'headers': FakeHeaders(514082)
    })
    await ledger.db.open()
    wallet.generate_account(ledger)
    manager = WalletManager()
    manager.config = Config()
    manager.wallets.append(wallet)
    manager.ledgers[Ledger] = ledger
    manager.ledger.network.client = ClientSession(
        network=manager.ledger.network, server=('fakespv.lbry.com', 50001)
    )

    async def mock_resolve(*args, **kwargs):
        result = {txo.meta['permanent_url']: txo}
        claims = [
            StreamManager._convert_to_old_resolve_output(manager, result)[txo.meta['permanent_url']]
        ]
        await storage.save_claims(claims)
        return result
    manager.ledger.resolve = mock_resolve

    async def get_balance(*_):
        return balance
    manager.get_balance = get_balance

    return manager, txo.meta['permanent_url']
Example #16
0
 async def asyncSetUp(self):
     self.tmp_dir = tempfile.mkdtemp()
     self.addCleanup(lambda: shutil.rmtree(self.tmp_dir))
     self.loop = asyncio.get_running_loop()
     self.config = Config()
     self.storage = SQLiteStorage(self.config, ":memory:", self.loop)
     self.blob_manager = BlobManager(self.loop, self.tmp_dir, self.storage, self.config)
     await self.storage.open()
Example #17
0
 def test_linux_defaults(self):
     c = Config()
     self.assertEqual(c.data_dir, os.path.expanduser('~/.local/share/lbry/lbrynet'))
     self.assertEqual(c.wallet_dir, os.path.expanduser('~/.local/share/lbry/lbryum'))
     self.assertEqual(c.download_dir, os.path.expanduser('~/Downloads'))
     self.assertEqual(c.config, os.path.expanduser('~/.local/share/lbry/lbrynet/daemon_settings.yml'))
     self.assertEqual(c.api_connection_url, 'http://localhost:5279/lbryapi')
     self.assertEqual(c.log_file_path, os.path.expanduser('~/.local/share/lbry/lbrynet/lbrynet.log'))
 def test_ensure_default(self):
     conf = Config()
     ensure(request('GET', '/'), conf)
     with self.assertLogs() as log:
         with self.assertRaises(HTTPForbidden):
             ensure(request('GET', '/', headers={'Origin': 'localhost'}),
                    conf)
         self.assertIn("'localhost' are not allowed", log.output[0])
Example #19
0
    def test_max_key_fee_from_args(self):
        parser = argparse.ArgumentParser()
        Config.contribute_to_argparse(parser)

        # default
        args = parser.parse_args([])
        c = Config.create_from_arguments(args)
        self.assertEqual(c.max_key_fee, {'amount': 50.0, 'currency': 'USD'})

        # disabled
        args = parser.parse_args(['--no-max-key-fee'])
        c = Config.create_from_arguments(args)
        self.assertEqual(c.max_key_fee, None)

        # set
        args = parser.parse_args(['--max-key-fee', '1.0', 'BTC'])
        c = Config.create_from_arguments(args)
        self.assertEqual(c.max_key_fee, {'amount': 1.0, 'currency': 'BTC'})
 def test_ensure_specific(self):
     conf = Config(allowed_origin='localhost')
     ensure(request('GET', '/', headers={'Origin': 'localhost'}), conf)
     with self.assertLogs() as log:
         with self.assertRaises(HTTPForbidden):
             ensure(request('GET', '/', headers={'Origin': 'hackers.com'}),
                    conf)
         self.assertIn("'hackers.com' are not allowed", log.output[0])
         self.assertIn("'allowed_origin' limits requests to: 'localhost'",
                       log.output[0])
Example #21
0
 def setUp(self):
     self.component_manager = ComponentManager(
         Config(),
         skip_components=[
             DATABASE_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT,
             PEER_PROTOCOL_SERVER_COMPONENT, UPNP_COMPONENT,
             EXCHANGE_RATE_MANAGER_COMPONENT
         ],
         wallet=FakeDelayedWallet,
         stream_manager=FakeDelayedStreamManager,
         blob_manager=FakeDelayedBlobManager)
Example #22
0
 def test_mac_defaults(self):
     c = Config()
     self.assertEqual(
         c.data_dir,
         os.path.expanduser("~/Library/Application Support/LBRY"))
     self.assertEqual(c.wallet_dir, os.path.expanduser('~/.lbryum'))
     self.assertEqual(c.download_dir, os.path.expanduser('~/Downloads'))
     self.assertEqual(c.config,
                      os.path.join(c.data_dir, 'daemon_settings.yml'))
     self.assertEqual(c.api_connection_url, 'http://localhost:5279/lbryapi')
     self.assertEqual(c.log_file_path,
                      os.path.join(c.data_dir, 'lbrynet.log'))
 def setUp(self):
     self.component_manager = ComponentManager(
         Config(),
         skip_components=[
             DATABASE_COMPONENT, DISK_SPACE_COMPONENT, DHT_COMPONENT,
             HASH_ANNOUNCER_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT,
             UPNP_COMPONENT, BACKGROUND_DOWNLOADER_COMPONENT,
             EXCHANGE_RATE_MANAGER_COMPONENT
         ],
         wallet=FakeDelayedWallet,
         file_manager=FakeDelayedFileManager,
         blob_manager=FakeDelayedBlobManager)
 def test_allowed_origin_specified(self):
     conf = Config(allowed_origin='localhost')
     # no origin and only localhost are allowed
     self.assertTrue(allowed(request('GET', '/'), conf))
     self.assertTrue(
         allowed(request('GET', '/', headers={'Origin': 'localhost'}),
                 conf))
     self.assertFalse(
         allowed(request('GET', '/', headers={'Origin': 'null'}), conf))
     self.assertFalse(
         allowed(request('GET', '/', headers={'Origin': 'hackers.com'}),
                 conf))
 def test_allowed_origin_star(self):
     conf = Config(allowed_origin='*')
     # everything is allowed
     self.assertTrue(allowed(request('GET', '/'), conf))
     self.assertTrue(
         allowed(request('GET', '/', headers={'Origin': 'null'}), conf))
     self.assertTrue(
         allowed(request('GET', '/', headers={'Origin': 'localhost'}),
                 conf))
     self.assertTrue(
         allowed(request('GET', '/', headers={'Origin': 'hackers.com'}),
                 conf))
Example #26
0
async def main():
    conf = Config()
    try:
        init_curses()
        c = None
        while c not in [ord('q'), ord('Q')]:
            routing_info = await daemon_rpc(conf, 'routing_table_get')
            refresh(routing_info)
            c = stdscr.getch()
            time.sleep(0.1)
    finally:
        teardown_curses()
 def test_allowed_origin_default(self):
     conf = Config()
     # lack of Origin is always allowed
     self.assertTrue(allowed(request('GET', '/'), conf))
     # deny all other Origins
     self.assertFalse(
         allowed(request('GET', '/', headers={'Origin': 'null'}), conf))
     self.assertFalse(
         allowed(request('GET', '/', headers={'Origin': 'localhost'}),
                 conf))
     self.assertFalse(
         allowed(request('GET', '/', headers={'Origin': 'hackers.com'}),
                 conf))
Example #28
0
 def test_windows_defaults(self):
     c = Config()
     prefix = os.path.join(r"C:\Users", os.getlogin(),
                           r"AppData\Local\lbry")
     self.assertEqual(c.data_dir, os.path.join(prefix, 'lbrynet'))
     self.assertEqual(c.wallet_dir, os.path.join(prefix, 'lbryum'))
     self.assertEqual(c.download_dir,
                      os.path.join(r"C:\Users", os.getlogin(), "Downloads"))
     self.assertEqual(c.config,
                      os.path.join(c.data_dir, 'daemon_settings.yml'))
     self.assertEqual(c.api_connection_url, 'http://localhost:5279/lbryapi')
     self.assertEqual(c.log_file_path,
                      os.path.join(c.data_dir, 'lbrynet.log'))
Example #29
0
 async def setup_node(self, peer_addresses, address, node_id):
     self.nodes: typing.Dict[int, Node] = {}
     self.advance = dht_mocks.get_time_accelerator(self.loop, self.loop.time())
     self.conf = Config()
     self.storage = SQLiteStorage(self.conf, ":memory:", self.loop, self.loop.time)
     await self.storage.open()
     self.peer_manager = PeerManager(self.loop)
     self.node = Node(self.loop, self.peer_manager, node_id, 4444, 4444, 3333, address)
     await self.node.start_listening(address)
     self.blob_announcer = BlobAnnouncer(self.loop, self.node, self.storage)
     for node_id, address in peer_addresses:
         await self.add_peer(node_id, address)
     self.node.joined.set()
     self.node._refresh_task = self.loop.create_task(self.node.refresh_node())
Example #30
0
def main(argv=None):
    argv = argv or sys.argv[1:]
    parser = get_argument_parser()
    args, command_args = parser.parse_known_args(argv)

    conf = Config.create_from_arguments(args)
    for directory in (conf.data_dir, conf.download_dir, conf.wallet_dir):
        ensure_directory_exists(directory)

    if args.cli_version:
        print(f"lbrynet {lbrynet_version}")
    elif args.command == 'start':
        if args.help:
            args.start_parser.print_help()
        else:
            if args.initial_headers:
                ledger_path = os.path.join(conf.wallet_dir, 'lbc_mainnet')
                ensure_directory_exists(ledger_path)
                current_size = 0
                headers_path = os.path.join(ledger_path, 'headers')
                if os.path.exists(headers_path):
                    current_size = os.stat(headers_path).st_size
                if os.stat(args.initial_headers).st_size > current_size:
                    log.info('Copying header from %s to %s',
                             args.initial_headers, headers_path)
                    shutil.copy(args.initial_headers, headers_path)
            run_daemon(args, conf)
    elif args.command is not None:
        doc = args.doc
        api_method_name = args.api_method_name
        if args.replaced_by:
            print(
                f"{args.api_method_name} is deprecated, using {args.replaced_by['api_method_name']}."
            )
            doc = args.replaced_by['doc']
            api_method_name = args.replaced_by['api_method_name']
        if args.help:
            print(doc)
        else:
            parsed = docopt(doc, command_args)
            params = set_kwargs(parsed)
            asyncio.get_event_loop().run_until_complete(
                execute_command(conf, api_method_name, params))
    elif args.group is not None:
        args.group_parser.print_help()
    else:
        parser.print_help()

    return 0