コード例 #1
0
ファイル: LBRYConsole.py プロジェクト: hpierce1102/lbry
 def _setup_lbry_file_opener(self):
     stream_info_manager = TempLBRYFileMetadataManager()
     downloader_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter,
                                                self.session.blob_manager, stream_info_manager,
                                                self.session.wallet)
     self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, downloader_factory)
     return defer.succeed(True)
コード例 #2
0
ファイル: LBRYDaemon.py プロジェクト: jackrobison/lbry
        def _set_vars():
            self.fetcher = None
            self.current_db_revision = 1
            self.run_server = True
            self.session = None
            self.known_dht_nodes = [('104.236.42.182', 4000)]
            self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
            self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
            self.peer_port = 3333
            self.dht_node_port = 4444
            self.first_run = False
            if os.name == "nt":
                from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
                self.download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
                self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd")
            elif sys.platform == "darwin":
                self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
                self.wallet_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd")
            else:
                self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd")
                self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')

            self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
            self.wallet_user = None
            self.wallet_password = None
            self.sd_identifier = StreamDescriptorIdentifier()
            self.stream_info_manager = TempLBRYFileMetadataManager()
            self.wallet_rpc_port = 8332
            self.downloads = []
            self.stream_frames = []
            self.default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE
            self.use_upnp = True
            self.start_lbrycrdd = True
            if os.name == "nt":
                self.lbrycrdd_path = "lbrycrdd.exe"
            else:
                self.lbrycrdd_path = "./lbrycrdd"
            self.delete_blobs_on_remove = True
            self.blob_request_payment_rate_manager = None
            self.lbry_file_metadata_manager = None
            self.lbry_file_manager = None
            self.settings = LBRYSettings(self.db_dir)
            self.wallet_type = "lbrycrd"
            self.lbrycrd_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
            self.autofetcher_conf = os.path.join(self.wallet_dir, "autofetcher.conf")
            self.rpc_conn = None
            self.files = []
            self.created_data_dir = False
            if not os.path.exists(self.db_dir):
                os.mkdir(self.db_dir)
                self.created_data_dir = True
            self.session_settings = None
            self.data_rate = 0.5
            self.max_key_fee = 100.0
            return defer.succeed(None)
コード例 #3
0
ファイル: LBRYDaemon.py プロジェクト: jackrobison/lbry
class LBRYDaemon(xmlrpc.XMLRPC):
    """
    LBRYnet daemon
    """

    def setup(self):
        def _set_vars():
            self.fetcher = None
            self.current_db_revision = 1
            self.run_server = True
            self.session = None
            self.known_dht_nodes = [('104.236.42.182', 4000)]
            self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
            self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
            self.peer_port = 3333
            self.dht_node_port = 4444
            self.first_run = False
            if os.name == "nt":
                from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
                self.download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
                self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd")
            elif sys.platform == "darwin":
                self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
                self.wallet_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd")
            else:
                self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd")
                self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')

            self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
            self.wallet_user = None
            self.wallet_password = None
            self.sd_identifier = StreamDescriptorIdentifier()
            self.stream_info_manager = TempLBRYFileMetadataManager()
            self.wallet_rpc_port = 8332
            self.downloads = []
            self.stream_frames = []
            self.default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE
            self.use_upnp = True
            self.start_lbrycrdd = True
            if os.name == "nt":
                self.lbrycrdd_path = "lbrycrdd.exe"
            else:
                self.lbrycrdd_path = "./lbrycrdd"
            self.delete_blobs_on_remove = True
            self.blob_request_payment_rate_manager = None
            self.lbry_file_metadata_manager = None
            self.lbry_file_manager = None
            self.settings = LBRYSettings(self.db_dir)
            self.wallet_type = "lbrycrd"
            self.lbrycrd_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
            self.autofetcher_conf = os.path.join(self.wallet_dir, "autofetcher.conf")
            self.rpc_conn = None
            self.files = []
            self.created_data_dir = False
            if not os.path.exists(self.db_dir):
                os.mkdir(self.db_dir)
                self.created_data_dir = True
            self.session_settings = None
            self.data_rate = 0.5
            self.max_key_fee = 100.0
            return defer.succeed(None)

        def _disp_startup():
            print "Started LBRYnet daemon"
            print "The daemon can be shut down by running 'stop-lbrynet-daemon' in a terminal"
            return defer.succeed(None)

        d = defer.Deferred()
        d.addCallback(lambda _: _set_vars())
        d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory))
        d.addCallback(lambda _: self._check_db_migration())
        d.addCallback(lambda _: self._get_settings())
        d.addCallback(lambda _: self._get_lbrycrdd_path())
        d.addCallback(lambda _: self._get_session())
        d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier))
        d.addCallback(lambda _: self._setup_stream_identifier())
        d.addCallback(lambda _: self._setup_lbry_file_manager())
        d.addCallback(lambda _: self._setup_lbry_file_opener())
        d.addCallback(lambda _: self._setup_fetcher())
        d.addCallback(lambda _: _disp_startup())
        d.callback(None)

        return defer.succeed(None)

    def _shutdown(self):
        print 'Closing lbrynet session'
        if self.session is not None:
            d = self.session.shut_down()
        else:
            d = defer.succeed(True)
        return d

    def _update_settings(self):
        self.data_rate = self.session_settings['data_rate']
        self.max_key_fee = self.session_settings['max_key_fee']

    def _setup_fetcher(self):
        self.fetcher = FetcherDaemon(self.session, self.lbry_file_manager, self.lbry_file_metadata_manager,
                                     self.session.wallet, self.sd_identifier, self.autofetcher_conf)
        return defer.succeed(None)

    def _setup_data_directory(self):
        print "Loading databases..."
        if self.created_data_dir:
            db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w')
            db_revision.write(str(self.current_db_revision))
            db_revision.close()
            log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision")))
        if not os.path.exists(self.blobfile_dir):
            os.mkdir(self.blobfile_dir)
            log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))

    def _check_db_migration(self):
        old_revision = 0
        db_revision_file = os.path.join(self.db_dir, "db_revision")
        if os.path.exists(db_revision_file):
            old_revision = int(open(db_revision_file).read().strip())
        if old_revision < self.current_db_revision:
            from lbrynet.db_migrator import dbmigrator
            print "Upgrading your databases..."
            d = threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision)

            def print_success(old_dirs):
                success_string = "Finished upgrading the databases. It is now safe to delete the"
                success_string += " following directories, if you feel like it. It won't make any"
                success_string += " difference.\nAnyway here they are: "
                for i, old_dir in enumerate(old_dirs):
                    success_string += old_dir
                    if i + 1 < len(old_dir):
                        success_string += ", "
                print success_string

            d.addCallback(print_success)
            return d
        return defer.succeed(True)

    def _get_settings(self):
        d = self.settings.start()
        d.addCallback(lambda _: self.settings.get_lbryid())
        d.addCallback(self.set_lbryid)
        d.addCallback(lambda _: self._get_lbrycrdd_path())
        return d

    def set_lbryid(self, lbryid):
        if lbryid is None:
            return self._make_lbryid()
        else:
            self.lbryid = lbryid

    def _make_lbryid(self):
        self.lbryid = generate_id()
        d = self.settings.save_lbryid(self.lbryid)
        return d

    def _setup_lbry_file_manager(self):
        self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir)
        d = self.lbry_file_metadata_manager.setup()

        def set_lbry_file_manager():
            self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier)
            return self.lbry_file_manager.setup()

        d.addCallback(lambda _: set_lbry_file_manager())

        return d

    def _get_session(self):
        def get_default_data_rate():
            d = self.settings.get_default_data_payment_rate()
            d.addCallback(lambda rate: {"default_data_payment_rate":
                                            rate if rate is not None else MIN_BLOB_DATA_PAYMENT_RATE})
            return d

        def get_wallet():
            if self.wallet_type == "lbrycrd":
                lbrycrdd_path = None
                if self.start_lbrycrdd is True:
                    lbrycrdd_path = self.lbrycrdd_path
                    if not lbrycrdd_path:
                        lbrycrdd_path = self.default_lbrycrdd_path
                d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.lbrycrd_conf,
                                                lbrycrdd_path=lbrycrdd_path))
            else:
                d = defer.succeed(PTCWallet(self.db_dir))
            d.addCallback(lambda wallet: {"wallet": wallet})
            return d

        d1 = get_default_data_rate()
        d2 = get_wallet()

        def combine_results(results):
            r = {}
            for success, result in results:
                if success is True:
                    r.update(result)
            return r

        def create_session(results):
            self.session = LBRYSession(results['default_data_payment_rate'], db_dir=self.db_dir, lbryid=self.lbryid,
                                       blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port,
                                       known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port,
                                       use_upnp=self.use_upnp, wallet=results['wallet'])
            self.rpc_conn = self.session.wallet.get_rpc_conn_x()

        dl = defer.DeferredList([d1, d2], fireOnOneErrback=True)
        dl.addCallback(combine_results)
        dl.addCallback(create_session)
        dl.addCallback(lambda _: self.session.setup())
        return dl

    def _get_lbrycrdd_path(self):
        def get_lbrycrdd_path_conf_file():
            lbrycrdd_path_conf_path = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf")
            if not os.path.exists(lbrycrdd_path_conf_path):
                return ""
            lbrycrdd_path_conf = open(lbrycrdd_path_conf_path)
            lines = lbrycrdd_path_conf.readlines()
            return lines

        d = threads.deferToThread(get_lbrycrdd_path_conf_file)

        def load_lbrycrdd_path(conf):
            for line in conf:
                if len(line.strip()) and line.strip()[0] != "#":
                    self.lbrycrdd_path = line.strip()
                    print self.lbrycrdd_path

        d.addCallback(load_lbrycrdd_path)
        return d

    def _setup_stream_identifier(self):
        file_saver_factory = LBRYFileSaverFactory(self.session.peer_finder, self.session.rate_limiter,
                                                  self.session.blob_manager, self.stream_info_manager,
                                                  self.session.wallet, self.download_directory)
        self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_saver_factory)
        file_opener_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter,
                                                    self.session.blob_manager, self.stream_info_manager,
                                                    self.session.wallet)
        self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_opener_factory)
        return defer.succeed(None)

    def _setup_lbry_file_manager(self):
        self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir)
        d = self.lbry_file_metadata_manager.setup()

        def set_lbry_file_manager():
            self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier)
            return self.lbry_file_manager.setup()

        d.addCallback(lambda _: set_lbry_file_manager())

        return d

    def _setup_lbry_file_opener(self):

        downloader_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter,
                                                   self.session.blob_manager, self.stream_info_manager,
                                                   self.session.wallet)
        self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, downloader_factory)
        return defer.succeed(True)

    def _download_name(self, name):
        def _disp_file(file):
            print '[' + str(datetime.now()) + ']' + ' Already downloaded: ' + str(file.stream_hash)
            d = self._path_from_lbry_file(file)
            return d

        def _get_stream(name):
            def _disp(stream):
                print '[' + str(datetime.now()) + ']' + ' Start stream: ' + stream['stream_hash']
                return stream

            d = self.session.wallet.get_stream_info_for_name(name)
            stream = GetStream(self.sd_identifier, self.session, self.session.wallet, self.lbry_file_manager,
                                                        max_key_fee=self.max_key_fee, data_rate=self.data_rate)
            d.addCallback(_disp)
            d.addCallback(lambda stream_info: stream.start(stream_info))
            d.addCallback(lambda _: self._path_from_name(name))

            return d

        d = self._check_history(name)
        d.addCallback(lambda lbry_file: _get_stream(name) if not lbry_file else _disp_file(lbry_file))
        d.addCallback(lambda _: self._check_history(name))
        d.addCallback(lambda lbry_file: self._path_from_lbry_file(lbry_file) if lbry_file else 'Not found')
        d.addErrback(lambda err: str(err))

        return d

    def _resolve_name(self, name):
        d = defer.Deferred()
        d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name))
        d.addErrback(lambda _: defer.fail(UnknownNameError))

        return d


    def _resolve_name_wc(self, name):
        d = defer.Deferred()
        d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name))
        d.addErrback(lambda _: defer.fail(UnknownNameError))
        d.callback(None)

        return d

    def _check_history(self, name):
        def _get_lbry_file(path):
            f = open(path, 'r')
            l = json.loads(f.read())
            f.close()
            file_name = l['stream_name'].decode('hex')
            lbry_file = [file for file in self.lbry_file_manager.lbry_files if file.stream_name == file_name][0]
            return lbry_file

        def _check(info):
            stream_hash = info['stream_hash']
            path = os.path.join(self.blobfile_dir, stream_hash)
            if os.path.isfile(path):
                print "[" + str(datetime.now()) + "] Search for lbry_file, returning: " + stream_hash
                return defer.succeed(_get_lbry_file(path))
            else:
                print  "[" + str(datetime.now()) + "] Search for lbry_file didn't return anything"
                return defer.succeed(False)

        d = self._resolve_name(name)
        d.addCallbacks(_check, lambda _: False)
        d.callback(None)

        return d

    def _delete_lbry_file(self, lbry_file):
        d = self.lbry_file_manager.delete_lbry_file(lbry_file)

        def finish_deletion(lbry_file):
            d = lbry_file.delete_data()
            d.addCallback(lambda _: _delete_stream_data(lbry_file))
            return d

        def _delete_stream_data(lbry_file):
            s_h = lbry_file.stream_hash
            d = self.lbry_file_manager.get_count_for_stream_hash(s_h)
            # TODO: could possibly be a timing issue here
            d.addCallback(lambda c: self.stream_info_manager.delete_stream(s_h) if c == 0 else True)
            return d

        d.addCallback(lambda _: finish_deletion(lbry_file))
        return d

    def _path_from_name(self, name):
        d = self._check_history(name)
        d.addCallback(lambda lbry_file: {'stream_hash': lbry_file.stream_hash,
                                         'path': os.path.join(self.download_directory, lbry_file.file_name)}
                                        if lbry_file else defer.fail(UnknownNameError))
        return d

    def _path_from_lbry_file(self, lbry_file):
        if lbry_file:
            r = {'stream_hash': lbry_file.stream_hash,
                 'path': os.path.join(self.download_directory, lbry_file.file_name)}
            return defer.succeed(r)
        else:
            return defer.fail(UnknownNameError)

    def xmlrpc_get_settings(self):
        """
        Get LBRY payment settings
        """

        if not self.session_settings:
            self.session_settings = {'data_rate': self.data_rate, 'max_key_fee': self.max_key_fee}

        print '[' + str(datetime.now()) + '] Get daemon settings'
        return self.session_settings

    def xmlrpc_set_settings(self, settings):
        self.session_settings = settings
        self._update_settings()

        print '[' + str(datetime.now()) + '] Set daemon settings'
        return 'Set'

    def xmlrpc_start_fetcher(self):
        """
        Start autofetcher
        """

        self.fetcher.start()
        print '[' + str(datetime.now()) + '] Start autofetcher'
        return str('Started autofetching')

    def xmlrpc_stop_fetcher(self):
        """
        Start autofetcher
        """

        self.fetcher.stop()
        print '[' + str(datetime.now()) + '] Stop autofetcher'
        return str('Started autofetching')

    def xmlrpc_fetcher_status(self):
        """
        Start autofetcher
        """

        print '[' + str(datetime.now()) + '] Get fetcher status'
        return str(self.fetcher.check_if_running())

    def xmlrpc_get_balance(self):
        """
        Get LBC balance
        """

        print '[' + str(datetime.now()) + '] Get balance'
        return str(self.session.wallet.wallet_balance)

    def xmlrpc_stop(self):
        """
        Stop the reactor
        """

        def _disp_shutdown():
            print 'Shutting down lbrynet daemon'

        d = self._shutdown()
        d.addCallback(lambda _: _disp_shutdown())
        d.addCallback(lambda _: reactor.stop())
        d.callback(None)

        return d

    def xmlrpc_get_lbry_files(self):
        """
        Get LBRY files

        @return: Managed LBRY files
        """

        r = []
        for f in self.lbry_file_manager.lbry_files:
            if f.key:
                t = {'completed': f.completed, 'file_name': f.file_name, 'key': binascii.b2a_hex(f.key),
                     'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
                     'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
                     'upload_allowed': f.upload_allowed}

            else:
                t = {'completed': f.completed, 'file_name': f.file_name, 'key': None, 'points_paid': f.points_paid,
                     'stopped': f.stopped, 'stream_hash': f.stream_hash, 'stream_name': f.stream_name,
                     'suggested_file_name': f.suggested_file_name, 'upload_allowed': f.upload_allowed}

            r.append(json.dumps(t))

        print '[' + str(datetime.now()) + '] Get LBRY files'
        return r

    def xmlrpc_resolve_name(self, name):
        """
        Resolve stream info from a LBRY uri

        @param: name
        """

        def _disp(info):
            log.debug('[' + str(datetime.now()) + ']' + ' Resolved info: ' + str(info['stream_hash']))
            print '[' + str(datetime.now()) + ']' + ' Resolved info: ' + str(info['stream_hash'])
            return info

        d = self._resolve_name(name)
        d.addCallbacks(_disp, lambda _: str('UnknownNameError'))
        d.callback(None)
        return d

    def xmlrpc_get(self, name):
        """
        Download stream from a LBRY uri

        @param: name
        """

        d = self._download_name(name)

        return d

    def xmlrpc_stop_lbry_file(self, stream_hash):
        try:
            lbry_file = [f for f in self.lbry_file_manager.lbry_files if f.stream_hash == stream_hash][0]
        except IndexError:
            return defer.fail(UnknownNameError)

        if not lbry_file.stopped:
            d = self.lbry_file_manager.toggle_lbry_file_running(lbry_file)
            d.addCallback(lambda _: 'Stream has been stopped')
            d.addErrback(lambda err: str(err))
            return d
        else:
            return defer.succeed('Stream was already stopped')

    def xmlrpc_start_lbry_file(self, stream_hash):
        try:
            lbry_file = [f for f in self.lbry_file_manager.lbry_files if f.stream_hash == stream_hash][0]
        except IndexError:
            return defer.fail(UnknownNameError)

        if lbry_file.stopped:
            d = self.lbry_file_manager.toggle_lbry_file_running(lbry_file)
            d.callback(None)
            return defer.succeed('Stream started')
        else:
            return defer.succeed('Stream was already running')


    def xmlrpc_render_html(self, html):
        def _make_file(html, path):
            f = open(path, 'w')
            f.write(html)
            f.close()
            return defer.succeed(None)

        def _disp_err(err):
            print str(err.getTraceback())
            return err

        path = os.path.join(self.download_directory, 'lbry.html')

        d = defer.Deferred()
        d.addCallback(lambda _: _make_file(html, path))
        d.addCallback(lambda _: webbrowser.open('file://' + path))
        d.addErrback(_disp_err)
        d.callback(None)

        return d

    def xmlrpc_render_gui(self):
        def _disp_err(err):
            print str(err.getTraceback())
            return err
        d = defer.Deferred()
        d.addCallback(lambda _: webbrowser.open(os.path.join(self.download_directory, "lbryio/view/page/gui.html")))
        d.addErrback(_disp_err)
        d.callback(None)

        return d

    def xmlrpc_search_nametrie(self, search):
        def _return_d(x):
            d = defer.Deferred()
            d.addCallback(lambda _: x)
            d.callback(None)

            return d

        def _clean(n):
            t = []
            for i in n:
                if i[0]:
                    if i[1][0][0] and i[1][1][0]:
                        i[1][0][1]['value'] = str(i[1][0][1]['value'])
                        t.append([i[1][0][1], i[1][1][1]])
            return t

        def _parse(results):
            f = []
            for chain, meta in results:
                t = {}
                if 'name' in chain.keys():
                    t['name'] = chain['name']
                if 'thumbnail' in meta.keys():
                    t['img'] = meta['thumbnail']
                if 'name' in meta.keys():
                    t['title'] = meta['name']
                if 'description' in meta.keys():
                    t['description'] = meta['description']
                if 'key_fee' in meta.keys():
                    t['cost_est'] = meta['key_fee']
                else:
                    t['cost_est'] = 0.0
                f.append(t)

            return f

        def _disp(results):
            print '[' + str(datetime.now()) + '] Found ' + str(len(results)) + ' results'
            return results

        print '[' + str(datetime.now()) + '] Search nametrie: ' + search

        filtered_results = [n for n in self.rpc_conn.getnametrie() if n['name'].startswith(search)]
        filtered_results = [n for n in filtered_results if 'txid' in n.keys()]
        resolved_results = [defer.DeferredList([_return_d(n), self._resolve_name_wc(n['name'])]) for n in filtered_results]

        d = defer.DeferredList(resolved_results)
        d.addCallback(_clean)
        d.addCallback(_parse)
        d.addCallback(_disp)

        return d

    def xmlrpc_delete_lbry_file(self, file_name):
        def _disp(file_name):
            print '[' + str(datetime.now()) + '] Deleted: ' + file_name
            return defer.succeed(str('Deleted: ' + file_name))

        lbry_files = [self._delete_lbry_file(f) for f in self.lbry_file_manager.lbry_files if file_name == f.file_name]
        d = defer.DeferredList(lbry_files)
        d.addCallback(lambda _: _disp(file_name))
        return d

    def xmlrpc_check(self, name):
        d = self._check_history(name)
        d.addCallback(lambda lbry_file: self._path_from_lbry_file(lbry_file) if lbry_file else 'Not found')
        d.addErrback(lambda err: str(err))

        return d

    def xmlrpc_publish(self, metadata):
        metadata = json.loads(metadata)

        required = ['name', 'file_path', 'bid']

        for r in required:
            if not r in metadata.keys():
                return defer.fail()

        # if not os.path.isfile(metadata['file_path']):
        #     return defer.fail()

        if not type(metadata['bid']) is float and metadata['bid'] > 0.0:
            return defer.fail()

        name = metadata['name']
        file_path = metadata['file_path']
        bid = metadata['bid']

        if 'title' in metadata.keys():
            title = metadata['title']
        else:
            title = None

        if 'description' in metadata.keys():
            description = metadata['description']
        else:
            description = None

        if 'thumbnail' in metadata.keys():
            thumbnail = metadata['thumbnail']
        else:
            thumbnail = None

        if 'key_fee' in metadata.keys():
            if not 'key_fee_address' in metadata.keys():
                return defer.fail()
            key_fee = metadata['key_fee']
        else:
            key_fee = None

        if 'key_fee_address' in metadata.keys():
            key_fee_address = metadata['key_fee_address']
        else:
            key_fee_address = None

        p = Publisher(self.session, self.lbry_file_manager, self.session.wallet)
        d = p.start(name, file_path, bid, title, description, thumbnail, key_fee, key_fee_address)

        return d
コード例 #4
0
ファイル: LBRYGui.py プロジェクト: jackrobison/lbry
 def _setup_stream_info_manager(self):
     self.stream_info_manager = TempLBRYFileMetadataManager()
     return defer.succeed(True)
コード例 #5
0
ファイル: LBRYGui.py プロジェクト: jackrobison/lbry
class LBRYDownloader(object):
    def __init__(self):
        self.session = None
        self.known_dht_nodes = [('104.236.42.182', 4000)]
        self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrydownloader")
        self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
        self.peer_port = 3333
        self.dht_node_port = 4444
        self.run_server = True
        self.first_run = False
        self.current_db_revision = 1
        if os.name == "nt":
            from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
            self.download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
            self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd")
        else:
            if sys.platform == 'darwin':
                self.download_directory = os.path.join(os.path.expanduser("~"), "Downloads")
                self.wallet_dir =  os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd")
            else:
                self.download_directory = os.getcwd()
                self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd")
        self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
        self.wallet_user = None
        self.wallet_password = None
        self.sd_identifier = StreamDescriptorIdentifier()
        self.wallet_rpc_port = 8332
        self.download_deferreds = []
        self.stream_frames = []
        self.default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE
        self.use_upnp = True
        self.start_lbrycrdd = True
        if os.name == "nt":
            self.lbrycrdd_path = "lbrycrdd.exe"
        else:
            self.lbrycrdd_path = None
            self.default_lbrycrdd_path = "./lbrycrdd"
        self.delete_blobs_on_remove = True
        self.blob_request_payment_rate_manager = None

    def start(self):
        d = self._load_conf_options()
        d.addCallback(lambda _: threads.deferToThread(self._create_directory))
        d.addCallback(lambda _: self._check_db_migration())
        d.addCallback(lambda _: self._get_session())
        d.addCallback(lambda _: self._setup_stream_info_manager())
        d.addCallback(lambda _: self._setup_stream_identifier())
        d.addCallback(lambda _: self.start_server())
        return d

    def stop(self):
        dl = defer.DeferredList(self.download_deferreds)
        for stream_frame in self.stream_frames:
            stream_frame.cancel_func()
        if self.session is not None:
            dl.addBoth(lambda _: self.stop_server())
            dl.addBoth(lambda _: self.session.shut_down())
        return dl

    def get_new_address(self):
        return self.session.wallet.get_new_address()

    def _check_db_migration(self):
        old_revision = 0
        db_revision_file = os.path.join(self.db_dir, "db_revision")
        if os.path.exists(db_revision_file):
            old_revision = int(open(db_revision_file).read().strip())
        if old_revision < self.current_db_revision:
            if os.name == "nt":
                import subprocess
                import sys

                def run_migrator():
                    migrator_exe = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])),
                                                "dbmigrator", "migrator.exe")
                    print "trying to find the migrator at", migrator_exe
                    si = subprocess.STARTUPINFO
                    si.dwFlags = subprocess.STARTF_USESHOWWINDOW
                    si.wShowWindow = subprocess.SW_HIDE
                    print "trying to run the migrator"
                    migrator_proc = subprocess.Popen([migrator_exe, self.db_dir, str(old_revision),
                                                      str(self.current_db_revision)], startupinfo=si)
                    print "started the migrator"
                    migrator_proc.wait()
                    print "migrator has returned"

                return threads.deferToThread(run_migrator)
            else:
                from lbrynet.db_migrator import dbmigrator
                return threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision,
                                             self.current_db_revision)
        return defer.succeed(True)

    def _load_conf_options(self):

        def get_lbrycrdd_path_conf_file():
            if os.name == "nt":
                return ""
            lbrycrdd_path_conf_path = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf")
            if not os.path.exists(lbrycrdd_path_conf_path):
                return ""
            lbrycrdd_path_conf = open(lbrycrdd_path_conf_path)
            lines = lbrycrdd_path_conf.readlines()
            return lines

        d = threads.deferToThread(get_lbrycrdd_path_conf_file)

        def load_lbrycrdd_path(conf):
            for line in conf:
                if len(line.strip()) and line.strip()[0] != "#":
                    self.lbrycrdd_path = line.strip()

        d.addCallback(load_lbrycrdd_path)

        def get_configuration_file():
            if os.name == "nt":
                lbry_conf_path = "lbry.conf"
                if not os.path.exists(lbry_conf_path):
                    log.debug("Could not read lbry.conf")
                    return ""
            else:
                lbry_conf_path = os.path.join(os.path.expanduser("~"), ".lbrynetgui.conf")
                if not os.path.exists(lbry_conf_path):
                    clean_conf_path = os.path.join(os.path.dirname(__file__), "lbry.conf")
                    shutil.copy(clean_conf_path, lbry_conf_path)
            lbry_conf = open(lbry_conf_path)
            log.debug("Loading configuration options from %s", lbry_conf_path)
            lines = lbry_conf.readlines()
            log.debug("%s file contents:\n%s", lbry_conf_path, str(lines))
            return lines

        d.addCallback(lambda _: threads.deferToThread(get_configuration_file))

        def load_configuration_file(conf):
            for line in conf:
                if len(line.strip()) and line.strip()[0] != "#":
                    try:
                        field_name, field_value = map(lambda x: x.strip(), line.strip().split("=", 1))
                        field_name = field_name.lower()
                    except ValueError:
                        raise ValueError("Invalid configuration line: %s" % line)
                    if field_name == "known_dht_nodes":
                        known_nodes = []
                        nodes = field_value.split(",")
                        for n in nodes:
                            if n.strip():
                                try:
                                    ip_address, port_string = map(lambda x: x.strip(), n.split(":"))
                                    ip_numbers = ip_address.split(".")
                                    assert len(ip_numbers) == 4
                                    for ip_num in ip_numbers:
                                        num = int(ip_num)
                                        assert 0 <= num <= 255
                                    known_nodes.append((ip_address, int(port_string)))
                                except (ValueError, AssertionError):
                                    raise ValueError("Expected known nodes in format 192.168.1.1:4000,192.168.1.2:4001. Got %s" % str(field_value))
                        log.debug("Setting known_dht_nodes to %s", str(known_nodes))
                        self.known_dht_nodes = known_nodes
                    elif field_name == "run_server":
                        if field_value.lower() == "true":
                            run_server = True
                        elif field_value.lower() == "false":
                            run_server = False
                        else:
                            raise ValueError("run_server must be set to True or False. Got %s" % field_value)
                        log.debug("Setting run_server to %s", str(run_server))
                        self.run_server = run_server
                    elif field_name == "data_dir":
                        log.debug("Setting data_dir to %s", str(field_value))
                        self.db_dir = field_value
                        self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
                    elif field_name == "wallet_dir":
                        log.debug("Setting wallet_dir to %s", str(field_value))
                        self.wallet_dir = field_value
                    elif field_name == "wallet_conf":
                        log.debug("Setting wallet_conf to %s", str(field_value))
                        self.wallet_conf = field_value
                    elif field_name == "peer_port":
                        try:
                            peer_port = int(field_value)
                            assert 0 <= peer_port <= 65535
                            log.debug("Setting peer_port to %s", str(peer_port))
                            self.peer_port = peer_port
                        except (ValueError, AssertionError):
                            raise ValueError("peer_port must be set to an integer between 1 and 65535. Got %s" % field_value)
                    elif field_name == "dht_port":
                        try:
                            dht_port = int(field_value)
                            assert 0 <= dht_port <= 65535
                            log.debug("Setting dht_node_port to %s", str(dht_port))
                            self.dht_node_port = dht_port
                        except (ValueError, AssertionError):
                            raise ValueError("dht_port must be set to an integer between 1 and 65535. Got %s" % field_value)
                    elif field_name == "use_upnp":
                        if field_value.lower() == "true":
                            use_upnp = True
                        elif field_value.lower() == "false":
                            use_upnp = False
                        else:
                            raise ValueError("use_upnp must be set to True or False. Got %s" % str(field_value))
                        log.debug("Setting use_upnp to %s", str(use_upnp))
                        self.use_upnp = use_upnp
                    elif field_name == "default_blob_data_payment_rate":
                        try:
                            rate = float(field_value)
                            assert rate >= 0.0
                            log.debug("Setting default_blob_data_payment_rate to %s", str(rate))
                            self.default_blob_data_payment_rate = rate
                        except (ValueError, AssertionError):
                            raise ValueError("default_blob_data_payment_rate must be a positive floating point number, e.g. 0.5. Got %s" % str(field_value))
                    elif field_name == "start_lbrycrdd":
                        if field_value.lower() == "true":
                            start_lbrycrdd = True
                        elif field_value.lower() == "false":
                            start_lbrycrdd = False
                        else:
                            raise ValueError("start_lbrycrdd must be set to True or False. Got %s" % field_value)
                        log.debug("Setting start_lbrycrdd to %s", str(start_lbrycrdd))
                        self.start_lbrycrdd = start_lbrycrdd
                    elif field_name == "lbrycrdd_path":
                        self.lbrycrdd_path = field_value
                    elif field_name == "download_directory":
                        log.debug("Setting download_directory to %s", str(field_value))
                        self.download_directory = field_value
                    elif field_name == "delete_blobs_on_stream_remove":
                        if field_value.lower() == "true":
                            self.delete_blobs_on_remove = True
                        elif field_value.lower() == "false":
                            self.delete_blobs_on_remove = False
                        else:
                            raise ValueError("delete_blobs_on_stream_remove must be set to True or False")
                    else:
                        log.warning("Got unknown configuration field: %s", field_name)

        d.addCallback(load_configuration_file)
        return d

    def _create_directory(self):
        if not os.path.exists(self.db_dir):
            os.makedirs(self.db_dir)
            db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w')
            db_revision.write(str(self.current_db_revision))
            db_revision.close()
            log.debug("Created the configuration directory: %s", str(self.db_dir))
        if not os.path.exists(self.blobfile_dir):
            os.makedirs(self.blobfile_dir)
            log.debug("Created the data directory: %s", str(self.blobfile_dir))
        if os.name == "nt":
            if not os.path.exists(self.wallet_dir):
                os.makedirs(self.wallet_dir)
            if not os.path.exists(self.wallet_conf):
                lbrycrd_conf = open(self.wallet_conf, mode='w')
                self.wallet_user = "******"
                lbrycrd_conf.write("rpcuser=%s\n" % self.wallet_user)
                self.wallet_password = binascii.hexlify(Random.new().read(20))
                lbrycrd_conf.write("rpcpassword=%s\n" % self.wallet_password)
                lbrycrd_conf.write("server=1\n")
                lbrycrd_conf.close()
            else:
                lbrycrd_conf = open(self.wallet_conf)
                for l in lbrycrd_conf:
                    if l.startswith("rpcuser="******"rpcpassword="******"rpcport="):
                        self.wallet_rpc_port = int(l[8:-1].rstrip('\n'))

    def _get_session(self):
        lbrycrdd_path = None
        if self.start_lbrycrdd is True:
            lbrycrdd_path = self.lbrycrdd_path
            if not lbrycrdd_path:
                lbrycrdd_path = self.default_lbrycrdd_path

        if sys.platform == 'darwin':
            os.chdir("/Applications/LBRY.app/Contents/Resources")

        wallet = LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.wallet_conf,
                               lbrycrdd_path=lbrycrdd_path)

        peer_port = None
        if self.run_server:
            peer_port = self.peer_port
        self.session = LBRYSession(self.default_blob_data_payment_rate, db_dir=self.db_dir,
                                   blob_dir=self.blobfile_dir, use_upnp=self.use_upnp, wallet=wallet,
                                   known_dht_nodes=self.known_dht_nodes, dht_node_port=self.dht_node_port,
                                   peer_port=peer_port)
        return self.session.setup()

    def _setup_stream_info_manager(self):
        self.stream_info_manager = TempLBRYFileMetadataManager()
        return defer.succeed(True)

    def start_server(self):

        if self.run_server:
            self.blob_request_payment_rate_manager = PaymentRateManager(
                self.session.base_payment_rate_manager,
                self.default_blob_data_payment_rate
            )
            handlers = [
                BlobAvailabilityHandlerFactory(self.session.blob_manager),
                self.session.wallet.get_wallet_info_query_handler_factory(),
                BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet,
                                          self.blob_request_payment_rate_manager)
            ]

            server_factory = ServerProtocolFactory(self.session.rate_limiter,
                                                   handlers,
                                                   self.session.peer_manager)
            from twisted.internet import reactor
            self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory)

        return defer.succeed(True)

    def stop_server(self):
        if self.lbry_server_port is not None:
            self.lbry_server_port, p = None, self.lbry_server_port
            return defer.maybeDeferred(p.stopListening)
        else:
            return defer.succeed(True)

    def _setup_stream_identifier(self):
        add_lbry_file_to_sd_identifier(self.sd_identifier)
        file_saver_factory = LBRYFileSaverFactory(self.session.peer_finder, self.session.rate_limiter,
                                                  self.session.blob_manager, self.stream_info_manager,
                                                  self.session.wallet, self.download_directory)
        self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_saver_factory)
        file_opener_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter,
                                                    self.session.blob_manager, self.stream_info_manager,
                                                    self.session.wallet)
        self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_opener_factory)

    def check_first_run(self):
        d = self.session.wallet.check_first_run()
        d.addCallback(lambda is_first_run: self._do_first_run() if is_first_run else 0.0)
        return d

    def _do_first_run(self):
        d = self.session.wallet.get_new_address()

        def send_request(url, data):
            r = requests.post(url, json=data)
            if r.status_code == 200:
                return r.json()['credits_sent']
            return 0.0

        def log_error(err):
            log.warning("unable to request free credits. %s", err.getErrorMessage())
            return 0.0

        def request_credits(address):
            url = "http://credreq.lbry.io/requestcredits"
            data = {"address": address}
            d = threads.deferToThread(send_request, url, data)
            d.addErrback(log_error)
            return d

        d.addCallback(request_credits)
        return d

    def _resolve_name(self, uri):
        return self.session.wallet.get_stream_info_for_name(uri)

    def download_stream(self, stream_frame, uri):
        resolve_d = self._resolve_name(uri)

        stream_frame.show_metadata_status("resolving name...")

        stream_frame.cancel_func = resolve_d.cancel
        payment_rate_manager = PaymentRateManager(self.session.base_payment_rate_manager)

        def update_stream_name(value):
            if 'name' in value:
                stream_frame.show_name(value['name'])
            if 'description' in value:
                stream_frame.show_description(value['description'])
            if 'thumbnail' in value:
                stream_frame.show_thumbnail(value['thumbnail'])
            return value

        def get_sd_hash(value):
            if 'stream_hash' in value:
                return value['stream_hash']
            raise UnknownNameError(uri)

        def get_sd_blob(sd_hash):
            stream_frame.show_metadata_status("name resolved, fetching metadata...")
            get_sd_d = StreamDescriptor.download_sd_blob(self.session, sd_hash,
                                                         payment_rate_manager)
            get_sd_d.addCallback(self.sd_identifier.get_metadata_for_sd_blob)
            get_sd_d.addCallbacks(choose_download_factory, bad_sd_blob)
            return get_sd_d

        def get_info_from_validator(info_validator):
            stream_name = None
            stream_size = None
            for field, val in info_validator.info_to_show():
                if field == "suggested_file_name":
                    stream_name = val
                elif field == "stream_name" and stream_name is None:
                    stream_name = val
                elif field == "stream_size":
                    stream_size = int(val)
            if stream_size is None:
                stream_size = "unknown"
            if stream_name is None:
                stream_name = "unknown"
            return stream_name, stream_size

        def choose_download_factory(metadata):
            #info_validator, options, factories = info_and_factories
            stream_name, stream_size = get_info_from_validator(metadata.validator)
            if isinstance(stream_size, (int, long)):
                price = payment_rate_manager.get_effective_min_blob_data_payment_rate()
                estimated_cost = stream_size * 1.0 / 2**20 * price
            else:
                estimated_cost = "unknown"

            stream_frame.show_stream_metadata(stream_name, stream_size, estimated_cost)

            available_options = metadata.options.get_downloader_options(metadata.validator,
                                                                        payment_rate_manager)

            stream_frame.show_download_options(available_options)

            get_downloader_d = defer.Deferred()

            def create_downloader(f, chosen_options):

                def fire_get_downloader_d(downloader):
                    if not get_downloader_d.called:
                        get_downloader_d.callback(downloader)

                stream_frame.disable_download_buttons()
                d = f.make_downloader(metadata, chosen_options,
                                      payment_rate_manager)
                d.addCallback(fire_get_downloader_d)

            for factory in metadata.factories:

                def choose_factory(f=factory):
                    chosen_options = stream_frame.get_chosen_options()
                    create_downloader(f, chosen_options)

                stream_frame.add_download_factory(factory, choose_factory)

            get_downloader_d.addCallback(start_download)

            return get_downloader_d

        def show_stream_status(downloader):
            total_bytes = downloader.get_total_bytes_cached()
            bytes_left_to_download = downloader.get_bytes_left_to_download()
            points_paid = payment_rate_manager.points_paid
            payment_rate = payment_rate_manager.get_effective_min_blob_data_payment_rate()
            points_remaining = 1.0 * bytes_left_to_download * payment_rate / 2**20
            stream_frame.show_progress(total_bytes, bytes_left_to_download,
                                       points_paid, points_remaining)

        def show_finished(arg, downloader):
            show_stream_status(downloader)
            stream_frame.show_download_done(payment_rate_manager.points_paid)
            return arg

        def start_download(downloader):
            stream_frame.stream_hash = downloader.stream_hash
            l = task.LoopingCall(show_stream_status, downloader)
            l.start(1)
            d = downloader.start()
            stream_frame.cancel_func = downloader.stop

            def stop_looping_call(arg):
                l.stop()
                stream_frame.cancel_func = resolve_d.cancel
                return arg

            d.addBoth(stop_looping_call)
            d.addCallback(show_finished, downloader)
            return d

        def lookup_failed(err):
            stream_frame.show_metadata_status("name lookup failed")
            return err

        def bad_sd_blob(err):
            stream_frame.show_metadata_status("Unknown type or badly formed metadata")
            return err

        resolve_d.addCallback(update_stream_name)
        resolve_d.addCallback(get_sd_hash)
        resolve_d.addCallbacks(get_sd_blob, lookup_failed)

        def show_err(err):
            tkMessageBox.showerror(title="Download Error", message=err.getErrorMessage())
            log.error(err.getErrorMessage())
            stream_frame.show_download_done(payment_rate_manager.points_paid)

        resolve_d.addErrback(lambda err: err.trap(defer.CancelledError, UnknownNameError,
                                                  UnknownStreamTypeError, InvalidStreamDescriptorError,
                                                  InvalidStreamInfoError))
        resolve_d.addErrback(show_err)

        def delete_associated_blobs():
            if stream_frame.stream_hash is None or self.delete_blobs_on_remove is False:
                return defer.succeed(True)
            d1 = self.stream_info_manager.get_blobs_for_stream(stream_frame.stream_hash)

            def get_blob_hashes(blob_infos):
                return [b[0] for b in blob_infos if b[0] is not None]

            d1.addCallback(get_blob_hashes)
            d2 = self.stream_info_manager.get_sd_blob_hashes_for_stream(stream_frame.stream_hash)

            def combine_blob_hashes(results):
                blob_hashes = []
                for success, result in results:
                    if success is True:
                        blob_hashes.extend(result)
                return blob_hashes

            def delete_blobs(blob_hashes):
                return self.session.blob_manager.delete_blobs(blob_hashes)

            dl = defer.DeferredList([d1, d2], fireOnOneErrback=True)
            dl.addCallback(combine_blob_hashes)
            dl.addCallback(delete_blobs)
            return dl

        resolve_d.addCallback(lambda _: delete_associated_blobs())
        self._add_download_deferred(resolve_d, stream_frame)

    def _add_download_deferred(self, d, stream_frame):
        self.download_deferreds.append(d)
        self.stream_frames.append(stream_frame)

        def remove_from_list():
            self.download_deferreds.remove(d)
            self.stream_frames.remove(stream_frame)

        d.addBoth(lambda _: remove_from_list())
コード例 #6
0
ファイル: functional_tests.py プロジェクト: Xuiquaxa/lbry
    def test_live_transfer(self):

        sd_hash_queue = Queue()
        kill_event = Event()
        dead_event = Event()
        server_args = (sd_hash_queue, kill_event, dead_event)
        server = Process(target=start_live_server, args=server_args)
        server.start()
        self.server_processes.append(server)

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        db_dir = "client"
        os.mkdir(db_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None,
                                   peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = TempLiveStreamMetadataManager(hash_announcer)

        d = self.wait_for_hash_from_queue(sd_hash_queue)

        def create_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def start_lbry_file(lbry_file):
            lbry_file = lbry_file
            logging.debug("Calling lbry_file.start()")
            return lbry_file.start()

        def download_stream(sd_blob_hash):
            logging.debug("Downloaded the sd blob. Reading it now")
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_blob_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(create_downloader, prm)
            d.addCallback(start_lbry_file)
            return d

        def do_download(sd_blob_hash):
            logging.debug("Starting the download")
            d = self.session.setup()
            d.addCallback(lambda _: enable_live_stream())
            d.addCallback(lambda _: download_stream(sd_blob_hash))
            return d

        def enable_live_stream():
            base_live_stream_payment_rate_manager = BaseLiveStreamPaymentRateManager(
                MIN_BLOB_INFO_PAYMENT_RATE
            )
            add_live_stream_to_sd_identifier(sd_identifier,
                                             base_live_stream_payment_rate_manager)
            add_full_live_stream_downloader_to_sd_identifier(self.session, self.stream_info_manager,
                                                             sd_identifier,
                                                             base_live_stream_payment_rate_manager)

        d.addCallback(do_download)

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "215b177db8eed86d028b37e5cbad55c7")

        d.addCallback(lambda _: check_md5_sum())

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = self.wait_for_event(dead_event, 15)

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d.addBoth(stop)
        return d
コード例 #7
0
ファイル: functional_tests.py プロジェクト: Xuiquaxa/lbry
    def test_lbry_transfer(self):

        sd_hash_queue = Queue()
        kill_event = Event()
        dead_event = Event()
        uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_event, 5209343))
        uploader.start()
        self.server_processes.append(uploader)

        logging.debug("Testing transfer")

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        db_dir = "client"
        blob_dir = os.path.join(db_dir, "blobfiles")
        os.mkdir(db_dir)
        os.mkdir(blob_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer,
                                   blob_dir=blob_dir, peer_port=5553,
                                   use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = TempLBRYFileMetadataManager()

        self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier)

        def make_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def download_file(sd_hash):
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(make_downloader, prm)
            d.addCallback(lambda downloader: downloader.start())
            return d

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")

        def start_transfer(sd_hash):

            logging.debug("Starting the transfer")

            d = self.session.setup()
            d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier))
            d.addCallback(lambda _: self.lbry_file_manager.setup())
            d.addCallback(lambda _: download_file(sd_hash))
            d.addCallback(lambda _: check_md5_sum())

            return d

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = self.wait_for_event(dead_event, 15)

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d = self.wait_for_hash_from_queue(sd_hash_queue)
        d.addCallback(start_transfer)
        d.addBoth(stop)

        return d
コード例 #8
0
ファイル: functional_tests.py プロジェクト: Xuiquaxa/lbry
class TestTransfer(TestCase):
    def setUp(self):
        self.server_processes = []
        self.session = None
        self.stream_info_manager = None
        self.lbry_file_manager = None
        self.addCleanup(self.take_down_env)

    def take_down_env(self):

        d = defer.succeed(True)
        if self.lbry_file_manager is not None:
            d.addCallback(lambda _: self.lbry_file_manager.stop())
        if self.session is not None:
            d.addCallback(lambda _: self.session.shut_down())
        if self.stream_info_manager is not None:
            d.addCallback(lambda _: self.stream_info_manager.stop())

        def delete_test_env():
            dirs = ['server', 'server1', 'server2', 'client']
            files = ['test_file']
            for di in dirs:
                if os.path.exists(di):
                    shutil.rmtree(di)
            for f in files:
                if os.path.exists(f):
                    os.remove(f)
            for p in self.server_processes:
                p.terminate()
            return True

        d.addCallback(lambda _: threads.deferToThread(delete_test_env))
        return d

    @staticmethod
    def wait_for_event(event, timeout):

        from twisted.internet import reactor
        d = defer.Deferred()

        def stop():
            set_check.stop()
            if stop_call.active():
                stop_call.cancel()
                d.callback(True)

        def check_if_event_set():
            if event.is_set():
                logging.debug("Dead event has been found set")
                stop()

        def done_waiting():
            logging.warning("Event has not been found set and timeout has expired")
            stop()

        set_check = task.LoopingCall(check_if_event_set)
        set_check.start(.1)
        stop_call = reactor.callLater(timeout, done_waiting)
        return d

    @staticmethod
    def wait_for_hash_from_queue(hash_queue):
        logging.debug("Waiting for the sd_hash to come through the queue")

        d = defer.Deferred()

        def check_for_start():
            if hash_queue.empty() is False:
                logging.debug("Client start event has been found set")
                start_check.stop()
                d.callback(hash_queue.get(False))
            else:
                logging.debug("Client start event has NOT been found set")

        start_check = task.LoopingCall(check_for_start)
        start_check.start(1.0)

        return d

    def test_lbry_transfer(self):

        sd_hash_queue = Queue()
        kill_event = Event()
        dead_event = Event()
        uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_event, 5209343))
        uploader.start()
        self.server_processes.append(uploader)

        logging.debug("Testing transfer")

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        db_dir = "client"
        blob_dir = os.path.join(db_dir, "blobfiles")
        os.mkdir(db_dir)
        os.mkdir(blob_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer,
                                   blob_dir=blob_dir, peer_port=5553,
                                   use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = TempLBRYFileMetadataManager()

        self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier)

        def make_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def download_file(sd_hash):
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(make_downloader, prm)
            d.addCallback(lambda downloader: downloader.start())
            return d

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")

        def start_transfer(sd_hash):

            logging.debug("Starting the transfer")

            d = self.session.setup()
            d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier))
            d.addCallback(lambda _: self.lbry_file_manager.setup())
            d.addCallback(lambda _: download_file(sd_hash))
            d.addCallback(lambda _: check_md5_sum())

            return d

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = self.wait_for_event(dead_event, 15)

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d = self.wait_for_hash_from_queue(sd_hash_queue)
        d.addCallback(start_transfer)
        d.addBoth(stop)

        return d

    def test_live_transfer(self):

        sd_hash_queue = Queue()
        kill_event = Event()
        dead_event = Event()
        server_args = (sd_hash_queue, kill_event, dead_event)
        server = Process(target=start_live_server, args=server_args)
        server.start()
        self.server_processes.append(server)

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        db_dir = "client"
        os.mkdir(db_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None,
                                   peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = TempLiveStreamMetadataManager(hash_announcer)

        d = self.wait_for_hash_from_queue(sd_hash_queue)

        def create_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def start_lbry_file(lbry_file):
            lbry_file = lbry_file
            logging.debug("Calling lbry_file.start()")
            return lbry_file.start()

        def download_stream(sd_blob_hash):
            logging.debug("Downloaded the sd blob. Reading it now")
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_blob_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(create_downloader, prm)
            d.addCallback(start_lbry_file)
            return d

        def do_download(sd_blob_hash):
            logging.debug("Starting the download")
            d = self.session.setup()
            d.addCallback(lambda _: enable_live_stream())
            d.addCallback(lambda _: download_stream(sd_blob_hash))
            return d

        def enable_live_stream():
            base_live_stream_payment_rate_manager = BaseLiveStreamPaymentRateManager(
                MIN_BLOB_INFO_PAYMENT_RATE
            )
            add_live_stream_to_sd_identifier(sd_identifier,
                                             base_live_stream_payment_rate_manager)
            add_full_live_stream_downloader_to_sd_identifier(self.session, self.stream_info_manager,
                                                             sd_identifier,
                                                             base_live_stream_payment_rate_manager)

        d.addCallback(do_download)

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "215b177db8eed86d028b37e5cbad55c7")

        d.addCallback(lambda _: check_md5_sum())

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = self.wait_for_event(dead_event, 15)

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d.addBoth(stop)
        return d

    def test_last_blob_retrieval(self):

        kill_event = Event()
        dead_event_1 = Event()
        blob_hash_queue_1 = Queue()
        blob_hash_queue_2 = Queue()
        fast_uploader = Process(target=start_blob_uploader,
                                args=(blob_hash_queue_1, kill_event, dead_event_1, False))
        fast_uploader.start()
        self.server_processes.append(fast_uploader)
        dead_event_2 = Event()
        slow_uploader = Process(target=start_blob_uploader,
                                args=(blob_hash_queue_2, kill_event, dead_event_2, True))
        slow_uploader.start()
        self.server_processes.append(slow_uploader)

        logging.debug("Testing transfer")

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 2)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()

        db_dir = "client"
        blob_dir = os.path.join(db_dir, "blobfiles")
        os.mkdir(db_dir)
        os.mkdir(blob_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer,
                                   blob_dir=blob_dir, peer_port=5553,
                                   use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        d1 = self.wait_for_hash_from_queue(blob_hash_queue_1)
        d2 = self.wait_for_hash_from_queue(blob_hash_queue_2)
        d = defer.DeferredList([d1, d2], fireOnOneErrback=True)

        def get_blob_hash(results):
            self.assertEqual(results[0][1], results[1][1])
            return results[0][1]

        d.addCallback(get_blob_hash)

        def download_blob(blob_hash):
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            downloader = StandaloneBlobDownloader(blob_hash, self.session.blob_manager, peer_finder,
                                                  rate_limiter, prm, wallet)
            d = downloader.download()
            return d

        def start_transfer(blob_hash):

            logging.debug("Starting the transfer")

            d = self.session.setup()
            d.addCallback(lambda _: download_blob(blob_hash))

            return d

        d.addCallback(start_transfer)

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d1 = self.wait_for_event(dead_event_1, 15)
            d2 = self.wait_for_event(dead_event_2, 15)
            dl = defer.DeferredList([d1, d2])

            def print_shutting_down():
                logging.info("Client is shutting down")

            dl.addCallback(lambda _: print_shutting_down())
            dl.addCallback(lambda _: arg)
            return dl

        d.addBoth(stop)

        return d

    def test_double_download(self):

        sd_hash_queue = Queue()
        kill_event = Event()
        dead_event = Event()
        uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_event, 5209343))
        uploader.start()
        self.server_processes.append(uploader)

        logging.debug("Testing double download")

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        downloaders = []

        db_dir = "client"
        blob_dir = os.path.join(db_dir, "blobfiles")
        os.mkdir(db_dir)
        os.mkdir(blob_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer,
                                   blob_dir=blob_dir, peer_port=5553, use_upnp=False,
                                   rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = DBLBRYFileMetadataManager(self.session.db_dir)

        self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier)

        def make_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def append_downloader(downloader):
            downloaders.append(downloader)
            return downloader

        def download_file(sd_hash):
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(make_downloader, prm)
            d.addCallback(append_downloader)
            d.addCallback(lambda downloader: downloader.start())
            return d

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")

        def delete_lbry_file():
            logging.debug("deleting the file...")
            d = self.lbry_file_manager.delete_lbry_file(downloaders[0])
            d.addCallback(lambda _: self.lbry_file_manager.get_count_for_stream_hash(downloaders[0].stream_hash))
            d.addCallback(lambda c: self.stream_info_manager.delete_stream(downloaders[1].stream_hash) if c == 0 else True)
            return d

        def check_lbry_file():
            d = downloaders[1].status()
            d.addCallback(lambda _: downloaders[1].status())

            def check_status_report(status_report):
                self.assertEqual(status_report.num_known, status_report.num_completed)
                self.assertEqual(status_report.num_known, 3)

            d.addCallback(check_status_report)
            return d

        def start_transfer(sd_hash):

            logging.debug("Starting the transfer")

            d = self.session.setup()
            d.addCallback(lambda _: self.stream_info_manager.setup())
            d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier))
            d.addCallback(lambda _: self.lbry_file_manager.setup())
            d.addCallback(lambda _: download_file(sd_hash))
            d.addCallback(lambda _: check_md5_sum())
            d.addCallback(lambda _: download_file(sd_hash))
            d.addCallback(lambda _: delete_lbry_file())
            d.addCallback(lambda _: check_lbry_file())

            return d

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = self.wait_for_event(dead_event, 15)

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d = self.wait_for_hash_from_queue(sd_hash_queue)
        d.addCallback(start_transfer)
        d.addBoth(stop)
        return d

    def test_multiple_uploaders(self):

        sd_hash_queue = Queue()
        num_uploaders = 3
        kill_event = Event()
        dead_events = [Event() for _ in range(num_uploaders)]
        ready_events = [Event() for _ in range(1, num_uploaders)]
        uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_events[0],
                                                             9373419, 2**22))
        uploader.start()
        self.server_processes.append(uploader)

        logging.debug("Testing multiple uploaders")

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, num_uploaders)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        db_dir = "client"
        blob_dir = os.path.join(db_dir, "blobfiles")
        os.mkdir(db_dir)
        os.mkdir(blob_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer,
                                   blob_dir=None, peer_port=5553,
                                   use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = TempLBRYFileMetadataManager()

        self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier)

        def start_additional_uploaders(sd_hash):
            for i in range(1, num_uploaders):
                uploader = Process(target=start_lbry_reuploader,
                                   args=(sd_hash, kill_event, dead_events[i], ready_events[i-1], i, 2**10))
                uploader.start()
                self.server_processes.append(uploader)
            return defer.succeed(True)

        def wait_for_ready_events():
            return defer.DeferredList([self.wait_for_event(ready_event, 60) for ready_event in ready_events])

        def make_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def download_file(sd_hash):
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(make_downloader, prm)
            d.addCallback(lambda downloader: downloader.start())
            return d

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "e5941d615f53312fd66638239c1f90d5")

        def start_transfer(sd_hash):

            logging.debug("Starting the transfer")

            d = start_additional_uploaders(sd_hash)
            d.addCallback(lambda _: wait_for_ready_events())
            d.addCallback(lambda _: self.session.setup())
            d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier))
            d.addCallback(lambda _: self.lbry_file_manager.setup())
            d.addCallback(lambda _: download_file(sd_hash))
            d.addCallback(lambda _: check_md5_sum())

            return d

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = defer.DeferredList([self.wait_for_event(dead_event, 15) for dead_event in dead_events])

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d = self.wait_for_hash_from_queue(sd_hash_queue)
        d.addCallback(start_transfer)
        d.addBoth(stop)

        return d
コード例 #9
0
ファイル: functional_tests.py プロジェクト: Xuiquaxa/lbry
    def test_multiple_uploaders(self):

        sd_hash_queue = Queue()
        num_uploaders = 3
        kill_event = Event()
        dead_events = [Event() for _ in range(num_uploaders)]
        ready_events = [Event() for _ in range(1, num_uploaders)]
        uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_events[0],
                                                             9373419, 2**22))
        uploader.start()
        self.server_processes.append(uploader)

        logging.debug("Testing multiple uploaders")

        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, num_uploaders)
        hash_announcer = FakeAnnouncer()
        rate_limiter = DummyRateLimiter()
        sd_identifier = StreamDescriptorIdentifier()

        db_dir = "client"
        blob_dir = os.path.join(db_dir, "blobfiles")
        os.mkdir(db_dir)
        os.mkdir(blob_dir)

        self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
                                   peer_finder=peer_finder, hash_announcer=hash_announcer,
                                   blob_dir=None, peer_port=5553,
                                   use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)

        self.stream_info_manager = TempLBRYFileMetadataManager()

        self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier)

        def start_additional_uploaders(sd_hash):
            for i in range(1, num_uploaders):
                uploader = Process(target=start_lbry_reuploader,
                                   args=(sd_hash, kill_event, dead_events[i], ready_events[i-1], i, 2**10))
                uploader.start()
                self.server_processes.append(uploader)
            return defer.succeed(True)

        def wait_for_ready_events():
            return defer.DeferredList([self.wait_for_event(ready_event, 60) for ready_event in ready_events])

        def make_downloader(metadata, prm):
            info_validator = metadata.validator
            options = metadata.options
            factories = metadata.factories
            chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)]
            return factories[0].make_downloader(metadata, chosen_options, prm)

        def download_file(sd_hash):
            prm = PaymentRateManager(self.session.base_payment_rate_manager)
            d = download_sd_blob(self.session, sd_hash, prm)
            d.addCallback(sd_identifier.get_metadata_for_sd_blob)
            d.addCallback(make_downloader, prm)
            d.addCallback(lambda downloader: downloader.start())
            return d

        def check_md5_sum():
            f = open('test_file')
            hashsum = MD5.new()
            hashsum.update(f.read())
            self.assertEqual(hashsum.hexdigest(), "e5941d615f53312fd66638239c1f90d5")

        def start_transfer(sd_hash):

            logging.debug("Starting the transfer")

            d = start_additional_uploaders(sd_hash)
            d.addCallback(lambda _: wait_for_ready_events())
            d.addCallback(lambda _: self.session.setup())
            d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier))
            d.addCallback(lambda _: self.lbry_file_manager.setup())
            d.addCallback(lambda _: download_file(sd_hash))
            d.addCallback(lambda _: check_md5_sum())

            return d

        def stop(arg):
            if isinstance(arg, Failure):
                logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback())
            else:
                logging.debug("Client is stopping normally.")
            kill_event.set()
            logging.debug("Set the kill event")
            d = defer.DeferredList([self.wait_for_event(dead_event, 15) for dead_event in dead_events])

            def print_shutting_down():
                logging.info("Client is shutting down")

            d.addCallback(lambda _: print_shutting_down())
            d.addCallback(lambda _: arg)
            return d

        d = self.wait_for_hash_from_queue(sd_hash_queue)
        d.addCallback(start_transfer)
        d.addBoth(stop)

        return d