示例#1
0
文件: commands.py 项目: lae/Servrhe
    def loadCommands(self):
        commands = {}
        path = yield self.config.get("path","commands")
        for loader, name, ispkg in pkgutil.iter_modules([path]):
            if ispkg:
                continue
            try:
                command = getattr(__import__(path, fromlist=[name.encode("utf8")]), name)
                reload(command)
                command.config["name"] = name
                command.config["command"] = inlineCallbacks(command.command) if inspect.isgeneratorfunction(command.command) else command.command
                args, _, _, kwargs = inspect.getargspec(command.command)

                if args[:5] != ["guid", "manager", "irc", "channel", "user"]:
                    continue

                if kwargs:
                    boundary = -1 * len(kwargs)
                    command.config["args"] = args[5:boundary]
                    command.config["kwargs"] = args[boundary:]
                else:
                    command.config["args"] = args[5:]
                    command.config["kwargs"] = []

                if "disabled" in command.config and command.config["disabled"]:
                    continue

                commands[name] = command.config
            except:
                self.err("Failed to load {}.{}", path, name)
        self.commands = commands
示例#2
0
def action(fn):
    """
    Decorator which wraps an action callback conforming to the twisted
    inlineCallbacks interface.  This lets us write synchronous-looking code
    that uses asynchronous methods from twisted.
    """
    from twisted.internet import reactor
    from twisted.internet.defer import inlineCallbacks
    from twisted.python.failure import Failure
    inlinefn = inlineCallbacks(fn)
    def on_return(ret):
        if isinstance(ret, Failure):
            print "action failed: " + ret.getErrorMessage()
        reactor.stop()
    def trampoline(ns):
        section = ns.get_section('client')
        debug = section.get_bool("debug", False)
        if debug:
            startLogging(StdoutHandler(), DEBUG)
        else:
            startLogging(None)
        def when_running():
            defer = inlinefn(ns)
            defer.addBoth(on_return)
        reactor.callWhenRunning(when_running)
        reactor.run()
    return trampoline
    def test_inlineCallbacksTracebacks(self):
        """
        inlineCallbacks that re-raise tracebacks into their deferred
        should not lose their tracebacsk.
        """
        f = getDivisionFailure()
        d = defer.Deferred()
        try:
            f.raiseException()
        except:
            d.errback()

        failures = []
        def collect_error(result):
            failures.append(result)

        def ic(d):
            yield d
        ic = defer.inlineCallbacks(ic)
        ic(d).addErrback(collect_error)

        newFailure, = failures
        self.assertEquals(
            traceback.extract_tb(newFailure.getTracebackObject())[-1][-1],
            "1/0"
        )
示例#4
0
文件: protocol.py 项目: Dev0n/txMySQL
def operation(func):
    func = defer.inlineCallbacks(func)

    def wrap(self, *a, **kw):
        return self._do_operation(func, self, *a, **kw)

    return wrap
示例#5
0
def delayed(func):
    func = inlineCallbacks(func)

    @wraps(func)
    def wrapper(self, request, *args, **kwargs):
        was_interrupted = [False]
        log = Logger('web')

        def on_cancel(failure):
            err(failure, 'Call to "%s" was interrupted' % request.path)
            was_interrupted[0] = True

        request.notifyFinish().addErrback(on_cancel)

        def finish_request(result):
            log.debug('%s(%r, args=%r, kwargs=%r)=%s' % (func.__name__, request, args, kwargs, result))

            if isinstance(result, Failure):
                request.setResponseCode(INTERNAL_SERVER_ERROR)
                log.exception('Call to %s(%r, args=%r, kwargs=%r) failed' % (func.__name__, request, args, kwargs), exc_info = (result.type, result.value, result.getTracebackObject()))

            if was_interrupted[0] == False and result != NOT_DONE_YET:
                request.finish()

        log.debug('Calling %s(%r, args=%r, kwargs=%r)' % (func.__name__, request, args, kwargs))
        d = func(self, request, *args, **kwargs)
        log.debug('Result: %s' % d)
        log.debug('is returned deferred was called? %s' % d.called)
        d.addBoth(finish_request)
        return NOT_DONE_YET
    return wrapper
示例#6
0
文件: __init__.py 项目: corydodt/Noms
def wrapDatabaseAndCallbacks(fn):
    """
    Decorator; convenience for methods that need mock db and yield-Deferred syntax
    """
    fnICB = defer.inlineCallbacks(fn)
    fnMockedConfig = mockConfig()(fnICB)
    return wraps(fn)(fnMockedConfig)
示例#7
0
    def __init__(self, orig, max_entries=1000, num_args=1, lru=True,
                 inlineCallbacks=False):
        self.orig = orig

        if inlineCallbacks:
            self.function_to_call = defer.inlineCallbacks(orig)
        else:
            self.function_to_call = orig

        self.max_entries = max_entries
        self.num_args = num_args
        self.lru = lru

        self.arg_names = inspect.getargspec(orig).args[1:num_args+1]

        if len(self.arg_names) < self.num_args:
            raise Exception(
                "Not enough explicit positional arguments to key off of for %r."
                " (@cached cannot key off of *args or **kwars)"
                % (orig.__name__,)
            )

        self.cache = Cache(
            name=self.orig.__name__,
            max_entries=self.max_entries,
            keylen=self.num_args,
            lru=self.lru,
        )
示例#8
0
    def wrap(fn):
        if inspect.isgeneratorfunction(fn):
            fn = inlineCallbacks(fn)

        @functools.wraps(fn)
        @twistedtools.deferred(timeout=fn.timeout if hasattr(fn, 'timeout') else 1.0)
        def ret():
            # dbg("\n============================================\n")

            import spinoff.actor._actor
            spinoff.actor._actor.TESTING = True

            Actor.reset_flags(debug=True)

            # TODO: once the above TODO (fresh Node for each test fn) is complete, consider making Events non-global by
            # having each Node have its own Events instance.
            Events.reset()

            def check_memleaks():
                if '__pypy__' not in sys.builtin_module_names:
                    gc.collect()
                    for trash in gc.garbage[:]:
                        if isinstance(trash, DebugInfo):
                            # dbg("DEBUGINFO: __del__")
                            if trash.failResult is not None:
                                exc = Unclean(repr(trash.failResult.value) + '\n' + str(trash._getDebugTracebacks()))
                                trash.__dict__.clear()
                                raise exc
                            gc.garbage.remove(trash)

                    assert not gc.garbage, "Memory leak detected: %r" % (gc.garbage,)

                    # if gc.garbage:
                    #     dbg("GARGABE: detected after %s:" % (fn.__name__,), len(gc.garbage))
                    #     import objgraph as ob
                    #     import os

                    #     def dump_chain(g_):
                    #         def calling_test(x):
                    #             if not isframework(x):
                    #                 return None
                    #         import spinoff
                    #         isframework = lambda x: type(x).__module__.startswith(spinoff.__name__)
                    #         ob.show_backrefs([g_], filename='backrefs.png', max_depth=100, highlight=isframework)

                    #     for gen in gc.garbage:
                    #         dump_chain(gen)
                    #         dbg("   TESTWRAP: mem-debuggin", gen)
                    #         import pdb; pdb.set_trace()
                    #         os.remove('backrefs.png')

            return (
                deferred_with(ErrorCollector(), fn)
                .addBoth(lambda result: Node.stop_all().addCallback(lambda _: result))
                .addBoth(lambda result: (_process_idle_calls(), result)[-1])
                .addBoth(lambda result: (check_memleaks(), result)[-1])
            )

        return ret
示例#9
0
    def testReturnNoValue(self):
        """Ensure a standard python return results in a None result."""
        def _noReturn():
            yield 5
            return
        _noReturn = inlineCallbacks(_noReturn)

        return _noReturn().addCallback(self.assertEqual, None)
示例#10
0
def set_route(call, method, route, **kwargs):
    call = defer.inlineCallbacks(call)
    call.method = method
    call.route = re.compile(route)
    call.template = kwargs.get('template', None)
    call.last = kwargs.get('last', False)
    call.redirect = kwargs.get('redirect', None)
    return call
示例#11
0
def webInlineCallbacks(function):
    function = inlineCallbacks(function)
    
    def __func__(*args, **kwargs):
        function(*args, **kwargs)
        return NOT_DONE_YET
    
    return __func__
示例#12
0
    def testReturnValue(self):
        """Ensure that returnValue works."""
        def _return():
            yield 5
            returnValue(6)
        _return = inlineCallbacks(_return)

        return _return().addCallback(self.assertEqual, 6)
示例#13
0
def deferredPage(func):
    func = defer.inlineCallbacks(func)
    def wrap(self, request):
        (func(self, request)
            .addErrback(request.processingFailed)
            .addErrback(lambda f: None))
        return NOT_DONE_YET
    return wrap
示例#14
0
    def __init__(self, orig, num_args, inlineCallbacks, cache_context=False):
        self.orig = orig

        if inlineCallbacks:
            self.function_to_call = defer.inlineCallbacks(orig)
        else:
            self.function_to_call = orig

        arg_spec = inspect.getargspec(orig)
        all_args = arg_spec.args

        if "cache_context" in all_args:
            if not cache_context:
                raise ValueError(
                    "Cannot have a 'cache_context' arg without setting"
                    " cache_context=True"
                )
        elif cache_context:
            raise ValueError(
                "Cannot have cache_context=True without having an arg"
                " named `cache_context`"
            )

        if num_args is None:
            num_args = len(all_args) - 1
            if cache_context:
                num_args -= 1

        if len(all_args) < num_args + 1:
            raise Exception(
                "Not enough explicit positional arguments to key off for %r: "
                "got %i args, but wanted %i. (@cached cannot key off *args or "
                "**kwargs)"
                % (orig.__name__, len(all_args), num_args)
            )

        self.num_args = num_args

        # list of the names of the args used as the cache key
        self.arg_names = all_args[1:num_args + 1]

        # self.arg_defaults is a map of arg name to its default value for each
        # argument that has a default value
        if arg_spec.defaults:
            self.arg_defaults = dict(zip(
                all_args[-len(arg_spec.defaults):],
                arg_spec.defaults
            ))
        else:
            self.arg_defaults = {}

        if "cache_context" in self.arg_names:
            raise Exception(
                "cache_context arg cannot be included among the cache keys"
            )

        self.add_cache_context = cache_context
示例#15
0
def async_test(f, *args, **kwargs):
    "Starts an asynchronous test, waits for it to complete, and returns its result."
    result = []
    def cb(value, good):
        result.append(good)
        result.append(value)
    inlineCallbacks(f)(*args, **kwargs).addCallbacks(callback = cb, callbackArgs = [True],
                                    errback  = cb, errbackArgs  = [False])
    while not result:
        reactor.iterate(0.02)
    if result[0]:
        # Uncomment the following line to check that all the tests
        # really are being run to completion.
        #raise(Exception("Success"))
        return result[1]
    else:
        result[1].printTraceback()
        result[1].raiseException()
示例#16
0
 def wrap(f):
     func = inlineCallbacks(rpc_schema.schema(path, drop_args=drop_args)(f))
     def wrapped_f(*args, **kwargs):
         try:
             result = func(*args, **kwargs)
             return result
         except ValidationError as e:
             error(e)
             raise WebserverException("exceptions/webserver/schema_exception", str(f.validator.schema), e.args)
     return wrapped_f
示例#17
0
 def test_get_block(self):
     factory = p2p.ClientFactory(networks.nets['dash'])
     c = reactor.connectTCP('127.0.0.1', 9999, factory)
     try:
         h = 0x00000000000132b9afeca5e9a2fdf4477338df6dcff1342300240bc70397c4bb
         block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))()
         assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root']
         assert data.hash256(data.block_header_type.pack(block['header'])) == h
     finally:
         factory.stopTrying()
         c.disconnect()
示例#18
0
    def test_nonGeneratorReturnValue(self):
        """
        Ensure that C{TypeError} with a message about L{inlineCallbacks} is
        raised when a non-generator calls L{returnValue}.
        """
        def _noYield():
            returnValue(5)
        _noYield = inlineCallbacks(_noYield)

        self.assertIn("inlineCallbacks",
            str(self.assertRaises(TypeError, _noYield)))
示例#19
0
 def test_get_block(self):
     factory = p2p.ClientFactory(networks.nets['bitcoin'])
     c = reactor.connectTCP('127.0.0.1', 8333, factory)
     try:
         h = 0x000000000000046acff93b0e76cd10490551bf871ce9ac9fad62e67a07ff1d1e
         block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))()
         assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root']
         assert data.hash256(data.block_header_type.pack(block['header'])) == h
     finally:
         factory.stopTrying()
         c.disconnect()
示例#20
0
文件: test_defgen.py 项目: 0004c/VTK
    def testYieldNonDeferrred(self):
        """
        Ensure that yielding a non-deferred passes it back as the
        result of the yield expression.
        """
        def _test():
            x = yield 5
            returnValue(5)
        _test = inlineCallbacks(_test)

        return _test().addCallback(self.assertEqual, 5)
示例#21
0
def inline_callbacks(original, debug=False):
    """
    Decorate a function like ``inlineCallbacks`` would but in a more
    Eliot-friendly way.  Use it just like ``inlineCallbacks`` but where you
    want Eliot action contexts to Do The Right Thing inside the decorated
    function.
    """
    f = eliot_friendly_generator_function(original)
    if debug:
        f.debug = True
    return inlineCallbacks(f)
示例#22
0
def go_channel(f, *args, **kwargs):
    f1 = inlineCallbacks(f)
    d = f1(*args, **kwargs)
    channel = Channel(1)
    def done(value):
        if value == CLOSED:
            channel.close()
        else:
            put_then_callback(channel, value, lambda ok: channel.close())
    d.addBoth(done)
    return channel
示例#23
0
 def test_get_block(self):
     factory = p2p.ClientFactory(networks.nets['bitcoin_regtest'])
     c = reactor.connectTCP('127.0.0.1', 18444, factory)
     try:
         h = 0x0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206
         block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))()
         assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root']
         assert data.hash256(data.block_header_type.pack(block['header'])) == h
     finally:
         factory.stopTrying()
         c.disconnect()
示例#24
0
    def test_nonGeneratorReturn(self):
        """
        Ensure that C{TypeError} with a message about L{inlineCallbacks} is
        raised when a non-generator returns something other than a generator.
        """
        def _noYield():
            return 5
        _noYield = inlineCallbacks(_noYield)

        self.assertIn("inlineCallbacks",
            str(self.assertRaises(TypeError, _noYield)))
示例#25
0
        def decorator(func):
            if uri:
                _uri = uri
            else:
                assert(self._prefix is not None)
                _uri = "{0}.{1}".format(self._prefix, func.__name__)

            if inspect.isgeneratorfunction(func):
                func = inlineCallbacks(func)

            self._handlers.append((_uri, func))
            return func
示例#26
0
文件: tx.py 项目: doncatnip/kanone
def validateDecorator( validator, method, include, exclude, onInvalid, inlineCallbacks ):

    if include and exclude:
        raise SyntaxError("'include' and 'exclude' cannot be used at the same time")

    spec = getArgSpec( method )
    hasVarargs = spec.varargs is not None
    varargs =  spec.varargs or '*varargs'
    keywords = spec.keywords or False

    methodParameterNames = getParameterNames( method, skipSelf=False )

    skip = ()
    if exclude:
        skip = exclude
    if include:
        skip = set(methodParameterNames) - set(include)

    varargs    = varargs

    hasVarargs = spec.varargs not in skip and hasVarargs

    keywords   = keywords not in skip and keywords

    if inlineCallbacks:
        method = defer.inlineCallbacks( method )

    def __wrap( *fargs, **fkwargs):

        d = defer.Deferred()

        (fargs, fkwargs, shifted ) = varargs2kwargs( method, fargs, fkwargs, skipSelf=False )
        origKwargs = dict(fkwargs)

        if keywords is not False:
            restKwargs = dict(\
                ( key, fkwargs.pop(key))\
                    for key in list(fkwargs.keys()) if key not in methodParameterNames
                )
            fkwargs[ keywords ] = restKwargs

        if fargs or hasVarargs:
            fkwargs[ varargs ] = list(fargs)

        result = validator.context\
            ( dict( ( key, fkwargs[ key] ) for key in fkwargs if key not in skip )
            ).result

        result.addBoth( validateDecorator_gotValidationResult, d, fargs, origKwargs, method, varargs, keywords, shifted, onInvalid )

        return d

    return __wrap
示例#27
0
    def testYieldNonDeferred(self):
        """
        Ensure that yielding a non-deferred passes it back as the
        result of the yield expression.

        @return: A L{twisted.internet.defer.Deferred}
        @rtype: L{twisted.internet.defer.Deferred}
        """
        def _test():
            yield 5
            returnValue(5)
        _test = inlineCallbacks(_test)

        return _test().addCallback(self.assertEqual, 5)
示例#28
0
    def _decorator(f):
        def _decorated(*args, **kwargs):
            for x in xrange(times):
                startTime=timer()
                result = yield f(*args, **kwargs)
                timeStore.setdefault(f.__name__, []).append(timer()-startTime)

                if x%(times*.10) == 0.0:
                    progressDest.write('.')
                    progressDest.flush()
            progressDest.write('\n')

        _decorated.__name__ = f.__name__

        return inlineCallbacks(_decorated)
示例#29
0
        def wrapper(*args, **kwargs):
            """
            Wrap original function and run in inside reactor loop.
            """

            from twisted.internet import defer, reactor

            def errback(failure):
                print(failure)

            deferred = defer.inlineCallbacks(function)(*args, **kwargs)
            deferred.addErrback(errback)
            deferred.addBoth(lambda unused: reactor.stop())

            reactor.run()
示例#30
0
 def __init__(self):
     self.device_settings = {}
     for attr_name in dir(self):
         attr = getattr(self,attr_name)
         if hasattr(attr,DEVICE_SETTING):
             if inspect.isgeneratorfunction(attr):
                 attr = inlineCallbacks(attr)
             self.device_settings[getattr(attr,DEVICE_SETTING_ID)]=attr
         elif isinstance(attr,DeviceSignal):
             setattr(
                 self,
                 attr_name,
                 _DeviceSignal(attr,self)
             )
     self.owning_context = None
示例#31
0
文件: web.py 项目: whortonda/p2pool
def get_web_root(wb,
                 datadir_path,
                 bitcoind_getinfo_var,
                 stop_event=variable.Event()):
    node = wb.node
    start_time = time.time()

    web_root = resource.Resource()

    def get_users():
        height, last = node.tracker.get_height_and_last(
            node.best_share_var.value)
        weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(
            node.best_share_var.value, min(height, 720), 65535 * 2**256)
        res = {}
        for script in sorted(weights, key=lambda s: weights[s]):
            res[bitcoin_data.script2_to_address(
                script, node.net.PARENT)] = weights[script] / total_weight
        return res

    def get_current_scaled_txouts(scale, trunc=0):
        txouts = node.get_current_txouts()
        total = sum(txouts.itervalues())
        results = dict((script, value * scale // total)
                       for script, value in txouts.iteritems())
        if trunc > 0:
            total_random = 0
            random_set = set()
            for s in sorted(results, key=results.__getitem__):
                if results[s] >= trunc:
                    break
                total_random += results[s]
                random_set.add(s)
            if total_random:
                winner = math.weighted_choice(
                    (script, results[script]) for script in random_set)
                for script in random_set:
                    del results[script]
                results[winner] = total_random
        if sum(results.itervalues()) < int(scale):
            results[math.weighted_choice(
                results.iteritems())] += int(scale) - sum(results.itervalues())
        return results

    def get_patron_sendmany(total=None, trunc='0.01'):
        if total is None:
            return 'need total argument. go to patron_sendmany/<TOTAL>'
        total = int(float(total) * 1e8)
        trunc = int(float(trunc) * 1e8)
        return json.dumps(
            dict((bitcoin_data.script2_to_address(script, node.net.PARENT),
                  value / 1e8) for script, value in get_current_scaled_txouts(
                      total, trunc).iteritems()
                 if bitcoin_data.script2_to_address(script, node.net.PARENT)
                 is not None))

    def get_global_stats():
        # averaged over last hour
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value),
                         3600 // node.net.SHARE_PERIOD)

        nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(
            node.tracker, node.best_share_var.value, lookbehind)
        stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)
        diff = bitcoin_data.target_to_difficulty(
            wb.current_work.value['bits'].target)

        return dict(
            pool_nonstale_hash_rate=nonstale_hash_rate,
            pool_hash_rate=nonstale_hash_rate / (1 - stale_prop),
            pool_stale_prop=stale_prop,
            min_difficulty=bitcoin_data.target_to_difficulty(
                node.tracker.items[node.best_share_var.value].max_target),
            network_block_difficulty=diff,
            network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD),
        )

    def get_local_stats():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value),
                         3600 // node.net.SHARE_PERIOD)

        global_stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)

        my_unstale_count = sum(1 for share in node.tracker.get_chain(
            node.best_share_var.value, lookbehind)
                               if share.hash in wb.my_share_hashes)
        my_orphan_count = sum(1 for share in node.tracker.get_chain(
            node.best_share_var.value, lookbehind)
                              if share.hash in wb.my_share_hashes
                              and share.share_data['stale_info'] == 'orphan')
        my_doa_count = sum(1 for share in node.tracker.get_chain(
            node.best_share_var.value, lookbehind)
                           if share.hash in wb.my_share_hashes
                           and share.share_data['stale_info'] == 'doa')
        my_share_count = my_unstale_count + my_orphan_count + my_doa_count
        my_stale_count = my_orphan_count + my_doa_count

        my_stale_prop = my_stale_count / my_share_count if my_share_count != 0 else None

        my_work = sum(
            bitcoin_data.target_to_average_attempts(share.target) for share in
            node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
            if share.hash in wb.my_share_hashes)
        actual_time = (
            node.tracker.items[node.best_share_var.value].timestamp -
            node.tracker.items[node.tracker.get_nth_parent_hash(
                node.best_share_var.value, lookbehind - 1)].timestamp)
        share_att_s = my_work / actual_time

        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        (stale_orphan_shares,
         stale_doa_shares), shares, _ = wb.get_stale_counts()

        miner_last_difficulties = {}
        for addr in wb.last_work_shares.value:
            miner_last_difficulties[addr] = bitcoin_data.target_to_difficulty(
                wb.last_work_shares.value[addr].target)

        return dict(
            my_hash_rates_in_last_hour=dict(
                note="DEPRECATED",
                nonstale=share_att_s,
                rewarded=share_att_s / (1 - global_stale_prop),
                actual=share_att_s /
                (1 - my_stale_prop) if my_stale_prop is not None else
                0,  # 0 because we don't have any shares anyway
            ),
            my_share_counts_in_last_hour=dict(
                shares=my_share_count,
                unstale_shares=my_unstale_count,
                stale_shares=my_stale_count,
                orphan_stale_shares=my_orphan_count,
                doa_stale_shares=my_doa_count,
            ),
            my_stale_proportions_in_last_hour=dict(
                stale=my_stale_prop,
                orphan_stale=my_orphan_count /
                my_share_count if my_share_count != 0 else None,
                dead_stale=my_doa_count /
                my_share_count if my_share_count != 0 else None,
            ),
            miner_hash_rates=miner_hash_rates,
            miner_dead_hash_rates=miner_dead_hash_rates,
            miner_last_difficulties=miner_last_difficulties,
            efficiency_if_miner_perfect=(1 - stale_orphan_shares / shares) /
            (1 - global_stale_prop) if shares else
            None,  # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
            efficiency=(1 -
                        (stale_orphan_shares + stale_doa_shares) / shares) /
            (1 - global_stale_prop) if shares else None,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if not peer.incoming),
            ),
            shares=dict(
                total=shares,
                orphan=stale_orphan_shares,
                dead=stale_doa_shares,
            ),
            uptime=time.time() - start_time,
            attempts_to_share=bitcoin_data.target_to_average_attempts(
                node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=bitcoin_data.target_to_average_attempts(
                node.bitcoind_work.value['bits'].target),
            block_value=node.bitcoind_work.value['subsidy'] * 1e-8,
            warnings=p2pool_data.get_warnings(node.tracker,
                                              node.best_share_var.value,
                                              node.net,
                                              bitcoind_getinfo_var.value,
                                              node.bitcoind_work.value),
            donation_proportion=wb.donation_percentage / 100,
            version=p2pool.__version__,
            protocol_version=p2p.Protocol.VERSION,
            fee=wb.worker_fee,
        )

    class WebInterface(deferred_resource.DeferredResource):
        def __init__(self, func, mime_type='application/json', args=()):
            deferred_resource.DeferredResource.__init__(self)
            self.func, self.mime_type, self.args = func, mime_type, args

        def getChild(self, child, request):
            return WebInterface(self.func, self.mime_type,
                                self.args + (child, ))

        @defer.inlineCallbacks
        def render_GET(self, request):
            request.setHeader('Content-Type', self.mime_type)
            request.setHeader('Access-Control-Allow-Origin', '*')
            res = yield self.func(*self.args)
            defer.returnValue(
                json.dumps(res) if self.mime_type ==
                'application/json' else res)

    def decent_height():
        return min(node.tracker.get_height(node.best_share_var.value), 720)

    web_root.putChild(
        'rate',
        WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(
            node.tracker, node.best_share_var.value, decent_height(
            )) / (1 - p2pool_data.get_average_stale_prop(
                node.tracker, node.best_share_var.value, decent_height()))))
    web_root.putChild(
        'difficulty',
        WebInterface(lambda: bitcoin_data.target_to_difficulty(
            node.tracker.items[node.best_share_var.value].max_target)))
    web_root.putChild('users', WebInterface(get_users))
    web_root.putChild(
        'user_stales',
        WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(
            ph, node.net.PARENT
        ), prop) for ph, prop in p2pool_data.get_user_stale_props(
            node.tracker, node.best_share_var.value,
            node.tracker.get_height(node.best_share_var.value)).iteritems())))
    web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
    web_root.putChild(
        'current_payouts',
        WebInterface(lambda: dict(
            (bitcoin_data.script2_to_address(script, node.net.PARENT), value /
             1e8) for script, value in node.get_current_txouts().iteritems())))
    web_root.putChild('patron_sendmany',
                      WebInterface(get_patron_sendmany, 'text/plain'))
    web_root.putChild('global_stats', WebInterface(get_global_stats))
    web_root.putChild('local_stats', WebInterface(get_local_stats))
    web_root.putChild(
        'peer_addresses',
        WebInterface(lambda: ' '.join('%s%s' % (
            peer.transport.getPeer().host, ':' + str(peer.transport.getPeer(
            ).port) if peer.transport.getPeer().port != node.net.P2P_PORT else
            '') for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild(
        'peer_txpool_sizes',
        WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer(
        ).host, peer.transport.getPeer().port), peer.remembered_txs_size) for
                                  peer in node.p2p_node.peers.itervalues())))
    web_root.putChild(
        'pings',
        WebInterface(
            defer.inlineCallbacks(lambda: defer.returnValue(
                dict([(a, (yield b)) for a, b in [(
                    '%s:%i' % (peer.transport.getPeer().host,
                               peer.transport.getPeer().port),
                    defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
                        min([(yield peer.do_ping().addCallback(
                            lambda x: x / 0.001).addErrback(lambda fail: None))
                             for i in xrange(3)])))()) for peer in list(
                                 node.p2p_node.peers.itervalues())]])))))
    web_root.putChild(
        'peer_versions',
        WebInterface(lambda: dict(
            ('%s:%i' % peer.addr, peer.other_sub_version)
            for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild(
        'payout_addr',
        WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(
            wb.my_pubkey_hash, node.net.PARENT)))
    web_root.putChild(
        'payout_addrs',
        WebInterface(lambda: list(('%s' % bitcoin_data.pubkey_hash_to_address(
            add, node.net.PARENT)) for add in wb.pubkeys.keys)))
    web_root.putChild(
        'recent_blocks',
        WebInterface(lambda: [
            dict(
                ts=s.timestamp,
                hash='%064x' % s.header_hash,
                number=p2pool_data.parse_bip0034(s.share_data['coinbase'])[0],
                share='%064x' % s.hash,
            ) for s in node.tracker.get_chain(
                node.best_share_var.value,
                min(node.tracker.get_height(node.best_share_var.value), 24 * 60
                    * 60 // node.net.SHARE_PERIOD))
            if s.pow_hash <= s.header['bits'].target
        ]))
    web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
    web_root.putChild(
        'stale_rates',
        WebInterface(
            lambda: p2pool_data.get_stale_counts(node.tracker,
                                                 node.best_share_var.value,
                                                 decent_height(),
                                                 rates=True)))

    new_root = resource.Resource()
    web_root.putChild('web', new_root)

    stat_log = []
    if os.path.exists(os.path.join(datadir_path, 'stats')):
        try:
            with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
                stat_log = json.loads(f.read())
        except:
            log.err(None, 'Error loading stats:')

    def update_stat_log():
        while stat_log and stat_log[0]['time'] < time.time() - 24 * 60 * 60:
            stat_log.pop(0)

        lookbehind = 3600 // node.net.SHARE_PERIOD
        if node.tracker.get_height(node.best_share_var.value) < lookbehind:
            return None

        global_stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)
        (stale_orphan_shares,
         stale_doa_shares), shares, _ = wb.get_stale_counts()
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()

        my_current_payout = 0.0
        for add in wb.pubkeys.keys:
            my_current_payout += node.get_current_txouts().get(
                bitcoin_data.pubkey_hash_to_script2(add), 0) * 1e-8
        stat_log.append(
            dict(
                time=time.time(),
                pool_hash_rate=p2pool_data.get_pool_attempts_per_second(
                    node.tracker, node.best_share_var.value, lookbehind) /
                (1 - global_stale_prop),
                pool_stale_prop=global_stale_prop,
                local_hash_rates=miner_hash_rates,
                local_dead_hash_rates=miner_dead_hash_rates,
                shares=shares,
                stale_shares=stale_orphan_shares + stale_doa_shares,
                stale_shares_breakdown=dict(orphan=stale_orphan_shares,
                                            doa=stale_doa_shares),
                current_payout=my_current_payout,
                peers=dict(
                    incoming=sum(1
                                 for peer in node.p2p_node.peers.itervalues()
                                 if peer.incoming),
                    outgoing=sum(1
                                 for peer in node.p2p_node.peers.itervalues()
                                 if not peer.incoming),
                ),
                attempts_to_share=bitcoin_data.target_to_average_attempts(
                    node.tracker.items[node.best_share_var.value].max_target),
                attempts_to_block=bitcoin_data.target_to_average_attempts(
                    node.bitcoind_work.value['bits'].target),
                block_value=node.bitcoind_work.value['subsidy'] * 1e-8,
            ))

        with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
            f.write(json.dumps(stat_log))

    x = deferral.RobustLoopingCall(update_stat_log)
    x.start(5 * 60)
    stop_event.watch(x.stop)
    new_root.putChild('log', WebInterface(lambda: stat_log))

    def get_share(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]

        return dict(
            parent='%064x' % share.previous_hash,
            far_parent='%064x' % share.share_info['far_share_hash'],
            children=[
                '%064x' % x for x in sorted(
                    node.tracker.reverse.get(share.hash, set()),
                    key=lambda sh: -len(node.tracker.reverse.get(sh, set())))
            ],  # sorted from most children to least children
            type_name=type(share).__name__,
            local=dict(
                verified=share.hash in node.tracker.verified.items,
                time_first_seen=start_time
                if share.time_seen == 0 else share.time_seen,
                peer_first_received_from=share.peer_addr,
            ),
            share_data=dict(
                timestamp=share.timestamp,
                target=share.target,
                max_target=share.max_target,
                payout_address=bitcoin_data.script2_to_address(
                    share.new_script, node.net.PARENT),
                donation=share.share_data['donation'] / 65535,
                stale_info=share.share_data['stale_info'],
                nonce=share.share_data['nonce'],
                desired_version=share.share_data['desired_version'],
                absheight=share.absheight,
                abswork=share.abswork,
            ),
            block=dict(
                hash='%064x' % share.header_hash,
                header=dict(
                    version=share.header['version'],
                    previous_block='%064x' % share.header['previous_block'],
                    merkle_root='%064x' % share.header['merkle_root'],
                    timestamp=share.header['timestamp'],
                    target=share.header['bits'].target,
                    nonce=share.header['nonce'],
                ),
                gentx=dict(
                    hash='%064x' % share.gentx_hash,
                    coinbase=share.share_data['coinbase'].ljust(
                        2, '\x00').encode('hex'),
                    value=share.share_data['subsidy'] * 1e-8,
                    last_txout_nonce='%016x' %
                    share.contents['last_txout_nonce'],
                ),
                other_transaction_hashes=[
                    '%064x' % x
                    for x in share.get_other_tx_hashes(node.tracker)
                ],
            ),
        )

    def get_share_address(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        return bitcoin_data.script2_to_address(share.new_script,
                                               node.net.PARENT)

    new_root.putChild(
        'payout_address',
        WebInterface(lambda share_hash_str: get_share_address(share_hash_str)))
    new_root.putChild(
        'share',
        WebInterface(lambda share_hash_str: get_share(share_hash_str)))
    new_root.putChild(
        'heads',
        WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
    new_root.putChild(
        'verified_heads',
        WebInterface(
            lambda: ['%064x' % x for x in node.tracker.verified.heads]))
    new_root.putChild(
        'tails',
        WebInterface(lambda: [
            '%064x' % x for t in node.tracker.tails
            for x in node.tracker.reverse.get(t, set())
        ]))
    new_root.putChild(
        'verified_tails',
        WebInterface(lambda: [
            '%064x' % x for t in node.tracker.verified.tails
            for x in node.tracker.verified.reverse.get(t, set())
        ]))
    new_root.putChild(
        'best_share_hash',
        WebInterface(lambda: '%064x' % node.best_share_var.value))
    new_root.putChild(
        'my_share_hashes',
        WebInterface(
            lambda:
            ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes]))

    def get_share_data(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return ''
        share = node.tracker.items[int(share_hash_str, 16)]
        return p2pool_data.share_type.pack(share.as_share())

    new_root.putChild(
        'share_data',
        WebInterface(lambda share_hash_str: get_share_data(share_hash_str),
                     'application/octet-stream'))
    new_root.putChild(
        'currency_info',
        WebInterface(lambda: dict(
            symbol=node.net.PARENT.SYMBOL,
            block_explorer_url_prefix=node.net.PARENT.
            BLOCK_EXPLORER_URL_PREFIX,
            address_explorer_url_prefix=node.net.PARENT.
            ADDRESS_EXPLORER_URL_PREFIX,
            tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
        )))
    new_root.putChild('version', WebInterface(lambda: p2pool.__version__))

    hd_path = os.path.join(datadir_path, 'graph_db')
    hd_data = _atomic_read(hd_path)
    hd_obj = {}
    if hd_data is not None:
        try:
            hd_obj = json.loads(hd_data)
        except Exception:
            log.err(None, 'Error reading graph database:')
    dataview_descriptions = {
        'last_hour': graph.DataViewDescription(150, 60 * 60),
        'last_day': graph.DataViewDescription(300, 60 * 60 * 24),
        'last_week': graph.DataViewDescription(300, 60 * 60 * 24 * 7),
        'last_month': graph.DataViewDescription(300, 60 * 60 * 24 * 30),
        'last_year': graph.DataViewDescription(300, 60 * 60 * 24 * 365.25),
    }
    hd = graph.HistoryDatabase.from_obj(
        {
            'local_hash_rate':
            graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
            'local_dead_hash_rate':
            graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
            'local_share_hash_rates':
            graph.DataStreamDescription(
                dataview_descriptions,
                is_gauge=False,
                multivalues=True,
                multivalue_undefined_means_0=True,
                default_func=graph.make_multivalue_migrator(
                    dict(good='local_share_hash_rate',
                         dead='local_dead_share_hash_rate',
                         orphan='local_orphan_share_hash_rate'),
                    post_func=lambda bins: [
                        dict((k, (v[0] - (sum(
                            bin.get(rem_k, (0, 0))[0]
                            for rem_k in ['dead', 'orphan'])
                                          if k == 'good' else 0), v[1]))
                             for k, v in bin.iteritems()) for bin in bins
                    ])),
            'pool_rates':
            graph.DataStreamDescription(dataview_descriptions,
                                        multivalues=True,
                                        multivalue_undefined_means_0=True),
            'current_payout':
            graph.DataStreamDescription(dataview_descriptions),
            'current_payouts':
            graph.DataStreamDescription(dataview_descriptions,
                                        multivalues=True),
            'peers':
            graph.DataStreamDescription(
                dataview_descriptions,
                multivalues=True,
                default_func=graph.make_multivalue_migrator(
                    dict(incoming='incoming_peers',
                         outgoing='outgoing_peers'))),
            'miner_hash_rates':
            graph.DataStreamDescription(
                dataview_descriptions, is_gauge=False, multivalues=True),
            'miner_dead_hash_rates':
            graph.DataStreamDescription(
                dataview_descriptions, is_gauge=False, multivalues=True),
            'desired_version_rates':
            graph.DataStreamDescription(dataview_descriptions,
                                        multivalues=True,
                                        multivalue_undefined_means_0=True),
            'traffic_rate':
            graph.DataStreamDescription(
                dataview_descriptions, is_gauge=False, multivalues=True),
            'getwork_latency':
            graph.DataStreamDescription(dataview_descriptions),
            'memory_usage':
            graph.DataStreamDescription(dataview_descriptions),
        }, hd_obj)
    x = deferral.RobustLoopingCall(
        lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
    x.start(100)
    stop_event.watch(x.stop)

    @wb.pseudoshare_received.watch
    def _(work, dead, user):
        t = time.time()
        hd.datastreams['local_hash_rate'].add_datum(t, work)
        if dead:
            hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
        if user is not None:
            hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
            if dead:
                hd.datastreams['miner_dead_hash_rates'].add_datum(
                    t, {user: work})

    @wb.share_received.watch
    def _(work, dead, share_hash):
        t = time.time()
        if not dead:
            hd.datastreams['local_share_hash_rates'].add_datum(
                t, dict(good=work))
        else:
            hd.datastreams['local_share_hash_rates'].add_datum(
                t, dict(dead=work))

        def later():
            res = node.tracker.is_child_of(share_hash,
                                           node.best_share_var.value)
            if res is None:
                res = False  # share isn't connected to sharechain? assume orphaned
            if res and dead:  # share was DOA, but is now in sharechain
                # move from dead to good
                hd.datastreams['local_share_hash_rates'].add_datum(
                    t, dict(dead=-work, good=work))
            elif not res and not dead:  # share wasn't DOA, and isn't in sharechain
                # move from good to orphan
                hd.datastreams['local_share_hash_rates'].add_datum(
                    t, dict(good=-work, orphan=work))

        reactor.callLater(200, later)

    @node.p2p_node.traffic_happened.watch
    def _(name, bytes):
        hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})

    def add_point():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.net.CHAIN_LENGTH,
                         60 * 60 // node.net.SHARE_PERIOD,
                         node.tracker.get_height(node.best_share_var.value))
        t = time.time()

        pool_rates = p2pool_data.get_stale_counts(node.tracker,
                                                  node.best_share_var.value,
                                                  lookbehind,
                                                  rates=True)
        pool_total = sum(pool_rates.itervalues())
        hd.datastreams['pool_rates'].add_datum(t, pool_rates)

        current_txouts = node.get_current_txouts()
        my_current_payouts = 0.0
        for add in wb.pubkeys.keys:
            my_current_payouts += current_txouts.get(
                bitcoin_data.pubkey_hash_to_script2(add), 0) * 1e-8
        hd.datastreams['current_payout'].add_datum(t, my_current_payouts)
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        current_txouts_by_address = dict(
            (bitcoin_data.script2_to_address(script, node.net.PARENT), amount)
            for script, amount in current_txouts.iteritems())
        hd.datastreams['current_payouts'].add_datum(
            t,
            dict((user, current_txouts_by_address[user] * 1e-8)
                 for user in miner_hash_rates
                 if user in current_txouts_by_address))

        hd.datastreams['peers'].add_datum(
            t,
            dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if not peer.incoming),
            ))

        vs = p2pool_data.get_desired_version_counts(node.tracker,
                                                    node.best_share_var.value,
                                                    lookbehind)
        vs_total = sum(vs.itervalues())
        hd.datastreams['desired_version_rates'].add_datum(
            t,
            dict((str(k), v / vs_total * pool_total)
                 for k, v in vs.iteritems()))
        try:
            hd.datastreams['memory_usage'].add_datum(t, memory.resident())
        except:
            if p2pool.DEBUG:
                traceback.print_exc()

    x = deferral.RobustLoopingCall(add_point)
    x.start(5)
    stop_event.watch(x.stop)

    @node.bitcoind_work.changed.watch
    def _(new_work):
        hd.datastreams['getwork_latency'].add_datum(time.time(),
                                                    new_work['latency'])

    new_root.putChild(
        'graph_data',
        WebInterface(lambda source, view: hd.datastreams[source].dataviews[
            view].get_data(time.time())))

    web_root.putChild(
        'static',
        static.File(
            os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])),
                         'web-static')))

    return web_root
示例#32
0
    def decorated(f):
        args, varargs, varkw, defaults = inspect.getargspec(f)
        args = args[lr_num_params:]

        # handle generators as defer.inlineCallbacks
        if inspect.isgeneratorfunction(f):
            f = defer.inlineCallbacks(f)

        # make sure that defined params are actually accepted by the function.
        # having extra params would not affect the running, but it is
        # unnecessary and hence may indicate other problems with the code
        for p in params:
            if p not in args:
                raise Exception("'%s' is not a valid parameter." % p)
            # turn single string annotations into lists
            if isinstance(params[p], str):
                params[p] = [params[p]]

        Nparams = len(args)
        Noptional = 0 if defaults is None else len(defaults)
        Nrequired = Nparams - Noptional

        if Nparams == 0:
            accepts_s = ['']  # only accept notifier
            accepts_t = [T.parseTypeTag(s) for s in accepts_s]

            @functools.wraps(f)
            def handleRequest(self, c, data):
                return f(self, c)

        elif Nparams == 1:
            accepts_s = params.get(args[0], [])
            accepts_t = [T.parseTypeTag(s) for s in accepts_s]

            if Nrequired == 0:
                # if accepted types were specified, add '' to the list
                # we don't add '' if the list of accepted types is empty,
                # since this would make '' the ONLY accepted type
                if len(accepts_t) and T.TNone() not in accepts_t:
                    accepts_s.append(': defaults [%s=%r]' \
                                     % (args[0], defaults[0]))
                    accepts_t.append(T.TNone())

                @functools.wraps(f)
                def handleRequest(self, c, data):
                    if data is None:
                        return f(self, c)
                    return f(self, c, data)

            else:
                # nothing special to do here
                handleRequest = f

        else:
            # sanity checks to make sure that we'll be able to
            # correctly dispatch to the function when called
            if Nrequired <= 1:
                if args[0] not in params:
                    raise Exception('Must specify types for first argument '
                                    'when fewer than two args are required.')
                for s in params[args[0]]:
                    t = T.parseTypeTag(s)
                    if isinstance(t, (T.TAny, T.TCluster)):
                        raise Exception('Cannot accept cluster or ? in first '
                                        'arg when fewer than two args are '
                                        'required.')

            # '' is not allowed on first arg when Nrequired > 1
            types = [T.parseTypeTag(s) for s in params.get(args[0], [])]
            if Nrequired > 1 and T.TNone() in types:
                raise Exception("'' not allowed when more than "
                                "one arg is required.")

            # '' is never allowed on args after the first.
            for p in args[1:]:
                types = [T.parseTypeTag(s) for s in params.get(p, [])]
                if T.TNone() in types:
                    raise Exception("'' not allowed after first arg.")

            # allowed types are as follows:
            # one type for each parameter, with the number of
            # parameters ranging from the total number down to
            # and including the required number
            # we don't include any zero-length group
            groups = []
            for n in range(Nparams, Nrequired - 1, -1):
                lists = [params.get(a, ['?']) for a in args[:n]]
                if len(lists):
                    groups += _product(lists)
                for i, group in reversed(list(enumerate(groups))):
                    # if there are any TNones in the group, we remove it
                    ts = [T.parseTypeTag(t) for t in group]
                    if T.TNone() in ts:
                        groups.pop(i)

            accepts_t = []
            accepts_s = []
            for group in groups:
                if len(group) > 1:
                    t = T.TCluster(*[T.parseTypeTag(t) for t in group])
                    s = ', '.join('%s{%s}' % (sub_t, arg)
                                  for sub_t, arg in zip(t, args))
                    s = '(%s)' % s
                else:
                    t = T.parseTypeTag(group[0])
                    if isinstance(t, T.TCluster):
                        raise Exception("Can't accept cluster in first param.")
                    s = '%s{%s}' % (group[0], args[0])
                # add information about default values of unused params
                if len(group) < Nparams:
                    defstr = ', '.join('%s=%r' %
                                       (args[n], defaults[n - Nrequired])
                                       for n in range(len(group), Nparams))
                    s = s + ': defaults [%s]' % defstr
                accepts_t.append(t)
                accepts_s.append(s)

            if Nrequired == 0:
                if T.TNone() not in accepts_t:
                    defstr = ', '.join('%s=%r' % (a, d)
                                       for a, d in zip(args, defaults))
                    accepts_s.append(': defaults [%s]' % defstr)
                    accepts_t.append(T.TNone())

                @functools.wraps(f)
                def handleRequest(self, c, data):
                    if isinstance(data, tuple):
                        return f(self, c, *data)
                    elif data is None:
                        return f(self, c)
                    else:
                        return f(self, c, data)
            else:

                @functools.wraps(f)
                def handleRequest(self, c, data):
                    if isinstance(data, tuple):
                        return f(self, c, *data)
                    else:
                        return f(self, c, data)

        f.ID = lr_ID
        f.name = lr_name or f.__name__
        f.accepts = accepts_s
        f.returns = returns
        f.handleRequest = handleRequest

        # this is the data that will be sent to the manager to
        # register this setting to be remotely callable
        f.description, f.notes = util.parseSettingDoc(f.__doc__)

        def getRegistrationInfo():
            return (f.ID, f.name, f.description, f.accepts, f.returns, f.notes)

        f.getRegistrationInfo = getRegistrationInfo

        return f
示例#33
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

#P2P_PREFIX = '0b110907'.decode('hex') # disk magic and old netmagic
P2P_PREFIX = 'f4e5f3f4'.decode('hex')  # new net magic
P2P_PORT = 18333
ADDRESS_VERSION = 111
RPC_PORT = 18332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    'getreceivedbyaddress' in (yield bitcoind.rpc_help()) and
    (yield bitcoind.rpc_getinfo())['testnet']))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 210000
POW_FUNC = data.hash256
BLOCK_PERIOD = 600  # s
SYMBOL = 'tBCH'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'Bitcoin') if platform.system() ==
    'Windows' else os.path.expanduser('~/Library/Application Support/Bitcoin/')
    if platform.system() == 'Darwin' else os.path.expanduser('~/.bitcoin'
                                                             ), 'bitcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://www.blocktrail.com/tBCC/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://www.blocktrail.com/tBCC/address/'
TX_EXPLORER_URL_PREFIX = 'https://www.blocktrail.com/tBCC/tx/'
SANE_TARGET_RANGE = (2**256 // 2**32 // 1000 - 1, 2**256 // 2**32 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 1e8
示例#34
0
from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'f9beb4d9'.decode('hex')
P2P_PORT = 8333
ADDRESS_VERSION = 0
RPC_PORT = 8332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    (yield helper.check_block_header(
        bitcoind,
        '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')
     ) and  # genesis block
    (yield helper.check_block_header(
        bitcoind,
        '00000000000000000019f112ec0a9982926f1258cdcc558dd7c3b7e5dc7fa148')
     ) and  # 478559 -- Bitcoin Cash fork
    (yield helper.check_block_header(
        bitcoind,
        '0000000000000000002ac644c9ba8ac3be966276fb7fc8f3baa1a3b9bdc615f1')
     ) and  # 491408 -- Bitcoin Gold fork
    (yield bitcoind.rpc_getblockchaininfo())['chain'] != 'test'))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 210000
POW_FUNC = data.hash256
BLOCK_PERIOD = 600  # s
SYMBOL = 'BTC'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'Bitcoin') if platform.system() ==
    'Windows' else os.path.expanduser('~/Library/Application Support/Bitcoin/')
    if platform.system() == 'Darwin' else os.path.expanduser('~/.bitcoin'
                                                             ), 'bitcoin.conf')
示例#35
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'f9beb4d9'.decode('hex')
P2P_PORT = 8343
ADDRESS_VERSION = 0
RPC_PORT = 8342
RPC_CHECK = defer.inlineCallbacks(lambda starwelsd: defer.returnValue(
    (yield helper.check_genesis_block(
        starwelsd,
        '000000003d69a915e9da53348c5c272978bb743442e3a6341c11061c125811a2')
     ) and (yield starwelsd.rpc_getblockchaininfo())['chain'] != 'test'))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 210000
POW_FUNC = data.hash256
BLOCK_PERIOD = 2  # s
SYMBOL = 'MAI'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'Starwels')
    if platform.system() == 'Windows' else os.path
    .expanduser('~/Library/Application Support/Starwels/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.starwels'), 'starwels.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://91.240.86.126:3001/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://91.240.86.126:3001/address/'
TX_EXPLORER_URL_PREFIX = 'http://91.240.86.126:3001/tx/'
SANE_TARGET_RANGE = (2**256 // 2**32 // 1000000 - 1, 2**256 // 2**32 - 1)
DUMB_SCRYPT_DIFF = 1
示例#36
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'fabfb5da'.decode('hex')
P2P_PORT = 18444
ADDRESS_VERSION = 111
RPC_PORT = 28332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    'bitcoinaddress' in (yield bitcoind.rpc_help())))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 150
POW_FUNC = data.hash256
BLOCK_PERIOD = 600  # s
SYMBOL = 'rBTC'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'Bitcoin') if platform.system() ==
    'Windows' else os.path.expanduser('~/Library/Application Support/Bitcoin/')
    if platform.system() == 'Darwin' else os.path.expanduser(
        '/media/Datos/.bitcoinlambod'), 'bitcoinlambo.conf')
BLOCK_EXPLORER_URL_PREFIX = '#'
ADDRESS_EXPLORER_URL_PREFIX = '#'
TX_EXPLORER_URL_PREFIX = '#'
SANE_TARGET_RANGE = (2**256 // 2**32 // 1000 - 1, 2**256 // 2 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 1e8
示例#37
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack


P2P_PREFIX = 'fac3b6da'.decode('hex')
P2P_PORT = 12024
ADDRESS_VERSION = 30
RPC_PORT = 14022
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
            (yield helper.check_genesis_block(bitcoind, '7497ea1b465eb39f1c8f507bc877078fe016d6fcb6dfad3a64c98dcc6e1e8496')) and
            not (yield bitcoind.rpc_getinfo())['testnet']
        ))
SUBSIDY_FUNC = lambda height: __import__('digibyte_subsidy').GetBlockBaseValue(height)
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('qubit_hash').getPoWHash(data))
BLOCK_PERIOD = 150 # s
SYMBOL = 'DGB'
CONF_FILE_FUNC=lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'digibyte') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/digibyte/') if platform.system() == 'Darwin' else os.path.expanduser('~/.digibyte'), 'digibyte.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://digiexplorer.info/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://digiexplorer.info/address/'
TX_EXPLORER_URL_PREFIX = 'http://digiexplorer.info/tx/'
SANE_TARGET_RANGE=(2**256//2**32//1000 - 1, 2**256//2**26 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 0.001e8
示例#38
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack


P2P_PREFIX = 'bf0c6bbd'.decode('hex')
P2P_PORT = 9999
ADDRESS_VERSION = 76
SCRIPT_ADDRESS_VERSION = 16
RPC_PORT = 9998
RPC_CHECK = defer.inlineCallbacks(lambda dashd: defer.returnValue(
            'dash' in (yield dashd.rpc_help()) and
            (yield dashd.rpc_getblockchaininfo())['chain'] == 'main'
        ))
BLOCKHASH_FUNC = lambda data: pack.IntType(256).unpack(__import__('dash_hash').getPoWHash(data))
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('dash_hash').getPoWHash(data))
BLOCK_PERIOD = 150 # s
SYMBOL = 'DASH'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'DashCore') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/DashCore/') if platform.system() == 'Darwin' else os.path.expanduser('~/.dashcore'), 'dash.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://explorer.dash.org/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://explorer.dash.org/address/'
TX_EXPLORER_URL_PREFIX = 'https://explorer.dash.org/tx/'
SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**32 - 1)
DUST_THRESHOLD = 0.001e8
示例#39
0
from .. import helper
from p2pool.util import pack
from p2pool.decred.blake import BLAKE

P2P_PREFIX = 'F900B4D9'.decode('hex')  # MsgVersion 1..5 #
P2P_PORT = 9108
# Ds... addresses
ADDRESS_VERSION_NET = 7
ADDRESS_VERSION_ALGO = 63
RPC_PORT = 9109
RPC_WALLET_PORT = 9110

# FIXME Genesis Block hash for Decred
RPC_CHECK = defer.inlineCallbacks(lambda dcrd: defer.returnValue(
    (yield helper.check_genesis_block(
        dcrd,
        '298e5cc3d985bfe7f81dc135f360abe089edd4396b86d2de66b0cef42b21d980')
     ) and not (yield dcrd.rpc_getinfo())['testnet']))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 210000
###POW_FUNC = POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('blake_hash').getPoWHash(data))  <-- FIXME
POW_FUNC = lambda data: pack.IntType(256).unpack(BLAKE(256).digest(data))
BLOCK_PERIOD = 300  # s
SYMBOL = 'DCR'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['LOCALAPPDATA'], 'dcrd')
    if platform.system() == 'Windows' else os.
    path.expanduser('~/Library/Application Support/dcrd/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.dcrd'), 'dcrd.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://mainnet.decred.org/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://mainnet.decred.org/address/'
TX_EXPLORER_URL_PREFIX = 'https://mainnet.decred.org/tx/'
示例#40
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack


P2P_PREFIX = '6342212c'.decode('hex')
P2P_PORT = 9334
ADDRESS_VERSION = 50
RPC_PORT = 9335
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
            (yield helper.check_genesis_block(bitcoind, '12a765e31ffd4059bada1e25190f6e98c99d9714d334efa41a195a7e7e04bfe2')) and
            (yield bitcoind.rpc_getblockchaininfo())['chain'] != 'test'
        ))
SUBSIDY_FUNC = lambda height: 2*100000000
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data))
BLOCK_PERIOD = 60 # s
SYMBOL = 'MNC'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'Mincoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/Mincoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.mincoin'), 'mincoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://www.mincoinexplorer.com/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://www.mincoinexplorer.com/address/'
TX_EXPLORER_URL_PREFIX = 'https://www.mincoinexplorer.com/tx/'
SANE_TARGET_RANGE = (2**256//1000000000000 - 1, 2**256//1000 - 1)
DUMB_SCRYPT_DIFF = 2**16
DUST_THRESHOLD = 0.00000546e8
示例#41
0
文件: kzc.py 项目: ilsawa/p2pool-ucom
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'bd1b44ba'.decode('hex')
P2P_PORT = 8277
ADDRESS_VERSION = 46
SCRIPT_ADDRESS_VERSION = 16
RPC_PORT = 8276
RPC_CHECK = defer.inlineCallbacks(
    lambda kzcd: defer.returnValue('ucomaddress' in (yield kzcd.rpc_help()) and
                                   not (yield kzcd.rpc_getinfo())['testnet']))
BLOCKHASH_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('dash_hash').getPoWHash(data))
POW_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('dash_hash').getPoWHash(data))
BLOCK_PERIOD = 150
SYMBOL = 'UCOM'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'ucom')
    if platform.system() == 'Windows' else os.
    path.expanduser('~/Library/Application Support/ucom/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.ucom'), 'ucom.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://chain.unitedcryptocommunity.com/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://chain.unitedcryptocommunity.com/address/'
TX_EXPLORER_URL_PREFIX = 'http://chain.unitedcryptocommunity.com/tx/'
SANE_TARGET_RANGE = (2**256 // 2**32 // 1000000 - 1, 2**256 // 2**32 - 1)
示例#42
0

@defer.inlineCallbacks
def get_subsidy(bitcoind, target):
    res = yield bitcoind.rpc_getblock(target)

    defer.returnValue(res)


nets = dict(bitconnect=math.Object(
    P2P_PREFIX='325e6f86'.decode('hex'),
    P2P_PORT=9239,
    ADDRESS_VERSION=18,
    RPC_PORT=9240,
    RPC_CHECK=defer.inlineCallbacks(
        lambda bitcoind: defer.returnValue(0 == (yield bitcoind.rpc_getblock(
            'd3bd95c47fa17c47e1e2732d7072a6c4014a2fa93873124418a8fd9a300'))[
                'height'] and not (yield bitcoind.rpc_getinfo())['testnet'])),
    SUBSIDY_FUNC=lambda height: 10 * 100000000,
    POW_FUNC=lambda data: pack.IntType(256).unpack(
        __import__('ltc_scrypt').getPoWHash(data)),
    BLOCK_PERIOD=120,
    SYMBOL='BCC',
    CONF_FILE_FUNC=lambda: os.path.join(
        os.path.join(os.environ['APPDATA'], 'bitconnect')
        if platform.system() == 'Windows' else os.path.expanduser(
            '~/Library/Application Support/bitconnect/')
        if platform.system() == 'Darwin' else os.path.expanduser(
            '~/.bitconnect'), 'bitconnect.conf'),
    BLOCK_EXPLORER_URL_PREFIX='https://chainz.cryptoid.info/bcc/block.dws?',
    ADDRESS_EXPLORER_URL_PREFIX='https://chainz.cryptoid.info/bcc/address.dws?',
    TX_EXPLORER_URL_PREFIX='https://chainz.cryptoid.info/bcc/tx.dws?',
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'fbc1b8dc'.decode('hex')
P2P_PORT = 13580
ADDRESS_VERSION = 23
RPC_PORT = 13581
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    (yield helper.check_genesis_block(
        bitcoind,
        '88c667bc63167685e4e4da058fffdfe8e007e5abffd6855de52ad59df7bb0bb2')
     ) and not (yield bitcoind.rpc_getinfo())['testnet']))
SUBSIDY_FUNC = lambda height: 3 * 100000000
POW_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('yescrypt_hash').getHash(data, 80))
BLOCK_PERIOD = 270  # s
SYMBOL = 'ARG'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'argentum')
    if platform.system() == 'Windows' else os.path
    .expanduser('~/Library/Application Support/Argentum/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.argentum'), 'argentum.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://chainz.cryptoid.info/arg/block.dws?'
ADDRESS_EXPLORER_URL_PREFIX = 'https://chainz.cryptoid.info/arg/address.dws?'
TX_EXPLORER_URL_PREFIX = 'https://chainz.cryptoid.info/arg/tx.dws?'
SANE_TARGET_RANGE = (2**256 // 1000000000 - 1, 2**256 // 10000 - 1)
示例#44
0
from p2pool.util import math, pack, jsonrpc


@defer.inlineCallbacks
def get_subsidy(bitcoind, target):
    res = yield bitcoind.rpc_getblock(target)
    defer.returnValue(res)


P2P_PREFIX = 'e4e8e9e5'.decode('hex')
P2P_PORT = 19946
ADDRESS_VERSION = 8
RPC_PORT = 9346
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    (yield helper.check_genesis_block(
        bitcoind,
        '00000a060336cbb72fe969666d337b87198b1add2abaa59cca226820b32933a4')
     ) and not (yield bitcoind.rpc_getinfo())['testnet']))
SUBSIDY_FUNC = lambda bitcoind, target: get_subsidy(bitcoind, target)
BLOCK_PERIOD = 600  # s
SYMBOL = 'NVC'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'NovaCoin')
    if platform.system() == 'Windows' else os.path
    .expanduser('~/Library/Application Support/NovaCoin/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.novacoin'), 'novacoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://explorer.novaco.in/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://explorer.novaco.in/address/'
TX_EXPLORER_URL_PREFIX = 'http://explorer.novaco.in/tx/'
SANE_TARGET_RANGE = (2**256 // 1000000000 - 1, 2**256 // 1000 - 1)
DUMB_SCRYPT_DIFF = 2**16
示例#45
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = '0b110907'.decode('hex')
P2P_PORT = 18333
ADDRESS_VERSION = 111
RPC_PORT = 18332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    'bitcoinaddress' in (yield bitcoind.rpc_help()) and
    (yield bitcoind.rpc_getblockchaininfo())['chain'] == 'test'))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 210000
POW_FUNC = data.hash256
BLOCK_PERIOD = 600  # s
SYMBOL = 'tBTC'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'Bitcoin') if platform.system() ==
    'Windows' else os.path.expanduser('~/Library/Application Support/Bitcoin/')
    if platform.system() == 'Darwin' else os.path.expanduser('~/.bitcoin'
                                                             ), 'bitcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://blockexplorer.com/testnet/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://blockexplorer.com/testnet/address/'
TX_EXPLORER_URL_PREFIX = 'http://blockexplorer.com/testnet/tx/'
SANE_TARGET_RANGE = (2**256 // 2**32 // 1000 - 1, 2**256 // 2**32 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 1e8
示例#46
0
    def __init__(self, func, lr_ID, lr_name, returns, unflatten, **params):
        """Setting constructor
        Args:

            func (callable):   Function to implement the setting.
            lr_ID (int):       The ID number of the setting used in the labrad
                               protocol
            lr_name (str):     The setting name.  By default, this is derived
                               from the function name.
            returns:           The labrad type tag or list of tags the setting
                               returns
            unflatten (bool):  Request automatic unflattening of incoming data.
                               (default True)
            **params:          Additional keyword arguments indicate arguments
                               to the setting.  Each keyword should be a string
                               matching one of the formal parameters of func,
                               while the value is the type tag or list of tags
                               accepted by that parameter.

        If unflattening is requested, pylabrad will use the default unflattening
        for the data.  Otherwise, arguments will receive FlatData objects.  If
        there are multiple arguments, the top-level tuple will still be
        unpacked."""

        self.func = func
        self.ID = lr_ID
        self.name = lr_name or func.__name__
        self.returns = [returns] if isinstance(returns,
                                               basestring) else returns
        self.unflatten = unflatten
        self.description, self.notes = util.parseSettingDoc(func.__doc__)
        self.__doc__ = "Setting wrapper for {}\n\n{}".format(
            func.__name__, func.__doc__)

        ###
        # We need to validate the arguments.  Things we need to checks:
        #
        # 1) All parameters must match function arguments
        # 2) Function arguments with no specified parameter default to '?'
        # 3) The empty tag '' is only allowd on the first argument, and only
        #    if all other arguments are optional.
        # 4) If more than one argument are required, we expect to always receive
        #    a tuple and unpack it
        # 5) If only a single argument is allowed, we never unpack tuples
        # 6) If both =1 and >1 arguments are allowed, it is ambiguous whether to
        #    unpack tuples, so this case is not allowed:  The first argument
        #    cannot be a tuple or '?' tag if the second argument is optional.

        argspec = inspect.getargspec(self.func)
        args = argspec.args[2:]  # Skip 'self' and context data arguments.

        if inspect.isgeneratorfunction(func):
            self.func = defer.inlineCallbacks(func)

        for arg in args:
            if arg not in params:
                params[arg] = ['?']

        for p in params.keys():
            if p not in args:
                raise ValueError(
                    "Setting parameter {} not accepted by function".format(p))
            if isinstance(params[p], basestring):
                params[p] = [params[p]]

        Nparams = len(args)
        Noptional = len(argspec.defaults) if argspec.defaults else 0
        Nrequired = Nparams - Noptional

        if Nrequired > 1:
            self.expand_cluster = "always"
        elif Nparams > 1:
            self.expand_cluster = "optional"
        else:  # one or fewer arguments
            self.expand_cluster = "never"

        self.allow_none = Nrequired == 0

        if Nparams:
            for tag in params[args[0]]:
                tt = T.parseTypeTag(tag)
                if isinstance(tt, T.TNone) and Nrequired > 1:
                    raise ValueError(
                        "First argument {} cannot accept '' "
                        "unless other arguments are optional".format(args[0]))
                if isinstance(
                        tt,
                    (T.TAny,
                     T.TCluster)) and self.expand_cluster == "optional":
                    raise ValueError(
                        "First argument {} cannot accept type {} "
                        "because other arguments are optional".format(
                            args[0], tt))
            for arg in args[1:]:
                for tag in params[arg]:
                    if isinstance(T.parseTypeTag(tag), T.TNone):
                        raise ValueError(
                            "Argument {} cannot accept ''".format(arg))

        # Build list of accepted data types.
        # This is basically every combination of accepted types for each agrument,
        # including omitting any number of trailing optional arguments.

        accepted_types = []
        for i in range(Nrequired, Nparams + 1):
            if i == 0:
                accepted_types.append('_')
            else:
                accept_tuples = itertools.product(
                    *[params[arg] for arg in args[:i]])
                accepted_types.extend(
                    combine_type_tags(x) for x in accept_tuples)
        self.accepts = accepted_types
示例#47
0
import platform

from twisted.internet import defer

from . import data
from p2pool.util import math, pack

nets = dict(
    ###CJWinty: code clean
    dimecoin=math.Object(
        P2P_PREFIX='fea503dd'.decode('hex'),
        P2P_PORT=11931,
        ADDRESS_VERSION=58,
        RPC_PORT=11930,
        RPC_CHECK=defer.inlineCallbacks(lambda dimecoind: defer.returnValue(
            'dimecoinaddress' in (yield dimecoind.rpc_help()) and
            not (yield dimecoind.rpc_getinfo())['testnet']
        )),
        SUBSIDY_FUNC=lambda height: 2048*100000000 >> (height + 1)//60480,
        BLOCKHASH_FUNC=lambda data: pack.IntType(256).unpack(__import__('quark_hash').getPoWHash(data)),
        POW_FUNC=lambda data: pack.IntType(256).unpack(__import__('quark_hash').getPoWHash(data)),
        BLOCK_PERIOD=30, # s
        SYMBOL='DIM',
        CONF_FILE_FUNC=lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'Dimecoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/Dimecoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.dimecoin'), 'dimecoin.conf'),
        BLOCK_EXPLORER_URL_PREFIX='http://http://107.150.11.146:333//block/',
        ADDRESS_EXPLORER_URL_PREFIX='http://http://107.150.11.146:333//address/',
        ### CJWinty: Code clear 
        SANE_TARGET_RANGE=(2**256//2**32//1000 - 1, 2**256//2**20 - 1), 
        DUMB_SCRYPT_DIFF=1,
        DUST_THRESHOLD=0.001e8,
    ),
    dimecoin_testnet=math.Object(
示例#48
0
# P2P_PREFIX = 'f9beb4d9'.decode('hex') # disk magic and old net magic
P2P_PREFIX = 'e3e1f3e8'.decode('hex')  # new net magic
P2P_PORT = 8333
ADDRESS_VERSION = 0
ADDRESS_P2SH_VERSION = 5
HUMAN_READABLE_PART = 'bitcoincash'
RPC_PORT = 8332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    (yield helper.check_block_header(
        bitcoind,
        '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')
     ) and  # genesis block
    # 478559 -- Bitcoin Cash fork
    (yield helper.check_block_header(
        bitcoind,
        '000000000000000000651ef99cb9fcbe0dadde1d424bd9f15ff20136191a5eec')
     ) and
    # 556767 -- Bitcoin SV fork
    (yield helper.check_block_header(
        bitcoind,
        '000000000000000001d956714215d96ffc00e0afda4cd0a96c96f8d802b1662b')
     ) and (yield bitcoind.rpc_getblockchaininfo())['chain'] == 'main'))


def SUBSIDY_FUNC(height):
    return 50 * 100000000 >> (height + 1) // 210000


POW_FUNC = data.hash256
BLOCK_PERIOD = 600  # s
示例#49
0
 def decorator(func):
     if inspect.isgeneratorfunction(func):
         func = inlineCallbacks(func)
     self._signals.setdefault(name, []).append(func)
     return func
示例#50
0
def inlineCallbacks(fun, *args, **kw):
    return defer.inlineCallbacks(fun)(*args, **kw)
示例#51
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'af4576ee'.decode('hex')
P2P_PORT = 10888
ADDRESS_VERSION = 50
RPC_PORT = 10889
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    (yield helper.check_genesis_block(
        bitcoind,
        '00000ffde4c020b5938441a0ea3d314bf619eff0b38f32f78f7583cffa1ea485')
     ) and not (yield bitcoind.rpc_getinfo())['testnet']))
SUBSIDY_FUNC = lambda height: 1000 * 100000000 >> (height + 1) // 967680
POW_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('yescrypt_hash').getHash(data, 80))
BLOCK_PERIOD = 300  # s
SYMBOL = 'XMY'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'myriadcoin')
    if platform.system() == 'Windows' else os.path.
    expanduser('~/Library/Application Support/myriadcoin/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.myriadcoin'), 'myriadcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://insight-myr.cryptap.us/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://insight-myr.cryptap.us/address/'
TX_EXPLORER_URL_PREFIX = 'http://insight-myr.cryptap.us/tx/'
SANE_TARGET_RANGE = (2**256 // 1000000000 - 1, 2**256 // 10000 - 1)
示例#52
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'FED0D8C3'.decode('hex')
P2P_PORT = 9555
ADDRESS_VERSION = 56
RPC_PORT = 9554
RPC_CHECK = defer.inlineCallbacks(lambda phoenixcoind: defer.returnValue(
    (yield helper.check_genesis_block(
        phoenixcoind,
        'be2f30f9e8db8f430056869c43503a992d232b28508e83eda101161a18cf7c73'))))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 1000000
POW_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('neoscrypt').getPoWHash(data))
BLOCK_PERIOD = 90
SYMBOL = 'PXC'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.getcwd(), 'data') if platform.system() == 'Windows' else os
    .path.expanduser('~/.phoenixcoin'), 'phoenixcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://explorer.phoenixcoin.org/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://explorer.phoenixcoin.org/address/'
TX_EXPLORER_URL_PREFIX = 'http://explorer.phoenixcoin.org/tx/'
SANE_TARGET_RANGE = (2**256 - 1 >> 30, 2**256 - 1 >> 12)
DUMB_SCRYPT_DIFF = 2**16
DUST_THRESHOLD = 0.1e8
示例#53
0
 def _chainable(*args, **kwargs):
     deferred = defer.inlineCallbacks(f)(*args, **kwargs)
     return deferred
示例#54
0
import os
import platform

from twisted.internet import defer

from . import data
from p2pool.util import math, pack

nets = dict(
    bitcoin=math.Object(
        P2P_PREFIX='f9beb4d9'.decode('hex'),
        P2P_PORT=8333,
        ADDRESS_VERSION=0,
        RPC_PORT=8332,
        RPC_CHECK=defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
            'bitcoinaddress' in (yield bitcoind.rpc_help()) and not (
                yield bitcoind.rpc_getinfo())['testnet'])),
        SUBSIDY_FUNC=lambda height: 50 * 100000000 >> (height + 1) // 210000,
        POW_FUNC=data.hash256,
        BLOCK_PERIOD=600,  # s
        SYMBOL='BTC',
        CONF_FILE_FUNC=lambda: os.path.join(
            os.path.join(os.environ['APPDATA'], 'Bitcoin')
            if platform.system() == 'Windows' else os.path.expanduser(
                '~/Library/Application Support/Bitcoin/') if platform.system()
            == 'Darwin' else os.path.expanduser('~/.bitcoin'), 'bitcoin.conf'),
        BLOCK_EXPLORER_URL_PREFIX='http://blockexplorer.com/block/',
        ADDRESS_EXPLORER_URL_PREFIX='http://blockexplorer.com/address/',
        SANE_TARGET_RANGE=(2**256 // 2**32 // 1000 - 1, 2**256 // 2**32 - 1),
        DUMB_SCRYPT_DIFF=1,
    ),
示例#55
0
 def wrapper(self, *args, **kw):
     if getattr(self, sync_attr):
         return flatten_generator(func)(self, *args, **kw)
     return inlineCallbacks(func)(self, *args, **kw)
示例#56
0
def check_genesis_block(bitcoind, genesis_block_hash):
    try:
        yield bitcoind.rpc_getblock(genesis_block_hash)
    except jsonrpc.Error_for_code(-5):
        defer.returnValue(False)
    else:
        defer.returnValue(True)

nets = dict(
    bitcoin=math.Object(
        P2P_PREFIX='f9beb4d9'.decode('hex'),
        P2P_PORT=8333,
        ADDRESS_VERSION=0,
        RPC_PORT=8332,
        RPC_CHECK=defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
            (yield check_genesis_block(bitcoind, '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')) and
            not (yield bitcoind.rpc_getinfo())['testnet']
        )),
        SUBSIDY_FUNC=lambda height: 50*100000000 >> (height + 1)//210000,
        POW_FUNC=data.hash256,
        BLOCK_PERIOD=600, # s
        SYMBOL='BTC',
        CONF_FILE_FUNC=lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'Bitcoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/Bitcoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.bitcoin'), 'bitcoin.conf'),
        BLOCK_EXPLORER_URL_PREFIX='https://blockchain.info/block/',
        ADDRESS_EXPLORER_URL_PREFIX='https://blockchain.info/address/',
        TX_EXPLORER_URL_PREFIX='https://blockchain.info/tx/',
        SANE_TARGET_RANGE=(2**256//2**32//1000 - 1, 2**256//2**32 - 1),
        DUMB_SCRYPT_DIFF=1,
        DUST_THRESHOLD=0.001e8,
    ),

    litecoin=math.Object(
示例#57
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = 'fac3b6da'.decode('hex')  #pchmessagestart
P2P_PORT = 19922
ADDRESS_VERSION = 30  #pubkey_address
RPC_PORT = 14022
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    'digibyteaddress' in (yield bitcoind.rpc_help()) and not (
        yield bitcoind.rpc_getinfo())['testnet']))
#SUBSIDY_FUNC = lambda height: 8000*100000000 >> (height + 1)//1051200
SUBSIDY_FUNC = lambda height: __import__('digibyte_subsidy').GetBlockBaseValue(
    height)
POW_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('ltc_scrypt').getPoWHash(data))
BLOCK_PERIOD = 60  # s
SYMBOL = 'DGB'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'digibyte')
    if platform.system() == 'Windows' else os.path
    .expanduser('~/Library/Application Support/digibyte/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.digibyte'), 'digibyte.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://altexplorer.net/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://altexplorer.net/address/'
TX_EXPLORER_URL_PREFIX = 'http://altexplorer.net/tx/'
SANE_TARGET_RANGE = (2**256 // 1000000000 - 1, 2**256 // 1000 - 1)
示例#58
0
P2P_PREFIX = 'f9beb402'.decode('hex')  #scchain args
P2P_PORT = 10333                       #scchain args
ADDRESS_VERSION = 0
#RPC_PORT = 8332                       #rbtc args 
RPC_PORT = 10665 #scchain args
# RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
#            (yield helper.check_genesis_block(bitcoind, '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')) and
#            (yield bitcoind.rpc_getblockchaininfo())['chain'] != 'test'
#        ))  #rbtc args
# RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
#             (yield helper.check_genesis_block(bitcoind, '00002624dcbee6e76a65f0ae84ac3a4c4f5889722d1412ef92486337fb43a007')) and
#             (yield bitcoind.rpc_getblockchaininfo())['chain'] != 'test'
#         ))  

RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
            (yield helper.check_genesis_block(bitcoind, '00000006941e463cf1cb6c74024228810dc81545c854ba5153b117d3bf602204')) and
            (yield bitcoind.rpc_getblockchaininfo())['chain'] != 'test'
        )) #scchain args
		
SUBSIDY_FUNC = lambda height: 50*100000000 >> (height + 1)//210000
# POW_FUNC = data.hash256
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('Lyra2Z_scrypt').getPoWHash(data))
BLOCK_PERIOD = 600 # s
SYMBOL = 'BSTK'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'Bitcoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/Bitcoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.bitcoin'), 'bitcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://block.genyuanlian.com/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://block.genyuanlian.com/address/'
TX_EXPLORER_URL_PREFIX = 'https://block.genyuanlian.com/tx/'
# SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**32 - 1)
# SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**16 - 1)
# SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**25 - 1) #rbtc args
SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**28 - 1)   #scchain args
示例#59
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = '9cd2c0a7'.decode('hex')
P2P_PORT = 10333
ADDRESS_VERSION = 25
RPC_PORT = 10332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
    (yield helper.check_genesis_block(
        bitcoind,
        '0a9e3b5fce3aee6e04f06dfd6ad380a6c0f9d8420f53a4ca97845756ee5d56e7')
     ) and (yield bitcoind.rpc_getblockchaininfo())['chain'] != 'test'))
SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 840000
POW_FUNC = lambda data: pack.IntType(256).unpack(
    __import__('bynd_scrypt').getPoWHash(data))
BLOCK_PERIOD = 150  # s
SYMBOL = 'BYND'
CONF_FILE_FUNC = lambda: os.path.join(
    os.path.join(os.environ['APPDATA'], 'Beyondcoin')
    if platform.system() == 'Windows' else os.path.
    expanduser('~/Library/Application Support/Beyondcoin/') if platform.system(
    ) == 'Darwin' else os.path.expanduser('~/.beyondcoin'), 'beyondcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://beyondcoinexplorer.com/#/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://beyondcoinexplorer.com/#/address/'
TX_EXPLORER_URL_PREFIX = 'https://beyondcoinexplorer.com/#/transaction/'
SANE_TARGET_RANGE = (2**256 // 1000000000 - 1, 2**256 // 1000 - 1)
示例#60
0
import os
import platform

from twisted.internet import defer

from .. import data, helper
from p2pool.util import pack

P2P_PREFIX = '7ab3c4d2'.decode('hex')
P2P_PORT = 12477
ADDRESS_VERSION = 63
SCRIPT_ADDRESS_VERSION = 20
RPC_PORT = 12478
RPC_CHECK = defer.inlineCallbacks(lambda dashd: defer.returnValue(
            'steloaddress' in (yield dashd.rpc_help()) and
            not (yield dashd.rpc_getinfo())['testnet']
        ))
BLOCKHASH_FUNC = lambda data: pack.IntType(256).unpack(__import__('neoscrypt').getPoWHash(data))
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('neoscrypt').getPoWHash(data))
BLOCK_PERIOD = 120 # s
SYMBOL = 'STL'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'stelocore') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/stelocore/') if platform.system() == 'Darwin' else os.path.expanduser('~/.stelocore'), 'stelo.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://206.189.100.68:3001/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://206.189.100.68:3001/address/'
TX_EXPLORER_URL_PREFIX = 'http://206.189.100.68:3001/tx/'
SANE_TARGET_RANGE = (2**256//1000000000 - 1, 2**256//10000 - 1)
DUMB_SCRYPT_DIFF = 2**16
DUST_THRESHOLD = 0.03e8