示例#1
0
 def register_aggregator(aggregator):
     web_aggregator = server.Site(WebCookieAggregator(aggregator))
     reactor.listenTCP(settings.WEB_AGGREGATOR_PORT, web_aggregator)
示例#2
0
def main():
    """
    The main event loop that starts the server and configures it.
    """
    usage = "usage: %prog [options]"
    parser = OptionParser(usage)

    parser.add_option(
        "-p",
        "--port",
        help="Port the HTTP server listens to for the GitHub Service Hook" +
        " [default: %default]",
        default=4000,
        type=int,
        dest="port")

    parser.add_option(
        "-m",
        "--buildmaster",
        help="Buildbot Master host and port. ie: localhost:9989 [default:" +
        " %default]",
        default="localhost:9989",
        dest="buildmaster")

    parser.add_option(
        "-l",
        "--log",
        help="The absolute path, including filename, to save the log to" +
        " [default: %default]",
        default=tempfile.gettempdir() + "/github_buildbot.log",
        dest="log")

    parser.add_option(
        "-L",
        "--level",
        help="The logging level: debug, info, warn, error, fatal [default:" +
        " %default]",
        default='warn',
        dest="level")

    parser.add_option(
        "-g",
        "--github",
        help="The github server.  Changing this is useful if you've specified"
        + "  a specific HOST handle in ~/.ssh/config for github " +
        "[default: %default]",
        default='github.com',
        dest="github")

    parser.add_option(
        "--pidfile",
        help="Write the process identifier (PID) to this file on start." +
        " The file is removed on clean exit. [default: %default]",
        default=None,
        dest="pidfile")

    (options, _) = parser.parse_args()

    if options.pidfile:
        with open(options.pidfile, 'w') as f:
            f.write(str(os.getpid()))

    levels = {
        'debug': logging.DEBUG,
        'info': logging.INFO,
        'warn': logging.WARNING,
        'error': logging.ERROR,
        'fatal': logging.FATAL,
    }

    filename = options.log
    log_format = "%(asctime)s - %(levelname)s - %(message)s"
    logging.basicConfig(filename=filename,
                        format=log_format,
                        level=levels[options.level])

    github_bot = GitHubBuildBot()
    github_bot.github = options.github
    github_bot.master = options.buildmaster

    site = server.Site(github_bot)
    reactor.listenTCP(options.port, site)
    reactor.run()

    if options.pidfile and os.path.exists(options.pidfile):
        os.unlink(options.pidfile)
    params = namedParameters()
    port = params.get('socket', 8080)

    if 'twisted' in params:
        from twisted.web import server
        from twisted.web.wsgi import WSGIResource
        from twisted.python.threadpool import ThreadPool
        from twisted.python import log
        from twisted.internet import reactor
        from twisted.application import service, strports

        # Create and start a thread pool,
        wsgiThreadPool = ThreadPool()
        wsgiThreadPool.start()

        # ensuring that it will be stopped when the reactor shuts down
        reactor.addSystemEventTrigger('after', 'shutdown', wsgiThreadPool.stop)

        reactor.listenTCP(
            port,
            server.Site(WSGIResource(reactor, wsgiThreadPool,
                                     wsgi_application)))
        log.startLogging(log.FileLogObserver(sys.stderr))
        reactor.run()

    else:
        import wsgiref, wsgiref.simple_server
        httpd = wsgiref.simple_server.make_server('', port, wsgi_application)
        httpd.serve_forever()
示例#4
0
 def setUp(self):
     xmlrpc = Test()
     addIntrospection(xmlrpc)
     self.p = reactor.listenTCP(0, server.Site(xmlrpc),interface="127.0.0.1")
     self.port = self.p.getHost().port
     self.factories = []
示例#5
0
# See the License for the specific language governing permissions and
# limitations under the License.
"""
a very simple json echo server
"""

from twswebrpc import JSONResource

from twisted.internet import reactor
from twisted.web import server


def serve_echo(value):
    return value


def serve_add(a, b):
    return a + b


jsonServerResource = JSONResource()
jsonServerResource.add_method('echo', serve_echo)
jsonServerResource.add_method('add', serve_add)

serverSite = server.Site(jsonServerResource)

# listen to available ips at port 1080
reactor.listenTCP(1080, serverSite)
print 'launch simpleechoclient.py or simpleechoclient_tester.py for test '
reactor.run()
示例#6
0
    def __init__(self, reactor, top, note_dirty, read_only_dbs, writable_db,
                 http_endpoint, ws_endpoint, root_cap, title):
        self.__http_port = http_endpoint
        self.__ws_port = ws_endpoint

        self.__ws_protocol = txws.WebSocketFactory(
            OurStreamFactory(top, root_cap))

        # Roots of resource trees
        # - appRoot is everything stateful/authority-bearing
        # - serverRoot is the HTTP '/' and static resources are placed there
        serverRoot = _make_static(staticResourcePath)
        if root_cap is None:
            appRoot = serverRoot
            self.__visit_path = '/'
        else:
            serverRoot = _make_static(staticResourcePath)
            appRoot = _SlashedResource()
            serverRoot.putChild(root_cap, appRoot)
            self.__visit_path = '/' + urllib.quote(root_cap, safe='') + '/'

        # UI entry point
        appRoot.putChild('', _RadioIndexHtmlResource(title))

        # Exported radio control objects
        appRoot.putChild('radio', BlockResource(top, note_dirty, notDeletable))

        # Frequency DB
        appRoot.putChild('dbs', shinysdr.db.DatabasesResource(read_only_dbs))
        appRoot.putChild('wdb', shinysdr.db.DatabaseResource(writable_db))

        # Debug graph
        appRoot.putChild('flow-graph', FlowgraphVizResource(reactor, top))

        # Construct explicit resources for merge.
        test = _reify(serverRoot, 'test')
        jasmine = _reify(test, 'jasmine')
        for name in ['jasmine.css', 'jasmine.js', 'jasmine-html.js']:
            jasmine.putChild(
                name,
                static.File(
                    os.path.join(os.path.dirname(__file__),
                                 'deps/jasmine/lib/jasmine-core/', name)))

        client = _reify(serverRoot, 'client')
        client.putChild(
            'require.js',
            static.File(
                os.path.join(os.path.dirname(__file__), 'deps/require.js')))

        # Plugin resources
        load_list_css = []
        load_list_js = []
        plugin_resources = Resource()
        client.putChild('plugins', plugin_resources)
        for resource_def in getPlugins(IClientResourceDef, shinysdr.plugins):
            # Add the plugin's resource to static serving
            plugin_resources.putChild(resource_def.key, resource_def.resource)
            plugin_resource_url = '/client/plugins/' + urllib.quote(
                resource_def.key, safe='') + '/'
            # Tell the client to load the plugins
            # TODO constrain path values to be relative
            if resource_def.load_css_path is not None:
                load_list_css.append(plugin_resource_url +
                                     resource_def.load_cs_path)
            if resource_def.load_js_path is not None:
                # TODO constrain value to be in the directory
                load_list_js.append(plugin_resource_url +
                                    resource_def.load_js_path)

        # Client plugin list
        client.putChild(
            'plugin-index.json',
            static.Data(
                _serialize({
                    u'css': load_list_css,
                    u'js': load_list_js,
                }).encode('utf-8'), 'application/json'))

        self.__site = server.Site(serverRoot)
        self.__ws_port_obj = None
        self.__http_port_obj = None
示例#7
0
 def createServer(self, resource):
     self.p = reactor.listenTCP(
         0, server.Site(resource), interface="127.0.0.1")
     self.addCleanup(self.p.stopListening)
     self.port = self.p.getHost().port
     self.proxy = xmlrpc.Proxy('http://127.0.0.1:%d' % self.port)
示例#8
0
    def register_resources(self, args: Namespace) -> None:
        from hathor.conf import HathorSettings
        from hathor.mining.ws import MiningWebsocketFactory
        from hathor.p2p.resources import AddPeersResource, MiningInfoResource, MiningResource, StatusResource
        from hathor.prometheus import PrometheusMetricsExporter
        from hathor.resources import ProfilerResource
        from hathor.transaction.resources import (
            CreateTxResource,
            DashboardTransactionResource,
            DecodeTxResource,
            GetBlockTemplateResource,
            GraphvizFullResource,
            GraphvizNeighboursResource,
            PushTxResource,
            SubmitBlockResource,
            TipsHistogramResource,
            TipsResource,
            TransactionAccWeightResource,
            TransactionResource,
            TxParentsResource,
            ValidateAddressResource,
        )
        from hathor.version_resource import VersionResource
        from hathor.wallet.resources import (
            AddressResource,
            BalanceResource,
            HistoryResource,
            LockWalletResource,
            SendTokensResource,
            SignTxResource,
            StateWalletResource,
            UnlockWalletResource,
        )
        from hathor.wallet.resources.nano_contracts import (
            NanoContractDecodeResource,
            NanoContractExecuteResource,
            NanoContractMatchValueResource,
        )
        from hathor.wallet.resources.thin_wallet import (
            AddressBalanceResource,
            AddressHistoryResource,
            AddressSearchResource,
            SendTokensResource as SendTokensThinResource,
            TokenHistoryResource,
            TokenResource,
        )
        from hathor.websocket import HathorAdminWebsocketFactory, WebsocketStatsResource

        settings = HathorSettings()

        if args.prometheus:
            kwargs: Dict[str, Any] = {'metrics': self.manager.metrics}

            if args.data:
                kwargs['path'] = os.path.join(args.data, 'prometheus')
            else:
                raise ValueError(
                    'To run prometheus exporter you must have a data path')

            prometheus = PrometheusMetricsExporter(**kwargs)
            prometheus.start()

        if args.status:
            # TODO get this from a file. How should we do with the factory?
            root = Resource()
            wallet_resource = Resource()
            root.putChild(b'wallet', wallet_resource)
            thin_wallet_resource = Resource()
            root.putChild(b'thin_wallet', thin_wallet_resource)
            contracts_resource = Resource()
            wallet_resource.putChild(b'nano-contract', contracts_resource)
            p2p_resource = Resource()
            root.putChild(b'p2p', p2p_resource)
            graphviz = Resource()
            # XXX: reach the resource through /graphviz/ too, previously it was a leaf so this wasn't a problem
            graphviz.putChild(b'', graphviz)
            for fmt in ['dot', 'pdf', 'png', 'jpg']:
                bfmt = fmt.encode('ascii')
                graphviz.putChild(
                    b'full.' + bfmt,
                    GraphvizFullResource(self.manager, format=fmt))
                graphviz.putChild(
                    b'neighbours.' + bfmt,
                    GraphvizNeighboursResource(self.manager, format=fmt))

            resources = (
                (b'status', StatusResource(self.manager), root),
                (b'version', VersionResource(self.manager), root),
                (b'create_tx', CreateTxResource(self.manager), root),
                (b'decode_tx', DecodeTxResource(self.manager), root),
                (b'validate_address', ValidateAddressResource(self.manager),
                 root),
                (b'push_tx', PushTxResource(self.manager), root),
                (b'graphviz', graphviz, root),
                (b'tips-histogram', TipsHistogramResource(self.manager), root),
                (b'tips', TipsResource(self.manager), root),
                (b'transaction', TransactionResource(self.manager), root),
                (b'transaction_acc_weight',
                 TransactionAccWeightResource(self.manager), root),
                (b'dashboard_tx', DashboardTransactionResource(self.manager),
                 root),
                (b'profiler', ProfilerResource(self.manager), root),
                # mining
                (b'mining', MiningResource(self.manager), root),
                (b'getmininginfo', MiningInfoResource(self.manager), root),
                (b'get_block_template', GetBlockTemplateResource(self.manager),
                 root),
                (b'submit_block', SubmitBlockResource(self.manager), root),
                (b'tx_parents', TxParentsResource(self.manager), root),
                # /thin_wallet
                (b'address_history', AddressHistoryResource(self.manager),
                 thin_wallet_resource),
                (b'address_balance', AddressBalanceResource(self.manager),
                 thin_wallet_resource),
                (b'address_search', AddressSearchResource(self.manager),
                 thin_wallet_resource),
                (b'send_tokens', SendTokensThinResource(self.manager),
                 thin_wallet_resource),
                (b'token', TokenResource(self.manager), thin_wallet_resource),
                (b'token_history', TokenHistoryResource(self.manager),
                 thin_wallet_resource),
                # /wallet/nano-contract
                (b'match-value', NanoContractMatchValueResource(self.manager),
                 contracts_resource),
                (b'decode', NanoContractDecodeResource(self.manager),
                 contracts_resource),
                (b'execute', NanoContractExecuteResource(self.manager),
                 contracts_resource),
                # /p2p
                (b'peers', AddPeersResource(self.manager), p2p_resource),
            )
            for url_path, resource, parent in resources:
                parent.putChild(url_path, resource)

            if self.manager.stratum_factory is not None:
                from hathor.stratum.resources import MiningStatsResource
                root.putChild(b'miners', MiningStatsResource(self.manager))

            if self.wallet and args.wallet_enable_api:
                wallet_resources = (
                    # /wallet
                    (b'balance', BalanceResource(self.manager), wallet_resource
                     ),
                    (b'history', HistoryResource(self.manager),
                     wallet_resource),
                    (b'address', AddressResource(self.manager),
                     wallet_resource),
                    (b'send_tokens', SendTokensResource(self.manager),
                     wallet_resource),
                    (b'sign_tx', SignTxResource(self.manager),
                     wallet_resource),
                    (b'unlock', UnlockWalletResource(self.manager),
                     wallet_resource),
                    (b'lock', LockWalletResource(self.manager),
                     wallet_resource),
                    (b'state', StateWalletResource(self.manager),
                     wallet_resource),
                )
                for url_path, resource, parent in wallet_resources:
                    parent.putChild(url_path, resource)

            # Websocket resource
            ws_factory = HathorAdminWebsocketFactory(
                metrics=self.manager.metrics,
                wallet_index=self.manager.tx_storage.wallet_index)
            ws_factory.start()
            root.putChild(b'ws', WebSocketResource(ws_factory))

            # Mining websocket resource
            mining_ws_factory = MiningWebsocketFactory(self.manager)
            root.putChild(b'mining_ws', WebSocketResource(mining_ws_factory))

            ws_factory.subscribe(self.manager.pubsub)

            # Websocket stats resource
            root.putChild(b'websocket_stats',
                          WebsocketStatsResource(ws_factory))

            real_root = Resource()
            real_root.putChild(settings.API_VERSION_PREFIX.encode('ascii'),
                               root)
            status_server = server.Site(real_root)
            reactor.listenTCP(args.status, status_server)

            # Set websocket factory in metrics
            self.manager.metrics.websocket_factory = ws_factory
示例#9
0
        else:
            req.setResponseCode(400)
            return json.dumps({
                'success': False,
                'error': 'no data parameter'
            }).encode()


if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(
        description='Launch an OpenDHT node with an HTTP control interface')
    parser.add_argument('-p',
                        '--port',
                        help='OpenDHT port to bind',
                        type=int,
                        default=4222)
    parser.add_argument('-hp',
                        '--http-port',
                        help='HTTP port to bind',
                        type=int,
                        default=8080)
    parser.add_argument('-b',
                        '--bootstrap',
                        help='bootstrap address',
                        default="bootstrap.ring.cx:4222")
    args = parser.parse_args()
    endpoints.serverFromString(reactor, "tcp:" + str(args.http_port)).listen(
        server.Site(DhtServer(args.port, args.bootstrap)))
    reactor.run()
示例#10
0
#!/usr/bin/env python

from __future__ import print_function
from twisted.internet import reactor
from twisted.internet.endpoints import serverFromString
from twisted.web import server, static

serverFromString(reactor, "onion:80").listen(
    server.Site(static.Data("Hello, world!", "text/plain"))
).addCallback(print)
reactor.run()
    def render_GET(self, request):
        return "redis: %s\n" % repr(self.db)


class XmlrpcHandler(BaseHandler, xmlrpc.XMLRPC):
    allowNone = True

    @defer.inlineCallbacks
    def xmlrpc_get(self, key):
        value = yield self.db.get(key)
        defer.returnValue(value)

    @defer.inlineCallbacks
    def xmlrpc_set(self, key, value):
        result = yield self.db.set(key, value)
        defer.returnValue(result)


# redis connection
_db = redis.lazyConnectionPool()

# http resources
root = Root()
root.putChild("", IndexHandler(_db))
root.putChild("info", InfoHandler(_db))
root.putChild("xmlrpc", XmlrpcHandler(_db))

application = service.Application("webredis")
srv = internet.TCPServer(8888, server.Site(root), interface="127.0.0.1")
srv.setServiceParent(application)
示例#12
0
        data = request.content.getvalue().decode('utf-8')

        msg = 'POST处理结果'
        if (path == '/func' or path == '/func1/'):
            msg = myFunc1(2, 3)
        #print('POST...path=', path)    # path= b'/this/is'  if post to url 'http://127.0.0.1:12347/this/is?a=1&b=2'
        #print('arg=', args)      # arg= {'a': ['de中啊文fg'], 'd': ['20190722']}
        #print('post data:', data)
        res = {
            'method': 'POST',
            'path': path,
            'args': args,
            'data': data,
            'out': msg
        }
        res['dec_data'] = parse.unquote(data)
        retstr = json.dumps(res, ensure_ascii=False)
        return retstr.encode('utf-8')


if __name__ == '__main__':

    # 监听的端口号
    port = 9601

    site = server.Site(Server())
    endpoint = endpoints.TCP4ServerEndpoint(reactor, port)
    endpoint.listen(site)
    print('READY at port ', port)
    reactor.run()
示例#13
0
    # Start a django-compatible webserver.

    from twisted.python import threadpool
    from src.server.webserver import DjangoWebRoot, WSGIWebServer

    # start a thread pool and define the root url (/) as a wsgi resource
    # recognized by Django
    threads = threadpool.ThreadPool(minthreads=max(1, settings.WEBSERVER_THREADPOOL_LIMITS[0]),
                                    maxthreads=max(1, settings.WEBSERVER_THREADPOOL_LIMITS[1]))
    web_root = DjangoWebRoot(threads)
    # point our media resources to url /media
    web_root.putChild("media", static.File(settings.MEDIA_ROOT))
    # point our static resources to url /static
    web_root.putChild("static", static.File(settings.STATIC_ROOT))
    web_site = server.Site(web_root, logPath=settings.HTTP_LOG_FILE)

    for proxyport, serverport in WEBSERVER_PORTS:
        # create the webserver (we only need the port for this)
        webserver = WSGIWebServer(threads, serverport, web_site, interface='127.0.0.1')
        webserver.setName('EvenniaWebServer%s' % serverport)
        EVENNIA.services.addService(webserver)

        print "  webserver: %s" % serverport

ENABLED = []
if IRC_ENABLED:
    # IRC channel connections
    ENABLED.append('irc')

if IMC2_ENABLED:
class IndexPage(resource.Resource):
    def __init__(self, wikiData):
        self.wikiData = wikiData
        resource.Resource.__init__(self)

    def render(self, request):
        pages = self.wikiData.listPages()
        pages.sort()
        links = ["<li><a href='%s'>%s</a></li>" % (p, p) for p in pages]
        return """
        <html>
        <head><title>Wiki Index</title></head>
        <body>
        <a href='/'>Home</a>
        <h1>Index</h1>
        <body>
        <ul>
        %s
        </ul>
        </body>
        </html>
        """ % "".join(links)


if __name__ == '__main__':
    import sys
    from twisted.internet import reactor
    wikiData = WikiData(sys.argv[1])
    reactor.listenTCP(8082, server.Site(RootResource(wikiData)))
    reactor.run()
# -*- coding:utf-8 –*-

from twisted.web import server, resource
from twisted.internet import reactor


class MainResource(resource.Resource):

    isLeaf = True

    # 用于处理GET类型请求
    def render_GET(self, request):

        # name参数
        name = 'World'
        if request.args.has_key('name'):
            name = request.args['name'][0]

        # 设置响应编码
        request.responseHeaders.addRawHeader("Content-Type",
                                             "text/html; charset=utf-8")

        # 响应的内容直接返回
        return "<html><body>Hello, " + name + "</body></html>"


site = server.Site(MainResource())
reactor.listenTCP(8080, site)
reactor.run()
示例#16
0
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.


from twisted.internet import reactor
from twisted.web import proxy, server

site = server.Site(proxy.ReverseProxyResource('www.yahoo.com', 80, ''))
reactor.listenTCP(8080, site)
reactor.run()
PORT = 8080


def delayed_render(request, name):
    request.write("Hello %s, sorry about the delay." % name)
    request.finish()


class HelloResource(resource.Resource):
    isLeaf = True

    def render_GET(self, request):
        # Check that the 'name' URL param was passed.
        name = request.args.get('name', None)
        if name is None:
            return resource.ErrorPage(400, "Bad Request",
                                      "Missing 'name' param.").render(request)
        name = name[0]

        delay_seconds = 2
        print "Delaying for %s seconds." % delay_seconds
        # Delay requires a callback
        reactor.callLater(delay_seconds, delayed_render, request, name)
        return NOT_DONE_YET


reactor.listenTCP(PORT, server.Site(HelloResource()))
print "Listening on http://localhost:%s" % PORT
reactor.run()
示例#18
0
#
# ep0:
#    launch a new Tor instance, configure a hidden service on some
#    port and pubish descriptor for port 80
# ep1:
#    connect to existing Tor via control-port 9051, configure a hidden
#    service listening locally on 8080, publish a descriptor for port
#    80 and use an explicit hiddenServiceDir (where "hostname" and
#    "private_key" files are put by Tor). We set SOCKS port
#    explicitly, too.
# ep2:
#    all the same as ep1, except we launch a new Tor (because no
#    "controlPort=9051")
#

ep0 = "onion:80"
ep1 = "onion:80:controlPort=9051:localPort=8080:socksPort=9089:hiddenServiceDir=/home/human/src/txtorcon/hidserv"
ep2 = "onion:80:localPort=8080:socksPort=9089:hiddenServiceDir=/home/human/src/txtorcon/hidserv"

hs_endpoint = serverFromString(reactor, ep0)
txtorcon.IProgressProvider(hs_endpoint).add_progress_listener(progress)

# create our Web server and listen on the endpoint; this does the
# actual launching of (or connecting to) tor.
site = server.Site(Simple())
d = hs_endpoint.listen(site)
d.addCallback(setup_complete)
d.addErrback(setup_failed)

reactor.run()
示例#19
0
def main():
    from twisted.internet import reactor
    from twisted.web import server
    r = Echoer()
    reactor.listenTCP(7080, server.Site(r))
    reactor.run()
示例#20
0
def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__, )
        print

        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (
                args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port,
                               factory)
            yield factory.getProtocol()  # waits until handshake is successful
            print '    ...success!'
            print
            defer.returnValue(factory)

        if args.testnet:  # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()

        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http',
                               args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (
            url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.HTTPProxy(
            url,
            dict(Authorization='Basic ' +
                 base64.b64encode(args.bitcoind_rpc_username + ':' +
                                  args.bitcoind_rpc_password)),
            timeout=30)
        yield helper.check(bitcoind, net)
        temp_work = yield helper.getwork(bitcoind)

        bitcoind_warning_var = variable.Variable(None)

        @defer.inlineCallbacks
        def poll_warnings():
            errors = (yield
                      deferral.retry('Error while calling getmininginfo:')(
                          bitcoind.rpc_getmininginfo)())['errors']
            bitcoind_warning_var.set(errors if errors != '' else None)

        yield poll_warnings()
        task.LoopingCall(poll_warnings).start(20 * 60)

        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block'], )
        print '    Current block height: %i' % (temp_work['height'] - 1, )
        print

        if not args.testnet:
            factory = yield connect_p2p()

        print 'Determining payout address...'
        pubkey_path = os.path.join(datadir_path, 'cached_payout_pubkey')

        if os.path.exists(pubkey_path):
            with open(pubkey_path, 'rb') as f:
                pubkey = f.read().strip('\r\n')
            print '    Loaded cached pubkey, payout address: %s...' % (
                bitcoin_data.pubkey_to_address(pubkey.decode('hex'),
                                               net.PARENT), )
        else:
            pubkey = None

        if pubkey is not None:
            res = yield deferral.retry(
                'Error validating cached pubkey:',
                5)(lambda: bitcoind.rpc_validatepubkey(pubkey))()
            if not res['isvalid'] or not res['ismine']:
                print '    Cached pubkey is either invalid or not controlled by local bitcoind!'
                address = None

        if pubkey is None:
            print '    Getting payout pubkey from bitcoind...'
            pubkey = yield deferral.retry(
                'Error getting payout pubkey from bitcoind:',
                5)(lambda: bitcoind.rpc_getnewpubkey('p2pool'))()

            with open(pubkey_path, 'wb') as f:
                f.write(pubkey)

        my_pubkey = pubkey.decode('hex')

        address = bitcoin_data.pubkey_to_address(my_pubkey, net.PARENT)

        my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(
            address, net.PARENT)
        print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(
            my_pubkey_hash, net.PARENT)
        print

        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
        shares = {}
        known_verified = set()
        print "Loading shares..."
        for i, (mode, contents) in enumerate(ss.get_shares()):
            if mode == 'share':
                contents.time_seen = 0
                shares[contents.hash] = contents
                if len(shares) % 1000 == 0 and shares:
                    print "    %i" % (len(shares), )
            elif mode == 'verified_hash':
                known_verified.add(contents)
            else:
                raise AssertionError()
        print "    ...done loading %i shares (%i verified)!" % (
            len(shares), len(known_verified))
        print

        print 'Initializing work...'

        node = p2pool_node.Node(factory, bitcoind, shares.values(),
                                known_verified, net)
        yield node.start()

        for share_hash in shares:
            if share_hash not in node.tracker.items:
                ss.forget_share(share_hash)
        for share_hash in known_verified:
            if share_hash not in node.tracker.verified.items:
                ss.forget_verified_share(share_hash)
        del shares, known_verified
        node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        node.tracker.verified.removed.watch(
            lambda share: ss.forget_verified_share(share.hash))

        def save_shares():
            for share in node.tracker.get_chain(
                    node.best_share_var.value,
                    min(node.tracker.get_height(node.best_share_var.value),
                        2 * net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in node.tracker.verified.items:
                    ss.add_verified_hash(share.hash)

        task.LoopingCall(save_shares).start(60)

        print '    ...success!'
        print

        print 'Joining p2pool network using port %i...' % (args.p2pool_port, )

        @defer.inlineCallbacks
        def parse(x):
            if ':' in x:
                ip, port = x.split(':')
                defer.returnValue(((yield reactor.resolve(ip)), int(port)))
            else:
                defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))

        addrs = {}
        if os.path.exists(os.path.join(datadir_path, 'addrs')):
            try:
                with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
                    addrs.update(
                        dict((tuple(k), v) for k, v in json.loads(f.read())))
            except:
                print >> sys.stderr, 'error parsing addrs'
        for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
            try:
                addr = yield addr_df
                if addr not in addrs:
                    addrs[addr] = (0, time.time(), time.time())
            except:
                log.err()

        connect_addrs = set()
        for addr_df in map(parse, args.p2pool_nodes):
            try:
                connect_addrs.add((yield addr_df))
            except:
                log.err()

        node.p2p_node = p2pool_node.P2PNode(
            node,
            port=args.p2pool_port,
            max_incoming_conns=args.p2pool_conns,
            addr_store=addrs,
            connect_addrs=connect_addrs,
            desired_outgoing_conns=args.p2pool_outgoing_conns,
        )
        node.p2p_node.start()

        def save_addrs():
            with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
                f.write(json.dumps(node.p2p_node.addr_store.items()))

        task.LoopingCall(save_addrs).start(60)

        print '    ...success!'
        print

        if args.upnp:

            @defer.inlineCallbacks
            def upnp_thread():
                while True:
                    try:
                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
                        if is_lan:
                            pm = yield portmapper.get_port_mapper()
                            yield pm._upnp.add_port_mapping(
                                lan_ip, args.p2pool_port, args.p2pool_port,
                                'p2pool', 'TCP')
                    except defer.TimeoutError:
                        pass
                    except:
                        if p2pool.DEBUG:
                            log.err(None, 'UPnP error:')
                    yield deferral.sleep(random.expovariate(1 / 120))

            upnp_thread()

        # start listening for workers with a JSON-RPC server

        print 'Listening for workers on %r port %i...' % (worker_endpoint[0],
                                                          worker_endpoint[1])

        wb = work.WorkerBridge(node, my_pubkey, args.donation_percentage,
                               merged_urls, args.worker_fee)
        web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var)
        caching_wb = worker_interface.CachingWorkerBridge(wb)
        worker_interface.WorkerInterface(caching_wb).attach_to(
            web_root, get_handler=lambda request: request.redirect('/static/'))
        web_serverfactory = server.Site(web_root)

        serverfactory = switchprotocol.FirstByteSwitchFactory(
            {'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
        deferral.retry('Error binding to worker port:', traceback=False)(
            reactor.listenTCP)(worker_endpoint[1],
                               serverfactory,
                               interface=worker_endpoint[0])

        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')),
                  'wb') as f:
            pass

        print '    ...success!'
        print

        # done!
        print 'Started successfully!'
        print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (
            worker_endpoint[1], )
        if args.donation_percentage > 1.1:
            print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (
                args.donation_percentage, )
        elif args.donation_percentage < .9:
            print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (
                args.donation_percentage, )
        else:
            print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (
                args.donation_percentage, )
            print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
        print

        if hasattr(signal, 'SIGALRM'):
            signal.signal(
                signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
                    sys.stderr.write, 'Watchdog timer went off at:\n' + ''.
                    join(traceback.format_stack())))
            signal.siginterrupt(signal.SIGALRM, False)
            task.LoopingCall(signal.alarm, 30).start(1)

        if args.irc_announce:
            from twisted.words.protocols import irc

            class IRCClient(irc.IRCClient):
                nickname = 'p2pool%02i' % (random.randrange(100), )
                channel = net.ANNOUNCE_CHANNEL

                def lineReceived(self, line):
                    if p2pool.DEBUG:
                        print repr(line)
                    irc.IRCClient.lineReceived(self, line)

                def signedOn(self):
                    self.in_channel = False
                    irc.IRCClient.signedOn(self)
                    self.factory.resetDelay()
                    self.join(self.channel)

                    @defer.inlineCallbacks
                    def new_share(share):
                        if not self.in_channel:
                            return
                        if share.pow_hash <= share.header[
                                'bits'].target and abs(share.timestamp -
                                                       time.time()) < 10 * 60:
                            yield deferral.sleep(random.expovariate(1 / 60))
                            message = '\x02%s BLOCK FOUND by %s! %s%064x' % (
                                net.NAME.upper(),
                                bitcoin_data.script2_to_address(
                                    share.new_script, net.PARENT),
                                net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                                share.header_hash)
                            if all('%x' %
                                   (share.header_hash, ) not in old_message
                                   for old_message in self.recent_messages):
                                self.say(self.channel, message)
                                self._remember_message(message)

                    self.watch_id = node.tracker.verified.added.watch(
                        new_share)
                    self.recent_messages = []

                def joined(self, channel):
                    self.in_channel = True

                def left(self, channel):
                    self.in_channel = False

                def _remember_message(self, message):
                    self.recent_messages.append(message)
                    while len(self.recent_messages) > 100:
                        self.recent_messages.pop(0)

                def privmsg(self, user, channel, message):
                    if channel == self.channel:
                        self._remember_message(message)

                def connectionLost(self, reason):
                    node.tracker.verified.added.unwatch(self.watch_id)
                    print 'IRC connection lost:', reason.getErrorMessage()

            class IRCClientFactory(protocol.ReconnectingClientFactory):
                protocol = IRCClient

            reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())

        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    height = node.tracker.get_height(node.best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(node.tracker.verified.items),
                        len(node.tracker.items),
                        len(node.p2p_node.peers),
                        sum(1 for peer in node.p2p_node.peers.itervalues()
                            if peer.incoming),
                    ) + (' FDs: %i R/%i W' %
                         (len(reactor.getReaders()), len(reactor.getWriters()))
                         if p2pool.DEBUG else '')

                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work'] / dt for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(
                            sum(1 for datum in datums if datum['dead']),
                            len(datums), 0.95),
                        math.format_dt(2**256 / node.tracker.items[
                            node.best_share_var.value].max_target / my_att_s)
                        if my_att_s and node.best_share_var.value else '???',
                    )

                    if height > 2:
                        (stale_orphan_shares,
                         stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(
                            node.tracker, node.best_share_var.value,
                            min(60 * 60 // net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(
                            node.tracker, node.best_share_var.value,
                            min(height - 1, 60 * 60 //
                                net.SHARE_PERIOD)) / (1 - stale_prop)

                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
                            shares,
                            stale_orphan_shares,
                            stale_doa_shares,
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95),
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95, lambda x: (1 - x) / (1 - stale_prop)),
                            node.get_current_txouts().get(
                                bitcoin_data.pubkey_to_script2(my_pubkey), 0) *
                            1e-6,
                            net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100 * stale_prop,
                            math.format_dt(
                                2**256 /
                                node.bitcoind_work.value['bits'].target /
                                real_att_s),
                        )

                        for warning in p2pool_data.get_warnings(
                                node.tracker, node.best_share_var.value, net,
                                bitcoind_warning_var.value,
                                node.bitcoind_work.value):
                            print >> sys.stderr, '#' * 40
                            print >> sys.stderr, '>>> Warning: ' + warning
                            print >> sys.stderr, '#' * 40

                        if gc.garbage:
                            print '%i pieces of uncollectable cyclic garbage! Types: %r' % (
                                len(gc.garbage), map(type, gc.garbage))

                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()

        status_thread()
    except:
        reactor.stop()
        log.err(None, 'Fatal error:')
示例#21
0
 def setUp(self):
     self.p = reactor.listenTCP(0, server.Site(TestAuthHeader()),
                                interface="127.0.0.1")
     self.port = self.p.getHost().port
     self.factories = []
示例#22
0
 def setUp(self):
     portal = MaayPortal(WebappConfiguration())
     portal.registerChecker(FakeChecker(portal.realm))
     rpcserver = server.Site(rpc.MaayRPCServer(portal))
     self.p = reactor.listenTCP(0, rpcserver, interface="127.0.0.1")
     self.port = self.p.getHost().port
示例#23
0
文件: serve.py 项目: bharatpatel/TS
        #don't provide enabled which will implicitly disable the genome
        query_args = {
            'status': 'error',
            'verbose_error': json.dumps(val),
            'index_version': tmap_version
        }
        encoded_args = urllib.urlencode(query_args)
        url = 'http://localhost/configure/references/genome/status/' + id
        url_read = urllib2.urlopen(url, encoded_args).read()
        logger.debug(url_read)

        shutil.rmtree(temp_path)


if __name__ == '__main__':

    try:
        logger.info("ionJobServer Started Ver: %s" % __version__)

        execService = ProcessExecutionService()

        aq = AnalysisQueue(settings.ANALYSIS_ROOT)
        aq.loop()

        r = AnalysisServer(aq, execService)
        reactor.callLater(0, r.execService.startService)
        reactor.listenTCP(settings.JOBSERVER_PORT, server.Site(r))
        reactor.run()
    except Exception as err:
        logger.exception("Job Server run-time failure.")
示例#24
0
 def factory(self, *args, **kwargs):
     return server.Site(resource.Resource(), *args, **kwargs)
示例#25
0
def getAppService(port, interface):
    root = resource.Resource()
    root.putChild("liveness", Liveness())
    root.putChild("readiness", Readiness())
    site = server.Site(root)
    return internet.TCPServer(port, site, interface=interface)
示例#26
0
 def createServer(self, r):
     chan = DummyChannel()
     chan.site = server.Site(r)
     return chan
示例#27
0
def startXMLRPC(control_point, port):
    from twisted.web import server
    r = XMLRPC(control_point)
    print(f'XMLRPC-API on port {port:d} ready')
    reactor.listenTCP(port, server.Site(r))
示例#28
0
from debianpackage import DebianBuildManager
from slave import XMLRPCBuildDSlave

conffile = os.environ.get("SHUTTLE_SLAVE_CONFIG", "shuttle-slave-example.conf")

conf = ConfigParser()
conf.read(conffile)
slave = XMLRPCBuildDSlave(conf)

slave.registerBuilder(DebianBuildManager, "debian")

application = service.Application("BuildSlave")
builddslaveService = service.IServiceCollection(application)

root = resource.Resource()
root.putChild("rpc", slave)
root.putChild("filecache", static.File(conf.get('slave', 'filecache')))

is_twistd = False
if is_twistd:
    slavesite = server.Site(root)
    strports.service(slave.slave._config.get("slave", "bindport"),
                     slavesite).setServiceParent(builddslaveService)
else:
    from twisted.internet import reactor
    from twisted.web.server import Site
    factory = Site(root)
    reactor.listenTCP(slave.slave._config.getint("slave", "bindport"), factory)
    reactor.run()

if __name__ == '__main__':
    # Set up logging
    logger = logging.getLogger('similarity')
    logger.setLevel(logging.DEBUG)
    handler = RotatingFileHandler(LOGFILE_INDEXING_SERVER,
                                  maxBytes=2 * 1024 * 1024,
                                  backupCount=5)
    handler.setLevel(logging.DEBUG)
    std_handler = logging.StreamHandler()
    std_handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    std_handler.setFormatter(formatter)
    logger.addHandler(std_handler)
    handler_graypy = graypy.GELFHandler('10.55.0.48', 12201)
    logger.addHandler(handler_graypy)

    # Start service
    logger.info('Configuring similarity INDEXING service...')
    root = resource.Resource()
    root.putChild("similarity", SimilarityServer())
    site = server.Site(root)
    reactor.listenTCP(INDEXING_SERVER_LISTEN_PORT, site)
    logger.info('Started similarity INDEXING service, listening to port ' +
                str(INDEXING_SERVER_LISTEN_PORT) + "...")
    reactor.run()
    logger.info('Service stopped.')
示例#30
0
from twisted.application import internet, service
from twisted.web import server

from scripts import resources

app = service.MultiService()
web_root = resources.Root('./web')
web_root.putChild('crypt', resources.Crypt())
web_factory = server.Site(web_root)
web_service = internet.TCPServer(8081, web_factory, interface='0.0.0.0')
web_service.setServiceParent(app)

application = service.Application("ansible-vault")

app.setServiceParent(application)