Пример #1
0
def main():
    arg = docopt("""
    Usage: environment.py [options]

    -v                    Verbose
    """)
    log.setLevel(logging.INFO)
    if arg['-v']:
        enableTwistedLog()
        log.setLevel(logging.DEBUG)
        defer.setDebugging(True)
        
    masterGraph = PatchableGraph()

    class Application(cyclone.web.Application):
        def __init__(self):
            handlers = [
                (r"/()",
                 cyclone.web.StaticFileHandler,
                 {"path": ".", "default_filename": "index.html"}),
                (r'/graph',
                 CycloneGraphHandler, {'masterGraph': masterGraph}),
                (r'/graph/events',
                 CycloneGraphEventsHandler, {'masterGraph': masterGraph}),
                (r'/doc', Doc), # to be shared
                (r'/stats/(.*)', StatsHandler, {'serverName': 'environment'}),
            ]
            cyclone.web.Application.__init__(self, handlers,
                                             masterGraph=masterGraph)
    task.LoopingCall(update, masterGraph).start(1)
    reactor.listenTCP(9075, Application())
    reactor.run()
    def setUpClass(cls):
        if not os.environ.get('KAFKA_VERSION'):  # pragma: no cover
            log.warning("WARNING: KAFKA_VERSION not found in environment")
            return

        DEBUGGING = True
        setDebugging(DEBUGGING)
        DelayedCall.debug = DEBUGGING

        # Single zookeeper, 3 kafka brokers
        zk_chroot = random_string(10)
        replicas = 3
        partitions = 2

        cls.zk = ZookeeperFixture.instance()
        kk_args = [cls.zk.host, cls.zk.port, zk_chroot, replicas, partitions]
        cls.kafka_brokers = [
            KafkaFixture.instance(i, *kk_args) for i in range(replicas)]
        # server is used by our superclass when creating the client...
        cls.server = cls.kafka_brokers[0]

        # Startup the twisted reactor in a thread. We need this before the
        # the KafkaClient can work, since KafkaBrokerClient relies on the
        # reactor for its TCP connection
        cls.reactor, cls.thread = threaded_reactor()
Пример #3
0
def install_exception_handlers(quitFunc=None):
  """this handles exceptions that would normally be caught by Python or Twisted and just silently ignored..."""
  def handle_exception(excType, value, tb):
    log_ex(value, "Unhandled exception in main loop:", reasonTraceback=tb, excType=excType)
  sys.excepthook = handle_exception
  #this handles exceptions that would normally be caught by Twisted:
  def handle_twisted_err(_stuff=None, _why=None, quitFunc=quitFunc, **kw):
    excType = None
    tb = None
    #get the exception from the system if necessary
    if not _stuff or issubclass(type(_stuff), Exception):
      (excType, _stuff, tb) = sys.exc_info()
    #check if this is a shutdown signal
    if quitFunc and _stuff and issubclass(type(_stuff), KeyboardInterrupt):
      log_msg("Shutting down from keyboard interrupt...", 0)
      quitFunc()
      return
    #otherwise, log the exception
    if excType and tb:
      log_ex(_stuff, "Unhandled exception from Twisted:", reasonTraceback=tb, excType=excType)
    else:
      log_ex(_stuff, "Unhandled failure from Twisted:")
  twisted.python.log.err = handle_twisted_err
  twisted.python.log.deferr = handle_twisted_err
  #for debugging deferreds--maintains the callstack so AlreadyCalled errors are easier to debug
  defer.setDebugging(True)
Пример #4
0
def setup_logging(options):
    default = pkg_resources.resource_filename(tron.__name__, 'logging.conf')
    logfile = options.log_conf or default

    level = twist_level = None
    if options.verbose > 0:
        level = logging.INFO
        twist_level = logging.WARNING
    if options.verbose > 1:
        level = logging.DEBUG
        twist_level = logging.INFO
    if options.verbose > 2:
        twist_level = logging.DEBUG

    tron_logger = logging.getLogger('tron')
    twisted_logger = logging.getLogger('twisted')

    logging.config.fileConfig(logfile)
    if level is not None:
        tron_logger.setLevel(level)
    if twist_level is not None:
        twisted_logger.setLevel(twist_level)

    # Hookup twisted to standard logging
    twisted_log.PythonLoggingObserver().start()

    # Show stack traces for errors in twisted deferreds.
    if options.debug:
        defer.setDebugging(True)
Пример #5
0
def main():
    """
    Test harness main()

    Usage:

    python SshClient.py hostname[:port] comand [command]

    Each command must be enclosed in quotes (") to be interpreted
    properly as a complete unit.
    """
    from itertools import chain
    from twisted.python import log
    import pprint
    import sys, getpass

    if debug:
        log.startLogging(sys.stdout)
        defer.setDebugging(True)
    # logging.basicConfig()

    client = ClientCommandFactory(username="******", ip="127.0.0.1", port=22, keyPath="/home/user/.ssh/id_rsa")

    client.run()
    # client._commands.append('hostname')
    reactor.run()

    pprint.pprint(client.getResults())
    def setUpClass(cls):
        if not os.environ.get('KAFKA_VERSION'):  # pragma: no cover
            return

        DEBUGGING = True
        setDebugging(DEBUGGING)
        DelayedCall.debug = DEBUGGING

        zk_chroot = random_string(10)
        replicas = 2
        partitions = 7

        # mini zookeeper, 2 kafka brokers
        cls.zk = ZookeeperFixture.instance()
        kk_args = [cls.zk.host, cls.zk.port, zk_chroot, replicas, partitions]
        cls.kafka_brokers = [
            KafkaFixture.instance(i, *kk_args) for i in range(replicas)]

        hosts = ['%s:%d' % (b.host, b.port) for b in cls.kafka_brokers]
        # We want a short timeout on message sending for this test, since
        # we are expecting failures when we take down the brokers
        cls.client = KafkaClient(hosts, timeout=1000, clientId=__name__)

        # Startup the twisted reactor in a thread. We need this before the
        # the KafkaClient can work, since KafkaBrokerClient relies on the
        # reactor for its TCP connection
        cls.reactor, cls.thread = threaded_reactor()
Пример #7
0
	def connect(self):
		if self.clientdir == None:
			raise MissingAttributeError('Missing Directory')
		if self.enc_pwd == None:
			raise MissingAttributeError('Missing Password')
		
		if os.path.exists(self.clientdir + '/restore~'):
			self.process_restore_folder()
		if not os.path.exists(self.clientdir):
			os.makedirs(self.clientdir)
			
		self.key = hashlib.sha256(self.enc_pwd).digest()
		self.existing_file_dict = parse_existing_clientdir(self.enc_pwd, self.clientdir)
		
		self.gdc = get_dir_changes(self.clientdir, self.configfile)
		if self.gdc == 'new':
			self.new_file_dict = parse_new_dir(self.clientdir, self.enc_pwd, self.key)
		else:
			self.new_file_dict = parse_dir_changes(self.clientdir, self.gdc, self.enc_pwd, self.key)
		self.file_dict = dict(self.existing_file_dict.items() + self.new_file_dict.items())
		self.file_dict, self.get_list = process_file_list(self.previous_file_dict, self.existing_file_dict)
		self.tmp_files = parse_tmp_dir(self.clientdir)
		
		self.config.set('client', 'path', self.clientdir)
		self.config.set('client', 'password', self.enc_pwd)
		self.config.set('client', 'ip', self.med_ip)
		self.config.set('client', 'port', self.med_port)
		
		save_client_wallet(self.configfile, self.config, self.rsa_key, self.file_dict)
		
		defer.setDebugging(self.debug)
		reactor.connectTCP(self.med_ip, self.med_port, MediatorClientFactory(self.clientdir, self.rsa_key, self.tmp_files, 2, self.get_list, self.enc_pwd, self))
		reactor.run()
Пример #8
0
    def debug(self, var):
        """Enable or Disable application model debugging.  You should extend
           this if you know how to enable application debugging in your custom
           'application model'.

           returns deferred - already called
        """
        #assume blaster which is string based, sent the message
        var = str(var) #for safety
        a = var.lower()
        context = {'code' : 0}
        template = '[%(application)s] Debug '
        try:
            if a == 'true':
                self.model.debug = True
                defer.setDebugging(True)
                template += 'Enabled'
            elif a == 'false':
                self.model.debug = False
                defer.setDebugging(False)
                template += 'Disabled'
            else:
                raise TypeError('input must be a bool, True/False')
        except Exception, exc:
            template += str(exc)
            context['code'] = 1
Пример #9
0
    def _init_local(self):
        from p2p import commands
        from lib import net_misc
        from lib import misc
        from system import tmpfile
        from system import run_upnpc
        from raid import eccmap
        from userid import my_id

        my_id.init()
        if settings.enableWebStream():
            from logs import weblog

            weblog.init(settings.getWebStreamPort())
        if settings.enableWebTraffic():
            from logs import webtraffic

            webtraffic.init(port=settings.getWebTrafficPort())
        misc.init()
        commands.init()
        tmpfile.init(settings.getTempDir())
        net_misc.init()
        settings.update_proxy_settings()
        run_upnpc.init()
        eccmap.init()
        if sys.argv.count("--twisted"):
            from twisted.python import log as twisted_log

            twisted_log.startLogging(MyTwistedOutputLog(), setStdout=0)
            # import twisted.python.failure as twisted_failure
            # twisted_failure.startDebugMode()
            # twisted_log.defaultObserver.stop()
        if settings.getDebugLevel() > 10:
            defer.setDebugging(True)
Пример #10
0
 def opt_debug(self):
     """
     Run the application in the Python Debugger (implies nodaemon),
     sending SIGUSR2 will drop into debugger
     """
     defer.setDebugging(True)
     failure.startDebugMode()
     self['debug'] = True
Пример #11
0
    def __init__(self, config):
        """ Set up the server with a INDX. """

        self.config = config

        from twisted.internet.defer import setDebugging
        setDebugging(True)

        # enable ssl (or not)
        self.ssl = config['server'].get('ssl') or False

        # generate the base URLs
        self.server_url = self.config['server']['hostname']
        if not self.ssl:
            self.server_url = "http://" + self.server_url
            if self.config['server']['port'] != 80:
                self.server_url = self.server_url + ":" + str(self.config['server']['port'])
        else:
            self.server_url = "https://" + self.server_url
            if self.config['server']['port'] != 443:
                self.server_url = self.server_url + ":" + str(self.config['server']['port'])

        # get values to pass to web server
        self.server_address = self.config['server']['address']
        if self.server_address == "":
            self.server_address = "0.0.0.0"

        database.HOST = config['db']['host']
        database.PORT = config['db']['port']

        def auth_cb(can_auth):
            logging.debug("WebServer auth_cb, can_auth: {0}".format(can_auth))
            if can_auth:

                def checked_db_ok(server_id):
                    self.server_id = server_id # Gets the Server ID from the database
                    self.check_users().addCallbacks(lambda checked: self.server_setup(), err_cb)

                self.database.check_indx_db().addCallbacks(checked_db_ok, err_cb) # check indx DB exists, otherwise create it - then setup the server
            else:
                print "Authentication failed, check username and password are correct."
                reactor.stop()

        def auth_err(failure):
            logging.debug("WebServer err_cb, failure: {0}".format(failure))
            failure.trap(Exception)

            print "Authentication failed, check username and password are correct."
            reactor.stop()

        def err_cb(failure):
            logging.debug("WebServer err_cb, failure: {0}".format(failure))
            failure.trap(Exception)

        user,password = self.get_indx_user_password()
        self.database = database.IndxDatabase(config['indx_db'], user, password)
        self.tokens = token.TokenKeeper(self.database)
        self.database.auth_indx(database = "postgres").addCallbacks(auth_cb, auth_err)
Пример #12
0
 def testNoDebugging(self):
     defer.setDebugging(False)
     d = defer.Deferred()
     d.addCallbacks(self._callback, self._errback)
     self._call_1(d)
     try:
         self._call_2(d)
     except defer.AlreadyCalledError, e:
         self.failIf(e.args)
Пример #13
0
 def opt_debug(self):
     """
     run the application in the Python Debugger (implies nodaemon),
     sending SIGUSR2 will drop into debugger
     """
     from twisted.internet import defer
     defer.setDebugging(True)
     failure.startDebugMode()
     self['debug'] = True
Пример #14
0
    def __init__(self, *args, **kwargs):
        super(AbstractServer, self).__init__(*args, **kwargs)
        twisted.internet.base.DelayedCall.debug = True

        self.watchdog = WatchDog()
        self.selected_socks5_ports = set()

        # Enable Deferred debugging
        from twisted.internet.defer import setDebugging
        setDebugging(True)
Пример #15
0
def _twisted_debug():
    """
    When the ``AFKAK_TWISTED_DEBUG`` environment variable is set, enable
    debugging of deferreds and delayed calls.
    """
    if os.environ.get('AFKAK_TWISTED_DEBUG'):
        from twisted.internet import defer
        from twisted.internet.base import DelayedCall

        defer.setDebugging(True)
        DelayedCall.debug = True
    def __init__(self, i, conf, port):
        self.cpu_blocking = conf[0]
        self.net_blocking = conf[1]
        self.io_blocking = conf[2]
        print "One client running on port %s" % port
        self.num = i
        self.url = "http://localhost:%s/zip" % port
        self.count = 0
        self.deferreds = []
        setDebugging(True)

        self.done_deferred = Deferred()
Пример #17
0
def main(utility):
    args = _parse_args(utility)
    if args.debug:
        log.setLevel(level=logging.DEBUG)
        defer.setDebugging(True)
    if args.config:
        config = _parse_config_file(args.config, utility)
    else:
        config = _adapt_args_to_config(args, utility)
    reactor.callWhenRunning(utility.tx_main, args, config)
    reactor.run()
    sys.exit(_exit_status)
Пример #18
0
 def __init__(self, config):
     self.config = config
     application = service.Application(
         self.config.get('bit', 'name').capitalize(), uid=1001, gid=1001)
     self.s = service.IServiceCollection(application)
     #self.s.setServiceParent(application)
     alsoProvides(application, IApplication)
     provideUtility(application, IApplication)
     provideUtility(Services(application), IServices)
     plugins = Plugins()
     provideUtility(plugins, IPlugins)
     plugins.loadPlugins()
     defer.setDebugging(True)
Пример #19
0
    def parse_options(self):
        """Parses the command line options"""
        parser = self.make_option_parser()
        (options, args) = parser.parse_args()
        if options.logstderr and not options.foreground:
            parser.error('-s is only valid if running in foreground')
        if options.netbox and not options.onlyjob:
            parser.error('specifying a netbox requires the -J option')
        if options.multiprocess:
            options.pidlog = True
        if options.capture_vars:
            setDebugging(True)

        return options, args
Пример #20
0
def main():
    args = parse_args()
    if args.debug:
        log.setLevel(level=logging.DEBUG)
        defer.setDebugging(True)
    if args.interactive:
        tx_main = interactive_main
    elif args.single_shot:
        tx_main = single_shot_main
    elif args.batch:
        tx_main = batch_main
    else:
        tx_main = long_running_main
    reactor.callWhenRunning(tx_main, args)
    reactor.run()
Пример #21
0
    def setUp(self):
        defer.setDebugging(True)
        self.config = common.get_test_config()
        # get_test_config will load a new core.conf with the default values.
        # Must save to save to file so that torrent.py.TorrentOptions loads the default values
        self.config.core_config.save()
        test_component = TestComponent()
        self.torrent_handler = TorrentHandler(LOG)
        self.torrent_handler.download_torrent_file = test_component.download_torrent_file

        # Might be necessary for changes in master branch
        # yarss2.core.component = test_component

        self.core = Core("test")
        self.core.enable(config=self.config)
        self.core.torrent_handler = self.torrent_handler
Пример #22
0
    def setUpClass(cls):
        if not os.environ.get('KAFKA_VERSION'):  # pragma: no cover
            log.warning("WARNING: KAFKA_VERSION not found in environment")
            return

        DEBUGGING = True
        setDebugging(DEBUGGING)
        DelayedCall.debug = DEBUGGING

        cls.zk = ZookeeperFixture.instance()
        cls.server = KafkaFixture.instance(0, cls.zk.host, cls.zk.port)

        # Startup the twisted reactor in a thread. We need this before the
        # the KafkaClient can work, since KafkaBrokerClient relies on the
        # reactor for its TCP connection
        cls.reactor, cls.thread = threaded_reactor()
Пример #23
0
def main():
    args = parse_args()
    if args.debug:
        log.setLevel(level=logging.DEBUG)
        defer.setDebugging(True)
    if args.config:
        config = parse_config_file(args.config)
        do_summary = True
    elif args.remote and args.username and args.password and args.filter:
        config = adapt_args_to_config(args)
        do_summary = False
    else:
        print >>sys.stderr, "ERROR: You must specify a config file with -c " \
                            "or specify remote, username, password and filter"
        sys.exit(1)
    reactor.callWhenRunning(send_requests, config, do_summary)
    reactor.run()
    sys.exit(exit_status)
Пример #24
0
 def testSwitchDebugging(self):
     defer.setDebugging(False)
     d = defer.Deferred()
     d.addBoth(lambda ign: None)
     defer.setDebugging(True)
     d.callback(None)
     defer.setDebugging(False)
     d = defer.Deferred()
     d.callback(None)
     defer.setDebugging(True)
     d.addBoth(lambda ign: None)
Пример #25
0
def run():
    """Starts the child zeo process and then starts the twisted reactor running OMS"""

    parser = argparse.ArgumentParser(description='Start OMS')
    parser.add_argument('-d', action='store_true',
                        help='start in development mode with autorestart')
    parser.add_argument('--db', help='overrides db directory')
    parser.add_argument('--log', help='log file')
    parser.add_argument('-v', action='store_true', help='verbose logs')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--debug', help='waits for remote pdb attach using supplied password')
    group.add_argument('--winpdb', action='store_true', help='spawns winpdb and automaticall attach')

    args = parser.parse_args()

    conf = get_config_cmdline()
    if args.db:
        if not conf.has_section('db'):
            conf.add_section('db')
        conf.set('db', 'path', args.db)

    if args.log:
        if not conf.has_section('logging'):
            conf.add_section('logging')
        conf.set('logging', 'file', args.log)

    basedir = conf.get_base_dir()
    if basedir:
        os.chdir(basedir)

    defer.setDebugging(args.v)

    run_debugger(args)

    global _daemon_started
    _daemon_started = True

    if args and args.d:
        autoreload.main(run_app)
    else:
        run_app()
Пример #26
0
    def testSwitchDebugging(self):
        # Make sure Deferreds can deal with debug state flipping
        # around randomly.  This is covering a particular fixed bug.
        defer.setDebugging(False)
        d = defer.Deferred()
        d.addBoth(lambda ign: None)
        defer.setDebugging(True)
        d.callback(None)

        defer.setDebugging(False)
        d = defer.Deferred()
        d.callback(None)
        defer.setDebugging(True)
        d.addBoth(lambda ign: None)
Пример #27
0
def run():
    if not hasattr(tcp.Client, 'abortConnection'):
        print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
        print 'Pausing for 3 seconds...'
        time.sleep(3)
    
    realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
    
    parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
    parser.add_argument('--version', action='version', version=p2pool.__version__)
    parser.add_argument('--net',
        help='use specified network (default: mogwai)',
        action='store', choices=sorted(realnets), default='mogwai', dest='net_name')
    parser.add_argument('--testnet',
        help='''use the network's testnet''',
        action='store_const', const=True, default=False, dest='testnet')
    parser.add_argument('--debug',
        help='enable debugging mode',
        action='store_const', const=True, default=False, dest='debug')
    parser.add_argument('-a', '--address',
        help='generate payouts to this address (default: <address requested from mogwaid>), or (dynamic)',
        type=str, action='store', default=None, dest='address')
    parser.add_argument('-i', '--numaddresses',
        help='number of mogwai auto-generated addresses to maintain for getwork dynamic address allocation',
        type=int, action='store', default=2, dest='numaddresses')
    parser.add_argument('-t', '--timeaddresses',
        help='seconds between acquisition of new address and removal of single old (default: 2 days or 172800s)',
        type=int, action='store', default=172800, dest='timeaddresses')
    parser.add_argument('--datadir',
        help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
        type=str, action='store', default=None, dest='datadir')
    parser.add_argument('--logfile',
        help='''log to this file (default: data/<NET>/log)''',
        type=str, action='store', default=None, dest='logfile')
    parser.add_argument('--web-static',
        help='use an alternative web frontend in this directory (otherwise use the built-in frontend)',
        type=str, action='store', default=None, dest='web_static')
    parser.add_argument('--merged',
        help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
        type=str, action='append', default=[], dest='merged_urls')
    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
        help='donate this percentage of work towards the development of p2pool (default: 1.0)',
        type=float, action='store', default=1.0, dest='donation_percentage')
    parser.add_argument('--iocp',
        help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
        action='store_true', default=False, dest='iocp')
    parser.add_argument('--irc-announce',
        help='announce any blocks found on irc://irc.freenode.net/#p2pool',
        action='store_true', default=False, dest='irc_announce')
    parser.add_argument('--no-bugreport',
        help='disable submitting caught exceptions to the author',
        action='store_true', default=False, dest='no_bugreport')
    
    p2pool_group = parser.add_argument_group('p2pool interface')
    p2pool_group.add_argument('--p2pool-port', metavar='PORT',
        help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='p2pool_port')
    p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
        help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
        type=str, action='append', default=[], dest='p2pool_nodes')
    parser.add_argument('--disable-upnp',
        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
        action='store_false', default=True, dest='upnp')
    p2pool_group.add_argument('--max-conns', metavar='CONNS',
        help='maximum incoming connections (default: 40)',
        type=int, action='store', default=40, dest='p2pool_conns')
    p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
        help='outgoing connections (default: 6)',
        type=int, action='store', default=6, dest='p2pool_outgoing_conns')
    p2pool_group.add_argument('--external-ip', metavar='ADDR[:PORT]',
        help='specify your own public IP address instead of asking peers to discover it, useful for running dual WAN or asymmetric routing',
        type=str, action='store', default=None, dest='p2pool_external_ip')
    parser.add_argument('--disable-advertise',
        help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''',
        action='store_false', default=True, dest='advertise_ip')
    
    worker_group = parser.add_argument_group('worker interface')
    worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
        help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
        type=str, action='store', default=None, dest='worker_endpoint')
    worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
        help='''charge workers mining to their own mogwai address (by setting their miner's username to a mogwai address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
        type=float, action='store', default=0, dest='worker_fee')
    
    mogwaid_group = parser.add_argument_group('mogwaid interface')
    mogwaid_group.add_argument('--mogwaid-config-path', metavar='MOGWAID_CONFIG_PATH',
        help='custom configuration file path (when mogwaid -conf option used)',
        type=str, action='store', default=None, dest='mogwaid_config_path')
    mogwaid_group.add_argument('--mogwaid-address', metavar='MOGWAID_ADDRESS',
        help='connect to this address (default: 127.0.0.1)',
        type=str, action='store', default='127.0.0.1', dest='mogwaid_address')
    mogwaid_group.add_argument('--mogwaid-rpc-port', metavar='MOGWAID_RPC_PORT',
        help='''connect to JSON-RPC interface at this port (default: %s <read from mogwai.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='mogwaid_rpc_port')
    mogwaid_group.add_argument('--mogwaid-rpc-ssl',
        help='connect to JSON-RPC interface using SSL',
        action='store_true', default=False, dest='mogwaid_rpc_ssl')
    mogwaid_group.add_argument('--mogwaid-p2p-port', metavar='MOGWAID_P2P_PORT',
        help='''connect to P2P interface at this port (default: %s <read from mogwai.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='mogwaid_p2p_port')
    
    mogwaid_group.add_argument(metavar='MOGWAID_RPCUSERPASS',
        help='mogwaid RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from mogwai.conf)',
        type=str, action='store', default=[], nargs='*', dest='mogwaid_rpc_userpass')
    
    args = parser.parse_args()
    
    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False
    
    net_name = args.net_name + ('_testnet' if args.testnet else '')
    net = networks.nets[net_name]
    
    datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)
    
    if len(args.mogwaid_rpc_userpass) > 2:
        parser.error('a maximum of two arguments are allowed')
    args.mogwaid_rpc_username, args.mogwaid_rpc_password = ([None, None] + args.mogwaid_rpc_userpass)[-2:]
    
    if args.mogwaid_rpc_password is None:
        conf_path = args.mogwaid_config_path or net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error('''mogwai configuration file not found. Manually enter your RPC password.\r\n'''
                '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
                '''\r\n'''
                '''server=1\r\n'''
                '''rpcpassword=%x\r\n'''
                '''\r\n'''
                '''Keep that password secret! After creating the file, restart mogwai.''' % (conf_path, random.randrange(2**128)))
        conf = open(conf_path, 'rb').read()
        contents = {}
        for line in conf.splitlines(True):
            if '#' in line:
                line = line[:line.index('#')]
            if '=' not in line:
                continue
            k, v = line.split('=', 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ('rpcuser', 'mogwaid_rpc_username', str),
            ('rpcpassword', 'mogwaid_rpc_password', str),
            ('rpcport', 'mogwaid_rpc_port', int),
            ('port', 'mogwaid_p2p_port', int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if 'rpcssl' in contents and contents['rpcssl'] != '0':
                args.mogwaid_rpc_ssl = True
        if args.mogwaid_rpc_password is None:
            parser.error('''mogwai configuration file didn't contain an rpcpassword= line! Add one!''')
    
    if args.mogwaid_rpc_username is None:
        args.mogwaid_rpc_username = ''
    
    if args.mogwaid_rpc_port is None:
        args.mogwaid_rpc_port = net.PARENT.RPC_PORT
    
    if args.mogwaid_p2p_port is None:
        args.mogwaid_p2p_port = net.PARENT.P2P_PORT
    
    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT
    
    if args.p2pool_outgoing_conns > 10:
        parser.error('''--outgoing-conns can't be more than 10''')
    
    if args.worker_endpoint is None:
        worker_endpoint = '', net.WORKER_PORT
    elif ':' not in args.worker_endpoint:
        worker_endpoint = '', int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(':', 1)
        worker_endpoint = addr, int(port)
    
    if args.address is not None and args.address != 'dynamic':
        try:
            args.pubkey_hash = mogwai_data.address_to_pubkey_hash(args.address, net.PARENT)
        except Exception as e:
            parser.error('error parsing address: ' + repr(e))
    else:
        args.pubkey_hash = None
    
    def separate_url(url):
        s = urlparse.urlsplit(url)
        if '@' not in s.netloc:
            parser.error('merged url netloc must contain an "@"')
        userpass, new_netloc = s.netloc.rsplit('@', 1)
        return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
    merged_urls = map(separate_url, args.merged_urls)
    
    if args.logfile is None:
        args.logfile = os.path.join(datadir_path, 'log')
    
    logfile = logging.LogFile(args.logfile)
    pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
    sys.stdout = logging.AbortPipe(pipe)
    sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
    if hasattr(signal, "SIGUSR1"):
        def sigusr1(signum, frame):
            print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
            logfile.reopen()
            print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
        signal.signal(signal.SIGUSR1, sigusr1)
    deferral.RobustLoopingCall(logfile.reopen).start(5)
    
    class ErrorReporter(object):
        def __init__(self):
            self.last_sent = None
        
        def emit(self, eventDict):
            if not eventDict["isError"]:
                return
            
            if self.last_sent is not None and time.time() < self.last_sent + 5:
                return
            self.last_sent = time.time()
            
            if 'failure' in eventDict:
                text = ((eventDict.get('why') or 'Unhandled Error')
                    + '\n' + eventDict['failure'].getTraceback())
            else:
                text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
            
            from twisted.web import client
            client.getPage(
                url='http://u.forre.st/p2pool_error.cgi',
                method='POST',
                postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
                timeout=15,
            ).addBoth(lambda x: None)
    if not args.no_bugreport:
        log.addObserver(ErrorReporter().emit)
    
    reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
    reactor.run()
Пример #28
0
def _initialDebugSetup(config):
    # do this part of debug setup first for easy debugging of import failures
    if config['debug']:
        failure.startDebugMode()
    if config['debug'] or config['debug-stacktraces']:
        defer.setDebugging(True)
from twisted.internet import reactor, defer

import zope.interface as zi
from ipython1.kernel.controllerservice import IControllerBase
from ipython1.kernel import engineservice as es
from ipython1.test.util import DeferredTestCase
from ipython1.kernel.enginevanilla import \
    VanillaEngineServerFactoryFromControllerService, \
    VanillaEngineClientFactoryFromEngineService

from ipython1.test.engineservicetest import \
    IEngineCoreTestCase, \
    IEngineSerializedTestCase, \
    IEngineQueuedTestCase

defer.setDebugging(1)


class EngineVanillaTest(DeferredTestCase, IEngineCoreTestCase,
                        IEngineSerializedTestCase, IEngineQueuedTestCase):

    zi.implements(IControllerBase)

    def setUp(self):
        self.services = []
        self.clients = []
        self.servers = []

        # Start a server and append to self.servers
        self.sf = VanillaEngineServerFactoryFromControllerService(self)
        self.servers.append(reactor.listenTCP(10201, self.sf))
Пример #30
0
    def __init__(self, config):
        """ Set up the server with a INDX. """

        self.config = config

        from twisted.internet.defer import setDebugging
        setDebugging(True)


        # enable ssl (or not)
        self.ssl = config['server'].get('ssl') or False

        # generate the base URLs
        self.server_url = self.config['server']['hostname']
        if not self.ssl:
            self.server_url = "http://" + self.server_url
            if self.config['server']['port'] != 80:
                self.server_url = self.server_url + ":" + str(self.config['server']['port'])
        else:
            self.server_url = "https://" + self.server_url
            if self.config['server']['port'] != 443:
                self.server_url = self.server_url + ":" + str(self.config['server']['port'])

        # get values to pass to web server
        self.server_address = self.config['server']['address']
        if self.server_address == "":
            self.server_address = "0.0.0.0"

        database.HOST = config['db']['host']
        database.PORT = config['db']['port']

        def auth_cb(can_auth):
            logging.debug("WebServer auth_cb, can_auth: {0}".format(can_auth))
            if can_auth:

                def checked_db_ok(server_id):
                    self.server_id = server_id # Gets the Server ID from the database
                    self.check_users().addCallbacks(lambda checked: self.server_setup(), err_cb)

                self.database.check_indx_db().addCallbacks(checked_db_ok, err_cb) # check indx DB exists, otherwise create it - then setup the server
            else:
                print "Authentication failed, check username and password are correct."
                reactor.stop()

        def auth_err(failure):
            logging.debug("WebServer err_cb, failure: {0}".format(failure))
            failure.trap(Exception)

            print "Authentication failed, check username and password are correct."
            reactor.stop()

        def err_cb(failure):
            logging.debug("WebServer err_cb, failure: {0}".format(failure))
            failure.trap(Exception)

        user,password = self.get_indx_user_password()
        self.database = database.IndxDatabase(config['indx_db'], user, password)
        self.tokens = token.TokenKeeper(self.database)
        self.indx_reactor = IndxReactor(self.tokens)
        self.database.set_reactor(self.indx_reactor) # ughh
        self.database.auth_indx(database = "postgres").addCallbacks(auth_cb, auth_err)
Пример #31
0
    def __init__(self, player, threshold, options=None):
        """Initialize runtime.

        Initialized a runtime owned by the given, the threshold, and
        optionally a set of options. The runtime has no network
        connections and knows of no other players -- the
        :func:`create_runtime` function should be used instead to
        create a usable runtime.
        """
        assert threshold > 0, "Must use a positive threshold."
        #: ID of this player.
        self.id = player.id
        #: Shamir secret sharing threshold.
        self.threshold = threshold

        if options is None:
            parser = OptionParser()
            self.add_options(parser)
            self.options = parser.get_default_values()
        else:
            self.options = options

        if self.options.deferred_debug:
            from twisted.internet import defer
            defer.setDebugging(True)

        #: Pool of preprocessed data.
        self._pool = {}
        #: Description of needed preprocessed data.
        self._needed_data = {}

        #: Current program counter.
        __comp_id = self.options.computation_id
        if __comp_id is None:
            __comp_id = 0
        else:
            assert __comp_id > 0, "Non-positive ID: %d." % __comp_id
        self.program_counter = [__comp_id, 0]

        #: Connections to the other players.
        #:
        #: Mapping from from Player ID to :class:`ShareExchanger`
        #: objects.
        self.protocols = {}

        #: Number of known players.
        #:
        #: Equal to ``len(self.players)``, but storing it here is more
        #: direct.
        self.num_players = 0

        #: Information on players.
        #:
        #: Mapping from Player ID to :class:`Player` objects.
        self.players = {}
        # Add ourselves, but with no protocol since we wont be
        # communicating with ourselves.
        protocol = SelfShareExchanger(self.id, SelfShareExchangerFactory(self))
        protocol.transport = FakeTransport()
        self.add_player(player, protocol)

        #: Queue of deferreds and data.
        self.deferred_queue = deque()
        self.complex_deferred_queue = deque()
        #: Counter for calls of activate_reactor().
        self.activation_counter = 0
        #: Record the recursion depth.
        self.depth_counter = 0
        self.max_depth = 0
        #: Recursion depth limit by experiment, including security margin.
        self.depth_limit = int(sys.getrecursionlimit() / 50)
        #: Use deferred queues only if the ViffReactor is running.
        self.using_viff_reactor = isinstance(reactor, viff.reactor.ViffReactor)
Пример #32
0
 def setUp(self):
     self.clock = task.Clock()
     ScheduledCallbackWrapper.clock = self.clock
     setDebugging(True)
Пример #33
0
def run():
    if not hasattr(tcp.Client, 'abortConnection'):
        print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
        print 'Pausing for 3 seconds...'
        time.sleep(3)
    
    realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
    
    parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
    parser.add_argument('--version', action='version', version=p2pool.__version__)
    parser.add_argument('--net',
        help='use specified network (default: dimecoin)',
        action='store', choices=sorted(realnets), default='dimecoin', dest='net_name')
    parser.add_argument('--testnet',
        help='''use the network's testnet''',
        action='store_const', const=True, default=False, dest='testnet')
    parser.add_argument('--debug',
        help='enable debugging mode',
        action='store_const', const=True, default=False, dest='debug')
    parser.add_argument('-a', '--address',
        help='generate payouts to this address (default: <address requested from dimecoind>)',
        type=str, action='store', default=None, dest='address')
    parser.add_argument('--datadir',
        help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
        type=str, action='store', default=None, dest='datadir')
    parser.add_argument('--logfile',
        help='''log to this file (default: data/<NET>/log)''',
        type=str, action='store', default=None, dest='logfile')
    parser.add_argument('--merged',
        help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
        type=str, action='append', default=[], dest='merged_urls')
    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
        help='donate this percentage of work towards the development of p2pool (default: 1.0)',
        type=float, action='store', default=1.0, dest='donation_percentage')
    parser.add_argument('--iocp',
        help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
        action='store_true', default=False, dest='iocp')
    parser.add_argument('--irc-announce',
        help='announce any blocks found on irc://irc.freenode.net/#p2pool',
        action='store_true', default=False, dest='irc_announce')
    parser.add_argument('--no-bugreport',
        help='disable submitting caught exceptions to the author',
        action='store_true', default=False, dest='no_bugreport')
    
    p2pool_group = parser.add_argument_group('p2pool interface')
    p2pool_group.add_argument('--p2pool-port', metavar='PORT',
        help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='p2pool_port')
    p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
        help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
        type=str, action='append', default=[], dest='p2pool_nodes')
    parser.add_argument('--disable-upnp',
        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
        action='store_false', default=True, dest='upnp')
    p2pool_group.add_argument('--max-conns', metavar='CONNS',
        help='maximum incoming connections (default: 40)',
        type=int, action='store', default=40, dest='p2pool_conns')
    p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
        help='outgoing connections (default: 6)',
        type=int, action='store', default=6, dest='p2pool_outgoing_conns')
    
    worker_group = parser.add_argument_group('worker interface')
    worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
        help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
        type=str, action='store', default=None, dest='worker_endpoint')
    worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
        type=float, action='store', default=0, dest='worker_fee')
    
    dimecoind_group = parser.add_argument_group('dimecoind interface')
    dimecoind_group.add_argument('--dimecoind-address', metavar='dimecoind_ADDRESS',
        help='connect to this address (default: 127.0.0.1)',
        type=str, action='store', default='127.0.0.1', dest='dimecoind_address')
    dimecoind_group.add_argument('--dimecoind-rpc-port', metavar='dimecoind_RPC_PORT',
        help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='dimecoind_rpc_port')
    dimecoind_group.add_argument('--dimecoind-rpc-ssl',
        help='connect to JSON-RPC interface using SSL',
        action='store_true', default=False, dest='dimecoind_rpc_ssl')
    dimecoind_group.add_argument('--dimecoind-p2p-port', metavar='dimecoind_P2P_PORT',
        help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='dimecoind_p2p_port')
    
    dimecoind_group.add_argument(metavar='dimecoind_RPCUSERPASS',
        help='dimecoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
        type=str, action='store', default=[], nargs='*', dest='dimecoind_rpc_userpass')
    
    args = parser.parse_args()
    
    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False
    
    net_name = args.net_name + ('_testnet' if args.testnet else '')
    net = networks.nets[net_name]
    
    datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)
    
    if len(args.dimecoind_rpc_userpass) > 2:
        parser.error('a maximum of two arguments are allowed')
    args.dimecoind_rpc_username, args.dimecoind_rpc_password = ([None, None] + args.dimecoind_rpc_userpass)[-2:]
    
    if args.dimecoind_rpc_password is None:
        conf_path = net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
                '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
                '''\r\n'''
                '''server=1\r\n'''
                '''rpcpassword=%x\r\n'''
                '''\r\n'''
                '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
        conf = open(conf_path, 'rb').read()
        contents = {}
        for line in conf.splitlines(True):
            if '#' in line:
                line = line[:line.index('#')]
            if '=' not in line:
                continue
            k, v = line.split('=', 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ('rpcuser', 'dimecoind_rpc_username', str),
            ('rpcpassword', 'dimecoind_rpc_password', str),
            ('rpcport', 'dimecoind_rpc_port', int),
            ('port', 'dimecoind_p2p_port', int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if args.dimecoind_rpc_password is None:
            parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
    
    if args.dimecoind_rpc_username is None:
        args.dimecoind_rpc_username = ''
    
    if args.dimecoind_rpc_port is None:
        args.dimecoind_rpc_port = net.PARENT.RPC_PORT
    
    if args.dimecoind_p2p_port is None:
        args.dimecoind_p2p_port = net.PARENT.P2P_PORT
    
    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT
    
    if args.p2pool_outgoing_conns > 10:
        parser.error('''--outgoing-conns can't be more than 10''')
    
    if args.worker_endpoint is None:
        worker_endpoint = '', net.WORKER_PORT
    elif ':' not in args.worker_endpoint:
        worker_endpoint = '', int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(':', 1)
        worker_endpoint = addr, int(port)
    
    if args.address is not None:
        try:
            args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
        except Exception, e:
            parser.error('error parsing address: ' + repr(e))
Пример #34
0
 def setUp(self):
     self._deferredWasDebugging = defer.getDebugging()
     defer.setDebugging(True)
Пример #35
0
def load_config():

    # Open config file
    try:
        with open(CONFIG_FILE, 'r') as config_file:
            conf = json.load(config_file)
    except IOError as e:
        print 'ERROR: Could not open %s file : ' % CONFIG_FILE, e
        exit(1)
    except ValueError as e:
        print 'ERROR: Config file is not valid JSON', e
        exit(1)

# Set default noproxy setting if missing
    if "mongo-scrapy" in conf:
        if 'proxy_host' not in conf['mongo-scrapy']:
            conf['mongo-scrapy']['proxy_host'] = ''
        if 'proxy_port' not in conf['mongo-scrapy']:
            conf['mongo-scrapy']['proxy_port'] = 3128
        # Ensure retrocompat
        conf['mongo-scrapy']['proxy'] = {
            'host': conf['mongo-scrapy']['proxy_host'],
            'port': conf['mongo-scrapy']['proxy_port']
        }
        if 'store_crawled_html_content' not in conf['mongo-scrapy']:
            conf['mongo-scrapy']['store_crawled_html_content'] = True

# Set default creation rules if missing
    if "defaultCreationRule" not in conf:
        conf["defaultCreationRule"] = "domain"
    if "creationRules" not in conf:
        conf["creationRules"] = {}

# Auto unset phantomJs autoretry if missing
    if "phantom" in conf and "autoretry" not in conf["phantom"]:
        conf["phantom"]["autoretry"] = False

# Check sanity
    try:
        check_conf_sanity(conf, GLOBAL_CONF_SCHEMA)
    except Exception as e:
        print e
        exit(1)

# Test MongoDB server
    mongoconf = conf['mongo-scrapy']
    db = MongoClient(
        os.environ.get('HYPHE_MONGODB_HOST', mongoconf['host']),
        int(os.environ.get('HYPHE_MONGODB_PORT',
                           mongoconf['mongo_port'])))[mongoconf.get(
                               'db_name', mongoconf.get('project'))]
    try:
        test = list(db['%s.logs' % DEFAULT_CORPUS].find())
    except Exception as x:
        print "ERROR: Cannot connect to mongoDB, please check your server and the configuration in %s" % CONFIG_FILE
        if conf['DEBUG']:
            print x
        exit(1)

# Turn on Twisted debugging
    if conf['DEBUG']:
        defer.setDebugging(True)

    return conf
Пример #36
0
#!/usr/bin/env python

from twisted.internet.protocol import Protocol, Factory
from twisted.internet import stdio, reactor, defer
from twisted.protocols import basic
from twisted.web.server import Site
from os import linesep

defer.setDebugging(True)


class ConnCounter(Protocol):
    def connectionMade(self):
        self.factory.conn_count += 1
        self.transport.write('{}\n'.format(
            self.factory.conn_count).encode('utf-8'))


class PrintConnCountAndDie(basic.LineReceiver):
    delimiter = linesep.encode("ascii")

    def __init__(self, conn_factory):
        self.conn_factory = conn_factory

    # @defer.inlineCallbacks
    def lineReceived(self, line):
        if line in [b'end', b'die']:
            # yield self.sendLine('{}\n'.format(self.conn_factory.conn_count).encode('utf-8'))
            print(self.conn_factory.conn_count)
            reactor.stop()
        else:
Пример #37
0
def main(executable_path=None):
    """
    THIS IS THE ENTRY POINT OF THE PROGRAM!
    """
    global AppDataDir

    import warnings
    warnings.filterwarnings("ignore", message="You do not have a working installation of the service_identity module")

    try:
        from logs import lg
    except:
        dirpath = os.path.dirname(os.path.abspath(sys.argv[0]))
        sys.path.insert(0, os.path.abspath(os.path.join(dirpath, '..')))
        # sys.path.insert(0, os.path.abspath(os.path.join(dirpath, '..', '..')))
        from distutils.sysconfig import get_python_lib
        sys.path.append(os.path.join(get_python_lib(), 'bitdust'))
        try:
            from logs import lg
        except:
            print 'ERROR! can not import working code.  Python Path:'
            print '\n'.join(sys.path)
            return 1

    # init IO module, update locale
    from system import bpio
    bpio.init()

    # sys.excepthook = lg.exception_hook

    if not bpio.isFrozen():
        try:
            from twisted.internet.defer import setDebugging
            setDebugging(True)
            # from twisted.python import log as twisted_log
            # twisted_log.startLogging(sys.stdout)
        except:
            lg.warn('python-twisted is not installed')

    pars = parser()
    (opts, args) = pars.parse_args()

    if opts.appdir:
        appdata = opts.appdir
        AppDataDir = appdata

    else:
        curdir = os.getcwd()  # os.path.dirname(os.path.abspath(sys.executable))
        appdatafile = os.path.join(curdir, 'appdata')
        defaultappdata = os.path.join(os.path.expanduser('~'), '.bitdust')
        appdata = defaultappdata
        if os.path.isfile(appdatafile):
            try:
                appdata = os.path.abspath(open(appdatafile, 'rb').read().strip())
            except:
                appdata = defaultappdata
            if not os.path.isdir(appdata):
                appdata = defaultappdata
        AppDataDir = appdata

    cmd = ''
    if len(args) > 0:
        cmd = args[0].lower()

    # ask to count time for each log line from that moment, not absolute time
    lg.life_begins()
    # try to read debug level value at the early stage - no problem if fail here
    try:
        if cmd == '' or cmd == 'start' or cmd == 'go' or cmd == 'show' or cmd == 'open':
            lg.set_debug_level(int(
                bpio._read_data(
                    os.path.abspath(
                        os.path.join(appdata, 'config', 'logs', 'debug-level')))))
    except:
        pass

    if opts.no_logs:
        lg.disable_logs()

    #---logpath---
    logpath = os.path.join(appdata, 'logs', 'start.log')
    if opts.output:
        logpath = opts.output

    need_redirecting = False

    if bpio.Windows() and not bpio.isConsoled():
        need_redirecting = True

    if logpath != '':
        lg.open_log_file(logpath)
        lg.out(2, 'bpmain.main log file opened ' + logpath)
        if bpio.Windows() and bpio.isFrozen():
            need_redirecting = True

    if need_redirecting:
        lg.stdout_start_redirecting()
        lg.out(2, 'bpmain.main redirecting started')

    # TODO: temporary solution to record run-time errors
    try:
        if os.path.isfile(os.path.join(appdata, 'logs', 'exception.log')):
            os.remove(os.path.join(appdata, 'logs', 'exception.log'))
    except:
        pass

    if opts.debug or str(opts.debug) == '0':
        lg.set_debug_level(opts.debug)

    # if opts.quite and not opts.verbose:
    #     lg.disable_output()

    if opts.verbose:
        copyright_text()

    lg.out(2, 'bpmain.main started ' + time.asctime())

    overDict = override_options(opts, args)

    lg.out(2, 'bpmain.main args=%s' % str(args))

    #---start---
    if cmd == '' or cmd == 'start' or cmd == 'go':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if appList:
            lg.out(0, 'BitDust already started, found another process: %s\n' % str(appList))
            bpio.shutdown()
            return 0

        UI = ''
        # if cmd == 'show' or cmd == 'open':
        # UI = 'show'
        try:
            ret = run(UI, opts, args, overDict, executable_path)
        except:
            lg.exc()
            ret = 1
        bpio.shutdown()
        return ret

    #---daemon---
    elif cmd == 'detach' or cmd == 'daemon' or cmd == 'background':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) > 0:
            lg.out(0, 'main BitDust process already started: %s\n' % str(appList))
            bpio.shutdown()
            return 0
        from lib import misc
        lg.out(0, 'new BitDust process will be started in daemon mode, finishing current process\n')
        bpio.shutdown()
        result = misc.DoRestart(detach=True)
        if result is not None:
            try:
                result = int(result)
            except:
                try:
                    result = result.pid
                except:
                    pass
        return 0

    #---restart---
    elif cmd == 'restart' or cmd == 'reboot':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        ui = False
        if len(appList) > 0:
            lg.out(0, 'found main BitDust process: %s, sending "restart" command ... ' % str(appList), '')

            def done(x):
                lg.out(0, 'DONE\n', '')
                from twisted.internet import reactor
                if reactor.running and not reactor._stopped:
                    reactor.stop()

            def failed(x):
                ok = str(x).count('Connection was closed cleanly') > 0
                from twisted.internet import reactor
                if ok and reactor.running and not reactor._stopped:
                    lg.out(0, 'DONE\n', '')
                    reactor.stop()
                    return
                lg.out(0, 'FAILED while killing previous process - do HARD restart\n', '')
                try:
                    kill()
                except:
                    lg.exc()
                from lib import misc
                reactor.addSystemEventTrigger('after', 'shutdown', misc.DoRestart, param='show' if ui else '', detach=True)
                reactor.stop()
            try:
                from twisted.internet import reactor
                # from interface.command_line import run_url_command
                # d = run_url_command('?action=restart', False)
                # from interface import cmd_line
                # d = cmd_line.call_xmlrpc_method('restart', ui)
                from interface import cmd_line_json
                d = cmd_line_json.call_jsonrpc_method('restart', ui)
                d.addCallback(done)
                d.addErrback(failed)
                reactor.run()
                bpio.shutdown()
                return 0
            except:
                lg.exc()
                bpio.shutdown()
                return 1
        else:
            ui = ''
            if cmd == 'restart':
                ui = 'show'
            try:
                ret = run(ui, opts, args, overDict, executable_path)
            except:
                lg.exc()
                ret = 1
            bpio.shutdown()
            return ret

    #---show---
    elif cmd == 'show' or cmd == 'open':
        from main import settings
        if not bpio.isGUIpossible() and not settings.NewWebGUI():
            lg.out(0, 'BitDust GUI is turned OFF\n')
            bpio.shutdown()
            return 0
        if bpio.Linux() and not bpio.X11_is_running():
            lg.out(0, 'this operating system not supported X11 interface\n')
            bpio.shutdown()
            return 0
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) == 0:
            try:
                ret = run('show', opts, args, overDict, executable_path)
            except:
                lg.exc()
                ret = 1
            bpio.shutdown()
            return ret
        lg.out(0, 'found main BitDust process: %s, start the GUI\n' % str(appList))
        ret = show()
        bpio.shutdown()
        return ret

    #---stop---
    elif cmd == 'stop' or cmd == 'kill' or cmd == 'shutdown':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) > 0:
            lg.out(0, 'found main BitDust process: %s, sending command "exit" ... ' % str(appList), '')
            try:
                from twisted.internet import reactor
                # from interface.command_line import run_url_command
                # url = '?action=exit'
                # run_url_command(url, False).addBoth(wait_then_kill)
                # reactor.run()
                # bpio.shutdown()

                def _stopped(x):
                    lg.out(0, 'BitDust process finished correctly\n')
                    reactor.stop()
                    bpio.shutdown()
                # from interface import cmd_line
                # cmd_line.call_xmlrpc_method('stop').addBoth(_stopped)
                from interface import cmd_line_json
                cmd_line_json.call_jsonrpc_method('stop').addBoth(_stopped)
                reactor.run()
                return 0
            except:
                lg.exc()
                ret = kill()
                bpio.shutdown()
                return ret
        else:
            lg.out(0, 'BitDust is not running at the moment\n')
            bpio.shutdown()
            return 0

    #---uninstall---
    elif cmd == 'uninstall':
        def do_spawn(x=None):
            from main.settings import WindowsStarterFileName
            starter_filepath = os.path.join(bpio.getExecutableDir(), WindowsStarterFileName())
            lg.out(0, "bpmain.main bitstarter.exe path: %s " % starter_filepath)
            if not os.path.isfile(starter_filepath):
                lg.out(0, "ERROR: %s not found\n" % starter_filepath)
                bpio.shutdown()
                return 1
            cmdargs = [os.path.basename(starter_filepath), 'uninstall']
            lg.out(0, "bpmain.main os.spawnve cmdargs=" + str(cmdargs))
            ret = os.spawnve(os.P_DETACH, starter_filepath, cmdargs, os.environ)
            bpio.shutdown()
            return ret

        def do_reactor_stop_and_spawn(x=None):
            lg.out(0, 'BitDust process finished correctly\n')
            reactor.stop()
            ret = do_spawn()
            bpio.shutdown()
            return ret
        lg.out(0, 'bpmain.main UNINSTALL!')
        if not bpio.Windows():
            lg.out(0, 'This command can be used only under OS Windows.\n')
            bpio.shutdown()
            return 0
        if not bpio.isFrozen():
            lg.out(0, 'You are running BitDust from sources, uninstall command is available only for installable version.\n')
            bpio.shutdown()
            return 0
        appList = bpio.find_process(['bitdust.exe', ])
        if len(appList) > 0:
            lg.out(0, 'found main BitDust process...   ', '')
            try:
                # from twisted.internet import reactor
                # from interface.command_line import run_url_command
                # url = '?action=exit'
                # run_url_command(url).addBoth(do_reactor_stop_and_spawn)
                # reactor.run()
                # bpio.shutdown()
                # from interface import cmd_line
                # cmd_line.call_xmlrpc_method('stop').addBoth(do_reactor_stop_and_spawn)
                from interface import cmd_line_json
                cmd_line_json.call_jsonrpc_method('stop').addBoth(do_reactor_stop_and_spawn)
                reactor.run()
                return 0
            except:
                lg.exc()
        ret = do_spawn()
        bpio.shutdown()
        return ret

    #---command_line---
    # from interface import command_line as cmdln
    # from interface import cmd_line as cmdln
    from interface import cmd_line_json as cmdln
    ret = cmdln.run(opts, args, pars, overDict, executable_path)
    if ret == 2:
        print usage_text()
    bpio.shutdown()
    return ret
Пример #38
0
def set_twisted_debug():
    DelayedCall.debug = True
    failure.startDebugMode()
    defer.setDebugging(True)
Пример #39
0
 def set_up(self):
     defer.setDebugging(True)
     tests_common.set_tmp_config_dir()
     client.start_standalone()
     client.core.enable_plugin('Stats')
     return component.start()
Пример #40
0
    def theLoop(self):
        # Setup logging if not done already
        if not self.LoggingSetupDone:
            if not self.setup_logging():
                print("Unable to setup logging")
            # Set logger verbosity level
            if verboseLevel >= 3:
                log_level = logging.DEBUG
                setDebugging(True)
            elif verboseLevel == 2:
                log_level = logging.INFO
            elif verboseLevel == 1:
                log_level = logging.WARN
            else:
                log_level = logging.ERROR
            app.logger.setLevel(log_level)
            self.LoggingSetupDone = True

        # Stop loop if we got the signal to quit
        if self.stop:
            reactor.stop()
            return False

        self.logger.info("")
        self.logger.info("#####################################################")
        self.logger.info("")


        ########################################################################################################################################################################

        # Create an Graphite Event and exit
        if (app.locationTag):
            self.stop = True
            try:
                if self.create_graphite_event("Location changed", [ self.config['eventTag_LocationChanged'] ], 'Location: {}'.format(self.locationTag)):
                    self.logger.info("Successfully created the event.")
                else:
                    self.logger.error("Failed to create the event.")
                    return False
            except Exception as e:
                self.logger.error("Failed to create the event. Exception: {}".format(str(e)))
                return False
            return True

        ########################################################################################################################################################################

        # Reboot the Huawei LTE/5G modem and exit
        if (app.reboot):
            self.stop = True
            try:
                self.logger.info("# Connecting...")
                self.connection = AuthorizedConnection('http://{}:{}@{}/'.format(self.config['router_username'], self.config['router_password'], self.config['router_hostname'])) # Login required even for signal data
                client = Client(self.connection)
                self.logger.info("Done.")

                self.logger.warning("# Rebooting...")
                self.logger.info(client.device.reboot())
                self.logger.warning("# Done.")
            except Exception as e:
                self.logger.error("Unable to reboot the device. Exception: {}".format(str(e)))
                return False
            return True

        ########################################################################################################################################################################
        # Initialize Graphyte (for sending data)

        # Get current time
        ts = time.time()

        if (self.sendData2Graphite):
            try:
                # Init #       hostname,   port,    prefix, timeout, interval
                graphyte.init(self.config['graphite_hostname'], self.config['graphite_port']) # Skip prefix, timeout and interval (no data?)
            except Exception as e:
                self.logger.error("Unable to initialize Graphyte (for sending metrics to Graphite). Exception: {}".format(str(e)))

        ########################################################################################################################################################################
        # Create & send test data to Graphite and exit
        if self.sendTestData:
            for signalDataKey in self.config['signalDataKeys']:
                try:
                    if (self.testData[signalDataKey + '_value'] < self.testData[signalDataKey + '_max']):
                        self.testData[signalDataKey + '_value'] += 1
                    else:
                        self.testData[signalDataKey + '_value'] = self.testData[signalDataKey + '_min']
                except Exception as e:
                    self.logger.error("Exception: {}".format(str(e)))

            for metric, value in sorted(self.testData.items()):
                
                # Replace dots with underscores
                safe_metric = metric.replace('.', '_')
            
                # Build the metric path from general prefix, 'test' and key name (metric)
                metric_path = self.config['graphite_prefix'] + ".test." + safe_metric

                # Only act on _values, skip originals
                if bool(re.match('.*_value$', metric)):
                        if bool(re.match('.*lfreq.*', metric)):
                            value = value * 1000

                        if (self.sendData2Graphite):
                            sendDataText = "Sending *test* data"
                        else:
                            sendDataText = "Simulating (not sending) *test* data"

                        self.logger.info("%s for '%s'. ts=%d => %s" % (sendDataText, metric_path, ts, datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')))
                        self.logger.debug("Value: '{}'".format(value))

                        if (self.sendData2Graphite):
                            try:
                                # send data - metric_path,    value, [timestamp, tags (dict)]
                                graphyte.send(metric_path, value, ts)
                            except Exception as e:
                                self.logger.error("Unable to send the test data to Graphite. Exception: {}".format(str(e)))
                                return False
                else:
                    # Skipping original values, as these can't be saved as strings.
                    pass
            self.logger.warning("Success. Loop done. Waiting max. {} seconds for the next run.".format(self.config['loopInterval']))
            return True

        ################################################################################
        # Get real data and send it to Graphite

        # Check if logged in or login timed out...
        try:
            if self.connection.logged_in == False or self.connection._is_login_timeout() == True:
                self.logger.info("Not already logged in OR login has timed out. We need to (re)connect.")
                raise Exception('not_logged_in')

            self.logger.debug("#self.connection.logged_in: {}".format(self.connection.logged_in))
            self.logger.debug("#self.connection._is_login_timeout(): {}".format(self.connection._is_login_timeout()))
        # ... if not, (re)connect
        except:
            try:
                self.logger.info("# Connecting...")
                self.connection = AuthorizedConnection('http://{}:{}@{}/'.format(self.config['router_username'], self.config['router_password'], self.config['router_hostname'])) # Login required even for signal data
                self.logger.info("# Done.")
            except Exception as e:
                self.logger.warning("Failed on 1st try to connect OR we got the already logged in exception.")
                self.logger.warning("Exception: {}".format(str(e)))
        # Try to get data from the API...
        try:
            client = Client(self.connection)
            APIsignalData = client.device.signal()
        except Exception as e1:
            # ... If that failed,reconnecting first and then try again.
            try:
                self.logger.info("# Connecting...")
                self.connection = AuthorizedConnection('http://{}:{}@{}/'.format(self.config['router_username'], self.config['router_password'], self.config['router_hostname'])) # Login required even for signal data
                self.logger.info("# Done.")

                client = Client(self.connection)
                APIsignalData = client.device.signal()
            except Exception as e2:
                self.logger.warning("Failed on 2nd try to connect OR we got the already logged in exception.")
                self.logger.warning("Exception (e1): {}".format(str(e1)))
                self.logger.warning("Exception (e2): {}".format(str(e2)))
                return False

        # Get values for the signalDataKeys (variables) from the returned API data, convert the string values to floats (returns None aka. null if it fails), ...
        # ... strip specific extra characters (check self.signalData_strip_regexp) and save the original value as well.
        signalData2Graphite = dict()
        for signalDataKey in self.config['signalDataKeys']:
            try:
                signalData2Graphite[signalDataKey + '_original'] = APIsignalData[signalDataKey]
                signalData2Graphite[signalDataKey + '_value'] = floatOrNull(re.sub(r"{}".format(self.signalData_strip_regexp), "", APIsignalData[signalDataKey]))
            except:
                pass

        # Get current time
        ts = time.time()

        # Check if Cell ID changed
        cell_id_changed = False
        try:
            if (self.saveData['latest_cell_id']):
                pass
            else:
                cell_id_changed = True
        except:
            self.saveData['latest_cell_id'] = ''
            cell_id_changed = True
        try:
            if (APIsignalData['cell_id'] != self.saveData['latest_cell_id']):
                self.logger.info("*** CELL ID changed.")
                cell_id_changed = True
        except:
            pass

        # Create Cell ID changed Graphite Event
        failed_to_send_cell_id_changed = False
        if (cell_id_changed):
            if (self.sendData2Graphite):
                sendDataText = "Creating an Event"
            else:
                sendDataText = "Simulating an Event creation"

            self.logger.info("%s ('%s'). ts=%d => %s" % (sendDataText, self.config['eventTag_CellIDchanged'], ts, datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')))
            self.logger.debug("Cell ID: {}/{} (Old/New)".format(self.saveData['latest_cell_id'], APIsignalData['cell_id']))

            if (self.sendData2Graphite):
                try:
                    if self.create_graphite_event("Cell ID changed", [self.config['eventTag_CellIDchanged'], "cell_id_{}".format(APIsignalData['cell_id'])], 'Cell ID: {}/{} (Old/New)'.format(self.saveData['latest_cell_id'], APIsignalData['cell_id'])):
                        self.logger.warning("Successfully created the event.")

                        # Save the Cell ID change to a variable and the JSON file
                        self.saveData['latest_cell_id'] = APIsignalData['cell_id']
                        self.saveJSONfile(self.saveDataFilePath, self.saveData)
                    else:
                        self.logger.error("Failed to create the event in Graphite. Unknown error.")
                        failed_to_send_cell_id_changed = True
                except Exception as e:
                    self.logger.error("Failed to create the event in Graphite. Exception: {}".format(str(e)))
                    failed_to_send_cell_id_changed = True

        # Go through all the values and send them to Graphite
        for extra_prefix in {"all", 'by_cell_id.' + APIsignalData['cell_id']}: # Send the data with the extra prefixes 'all' and by the Cell ID (double data stored, but you can later on filter the data per Cell ID as well)
            for metric, value in sorted(signalData2Graphite.items()):
                
                # Replace dots with underscores
                safe_metric = metric.replace('.', '_')
            
                # Build the metric path from general prefix, extra prefix and key name (metric)
                metric_path = self.config['graphite_prefix'] + "." + extra_prefix + "." + safe_metric

                # Only act on _values, skip originals
                if bool(re.match('.*_value$', metric)):
                        haveOriginalValue = True
                        try:
                            # Get original value
                            original_value_key = re.sub(r"_value", "_original", metric)
                            original_value = signalData2Graphite[original_value_key]
                        except Exception as e:
                            haveOriginalValue = False
                            self.logger.warning("Unable to get the original value. Exception: {}".format(str(e)))


                        if self.sendData2Graphite:
                            sendDataText = "Sending data"
                        else:
                            sendDataText = "Simulating (not sending) data"

                        self.logger.info("%s for '%s'. ts=%d => %s" % (sendDataText, metric_path, ts, datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')))

                        if haveOriginalValue:
                            self.logger.debug("Value: '{}' (Original_value: '{}')".format(value, original_value))
                        else:
                            self.logger.debug("Value: '{}'".format(value))

                        if self.sendData2Graphite:
                            try:
                                # send data - metric_path,    value, [timestamp, tags (dict)]
                                graphyte.send(metric_path, value, ts)
                            except Exception as e:
                                self.logger.error("Unable to send the data to Graphite. Exception: {}".format(str(e)))
                                return False
                else:
                    # Skipping original values, as these can't be saved as strings.
                    pass
        


        # Save latest cell ID only if we successfully sent the data to Graphite
        if (self.sendData2Graphite and not failed_to_send_cell_id_changed):
            self.saveData['latest_cell_id'] = APIsignalData['cell_id']
            self.saveJSONfile(self.saveDataFilePath, self.saveData)
        self.logger.warning("Success. Loop done. Waiting max. {} seconds for the next run.".format(self.config['loopInterval']))
        return True
Пример #41
0
from twisted.internet.address import IPv4Address

from twisted.python.failure import Failure

from twisted.web.client import Agent, FileBodyProducer
from twisted.web.http_headers import Headers
from twisted.web.resource import Resource
from twisted.web.server import Site, NOT_DONE_YET

from wind import AbstractWind, AbstractTCPWind, TCPWindFactory

from datetime import datetime

from StringIO import StringIO

setDebugging(True)

datamodel = '''-- HTTP-DTN Data model

PRAGMA foreign_keys = ON;

CREATE TABLE IF NOT EXISTS packages (
	id INTEGER PRIMARY KEY AUTOINCREMENT,
	name VARCHAR(160),
	md5 VARCHAR(32),
	size INTEGER,

	UNIQUE (name,md5)
);

CREATE UNIQUE INDEX IF NOT EXISTS packgesIDIndex ON packages(id);
Пример #42
0
def run():
    if not hasattr(tcp.Client, "abortConnection"):
        print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
        print "Pausing for 3 seconds..."
        time.sleep(3)

    realnets = dict((name, net) for name, net in networks.nets.iteritems() if "_testnet" not in name)

    parser = fixargparse.FixedArgumentParser(
        description="p2pool (version %s)" % (p2pool.__version__,), fromfile_prefix_chars="@"
    )
    parser.add_argument("--version", action="version", version=p2pool.__version__)
    parser.add_argument(
        "--net",
        help="use specified network (default: bitcoin)",
        action="store",
        choices=sorted(realnets),
        default="bitcoin",
        dest="net_name",
    )
    parser.add_argument(
        "--testnet",
        help="""use the network's testnet""",
        action="store_const",
        const=True,
        default=False,
        dest="testnet",
    )
    parser.add_argument(
        "--debug", help="enable debugging mode", action="store_const", const=True, default=False, dest="debug"
    )
    parser.add_argument(
        "-a",
        "--address",
        help="generate payouts to this address (default: <address requested from bitcoind>)",
        type=str,
        action="store",
        default=None,
        dest="address",
    )
    parser.add_argument(
        "--datadir",
        help="store data in this directory (default: <directory run_p2pool.py is in>/data)",
        type=str,
        action="store",
        default=None,
        dest="datadir",
    )
    parser.add_argument(
        "--logfile",
        help="""log to this file (default: data/<NET>/log)""",
        type=str,
        action="store",
        default=None,
        dest="logfile",
    )
    parser.add_argument(
        "--merged",
        help="call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)",
        type=str,
        action="append",
        default=[],
        dest="merged_urls",
    )
    parser.add_argument(
        "--give-author",
        metavar="DONATION_PERCENTAGE",
        help="donate this percentage of work towards the development of p2pool (default: 1.0)",
        type=float,
        action="store",
        default=1.0,
        dest="donation_percentage",
    )
    parser.add_argument(
        "--iocp",
        help="use Windows IOCP API in order to avoid errors due to large number of sockets being open",
        action="store_true",
        default=False,
        dest="iocp",
    )
    parser.add_argument(
        "--irc-announce",
        help="announce any blocks found on irc://irc.freenode.net/#p2pool",
        action="store_true",
        default=False,
        dest="irc_announce",
    )
    parser.add_argument(
        "--no-bugreport",
        help="disable submitting caught exceptions to the author",
        action="store_true",
        default=False,
        dest="no_bugreport",
    )

    p2pool_group = parser.add_argument_group("p2pool interface")
    p2pool_group.add_argument(
        "--p2pool-port",
        metavar="PORT",
        help="use port PORT to listen for connections (forward this port from your router!) (default: %s)"
        % ", ".join("%s:%i" % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int,
        action="store",
        default=None,
        dest="p2pool_port",
    )
    p2pool_group.add_argument(
        "-n",
        "--p2pool-node",
        metavar="ADDR[:PORT]",
        help="connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses",
        type=str,
        action="append",
        default=[],
        dest="p2pool_nodes",
    )
    parser.add_argument(
        "--disable-upnp",
        help="""don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer""",
        action="store_false",
        default=True,
        dest="upnp",
    )
    p2pool_group.add_argument(
        "--max-conns",
        metavar="CONNS",
        help="maximum incoming connections (default: 40)",
        type=int,
        action="store",
        default=40,
        dest="p2pool_conns",
    )
    p2pool_group.add_argument(
        "--outgoing-conns",
        metavar="CONNS",
        help="outgoing connections (default: 6)",
        type=int,
        action="store",
        default=6,
        dest="p2pool_outgoing_conns",
    )

    worker_group = parser.add_argument_group("worker interface")
    worker_group.add_argument(
        "-w",
        "--worker-port",
        metavar="PORT or ADDR:PORT",
        help="listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)"
        % ", ".join("%s:%i" % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
        type=str,
        action="store",
        default=None,
        dest="worker_endpoint",
    )
    worker_group.add_argument(
        "-f",
        "--fee",
        metavar="FEE_PERCENTAGE",
        help="""charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)""",
        type=float,
        action="store",
        default=0,
        dest="worker_fee",
    )

    bitcoind_group = parser.add_argument_group("bitcoind interface")
    bitcoind_group.add_argument(
        "--bitcoind-address",
        metavar="BITCOIND_ADDRESS",
        help="connect to this address (default: 127.0.0.1)",
        type=str,
        action="store",
        default="127.0.0.1",
        dest="bitcoind_address",
    )
    bitcoind_group.add_argument(
        "--bitcoind-rpc-port",
        metavar="BITCOIND_RPC_PORT",
        help="""connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)"""
        % ", ".join("%s:%i" % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
        type=int,
        action="store",
        default=None,
        dest="bitcoind_rpc_port",
    )
    bitcoind_group.add_argument(
        "--bitcoind-rpc-ssl",
        help="connect to JSON-RPC interface using SSL",
        action="store_true",
        default=False,
        dest="bitcoind_rpc_ssl",
    )
    bitcoind_group.add_argument(
        "--bitcoind-p2p-port",
        metavar="BITCOIND_P2P_PORT",
        help="""connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)"""
        % ", ".join("%s:%i" % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int,
        action="store",
        default=None,
        dest="bitcoind_p2p_port",
    )

    bitcoind_group.add_argument(
        metavar="BITCOIND_RPCUSERPASS",
        help="bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)",
        type=str,
        action="store",
        default=[],
        nargs="*",
        dest="bitcoind_rpc_userpass",
    )

    args = parser.parse_args()

    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False

    net_name = args.net_name + ("_testnet" if args.testnet else "")
    net = networks.nets[net_name]

    datadir_path = os.path.join(
        (os.path.join(os.path.dirname(sys.argv[0]), "data") if args.datadir is None else args.datadir), net_name
    )
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)

    if len(args.bitcoind_rpc_userpass) > 2:
        parser.error("a maximum of two arguments are allowed")
    args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]

    if args.bitcoind_rpc_password is None:
        conf_path = net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error(
                """Bitcoin configuration file not found. Manually enter your RPC password.\r\n"""
                """If you actually haven't created a configuration file, you should create one at %s with the text:\r\n"""
                """\r\n"""
                """server=1\r\n"""
                """rpcpassword=%x\r\n"""
                """\r\n"""
                """Keep that password secret! After creating the file, restart Bitcoin."""
                % (conf_path, random.randrange(2 ** 128))
            )
        conf = open(conf_path, "rb").read()
        contents = {}
        for line in conf.splitlines(True):
            if "#" in line:
                line = line[: line.index("#")]
            if "=" not in line:
                continue
            k, v = line.split("=", 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ("rpcuser", "bitcoind_rpc_username", str),
            ("rpcpassword", "bitcoind_rpc_password", str),
            ("rpcport", "bitcoind_rpc_port", int),
            ("port", "bitcoind_p2p_port", int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if args.bitcoind_rpc_password is None:
            parser.error("""Bitcoin configuration file didn't contain an rpcpassword= line! Add one!""")

    if args.bitcoind_rpc_username is None:
        args.bitcoind_rpc_username = ""

    if args.bitcoind_rpc_port is None:
        args.bitcoind_rpc_port = net.PARENT.RPC_PORT

    if args.bitcoind_p2p_port is None:
        args.bitcoind_p2p_port = net.PARENT.P2P_PORT

    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT

    if args.p2pool_outgoing_conns > 10:
        parser.error("""--outgoing-conns can't be more than 10""")

    if args.worker_endpoint is None:
        worker_endpoint = "", net.WORKER_PORT
    elif ":" not in args.worker_endpoint:
        worker_endpoint = "", int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(":", 1)
        worker_endpoint = addr, int(port)

    if args.address is not None:
        try:
            args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
        except Exception, e:
            parser.error("error parsing address: " + repr(e))
Пример #43
0
def main(executable_path=None, start_reactor=True):
    """
    THE ENTRY POINT
    """
    global AppDataDir

    pars = parser()
    (opts, args) = pars.parse_args()

    if opts.coverage:
        import coverage  # @UnresolvedImport
        cov = coverage.Coverage(config_file=opts.coverage_config)
        cov.start()

    overDict = override_options(opts, args)

    cmd = ''
    if len(args) > 0:
        cmd = args[0].lower()

    try:
        from system import deploy
    except:
        dirpath = os.path.dirname(os.path.abspath(sys.argv[0]))
        sys.path.insert(0, os.path.abspath(os.path.join(dirpath, '..')))
        from distutils.sysconfig import get_python_lib
        sys.path.append(os.path.join(get_python_lib(), 'bitdust'))
        try:
            from system import deploy
        except:
            print_text('ERROR! can not import working code.  Python Path:')
            print_text('\n'.join(sys.path))
            return 1

    #---install---
    if cmd in [
            'deploy',
            'install',
            'venv',
            'virtualenv',
    ]:
        from system import deploy
        return deploy.run(args)

    if opts.appdir:
        appdata = opts.appdir
        AppDataDir = appdata

    else:
        curdir = os.getcwd()
        appdatafile = os.path.join(curdir, 'appdata')
        defaultappdata = deploy.default_base_dir_portable()
        appdata = defaultappdata
        if os.path.isfile(appdatafile):
            try:
                appdata = os.path.abspath(
                    open(appdatafile, 'rb').read().strip())
            except:
                appdata = defaultappdata
            if not os.path.isdir(appdata):
                appdata = defaultappdata
        AppDataDir = appdata

    #---BitDust Home
    deploy.init_base_dir(base_dir=AppDataDir)

    from logs import lg

    #---init IO module
    from system import bpio
    bpio.init()

    appList = bpio.find_main_process(
        pid_file_path=os.path.join(appdata, 'metadata', 'processid'))

    if bpio.Android():
        lg.close_intercepted_log_file()
        lg.open_intercepted_log_file(
            '/storage/emulated/0/Android/data/org.bitdust_io.bitdust1/files/Documents/.bitdust/logs/android.log'
        )

    # sys.excepthook = lg.exception_hook

    #---init logging
    from twisted.internet.defer import setDebugging
    if _Debug:
        if bpio.isFrozen():
            setDebugging(False)
        else:
            setDebugging(True)
    else:
        setDebugging(False)

    from twisted.logger import globalLogPublisher, LogLevel
    tw_log_observer = TwistedUnhandledErrorsObserver(level=LogLevel.critical)
    globalLogPublisher.addObserver(tw_log_observer)

    #---life begins!
    # ask logger to count time for each log line from that moment, not absolute time
    lg.life_begins()

    # try to read debug level value at the early stage - no problem if fail here
    try:
        if cmd == '' or cmd == 'start' or cmd == 'go' or cmd == 'show' or cmd == 'open':
            lg.set_debug_level(
                int(
                    bpio.ReadTextFile(
                        os.path.abspath(
                            os.path.join(appdata, 'config', 'logs',
                                         'debug-level')))))
    except:
        pass

    if opts.no_logs:
        lg.disable_logs()

    if opts.debug or str(opts.debug) == '0':
        lg.set_debug_level(int(opts.debug))

    #---logpath---
    logpath = None
    if opts.output:
        logpath = opts.output
    else:
        try:
            os.makedirs(os.path.join(appdata, 'logs'), exist_ok=True)
        except:
            pass
        logpath = os.path.join(appdata, 'logs', 'stdout.log')

    need_redirecting = False

    if bpio.Windows() and not bpio.isConsoled():
        need_redirecting = True

    if logpath:
        if not appList:
            if cmd not in [
                    'detach',
                    'daemon',
                    'stop',
                    'kill',
                    'shutdown',
                    'restart',
                    'reboot',
                    'reconnect',
                    'show',
                    'open',
            ]:
                lg.open_log_file(logpath)
        if bpio.Windows() and bpio.isFrozen():
            need_redirecting = True

    if bpio.Android():
        need_redirecting = True

    if opts.quite and not opts.verbose:
        lg.disable_output()
    else:
        if need_redirecting:
            lg.stdout_start_redirecting()
            lg.stderr_start_redirecting()

    #---start---
    if cmd == '' or cmd == 'start' or cmd == 'go':
        if appList:
            print_text('BitDust already started, found another process: %s\n' %
                       str(appList),
                       nl='')
            bpio.shutdown()
            return 0

        UI = ''
        try:
            ret = run(UI, opts, args, overDict, executable_path, start_reactor)
        except:
            lg.exc()
            ret = 1
        bpio.shutdown()

        if opts.coverage:
            cov.stop()
            cov.save()
            if opts.coverage_report:
                cov.report(file=open(opts.coverage_report, 'w'))

        return ret

    #---daemon---
    elif cmd == 'detach' or cmd == 'daemon':
        appList = bpio.find_main_process(
            pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) > 0:
            print_text('main BitDust process already started: %s\n' %
                       str(appList),
                       nl='')
            bpio.shutdown()
            if opts.coverage:
                cov.stop()
                cov.save()
                if opts.coverage_report:
                    cov.report(file=open(opts.coverage_report, 'w'))
            return 0
        from lib import misc
        print_text('new BitDust process will be started in daemon mode\n',
                   nl='')
        result = misc.DoRestart(
            detach=True,
            # std_out=os.path.join(appdata, 'logs', 'stdout.log'),
            # std_err=os.path.join(appdata, 'logs', 'stderr.log'),
        )
        if result is not None:
            try:
                result = int(result)
            except:
                try:
                    result = result.pid
                except:
                    pass
        bpio.shutdown()
        if opts.coverage:
            cov.stop()
            cov.save()
            if opts.coverage_report:
                cov.report(file=open(opts.coverage_report, 'w'))
        return 0

    #---restart---
    elif cmd == 'restart' or cmd == 'reboot':
        appList = bpio.find_main_process(
            pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        ui = False
        if len(appList) > 0:
            print_text('found main BitDust process: %r ... ' % appList, nl='')

            def done(x):
                print_text('finished successfully\n', nl='')
                from twisted.internet import reactor  # @UnresolvedImport
                if reactor.running and not reactor._stopped:  # @UndefinedVariable
                    reactor.stop()  # @UndefinedVariable

            def failed(x):
                if isinstance(x, Failure):
                    print_text('finished with: %s\n' % x.getErrorMessage(),
                               nl='')
                else:
                    print_text('finished successfully\n', nl='')
                ok = str(x).count('Connection was closed cleanly') > 0
                from twisted.internet import reactor  # @UnresolvedImport
                if ok and reactor.running and not reactor._stopped:  # @UndefinedVariable
                    # print_text('DONE\n', '')
                    reactor.stop()  # @UndefinedVariable
                    return
                print_text('forcing previous process shutdown\n', nl='')
                try:
                    kill()
                except:
                    lg.exc()
                from lib import misc
                reactor.addSystemEventTrigger(  # @UndefinedVariable
                    'after',
                    'shutdown',
                    misc.DoRestart,
                    param='show' if ui else '',
                    detach=True,
                    # std_out=os.path.join(appdata, 'logs', 'stdout.log'),
                    # std_err=os.path.join(appdata, 'logs', 'stderr.log'),
                )
                reactor.stop()  # @UndefinedVariable

            try:
                from twisted.internet import reactor  # @UnresolvedImport
                # from interface.command_line import run_url_command
                # d = run_url_command('?action=restart', False)
                # from interface import cmd_line
                # d = cmd_line.call_xmlrpc_method('restart', ui)
                from interface import cmd_line_json
                d = cmd_line_json.call_websocket_method('process_restart',
                                                        websocket_timeout=5)
                d.addCallback(done)
                d.addErrback(failed)
                reactor.run()  # @UndefinedVariable
                bpio.shutdown()
                if opts.coverage:
                    cov.stop()
                    cov.save()
                    if opts.coverage_report:
                        cov.report(file=open(opts.coverage_report, 'w'))
                return 0
            except:
                lg.exc()
                bpio.shutdown()
                if opts.coverage:
                    cov.stop()
                    cov.save()
                    if opts.coverage_report:
                        cov.report(file=open(opts.coverage_report, 'w'))
                return 1
        else:
            ui = ''
            if cmd == 'restart':
                ui = 'show'
            try:
                ret = run(ui, opts, args, overDict, executable_path)
            except:
                lg.exc()
                ret = 1
            bpio.shutdown()
            if opts.coverage:
                cov.stop()
                cov.save()
                if opts.coverage_report:
                    cov.report(file=open(opts.coverage_report, 'w'))
            return ret

    #---show---
    elif cmd == 'show' or cmd == 'open':
        if not bpio.isGUIpossible():
            print_text('BitDust GUI is turned OFF\n', nl='')
            bpio.shutdown()
            return 0
        if bpio.Linux() and not bpio.X11_is_running():
            print_text('this operating system not supporting X11 interface\n',
                       nl='')
            bpio.shutdown()
            return 0
        appList = bpio.find_main_process(
            pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) == 0:
            try:
                ret = run('show', opts, args, overDict, executable_path)
            except:
                lg.exc()
                ret = 1
            bpio.shutdown()
            return ret
        # print_text('found main BitDust process: %s, start the GUI\n' % str(appList))
        # ret = show()
        bpio.shutdown()
        return ret

    #---stop---
    elif cmd == 'stop' or cmd == 'kill' or cmd == 'shutdown':
        if cmd == 'kill':
            ret = kill()
            bpio.shutdown()
            if opts.coverage:
                cov.stop()
                cov.save()
                if opts.coverage_report:
                    cov.report(file=open(opts.coverage_report, 'w'))
            return ret
        appList = bpio.find_main_process(pid_file_path=os.path.join(
            appdata, 'metadata', 'processid'), )
        if len(appList) > 0:
            if cmd == 'kill':
                print_text(
                    'found main BitDust process: %s, about to kill running process ... '
                    % appList,
                    nl='')
                ret = kill()
                bpio.shutdown()
                if opts.coverage:
                    cov.stop()
                    cov.save()
                    if opts.coverage_report:
                        cov.report(file=open(opts.coverage_report, 'w'))
                return ret
            try:
                from twisted.internet import reactor  # @UnresolvedImport
                from twisted.python.failure import Failure

                def _stopped(x):
                    if _Debug:
                        if isinstance(x, Failure):
                            print_text('finished with: %s\n' %
                                       x.getErrorMessage(),
                                       nl='')
                        else:
                            print_text('finished with: %s\n' % x, nl='')
                    else:
                        print_text('finished successfully\n', nl='')
                    reactor.stop()  # @UndefinedVariable
                    bpio.shutdown()

                print_text('found main BitDust process: %s ... ' % appList,
                           nl='')
                from interface import cmd_line_json
                cmd_line_json.call_websocket_method(
                    'process_stop', websocket_timeout=2).addBoth(_stopped)
                reactor.run()  # @UndefinedVariable
                if opts.coverage:
                    cov.stop()
                    cov.save()
                    if opts.coverage_report:
                        cov.report(file=open(opts.coverage_report, 'w'))
                return 0
            except:
                lg.exc()
                ret = kill()
                bpio.shutdown()
                if opts.coverage:
                    cov.stop()
                    cov.save()
                    if opts.coverage_report:
                        cov.report(file=open(opts.coverage_report, 'w'))
                return ret
        else:
            appListAllChilds = bpio.find_main_process(
                check_processid_file=False,
                extra_lookups=[],
            )
            if len(appListAllChilds) > 0:
                print_text(
                    'BitDust child processes found: %s, performing "kill process" action ...\n'
                    % appListAllChilds,
                    nl='')
                ret = kill()
                if opts.coverage:
                    cov.stop()
                    cov.save()
                    if opts.coverage_report:
                        cov.report(file=open(opts.coverage_report, 'w'))
                return ret

            print_text('BitDust is not running at the moment\n', nl='')
            bpio.shutdown()
            if opts.coverage:
                cov.stop()
                cov.save()
                if opts.coverage_report:
                    cov.report(file=open(opts.coverage_report, 'w'))
            return 0

    #---command_line---
    from interface import cmd_line_json as cmdln
    ret = cmdln.run(opts, args, pars, overDict, executable_path)
    if ret == 2:
        print_text(usage_text())
    bpio.shutdown()

    #---coverage report---
    if opts.coverage:
        cov.stop()
        cov.save()
        if opts.coverage_report:
            cov.report(file=open(opts.coverage_report, 'w'))

    return ret
Пример #44
0
Файл: main.py Проект: hef/sedbot
#!/usr/bin/env python
import sys
from twisted.words.protocols import irc
from twisted.internet import reactor, protocol
from twisted.python import log
import re
from twisted.internet.defer import setDebugging
setDebugging(True)


class SedBot(irc.IRCClient):
    nickname = "sedbot"

    def __init__(self):
        self.last = {}

    def signedOn(self):
        self.join("#pumpingstationone")

    def joined(self, channel):
        pass

    def privmsg(self, user, channel, msg):
        nick = user.split("!")[0]
        if nick == self.nickname:
            return
        elif channel == self.nickname:
            return
        m = re.search("s/(.*)/(.*)/[giI]*", msg)
        if (m):
            search_string = m.group(1)
Пример #45
0
def run():
    if not hasattr(tcp.Client, 'abortConnection'):
        print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
        print 'Pausing for 3 seconds...'
        time.sleep(3)

    realnets = dict((name, net) for name,
                    net in networks.nets.iteritems() if '_testnet' not in name)

    parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (
        p2pool.__version__,), fromfile_prefix_chars='@')
    # Calculate version from git in /p2pool/__init.py__/get_version()
    parser.add_argument('--version', action='version',
                        version=p2pool.__version__)
    parser.add_argument('--net',
                        help='use specified network (default: bitcoin)',
                        action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
    parser.add_argument('--testnet',
                        help='''use the network's testnet''',
                        action='store_const', const=True, default=False, dest='testnet')
    parser.add_argument('--debug',
                        help='enable debugging mode',
                        action='store_const', const=True, default=False, dest='debug')
    parser.add_argument('--bench',
                        help='enable CPU performance profiling mode',
                        action='store_const', const=True, default=False, dest='bench')
    parser.add_argument('--rconsole',
                        help='enable rconsole debugging mode (requires rfoo)',
                        action='store_const', const=True, default=False, dest='rconsole')

    parser.add_argument('-a', '--address',
                        help='generate payouts to this address (default: <address requested from bitcoind>), or (dynamic)',
                        type=str, action='store', default=None, dest='address')
    parser.add_argument('-i', '--numaddresses',
                        help='number of bitcoin auto-generated addresses to maintain for getwork dynamic address allocation',
                        type=int, action='store', default=2, dest='numaddresses')
    parser.add_argument('-t', '--timeaddresses',
                        help='seconds between acquisition of new address and removal of single old (default: 2 days or 172800s)',
                        type=int, action='store', default=172800, dest='timeaddresses')
    parser.add_argument('--datadir',
                        help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
                        type=str, action='store', default=None, dest='datadir')
    parser.add_argument('--logfile',
                        help='''log to this file (default: data/<NET>/log)''',
                        type=str, action='store', default=None, dest='logfile')
    parser.add_argument('--web-static',
                        help='use an alternative web frontend in this directory (otherwise use the built-in frontend)',
                        type=str, action='store', default=None, dest='web_static')
    parser.add_argument('--merged',
                        help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
                        type=str, action='append', default=[], dest='merged_urls')
    parser.add_argument('--coinbtext',
                        help='append this text to the coinbase',
                        type=str, action='append', default=[], dest='coinb_texts')
    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
                        help='donate this percentage of work towards the development of p2pool (default: 0.0)',
                        type=float, action='store', default=0.0, dest='donation_percentage')
    parser.add_argument('--iocp',
                        help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
                        action='store_true', default=False, dest='iocp')
    parser.add_argument('--irc-announce',
                        help='announce any blocks found on irc://irc.freenode.net/#p2pool',
                        action='store_true', default=False, dest='irc_announce')
    parser.add_argument('--no-bugreport',
                        help='disable submitting caught exceptions to the author',
                        action='store_true', default=False, dest='no_bugreport')

    p2pool_group = parser.add_argument_group('p2pool interface')
    p2pool_group.add_argument('--p2pool-port', metavar='PORT',
                              help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join(
                                  '%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
                              type=int, action='store', default=None, dest='p2pool_port')
    p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
                              help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
                              type=str, action='append', default=[], dest='p2pool_nodes')
    parser.add_argument('--disable-upnp',
                        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
                        action='store_false', default=True, dest='upnp')
    p2pool_group.add_argument('--max-conns', metavar='CONNS',
                              help='maximum incoming connections (default: 40)',
                              type=int, action='store', default=40, dest='p2pool_conns')
    p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
                              help='outgoing connections (default: 6)',
                              type=int, action='store', default=6, dest='p2pool_outgoing_conns')
    p2pool_group.add_argument('--external-ip', metavar='ADDR[:PORT]',
                              help='specify your own public IP address instead of asking peers to discover it, useful for running dual WAN or asymmetric routing',
                              type=str, action='store', default=None, dest='p2pool_external_ip')
    parser.add_argument('--disable-advertise',
                        help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''',
                        action='store_false', default=True, dest='advertise_ip')

    worker_group = parser.add_argument_group('worker interface')
    worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
                              help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join(
                                  '%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
                              type=str, action='store', default=None, dest='worker_endpoint')
    worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
                              help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
                              type=float, action='store', default=0, dest='worker_fee')
    worker_group.add_argument('-s', '--share-rate', metavar='SECONDS_PER_SHARE',
                              help='Auto-adjust mining difficulty on each connection to target this many seconds per pseudoshare (default: %3.0f)' % 3.,
                              type=float, action='store', default=3., dest='share_rate')  # pseudoshare adjusted timing/difficulty

    bitcoind_group = parser.add_argument_group('bitcoind interface')
    bitcoind_group.add_argument('--bitcoind-config-path', metavar='BITCOIND_CONFIG_PATH',
                                help='custom configuration file path (when bitcoind -conf option used)',
                                type=str, action='store', default=None, dest='bitcoind_config_path')
    bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
                                help='connect to this address (default: 127.0.0.1)',
                                type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
    bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
                                help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join(
                                    '%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
                                type=int, action='store', default=None, dest='bitcoind_rpc_port')
    bitcoind_group.add_argument('--bitcoind-rpc-ssl',
                                help='connect to JSON-RPC interface using SSL',
                                action='store_true', default=False, dest='bitcoind_rpc_ssl')
    bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
                                help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join(
                                    '%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
                                type=int, action='store', default=None, dest='bitcoind_p2p_port')
    bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
                                help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
                                type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
    bitcoind_group.add_argument('--allow-obsolete-bitcoind',
                                help='allow the use of coin daemons (bitcoind) that do not support all of the required softforks for this network (e.g. Bitcoin Core and segwit2x)',
                                action='store_const', const=True, default=False, dest='allow_obsolete_bitcoind')

    args = parser.parse_args()

    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False
    p2pool.BENCH = args.bench

    net_name = args.net_name + ('_testnet' if args.testnet else '')
    net = networks.nets[net_name]

    datadir_path = os.path.join((os.path.join(os.path.dirname(
        sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)

    if len(args.bitcoind_rpc_userpass) > 2:
        parser.error('a maximum of two arguments are allowed')
    args.bitcoind_rpc_username, args.bitcoind_rpc_password = (
        [None, None] + args.bitcoind_rpc_userpass)[-2:]

    if args.bitcoind_rpc_password is None:
        conf_path = args.bitcoind_config_path or net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
                         '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
                         '''\r\n'''
                         '''server=1\r\n'''
                         '''rpcpassword=%x\r\n'''
                         '''\r\n'''
                         '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
        conf = open(conf_path, 'rb').read()
        contents = {}
        for line in conf.splitlines(True):
            if '#' in line:
                line = line[:line.index('#')]
            if '=' not in line:
                continue
            k, v = line.split('=', 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ('rpcuser', 'bitcoind_rpc_username', str),
            ('rpcpassword', 'bitcoind_rpc_password', str),
            ('rpcport', 'bitcoind_rpc_port', int),
            ('port', 'bitcoind_p2p_port', int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if 'rpcssl' in contents and contents['rpcssl'] != '0':
            args.bitcoind_rpc_ssl = True
        if args.bitcoind_rpc_password is None:
            parser.error(
                '''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')

    if args.bitcoind_rpc_username is None:
        args.bitcoind_rpc_username = ''

    if args.bitcoind_rpc_port is None:
        args.bitcoind_rpc_port = net.PARENT.RPC_PORT

    if args.bitcoind_p2p_port is None:
        args.bitcoind_p2p_port = net.PARENT.P2P_PORT

    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT

    if args.p2pool_outgoing_conns > 10:
        parser.error('''--outgoing-conns can't be more than 10''')

    if args.worker_endpoint is None:
        worker_endpoint = '', net.WORKER_PORT
    elif ':' not in args.worker_endpoint:
        worker_endpoint = '', int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(':', 1)
        worker_endpoint = addr, int(port)

    if args.address is not None and args.address != 'dynamic':
        try:
            _ = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
            args.pubkey_hash = True
        except Exception, e:
            parser.error('error parsing address: ' + repr(e))
    OffsetCommitRequest,
    ProduceRequest,
    OffsetRequest,
    ConsumerCoordinatorNotAvailableError,
    NotCoordinatorForConsumerError,
)
from afkak.kafkacodec import (create_message)
from fixtures import ZookeeperFixture, KafkaFixture
from testutil import (
    kafka_versions,
    KafkaIntegrationTestCase,
    random_string,
)

DEBUGGING = True
setDebugging(DEBUGGING)
DelayedCall.debug = DEBUGGING

log = logging.getLogger(__name__)
#  logging.basicConfig(level=logging.DEBUG)


class TestAfkakClientIntegration(KafkaIntegrationTestCase):
    create_client = False

    @classmethod
    def setUpClass(cls):
        if not os.environ.get('KAFKA_VERSION'):  # pragma: no cover
            return

        # Single zookeeper, 3 kafka brokers
Пример #47
0
def main(executable_path=None):
    """
    THIS IS THE ENTRY POINT OF THE PROGRAM!
    """
    global AppDataDir

    pars = parser()
    (opts, args) = pars.parse_args()
    overDict = override_options(opts, args)

    cmd = ''
    if len(args) > 0:
        cmd = args[0].lower()
    #---install---
    if cmd in ['deploy', 'install', 'venv', 'virtualenv', ]:
        from system import deploy
        return deploy.run(args)

    try:
        from logs import lg
    except:
        dirpath = os.path.dirname(os.path.abspath(sys.argv[0]))
        sys.path.insert(0, os.path.abspath(os.path.join(dirpath, '..')))
        # sys.path.insert(0, os.path.abspath(os.path.join(dirpath, '..', '..')))
        from distutils.sysconfig import get_python_lib
        sys.path.append(os.path.join(get_python_lib(), 'bitdust'))
        try:
            from logs import lg
        except:
            print('ERROR! can not import working code.  Python Path:')
            print('\n'.join(sys.path))
            return 1

    # init IO module, update locale
    from system import bpio
    bpio.init()

    # sys.excepthook = lg.exception_hook

    if not bpio.isFrozen():
        try:
            from twisted.internet.defer import setDebugging
            setDebugging(True)
            # from twisted.python import log as twisted_log
            # twisted_log.startLogging(sys.stdout)
        except:
            lg.warn('python-twisted is not installed')

    if opts.appdir:
        appdata = opts.appdir
        AppDataDir = appdata

    else:
        curdir = os.getcwd()  # os.path.dirname(os.path.abspath(sys.executable))
        appdatafile = os.path.join(curdir, 'appdata')
        defaultappdata = os.path.join(os.path.expanduser('~'), '.bitdust')
        appdata = defaultappdata
        if os.path.isfile(appdatafile):
            try:
                appdata = os.path.abspath(open(appdatafile, 'rb').read().strip())
            except:
                appdata = defaultappdata
            if not os.path.isdir(appdata):
                appdata = defaultappdata
        AppDataDir = appdata

    # ask to count time for each log line from that moment, not absolute time
    lg.life_begins()
    # try to read debug level value at the early stage - no problem if fail here
    try:
        if cmd == '' or cmd == 'start' or cmd == 'go' or cmd == 'show' or cmd == 'open':
            lg.set_debug_level(int(
                bpio.ReadTextFile(
                    os.path.abspath(
                        os.path.join(appdata, 'config', 'logs', 'debug-level')))))
    except:
        pass

    if opts.no_logs:
        lg.disable_logs()

    #---logpath---
    logpath = os.path.join(appdata, 'logs', 'start.log')
    if opts.output:
        logpath = opts.output

    need_redirecting = False

    if bpio.Windows() and not bpio.isConsoled():
        need_redirecting = True

    if logpath != '':
        lg.open_log_file(logpath)
        lg.out(2, 'bpmain.main log file opened ' + logpath)
        if bpio.Windows() and bpio.isFrozen():
            need_redirecting = True

    if need_redirecting:
        lg.stdout_start_redirecting()
        lg.out(2, 'bpmain.main redirecting started')

    # very basic solution to record run-time errors
    try:
        if os.path.isfile(os.path.join(appdata, 'logs', 'exception.log')):
            os.remove(os.path.join(appdata, 'logs', 'exception.log'))
    except:
        pass

    if opts.debug or str(opts.debug) == '0':
        lg.set_debug_level(opts.debug)

    # if opts.quite and not opts.verbose:
    #     lg.disable_output()

    if opts.verbose:
        copyright_text()

    lg.out(2, 'bpmain.main started ' + time.asctime())
    lg.out(2, 'bpmain.main args=%s' % str(args))

    #---start---
    if cmd == '' or cmd == 'start' or cmd == 'go':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if appList:
            lg.out(0, 'BitDust already started, found another process: %s\n' % str(appList))
            bpio.shutdown()
            return 0

        UI = ''
        # if cmd == 'show' or cmd == 'open':
        # UI = 'show'
        try:
            ret = run(UI, opts, args, overDict, executable_path)
        except:
            lg.exc()
            ret = 1
        bpio.shutdown()
        return ret

    #---daemon---
    elif cmd == 'detach' or cmd == 'daemon':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) > 0:
            lg.out(0, 'main BitDust process already started: %s\n' % str(appList))
            bpio.shutdown()
            return 0
        from lib import misc
        lg.out(0, 'new BitDust process will be started in daemon mode, finishing current process\n')
        bpio.shutdown()
        result = misc.DoRestart(
            detach=True,
            std_out=os.path.join(appdata, 'logs', 'stdout.log'),
            std_err=os.path.join(appdata, 'logs', 'stderr.log'),
        )
        if result is not None:
            try:
                result = int(result)
            except:
                try:
                    result = result.pid
                except:
                    pass
        return 0

    #---restart---
    elif cmd == 'restart' or cmd == 'reboot':
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        ui = False
        if len(appList) > 0:
            lg.out(0, 'found main BitDust process: %s, sending "restart" command ... ' % str(appList), '')

            def done(x):
                lg.out(0, 'DONE\n', '')
                from twisted.internet import reactor  # @UnresolvedImport
                if reactor.running and not reactor._stopped:  # @UndefinedVariable
                    reactor.stop()  # @UndefinedVariable

            def failed(x):
                ok = str(x).count('Connection was closed cleanly') > 0
                from twisted.internet import reactor  # @UnresolvedImport
                if ok and reactor.running and not reactor._stopped:  # @UndefinedVariable
                    lg.out(0, 'DONE\n', '')
                    reactor.stop()  # @UndefinedVariable
                    return
                lg.out(0, 'FAILED while killing previous process - do HARD restart\n', '')
                try:
                    kill()
                except:
                    lg.exc()
                from lib import misc
                reactor.addSystemEventTrigger(  # @UndefinedVariable
                    'after',
                    'shutdown',
                    misc.DoRestart,
                    param='show' if ui else '',
                    detach=True,
                    std_out=os.path.join(appdata, 'logs', 'stdout.log'),
                    std_err=os.path.join(appdata, 'logs', 'stderr.log'),
                )
                reactor.stop()  # @UndefinedVariable
            try:
                from twisted.internet import reactor  # @UnresolvedImport
                # from interface.command_line import run_url_command
                # d = run_url_command('?action=restart', False)
                # from interface import cmd_line
                # d = cmd_line.call_xmlrpc_method('restart', ui)
                from interface import cmd_line_json
                d = cmd_line_json.call_jsonrpc_method('restart', ui)
                d.addCallback(done)
                d.addErrback(failed)
                reactor.run()  # @UndefinedVariable
                bpio.shutdown()
                return 0
            except:
                lg.exc()
                bpio.shutdown()
                return 1
        else:
            ui = ''
            if cmd == 'restart':
                ui = 'show'
            try:
                ret = run(ui, opts, args, overDict, executable_path)
            except:
                lg.exc()
                ret = 1
            bpio.shutdown()
            return ret

    #---show---
    elif cmd == 'show' or cmd == 'open':
        if not bpio.isGUIpossible():
            lg.out(0, 'BitDust GUI is turned OFF\n')
            bpio.shutdown()
            return 0
        if bpio.Linux() and not bpio.X11_is_running():
            lg.out(0, 'this operating system not supported X11 interface\n')
            bpio.shutdown()
            return 0
        appList = bpio.find_main_process(pid_file_path=os.path.join(appdata, 'metadata', 'processid'))
        if len(appList) == 0:
            try:
                ret = run('show', opts, args, overDict, executable_path)
            except:
                lg.exc()
                ret = 1
            bpio.shutdown()
            return ret
        lg.out(0, 'found main BitDust process: %s, start the GUI\n' % str(appList))
        ret = show()
        bpio.shutdown()
        return ret

    #---stop---
    elif cmd == 'stop' or cmd == 'kill' or cmd == 'shutdown':
        appList = bpio.find_main_process(
            pid_file_path=os.path.join(appdata, 'metadata', 'processid'),
        )
        if len(appList) > 0:
            lg.out(0, 'found main BitDust process: %r, sending command "exit" ... ' % appList, '')
            try:
                from twisted.internet import reactor  # @UnresolvedImport
                # from interface.command_line import run_url_command
                # url = '?action=exit'
                # run_url_command(url, False).addBoth(wait_then_kill)
                # reactor.run()
                # bpio.shutdown()

                def _stopped(x):
                    lg.out(0, 'BitDust process finished correctly\n')
                    reactor.stop()  # @UndefinedVariable
                    bpio.shutdown()
                # from interface import cmd_line
                # cmd_line.call_xmlrpc_method('stop').addBoth(_stopped)
                from interface import cmd_line_json
                cmd_line_json.call_jsonrpc_method('stop').addBoth(_stopped)
                reactor.run()  # @UndefinedVariable
                return 0
            except:
                lg.exc()
                ret = kill()
                bpio.shutdown()
                return ret
        else:
            appListAllChilds = bpio.find_main_process(
                check_processid_file=False,
                extra_lookups=['regexp:^.*python.*bitdust.py.*?$', ],
            )
            if len(appListAllChilds) > 0:
                lg.out(0, 'BitDust child processes found: %r, performing "kill process" actions ...\n' % appListAllChilds, '')
                ret = kill()
                return ret

            lg.out(0, 'BitDust is not running at the moment\n')
            bpio.shutdown()
            return 0

    #---command_line---
    from interface import cmd_line_json as cmdln
    ret = cmdln.run(opts, args, pars, overDict, executable_path)
    if ret == 2:
        print(usage_text())
    bpio.shutdown()
    return ret
 def enable_twisted_debug(self):
     defer.setDebugging(True)
     import twisted.internet.base
     twisted.internet.base.DelayedCall.debug = True
Пример #49
0
 def tearDown(self):
     defer.setDebugging(self._deferredWasDebugging)
Пример #50
0
def run():
    realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
    
    parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
    parser.add_argument('--version', action='version', version=p2pool.__version__)
    parser.add_argument('--net',
        help='use specified network (default: bitbar)',
        action='store', choices=sorted(realnets), default='bitbar', dest='net_name')
    parser.add_argument('--testnet',
        help='''use the network's testnet''',
        action='store_const', const=True, default=False, dest='testnet')
    parser.add_argument('--debug',
        help='enable debugging mode',
        action='store_const', const=True, default=False, dest='debug')
    parser.add_argument('--datadir',
        help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
        type=str, action='store', default=None, dest='datadir')
    parser.add_argument('--logfile',
        help='''log to this file (default: data/<NET>/log)''',
        type=str, action='store', default=None, dest='logfile')
    parser.add_argument('--merged',
        help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
        type=str, action='append', default=[], dest='merged_urls')
    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
        help='donate this percentage of work towards the development of p2pool (default: 0.0)',
        type=float, action='store', default=0.0, dest='donation_percentage')
    parser.add_argument('--iocp',
        help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
        action='store_true', default=False, dest='iocp')
    parser.add_argument('--irc-announce',
        help='announce any blocks found on irc://irc.freenode.net/#p2pool',
        action='store_true', default=False, dest='irc_announce')
    parser.add_argument('--no-bugreport',
        help='disable submitting caught exceptions to the author',
        action='store_true', default=False, dest='no_bugreport')
    
    p2pool_group = parser.add_argument_group('p2pool interface')
    p2pool_group.add_argument('--p2pool-port', metavar='PORT',
        help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='p2pool_port')
    p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
        help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
        type=str, action='append', default=[], dest='p2pool_nodes')
    parser.add_argument('--disable-upnp',
        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
        action='store_false', default=True, dest='upnp')
    p2pool_group.add_argument('--max-conns', metavar='CONNS',
        help='maximum incoming connections (default: 40)',
        type=int, action='store', default=40, dest='p2pool_conns')
    p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
        help='outgoing connections (default: 6)',
        type=int, action='store', default=6, dest='p2pool_outgoing_conns')
    
    worker_group = parser.add_argument_group('worker interface')
    worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
        help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
        type=str, action='store', default=None, dest='worker_endpoint')
    worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
        type=float, action='store', default=0, dest='worker_fee')
    
    bitcoind_group = parser.add_argument_group('bitcoind interface')
    bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
        help='connect to this address (default: 127.0.0.1)',
        type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
    bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
        help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='bitcoind_rpc_port')
    bitcoind_group.add_argument('--bitcoind-rpc-ssl',
        help='connect to JSON-RPC interface using SSL',
        action='store_true', default=False, dest='bitcoind_rpc_ssl')
    bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
        help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='bitcoind_p2p_port')
    
    bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
        help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
        type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
    
    args = parser.parse_args()
    
    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False
    
    net_name = args.net_name + ('_testnet' if args.testnet else '')
    net = networks.nets[net_name]
    
    datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)
    
    if len(args.bitcoind_rpc_userpass) > 2:
        parser.error('a maximum of two arguments are allowed')
    args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
    
    if args.bitcoind_rpc_password is None:
        conf_path = net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
                '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
                '''\r\n'''
                '''server=1\r\n'''
                '''rpcpassword=%x\r\n'''
                '''\r\n'''
                '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
        conf = open(conf_path, 'rb').read()
        contents = {}
        for line in conf.splitlines(True):
            if '#' in line:
                line = line[:line.index('#')]
            if '=' not in line:
                continue
            k, v = line.split('=', 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ('rpcuser', 'bitcoind_rpc_username', str),
            ('rpcpassword', 'bitcoind_rpc_password', str),
            ('rpcport', 'bitcoind_rpc_port', int),
            ('port', 'bitcoind_p2p_port', int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if args.bitcoind_rpc_password is None:
            parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
    
    if args.bitcoind_rpc_username is None:
        args.bitcoind_rpc_username = ''
    
    if args.bitcoind_rpc_port is None:
        args.bitcoind_rpc_port = net.PARENT.RPC_PORT
    
    if args.bitcoind_p2p_port is None:
        args.bitcoind_p2p_port = net.PARENT.P2P_PORT
    
    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT
    
    if args.p2pool_outgoing_conns > 10:
        parser.error('''--outgoing-conns can't be more than 10''')
    
    if args.worker_endpoint is None:
        worker_endpoint = '', net.WORKER_PORT
    elif ':' not in args.worker_endpoint:
        worker_endpoint = '', int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(':', 1)
        worker_endpoint = addr, int(port)

    def separate_url(url):
        s = urlparse.urlsplit(url)
        if '@' not in s.netloc:
            parser.error('merged url netloc must contain an "@"')
        userpass, new_netloc = s.netloc.rsplit('@', 1)
        return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
    merged_urls = map(separate_url, args.merged_urls)
    
    if args.logfile is None:
        args.logfile = os.path.join(datadir_path, 'log')
    
    logfile = logging.LogFile(args.logfile)
    pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
    sys.stdout = logging.AbortPipe(pipe)
    sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
    if hasattr(signal, "SIGUSR1"):
        def sigusr1(signum, frame):
            print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
            logfile.reopen()
            print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
        signal.signal(signal.SIGUSR1, sigusr1)
    task.LoopingCall(logfile.reopen).start(5)
    
    class ErrorReporter(object):
        def __init__(self):
            self.last_sent = None
        
        def emit(self, eventDict):
            if not eventDict["isError"]:
                return
            
            if self.last_sent is not None and time.time() < self.last_sent + 5:
                return
            self.last_sent = time.time()
            
            if 'failure' in eventDict:
                text = ((eventDict.get('why') or 'Unhandled Error')
                    + '\n' + eventDict['failure'].getTraceback())
            else:
                text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
            
            from twisted.web import client
            client.getPage(
                url='http://u.forre.st/p2pool_error.cgi',
                method='POST',
                postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
                timeout=15,
            ).addBoth(lambda x: None)
    if not args.no_bugreport:
        log.addObserver(ErrorReporter().emit)
    
    reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
    reactor.run()
Пример #51
0
def load_config():

  # Open config file
    try:
        with open(CONFIG_FILE, 'r') as config_file:
            conf = json.load(config_file)
    except IOError as e:
        print 'ERROR: Could not open %s file : ' % CONFIG_FILE, e
        return False
    except ValueError as e:
        print 'ERROR: Config file is not valid JSON', e
        return False


  # Set Monocorpus if old conf type
    if "MULTICORPUS" not in conf or not conf["MULTICORPUS"]:
        conf["MULTICORPUS"] = False

  # Set Twisted port from old configs format
    if "twisted.port" not in conf and "twisted" in conf and "port" in conf["twisted"]:
        conf["twisted.port"] = conf["twisted"]["port"]

  # Set default corpus keepalive option for old configs
    if "memoryStructure" in conf:
        if "keepalive" not in conf["memoryStructure"]:
            conf["memoryStructure"]["keepalive"] = 1800

  # Set default noproxy setting if missing
    if "mongo-scrapy" in conf:
        if 'proxy_host' not in conf['mongo-scrapy']:
            conf['mongo-scrapy']['proxy_host'] = ''
        if 'proxy_port' not in conf['mongo-scrapy']:
            conf['mongo-scrapy']['proxy_port'] = 3128
        # Ensure retrocompat
        conf['mongo-scrapy']['proxy'] = {
          'host': conf['mongo-scrapy']['proxy_host'],
          'port': conf['mongo-scrapy']['proxy_port']
        }

  # Set default creation rules if missing
    if "defaultCreationRule" not in conf:
        conf["defaultCreationRule"] = "domain"
    if "creationRules" not in conf:
        conf["creationRules"] = {}

  # Auto unset phantomJs autoretry if missing
    if "phantom" in conf and "autoretry" not in conf["phantom"]:
        conf["phantom"]["autoretry"] = False

  # Check sanity
    try:
        check_conf_sanity(conf, GLOBAL_CONF_SCHEMA)
    except Exception as e:
        print e
        return False


  # Test MongoDB server
    mongoconf = conf['mongo-scrapy']
    db = Connection(mongoconf['host'], mongoconf['mongo_port'])[mongoconf.get('db_name', mongoconf.get('project'))]
    try:
        test = list(db['%s.logs' % DEFAULT_CORPUS].find())
    except Exception as x:
        print "ERROR: Cannot connect to mongoDB, please check your server and the configuration in %s" % CONFIG_FILE
        if config['DEBUG']:
            print x
        return None


  # Handle old non multicorpus conf
    if not conf["MULTICORPUS"]:

      # Set default single port and ram
        if "thrift.portrange" not in conf["memoryStructure"]:
            conf["memoryStructure"]["thrift.portrange"] = [conf["memoryStructure"]["thrift.port"], conf["memoryStructure"]["thrift.port"]]
        if "thrift.max_ram" not in conf["memoryStructure"]:
            conf["memoryStructure"]["thrift.max_ram"] = 1024

      # Migrate old lucene corpus into default corpus if not existing yet
        oldpath = conf["memoryStructure"]["lucene.path"]
        if "lucene.rootpath" not in conf["memoryStructure"]:
            conf["memoryStructure"]["lucene.rootpath"] = oldpath
        newpath = os.path.join(oldpath, DEFAULT_CORPUS)
        if not os.path.isdir(newpath):
            print("Migrate old lucene corpus files from %s into dedicated dir %s as default corpus" % (oldpath, DEFAULT_CORPUS))
            old_lucene_files = os.listdir(oldpath)
            try:
                test_and_make_dir(newpath)
                for f in old_lucene_files:
                    os.rename(os.path.join(oldpath, f), os.path.join(newpath, f))
            except:
                print "ERROR migrating %s lucene files from old corpus into new directory" % len(old_lucene_files)
                return False

      # Migrate old corpus' mongodb collections into default corpus ones
        if "db_name" not in mongoconf:
            conf["mongo-scrapy"]["db_name"] = mongoconf["project"]
        migratedb = {
          "queue": "queue",
          "pages": "pageStore",
          "jobs": "jobList",
          "logs": "jobLogs"
        }
        try:
            for key, coll in migratedb.iteritems():
                oldname = mongoconf["%sCol" % coll]
                newname = "%s.%s" % (DEFAULT_CORPUS, key)
                if db[oldname].count():
                    print "INFO: migratingold corpus mongodb collection %s into default corpus %s" % (oldname, newname)
                    db[oldname].rename(newname)
        except Exception as e:
            print type(e), e
            print "ERROR migrating mongodb from old corpus into new collections"
            return False

  # Turn portrange into list of ports
    conf['memoryStructure']['thrift.portrange'] = range(conf['memoryStructure']['thrift.portrange'][0], conf['memoryStructure']['thrift.portrange'][1]+1)

  # Turn on Twisted debugging
    if conf['DEBUG']:
        defer.setDebugging(True)

    return conf
Пример #52
0
def run():
    if not hasattr(tcp.Client, 'abortConnection'):
        print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
        print 'Pausing for 3 seconds...'
        time.sleep(3)
    
    realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
    
    parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
    parser.add_argument('--version', action='version', version=p2pool.__version__)
    parser.add_argument('--net',
        help='use specified network (default: bitcoin)',
        action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
    parser.add_argument('--testnet',
        help='''use the network's testnet''',
        action='store_const', const=True, default=False, dest='testnet')
    parser.add_argument('--debug',
        help='enable debugging mode',
        action='store_const', const=True, default=False, dest='debug')
    parser.add_argument('-a', '--address',
        help='generate payouts to this address (default: <address requested from bitcoind>)',
        type=str, action='store', default=None, dest='address')
    parser.add_argument('--datadir',
        help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
        type=str, action='store', default=None, dest='datadir')
    parser.add_argument('--logfile',
        help='''log to this file (default: data/<NET>/log)''',
        type=str, action='store', default=None, dest='logfile')
    parser.add_argument('--merged',
        help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
        type=str, action='append', default=[], dest='merged_urls')
    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
        help='donate this percentage of work towards the development of p2pool (default: 1.0)',
        type=float, action='store', default=0, dest='donation_percentage')
    parser.add_argument('--iocp',
        help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
        action='store_true', default=False, dest='iocp')
    parser.add_argument('--irc-announce',
        help='announce any blocks found on irc://irc.freenode.net/#p2pool',
        action='store_true', default=False, dest='irc_announce')
    parser.add_argument('--no-bugreport',
        help='disable submitting caught exceptions to the author',
        action='store_true', default=False, dest='no_bugreport')
    
    p2pool_group = parser.add_argument_group('p2pool interface')
    p2pool_group.add_argument('--p2pool-port', metavar='PORT',
        help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='p2pool_port')
    p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
        help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
        type=str, action='append', default=[], dest='p2pool_nodes')
    parser.add_argument('--disable-upnp',
        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
        action='store_false', default=True, dest='upnp')
    p2pool_group.add_argument('--max-conns', metavar='CONNS',
        help='maximum incoming connections (default: 40)',
        type=int, action='store', default=40, dest='p2pool_conns')
    p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
        help='outgoing connections (default: 6)',
        type=int, action='store', default=6, dest='p2pool_outgoing_conns')
    parser.add_argument('--disable-advertise',
        help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''',
        action='store_false', default=False, dest='advertise_ip')
    
    worker_group = parser.add_argument_group('worker interface')
    worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
        help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
        type=str, action='store', default=None, dest='worker_endpoint')
    worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
        help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
        type=float, action='store', default=0.5, dest='worker_fee')
    worker_group.add_argument('--miner-share-rate', metavar='SHARES_PER_MINUTE',
        help='number of psuedoshares per minute for each miner',
        type=float, action='store', default=None, dest='miner_share_rate')
    worker_group.add_argument('--address-share-rate', metavar='SHARES_PER_MINUTE',
        help='number of psuedoshares per minute for each address',
        type=float, action='store', default=None, dest='address_share_rate')
    worker_group.add_argument('--min-difficulty', metavar='DIFFICULTY',
        help='minium difficulty for miners',
        type=float, action='store', default=1.0, dest='min_difficulty')
    
    bitcoind_group = parser.add_argument_group('bitcoind interface')
    bitcoind_group.add_argument('--bitcoind-config-path', metavar='BITCOIND_CONFIG_PATH',
        help='custom configuration file path (when bitcoind -conf option used)',
        type=str, action='store', default=None, dest='bitcoind_config_path')
    bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
        help='connect to this address (default: 127.0.0.1)',
        type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
    bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
        help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='bitcoind_rpc_port')
    bitcoind_group.add_argument('--bitcoind-rpc-ssl',
        help='connect to JSON-RPC interface using SSL',
        action='store_true', default=False, dest='bitcoind_rpc_ssl')
    bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
        help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='bitcoind_p2p_port')
    bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
        help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
        type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
    
    args = parser.parse_args()
    
    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False
    
    net_name = args.net_name + ('_testnet' if args.testnet else '')
    net = networks.nets[net_name]
    
    datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)
    
    if len(args.bitcoind_rpc_userpass) > 2:
        parser.error('a maximum of two arguments are allowed')
    args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
    
    if args.bitcoind_rpc_password is None:
        conf_path = args.bitcoind_config_path or net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
                '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
                '''\r\n'''
                '''server=1\r\n'''
                '''rpcpassword=%x\r\n'''
                '''\r\n'''
                '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
        conf = open(conf_path, 'rb').read()
        contents = {}
        for line in conf.splitlines(True):
            if '#' in line:
                line = line[:line.index('#')]
            if '=' not in line:
                continue
            k, v = line.split('=', 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ('rpcuser', 'bitcoind_rpc_username', str),
            ('rpcpassword', 'bitcoind_rpc_password', str),
            ('rpcport', 'bitcoind_rpc_port', int),
            ('port', 'bitcoind_p2p_port', int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if args.bitcoind_rpc_password is None:
            parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
    
    if args.bitcoind_rpc_username is None:
        args.bitcoind_rpc_username = ''
    
    if args.bitcoind_rpc_port is None:
        args.bitcoind_rpc_port = net.PARENT.RPC_PORT
    
    if args.bitcoind_p2p_port is None:
        args.bitcoind_p2p_port = net.PARENT.P2P_PORT
    
    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT
    
    if args.p2pool_outgoing_conns > 10:
        parser.error('''--outgoing-conns can't be more than 10''')
    
    if args.worker_endpoint is None:
        worker_endpoint = '', net.WORKER_PORT
    elif ':' not in args.worker_endpoint:
        worker_endpoint = '', int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(':', 1)
        worker_endpoint = addr, int(port)
    
    if args.address is not None:
        try:
            args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
        except Exception, e:
            parser.error('error parsing address: ' + repr(e))
Пример #53
0
            yield (key, value)


ConfigManager = ConfigManager()  #hacktastic Singleton
import config  #provided by ConfigManager

###############################################################################
# Setup basic logging working as soon as possible
###############################################################################

###############################################################################
# Setup our configuration from ENV or use sane defaults
###############################################################################

#if true deferreds will be traceable
defer.setDebugging(drone.DEBUG)


###############################################################################
# Setup our application services
###############################################################################
class ServiceManager(Entity):
    """ServiceManager Provides an Interface to get to any
       methods a service may provide.
       
       After the ServiceManager has been instantiated you can
       access services from any python package in the DroneD
       framework simply by placing ```import services``` in
       your code.
    """
    parentService = property(lambda s: s._parent)