def collect_results_daemon(job_list,log_file,restart_file): #job_list is a list of tuples, with [0] as job_id and [1] as the target directory import daemonize global controller global password daemonize.daemonize('/dev/null',log_file,log_file) restart_pickle = job_save(restart_file) restart_pickle.update(job_list) print "Daemon started with pid %d" % os.getpid() print "Started %s" % time.ctime() while len(job_list) > 0: time.sleep(1800) print "################################" print "Tick %s" % time.ctime() #print job_list #use a working list to prevent problems associated with in loop editing of the job_list working_list = list(job_list) for job in working_list: job_id = job[0] target_directory = job[1] status = get_status(controller, password, job_id).status_report() print job_id, status, target_directory if status == "Finished": retrieve_command = "xgrid -job results -h " + controller + " -id " + str(job_id) + " -out " + target_directory + " -so " + target_directory + "/xgrid_log.txt -se " + target_directory + "/xgrid_err.txt -p " + password os.system(retrieve_command) job_list.pop(job_list.index(job)) elif status == "Failed": job_list.pop(job_list.index(job)) #print "Loop", job_list restart_pickle.update(job_list) print "Finished %s" % time.ctime() exit()
def main(): cmd = sys.argv[1] pidfile = '/var/run/getcode.pid' with open(pidfile, 'r') as fd: pid = fd.readlines() if cmd == 'start': if not pid: daemonize(stdout='/var/log/getcode.log', stderr='/var/log/getcode_error.log') ppid = str(os.getpid()) with open(pidfile, 'w') as fd: fd.write(ppid) parserLastVersion() else: print "getcode subprocess already Running..." sys.exit(1) elif cmd == 'stop': comm = ': > %s' % pidfile if pid: p = int(pid[0]) os.kill(p, signal.SIGTERM) check_call(comm, shell=True, stdout=PIPE, stderr=PIPE) print "getcode stopd." else: print "argv[1] command: start stop" sys.exit(1)
def start(self): #signal.signal(signal.SIGINT, self.on_signal) #signal.signal(signal.SIGTERM, self.on_signal) daemonize(pidfile=self.pidfile) #self.start_subprocess() start_server() self.try_remove_pidfile()
def start(self): print 'Starting scrapydd agent daemon.' signal.signal(signal.SIGINT, self.on_signal) signal.signal(signal.SIGTERM, self.on_signal) daemonize(pidfile=self.pidfile) start() self.try_remove_pidfile()
def collect_results_daemon(job_list, log_file, restart_file): #job_list is a list of tuples, with [0] as job_id and [1] as the target directory import daemonize global controller global password daemonize.daemonize('/dev/null', log_file, log_file) restart_pickle = job_save(restart_file) restart_pickle.update(job_list) print "Daemon started with pid %d" % os.getpid() print "Started %s" % time.ctime() while len(job_list) > 0: time.sleep(1800) print "################################" print "Tick %s" % time.ctime() #print job_list #use a working list to prevent problems associated with in loop editing of the job_list working_list = list(job_list) for job in working_list: job_id = job[0] target_directory = job[1] status = get_status(controller, password, job_id).status_report() print job_id, status, target_directory if status == "Finished": retrieve_command = "xgrid -job results -h " + controller + " -id " + str( job_id ) + " -out " + target_directory + " -so " + target_directory + "/xgrid_log.txt -se " + target_directory + "/xgrid_err.txt -p " + password os.system(retrieve_command) job_list.pop(job_list.index(job)) elif status == "Failed": job_list.pop(job_list.index(job)) #print "Loop", job_list restart_pickle.update(job_list) print "Finished %s" % time.ctime() exit()
def main(name, start): # Parse the command line: server = start() # Print copyright and license information print_blurb() # Create run directory tree and get port. try: GLOBAL_CFG.create_cylc_run_tree(server.suite) server.configure_pyro() except Exception as exc: if flags.debug: raise else: sys.exit(exc) # Daemonize the suite if not server.options.no_detach and not flags.debug: daemonize(server) try: server.configure() server.run() # For profiling (see Python docs for how to display the stats). # import cProfile # cProfile.runctx('server.run()', globals(), locals(), 'stats') except SchedulerStop, x: # deliberate stop print str(x) server.shutdown()
def main(): args = parse_arguments() if not args.no_daemon: daemonize.daemonize() start_logging(args.logfile, args.loglevel) notify = choose_notify_function(args.notify, args.recipients) check_periodically(args.interval, notify)
def storaged(argv): """The NMS storage server. storaged [-h <serverhost>] [-p <serverport>] [-d <databasefile>] [-l <logfile>] [-n] [-?] where: -h is the server hostname to bind to. -p is the TCP port to use (other than the default) -d specifies the Durus file to use for the database. -l Log file name to use for logging. -n Do NOT become a daemon, stay in foreground (for debugging) -? This help screen. """ import daemonize import getopt import basicconfig cf = basicconfig.get_config("storage.conf") host = cf.get("host", DEFAULT_HOST) port = cf.get("port", DEFAULT_PORT) DBFILE = os.path.expandvars(cf.get("dbfile")) LOGFILE = os.path.expandvars(cf.get("dblog")) del cf do_daemon = True try: optlist, args = getopt.getopt(argv[1:], "d:l:h:p:n?") except getopt.GetoptError: print storaged.__doc__ sys.exit(2) for opt, optarg in optlist: if opt == "-d": DBFILE = optarg elif opt == "-l": LOGFILE = optarg elif opt == "-h": host = optarg elif opt == "-p": port = int(optarg) elif opt == "-n": do_daemon = False elif opt == "-?": print storaged.__doc__ return 2 if do_daemon: daemonize.daemonize() try: start_durus(host, port, LOGFILE, DBFILE) except KeyboardInterrupt: return
def run_server(argv): """pyserver [-nh?] Starts the Posix Client Operations Server. Where: -n = Do NOT run as a daemon, but stay in foreground. """ global _EXIT import daemonize, getopt do_daemon = True try: optlist, args = getopt.getopt(argv[1:], "nh?") except getopt.GetoptError: print run_server.__doc__ sys.exit(2) for opt, optarg in optlist: if opt in ("-h", "-?"): print run_server.__doc__ return elif opt == "-n": do_daemon = False if do_daemon: daemonize.daemonize() Pyro.core.initServer(banner=0, storageCheck=0) Log.msg("ClientServer", "initializing") ns=Pyro.naming.NameServerLocator().getNS() daemon=Pyro.core.Daemon() daemon.useNameServer(ns) uri=daemon.connectPersistent(ClientServer(), ":Client.%s" % (os.uname()[1].split(".")[0],)) while True: try: daemon.handleRequests(2.0) if _EXIT: return asyncio.poller.poll(0) except KeyboardInterrupt: break except: ex, val, tb = sys.exc_info() print >>sys.stderr, ex, val
def pytrapd(argv): """pytrapd [-d] Run a SNMP trap handler and email you on reciept of a trap. """ from pycopia import asyncio if len(argv) > 1 and argv[1] == "-d": import daemonize daemonize.daemonize() cf = basicconfig.get_config("trapserver") mailer = TrapMailer(cf) handlers = [mailer] dispatcher = traps.get_dispatcher(handlers) asyncio.poller.loop()
def pytrapd(argv): """pytrapd [-d] Run a SNMP trap handler and email you on reciept of a trap. """ from pycopia import asyncio if len(argv) > 1 and argv[1] == "-d": import daemonize daemonize.daemonize() cf = basicconfig.get_config("./trapserver.conf") mailer = TrapMailer(cf) handlers = [mailer] dispatcher = traps.get_dispatcher(handlers) asyncio.poller.loop()
def main(): (ttl, force, verbose, daemon, log, cnames) = parse_args() # Since an eventual log file must support external log rotation, we must do this the hard way... format = logging.Formatter("%(asctime)s: %(levelname)s [%(process)d]: %(message)s") handler = logging.handlers.WatchedFileHandler(log) if log else logging.StreamHandler(sys.stderr) handler.setFormatter(format) logger = logging.getLogger() logger.addHandler(handler) logger.setLevel(logging.DEBUG if verbose else logging.INFO) # This must be done after initializing the logger, so that an eventual log file gets created in # the right place (the user will assume that relative paths start from the current directory)... if daemon: daemonize() logging.info("Avahi/mDNS publisher starting...") if force: logging.info("Forcing CNAME publishing without collision checks") # The publisher needs to be initialized in the loop, to handle disconnects... publisher = None while True: if not publisher or not publisher.available(): publisher = AvahiPublisher(ttl) # To make sure records disappear immediately on exit, clean up properly... signal.signal(signal.SIGTERM, functools.partial(handle_signals, publisher)) signal.signal(signal.SIGINT, functools.partial(handle_signals, publisher)) signal.signal(signal.SIGQUIT, functools.partial(handle_signals, publisher)) for cname in cnames: status = publisher.publish_cname(cname, force) if not status: logging.error("Failed to publish '%s'", cname) continue if publisher.count() == len(cnames): logging.info("All CNAMEs published") else: logging.warning("%d out of %d CNAMEs published", publisher.count(), len(cnames)) # CNAMEs will exist while this service is kept alive, # but we don't actually need to do anything useful... sleep(1)
def pytrapd(argv): """pytrapd [-d] Run the Gtest trap daemon. Fork to background if -d is provided. """ import asyncio if len(argv) > 1 and argv[1] == "-d": import daemonize daemonize.daemonize() cf = Storage.get_config() recorder = TrapRecorder(cf) mailer = TrapMailer(cf) handlers = [recorder, mailer] dispatcher = traps.get_dispatcher(handlers) asyncio.poller.loop()
def main(): p = optparse.OptionParser() p.add_option("--nodaemon", "-n", action="store_true",help="no daemon mode", default=False) p.add_option("--verbose", "-v", action="store_true",help="enable debugging", default=False) p.add_option("--config", "-c", action="store",help="config file", type='string', default=configFile) p.add_option("--dump", "-d", action="store_true",help="dump settings", default=False) options, args = p.parse_args() if options.verbose: global verbose verbose = options.verbose print("logging output from '" + dfile + "' to log file: " + logFile + "\n") init(ReadConfig(options.config)) if options.dump: print("dumping settings:"); print(" verbose=" + str(options.verbose) + " config='" + str(options.config) + "' dump=" + str(options.dump)) print(" dfile='" + str(dfile) + "' logFile='" + str(logFile) + "' timeout=" + str(timeout)); print(" elogFile='" + str(elogFile) + "' baudRate='" + str(baudRate) + " nodaemon=" + str(options.nodaemon)) exit(0) if options.nodaemon == False: daemonize(stdout=elogFile) os.umask(022) line = serial.Serial(dfile, baudRate) line.open() line.flushInput() log = open(logFile, 'a', 0); while 1: a, b, c = select.select([line], [], [], timeout) if line in a: x = CheckInput(line) log.write(x + ":" + time.ctime(time.time()) + "\n") print("got '" + x + "'") else: line.write('t')
def storaged(argv): import getopt import basicconfig cf = basicconfig.get_config(os.path.join(os.environ["STRATATEST_HOME"], "etc", "storage.conf")) host = cf.get("serverhost", DEFAULT_HOST) port = cf.get("port", DEFAULT_PORT) DBFILE = os.path.expandvars(cf.get("dbfile")) LOGFILE = os.path.expandvars(cf.get("dblog")) del cf do_daemon = True try: optlist, args = getopt.getopt(argv[1:], "d:l:h:p:nk?") except getopt.GetoptError: print DOC sys.exit(2) for opt, optarg in optlist: if opt == "-d": DBFILE = optarg elif opt == "-k": import signal pid = int(file(PIDFILE).read()) os.kill(pid, signal.SIGINT) return 0 elif opt == "-l": LOGFILE = optarg elif opt == "-h": host = optarg elif opt == "-p": port = int(optarg) elif opt == "-n": do_daemon = False elif opt == "-?": print DOC return 2 if do_daemon: import daemonize daemonize.daemonize(pidfile=PIDFILE) try: start_durus(host, port, LOGFILE, DBFILE) except KeyboardInterrupt: return
def main(b1, b1op=Bunny1OptionParser()): """uses command line options and runs the server given an instance of the Bunny1 class""" # guess if this is running in CGI mode if os.environ.get("GATEWAY_INTERFACE", "").startswith("CGI"): main_cgi(b1) else: (options, args) = b1op.parse_args() if options.test_command is not None: try: b1._server_mode = "COMMAND_LINE" print(b1.do_command(options.test_command)) except HTTPRedirect as redir: # the escape sequences make the output show up yellow on terminals # in the case of a redirect to distinguish from content output print("\033[33m%s:\033[0m %s" % (redir.__class__.__name__, redir)) else: if options.port: port = int(options.port) else: port = DEFAULT_PORT if options.host: host = options.host else: host = socket.gethostname() if options.base_url: b1.base_url = options.base_url else: protocol = "http" b1.base_url = "%s://%s:%s/" % (protocol, host, port) if options.daemonize: import daemonize daemonize.daemonize(options.pidfile) # start the server b1.start(port=port, host=options.host, errorlogfile=options.errorlogfile, accesslogfile=options.accesslogfile)
#!/usr/bin/env python import SimpleXMLRPCServer from daemonize import daemonize import os def ls(directory): try: return os.listdir(directory) except OSError: return [] def ls_boom(directory): return os.listdir(directory) def cb(obj): print "OBJECT::", obj print "OBJECT.__class__::", obj.__class__ return obj.cb() if __name__ == '__main__': daemonize(stdout='/tmp/stdout.log', stderr='/tmp/stderr.log') s = SimpleXMLRPCServer.SimpleXMLRPCServer(('127.0.0.1', 8765)) s.register_function(ls) s.register_function(ls_boom) s.register_function(cb) s.serve_forever()
def handleArgs(*args): """Handle standard command line arguments, return the rest as a list. Takes the commandline arguments, converts them to Unicode, processes all global parameters such as -lang or -log. Returns a list of all arguments that are not global. This makes sure that global arguments are applied first, regardless of the order in which the arguments were given. args may be passed as an argument, thereby overriding sys.argv """ # get commandline arguments if necessary if not args: args = sys.argv[1:] # get the name of the module calling this function. This is # required because the -help option loads the module's docstring and because # the module name will be used for the filename of the log. moduleName = calledModuleName() if not moduleName: moduleName = "terminal-interface" nonGlobalArgs = [] username = None do_help = False for arg in args: arg = _decodeArg(arg) if arg == '-help': do_help = True elif arg.startswith('-family:'): config.family = arg[len("-family:"):] elif arg.startswith('-lang:'): config.mylang = arg[len("-lang:"):] elif arg.startswith("-user:"******"-user:"******"-putthrottle:"):]) elif arg.startswith('-pt:'): config.put_throttle = int(arg[len("-pt:"):]) elif arg == '-log': if moduleName not in config.log: config.log.append(moduleName) elif arg.startswith('-log:'): if moduleName not in config.log: config.log.append(moduleName) config.logfilename = arg[len("-log:"):] elif arg == '-nolog': if moduleName in config.log: config.log.remove(moduleName) elif arg == '-simulate': config.simulate = True # # DEBUG control: # # The framework has four layers (by default, others can be added), # each designated by a string -- # # 1. "comm": the communication layer (http requests, etc.) # 2. "data": the raw data layer (API requests, XML dump parsing) # 3. "wiki": the wiki content representation layer (Page and Site # objects) # 4. "bot": the application layer (user scripts should always # send any debug() messages to this layer) # # The "-debug:layer" flag sets the logger for any specified # layer to the DEBUG level, causing it to output extensive debugging # information. Otherwise, the default logging setting is the INFO # level. "-debug" with no layer specified sets _all_ loggers to # DEBUG level. # # This method does not check the 'layer' part of the flag for # validity. # # If used, "-debug" turns on file logging, regardless of any # other settings. # elif arg == "-debug": if moduleName not in config.log: config.log.append(moduleName) if "" not in config.debug_log: config.debug_log.append("") elif arg.startswith("-debug:"): if moduleName not in config.log: config.log.append(moduleName) component = arg[len("-debug:"):] if component not in config.debug_log: config.debug_log.append(component) elif arg == '-verbose' or arg == "-v": config.verbose_output += 1 elif arg == '-daemonize': import daemonize daemonize.daemonize() elif arg.startswith('-daemonize:'): import daemonize daemonize.daemonize(redirect_std=arg[11:]) else: # the argument depends numerical config settings cmd = [] if ':' in arg: cmd = arg[1:].split(':') if len(cmd) == 2 and len(cmd[1]) > 0 and \ hasattr(config, cmd[0]) and \ type(getattr(config, cmd[0])) == int: setattr(config, cmd[0], cmd[1]) # the argument is not global. Let the specific bot script care # about it. else: nonGlobalArgs.append(arg) if username: config.usernames[config.family][config.mylang] = username init_handlers() if config.verbose_output: import re ver = pywikibot.__version__ # probably can be improved on m = re.search(r"\$Id: (\w+) " r"\$", ver) pywikibot.output(u'Pywikipediabot r%s' % m.group(1)) pywikibot.output(u'Python %s' % sys.version) if do_help: showHelp() sys.exit(0) pywikibot.debug(u"handleArgs() completed.", _logger) return nonGlobalArgs
import sys, os, time from commands import getoutput import daemonize def run_child(): while True: time.sleep(5) if __name__ == '__main__': if len(sys.argv) < 3: print 'Usage: <num processes per sec><max num processes>' sys.exit() if len(sys.argv) == 3: daemonize.daemonize() num_processes_per_sec = int(sys.argv[1]) max_num_processes = int(sys.argv[2]) while True: num_processes = int(getoutput('ps ax | wc -l')) print num_processes if num_processes < max_num_processes and os.fork() == 0: # Child must not spawn child run_child() time.sleep(1 / float(num_processes_per_sec))
try: if server.__class__.__name__ != 'restart': gcfg = get_global_cfg() gcfg.create_cylc_run_tree(server.suite, server.options.verbose) server.configure_pyro() except Exception, x: if server.options.debug: raise else: print >> sys.stderr, x sys.exit(1) # Daemonize the suite if not server.options.no_detach and not server.options.debug: daemonize(server.suite, server.port) try: server.configure() server.run() # For profiling: #import cProfile #cProfile.run( 'server.run()', 'fooprof' ) # and see Python docs "The Python Profilers" # for how to display the resulting stats. except Exception, x: import traceback traceback.print_exc(x) print >> sys.stderr, "ERROR CAUGHT: cleaning up before exit" try: server.shutdown('ERROR: ' + str(x))
#coding=utf-8 import subprocess from time import sleep from mail import send_mail import sys from daemonize import daemonize def watcher(): while True: ip = '192.168.88.245' ret = subprocess.call("ping -c 1 %s" % ip, shell=True, stdout=open('/dev/null', 'w'), stderr=subprocess.STDOUT) if ret == 0: sys.stderr.write("%s: is alive" % ip) else: send_mail('服务器停止相应:', ip) sleep(3) if __name__ == '__main__': daemonize(stdout='/tmp/stdout.log', stderr='/tmp/stderr.log') watcher()
if args.verbose: args.daemonize = False if args.file: exfil_file = args.file else: exfil_file = os.getcwd() + "/" + default_file if args.dest_domain: dest_domain = args.dest_domain else: dest_domain = default_domain if args.daemonize: daemonize.daemonize('/tmp/dns_exfil_daemon.pid', stdin='/dev/null', stdout='/tmp/dns_exfil_daemon.log', stderr='/tmp/dns_exfil_daemon.log') if args.hec: e = splunk_hec_sender.EventPreamble() event_list = e.create_event_base(this_pid_str,this_script) e_notice = "starting DNS exfil script" event = [e_notice] event_list.extend(event) if args.verbose: print(event_list) splunk_hec_sender.create_json_data(event_list,this_script) if args.time: start_epoch = float(now_epoch) args.time = float(args.time)
port = DEFAULT_PORT if options.host: host = options.host else: host = socket.gethostname() if options.base_url: b1.base_url = options.base_url else: protocol = "http" b1.base_url = "%s://%s:%s/" % (protocol, host, port) if options.daemonize: import daemonize daemonize.daemonize(options.pidfile) # start the server b1.start(port=port, host=options.host, errorlogfile=options.errorlogfile, accesslogfile=options.accesslogfile) def main_cgi(b1): """for running bunny1 as a cgi""" # this mostly works, but it has problems serving images andother # static content try: form = cgi.FieldStorage() cmd = form.getvalue(COMMAND_QUERY_STRING_VAR) if not cmd:
def handleArgs(*args): """Handle standard command line arguments, return the rest as a list. Takes the command line arguments as Unicode strings, processes all global parameters such as -lang or -log. Returns a list of all arguments that are not global. This makes sure that global arguments are applied first, regardless of the order in which the arguments were given. args may be passed as an argument, thereby overriding sys.argv """ # get commandline arguments if necessary if not args: # it's the version in pywikibot.__init__ that is changed by scripts, # not the one in pywikibot.bot. args = pywikibot.argvu[1:] # get the name of the module calling this function. This is # required because the -help option loads the module's docstring and because # the module name will be used for the filename of the log. moduleName = calledModuleName() if not moduleName: moduleName = "terminal-interface" nonGlobalArgs = [] username = None do_help = False for arg in args: if arg == '-help': do_help = True elif arg.startswith('-family:'): config.family = arg[len("-family:"):] elif arg.startswith('-lang:'): config.mylang = arg[len("-lang:"):] elif arg.startswith("-user:"******"-user:"******"-putthrottle:"):]) elif arg.startswith('-pt:'): config.put_throttle = int(arg[len("-pt:"):]) elif arg == '-log': if moduleName not in config.log: config.log.append(moduleName) elif arg.startswith('-log:'): if moduleName not in config.log: config.log.append(moduleName) config.logfilename = arg[len("-log:"):] elif arg == '-nolog': if moduleName in config.log: config.log.remove(moduleName) elif arg in ('-cosmeticchanges', '-cc'): config.cosmetic_changes = not config.cosmetic_changes output(u'NOTE: option cosmetic_changes is %s\n' % config.cosmetic_changes) elif arg == '-simulate': config.simulate = True # # DEBUG control: # # The framework has four layers (by default, others can be added), # each designated by a string -- # # 1. "comm": the communication layer (http requests, etc.) # 2. "data": the raw data layer (API requests, XML dump parsing) # 3. "wiki": the wiki content representation layer (Page and Site # objects) # 4. "bot": the application layer (user scripts should always # send any debug() messages to this layer) # # The "-debug:layer" flag sets the logger for any specified # layer to the DEBUG level, causing it to output extensive debugging # information. Otherwise, the default logging setting is the INFO # level. "-debug" with no layer specified sets _all_ loggers to # DEBUG level. # # This method does not check the 'layer' part of the flag for # validity. # # If used, "-debug" turns on file logging, regardless of any # other settings. # elif arg == '-debug': if moduleName not in config.log: config.log.append(moduleName) if "" not in config.debug_log: config.debug_log.append("") elif arg.startswith("-debug:"): if moduleName not in config.log: config.log.append(moduleName) component = arg[len("-debug:"):] if component not in config.debug_log: config.debug_log.append(component) elif arg in ('-verbose', '-v'): config.verbose_output += 1 elif arg == '-daemonize': import daemonize daemonize.daemonize() elif arg.startswith('-daemonize:'): import daemonize daemonize.daemonize(redirect_std=arg[len('-daemonize:'):]) else: # the argument depends on numerical config settings # e.g. -maxlag: try: _arg, _val = arg[1:].split(':') # explicitly check for int (so bool doesn't match) if not isinstance(getattr(config, _arg), int): raise TypeError setattr(config, _arg, int(_val)) except (ValueError, TypeError, AttributeError): # argument not global -> specific bot script will take care nonGlobalArgs.append(arg) if username: config.usernames[config.family][config.mylang] = username init_handlers() if config.verbose_output: # Please don't change the regular expression here unless you really # have to - some git versions (like 1.7.0.4) seem to treat lines # containing just `$Id:` as if they were ident lines (see # gitattributes(5)) leading to unwanted behaviour like automatic # replacement with `$Id: 2be8a2f7674966d7b265e70fd7f4256dbba2f399 $` # or `$Id: 2be8a2f7674966d7b265e70fd7f4256dbba2f399 $`. m = re.search(r"\$Id" r": (\w+) \$", pywikibot.__version__) if m: pywikibot.output(u'Pywikibot r%s' % m.group(1)) else: # Version ID not availlable on SVN repository. # Maybe these informations should be imported from version.py pywikibot.output(u'Pywikibot SVN repository') pywikibot.output(u'Python %s' % sys.version) if do_help: showHelp() sys.exit(0) pywikibot.debug(u"handleArgs() completed.", _logger) return nonGlobalArgs
try: if server.__class__.__name__ != "restart": gcfg = get_global_cfg() gcfg.create_cylc_run_tree(server.suite, server.options.verbose) server.configure_pyro() except Exception, x: if server.options.debug: raise else: print >> sys.stderr, x sys.exit(1) # Daemonize the suite if not server.options.no_detach and not server.options.debug: daemonize(server.suite, server.port) try: server.configure() server.run() # For profiling: # import cProfile # cProfile.run( 'server.run()', 'fooprof' ) # and see Python docs "The Python Profilers" # for how to display the resulting stats. except Exception, x: import traceback traceback.print_exc(x) print >> sys.stderr, "ERROR CAUGHT: cleaning up before exit" try:
#!/usr/bin/env python from daemonize import daemonize import web urls=( "/(.*)","index", ) class index(): def GET(self,name=None): if not name: name="test" return ','.join(['hi',name]) if __name__=="__main__": from daemonize import daemonize daemonize(stdout="/tmp/stdout.log", stderr="/tmp/stderr.log") app=web.application(urls,globals()) app.run()
def handleArgs(*args): """Handle standard command line arguments, return the rest as a list. Takes the commandline arguments, converts them to Unicode, processes all global parameters such as -lang or -log. Returns a list of all arguments that are not global. This makes sure that global arguments are applied first, regardless of the order in which the arguments were given. args may be passed as an argument, thereby overriding sys.argv """ # get commandline arguments if necessary if not args: args = sys.argv[1:] # get the name of the module calling this function. This is # required because the -help option loads the module's docstring and because # the module name will be used for the filename of the log. moduleName = calledModuleName() if not moduleName: moduleName = "terminal-interface" nonGlobalArgs = [] username = None do_help = False for arg in args: arg = _decodeArg(arg) if arg == '-help': do_help = True elif arg.startswith('-family:'): config.family = arg[len("-family:") : ] elif arg.startswith('-lang:'): config.mylang = arg[len("-lang:") : ] elif arg.startswith("-user:"******"-user:"******"-putthrottle:") : ]) elif arg.startswith('-pt:'): config.put_throttle = int(arg[len("-pt:") : ]) elif arg == '-log': if moduleName not in config.log: config.log.append(moduleName) elif arg.startswith('-log:'): if moduleName not in config.log: config.log.append(moduleName) config.logfilename = arg[len("-log:") : ] elif arg == '-nolog': if moduleName in config.log: config.log.remove(moduleName) elif arg == '-simulate': config.simulate = True # # DEBUG control: # # The framework has four layers (by default, others can be added), # each designated by a string -- # # 1. "comm": the communication layer (http requests, etc.) # 2. "data": the raw data layer (API requests, XML dump parsing) # 3. "wiki": the wiki content representation layer (Page and Site # objects) # 4. "bot": the application layer (user scripts should always # send any debug() messages to this layer) # # The "-debug:layer" flag sets the logger for any specified # layer to the DEBUG level, causing it to output extensive debugging # information. Otherwise, the default logging setting is the INFO # level. "-debug" with no layer specified sets _all_ loggers to # DEBUG level. # # This method does not check the 'layer' part of the flag for # validity. # # If used, "-debug" turns on file logging, regardless of any # other settings. # elif arg == "-debug": if moduleName not in config.log: config.log.append(moduleName) if "" not in config.debug_log: config.debug_log.append("") elif arg.startswith("-debug:"): if moduleName not in config.log: config.log.append(moduleName) component = arg[len("-debug:") : ] if component not in config.debug_log: config.debug_log.append(component) elif arg == '-verbose' or arg == "-v": config.verbose_output += 1 elif arg == '-daemonize': import daemonize daemonize.daemonize() elif arg.startswith('-daemonize:'): import daemonize daemonize.daemonize(redirect_std = arg[11:]) else: # the argument depends numerical config settings cmd = [] if ':' in arg: cmd = arg[1:].split(':') if len(cmd) == 2 and len(cmd[1]) > 0 and \ hasattr(config, cmd[0]) and \ type(getattr(config, cmd[0])) == int: setattr(config, cmd[0], cmd[1]) # the argument is not global. Let the specific bot script care # about it. else: nonGlobalArgs.append(arg) if username: config.usernames[config.family][config.mylang] = username init_handlers() if config.verbose_output: import re ver = pywikibot.__version__ # probably can be improved on m = re.search(r"\$Id: .* (\d+ \d+-\d+-\d+ \d+:\d+:\d+Z) .*\$", ver) pywikibot.output(u'Pywikipediabot r%s' % m.group(1)) pywikibot.output(u'Python %s' % sys.version) if do_help: showHelp() sys.exit(0) pywikibot.debug(u"handleArgs() completed.", _logger) return nonGlobalArgs
q.sortby = 'date' q.desc = True ids = self.thrudex.search(q) tweets = [] if len(ids.elements) > 0: list_response = self.thrudoc.getList(self.create_doc_list(ids.elements)) for ele in list_response: if ele.element.value != '': tweet = cjson.decode(ele.element.value) tweet["profile_image_url"] = tweet["user"]["profile_image_url"].replace("\\", "") tweet["user_name"] = tweet["user"]["screen_name"] tweet["text"] = tweet["text"].replace("\\", "") tweets.append(tweet) return ids.total, tweets def create_doc_list(self, ids): docs = [] for pointer, ele in enumerate(ids): doc = ThrudocTypes.Element() doc.bucket = THRUDOC_BUCKET doc.key = ele.key docs.append(doc) return docs if __name__ == "__main__": import daemonize as dm dm.daemonize('/dev/null','/tmp/twitter.log','/tmp/twitter.log') tc = TweetManager() tc.grab_tweet()
self.wm.add_watch(config.get('FOLDER','watch'), pyinotify.IN_CLOSE_WRITE, rec=True,auto_add=True) self.notifier.start() gtk.gdk.threads_enter() gtk.main() gtk.gdk.threads_leave() def quit_cb(self, widget, data = None): print "Exiting cb" self.notifier.stop() gtk.main_quit() def refresh_cb(self, widget, data = None): pass def popup_menu_cb(self, widget, button, time, data = None): if button == 3: if data: data.show_all() data.popup(None, None, gtk.status_icon_position_menu, 3, time, self.statusIcon) if __name__=="__main__": daemonize(config.get('DAEMON','stdin'), config.get('DAEMON','stdout'), config.get('DAEMON','stderr')) fn = FolderNotify()
def commandPMdeal(self,character): # Deal cards as requested in a PM hand = self.command__deal() msg = '{"message":"'+hand+'","recipient":"'+character+'"}' self.FC.send_raw('PRI',msg) def commandCRdeal(self,room,character): # Deal cards as requested from a ChatRoom hand = character + ': ' + self.command__deal() msg = '{"message":"'+hand+'","channel":"'+room+'"}' self.FC.send_raw('MSG',msg) def mainf(): logging.basicConfig(format="%(asctime)s [%(levelno)s] %(funcName)s - %(message)s", datefmt='%Y-%m-%dT%H:%M:%S%z', filename='Pokerbot.log', filemode='a', level=logging.DEBUG ) logging.warning("Program Started") bot = Pokerbot() bot.loadConfigFile('Pokerbot.conf') bot.addHandlers() bot.FC.connect() if __name__ == "__main__": daemonize.daemonize(mainf,'pokerbot.pid')
for pid in jobs.keys(): if not check_status(pid): jobs.pop(pid) _c += 1 time.sleep(0.1) for pid in jobs: try: os.kill(pid,signal.SIGKILL) except: pass os.remove(pid_file) if __name__ == '__main__': if daemon_flag: daemonize() if not set_exists_pid(): logger.error("service is alive") exit(0) setproctitle("Monitor :Master") signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGTTIN, sig_add) signal.signal(signal.SIGTTOU, sig_reduce) #第二种方法,直接忽视子进程退出前发出的sigchld信号,交给内核,让内核来收拾,其实也是让内核用waitpid来解决。 signal.signal(signal.SIGCHLD, signal.SIG_IGN) logger.info('main process: %d start', os.getpid()) spawn_worker() logger.info('main: %d kill all jobs done', os.getpid())
return False return True server_address = ('127.0.0.1', 9779) # (address, port) server = SecureDocXMLRPCServer(Rpc(), server_address, sslkey, sslcrt) sa = server.socket.getsockname() print "Ok in https://%s:%d" % (sa[0], sa[1]) server.startup() if __name__ == '__main__': import getopt,sys daemon = False try: opts, args = getopt.getopt(sys.argv[1:], 'D') except getopt.GetoptError, err: print str(err) sys.exit(2) for o,a in opts: if o == '-D': daemon = True else: assert False, 'unhandled option' if daemon: daemonize.daemonize(stderr=os.path.join(curr_dir, 'panic.log')) MainServer()
if __name__ == "__main__": try: opts, args = getopt.getopt( sys.argv[1:], "hd:t:b", ["help", "date", "time"], ) except getopt.error, msg: print msg print "for help use --help" sys.exit(2) for o, a in opts: if o in ("-h", "--help"): print __doc__ sys.exit(0) elif o in ("-b", "--background"): print "Running in backgroun" from daemonize import daemonize #TODO crate files if don't exist daemonize(config.get('DAEMON', 'stdin'), config.get('DAEMON', 'stdout'), config.get('DAEMON', 'stderr')) main() else: assert False, "Unhandled option" main()
#!/usr/bin/env python from daemonize import daemonize import web urls = ( "/(.*)", "index", ) class index(): def GET(self, name=None): if not name: name = "test" return ','.join(['hi', name]) if __name__ == "__main__": from daemonize import daemonize daemonize(stdout="/tmp/stdout.log", stderr="/tmp/stderr.log") app = web.application(urls, globals()) app.run()
def main(*args): logfilename = None pidfilename = None use_processcontrol = True use_daemon = True xplore_hc = 5 use_resume = False #print 'sys.argv[1:]: %r' % sys.argv[1:] opts, args = getopt.getopt( sys.argv[1:], '', [ 'log=', 'pid=', 'processcontrol=', 'daemon=', 'xplore_hc=', 'path=', 'resume=', #'alert_email=', #'alert_url=', ]) def get_bool_arg(value): if value.lower() == 'true' or value.lower() == 'yes' or value.lower( ) == '1': return True elif value.lower() == 'false' or value.lower() == 'no' or value.lower( ) == '0': return False else: return None for name, value in opts: if name == '--log': logfilename = os.path.abspath(value) elif name == '--pid': pidfilename = os.path.abspath(value) elif name == '--processcontrol': temp = get_bool_arg(value) if temp is not None: use_processcontrol = temp else: raise Exception( 'Unknown value for --processcontrol %r, must be "true", "false", "yes", "no", 1, or 0' % value) elif name == '--daemon': temp = get_bool_arg(value) if temp is not None: use_daemon = temp else: raise Exception( 'Unknown value for --daemon %r, must be "true", "false", "yes", "no", 1, or 0' % value) elif name == '--xplore_hc': try: xplore_hc = int(value) except ValueError: raise Exception( 'Unknown value for --xplore_hc %r, must be a positive integer.' % value) elif name == '--path': # NOTE: It looks like daemons don't inherit the environment of the spawning WSGI process? # Add all paths in the --path arg to the current sys.path. paths = value.split(':') for path in paths: print 'Got path %r' % path if path not in sys.path: sys.path.insert(0, path) print 'Inserting path %r' % path elif name == '--resume': temp = get_bool_arg(value) if temp is not None: use_resume = temp else: raise Exception( 'Unknown value for --resume %r, must be "true", "false", "yes", "no", 1, or 0' % value) elif name == '--alert_email': alert_email = value elif name == '--alert_url': alert_url = value else: raise Exception('Unknown argument %r' % name) if len(args) > 0: raise Exception('Unknown arguments %r' % args) if use_resume and not use_processcontrol: raise Exception('Cannot use --resume when --processcontrol is false.') # NOTE: setup django import here, since we may have added more paths to sys.path. import ieeetags.settings from django.core.management import setup_environ setup_environ(ieeetags.settings) # Now our django imports. from webapp.models.node import Node from webapp.models.society import NodeSocieties from webapp.models.types import NodeType, ResourceType from webapp.models.resource import Resource, ResourceNodes from webapp.models.system import Cache, ProcessControl, PROCESS_CONTROL_TYPES print 'logfilename: %r' % logfilename print 'use_processcontrol: %r' % use_processcontrol print 'pidfilename: %r' % pidfilename if logfilename is not None: # NOTE: Overwrites existing file. logfile = open(logfilename, 'w', 0) print >> logfile, '----------------------------------------' else: logfile = None #if pidfilename is not None: # from lockfile.pidlockfile import PIDLockFile # pidfile = PIDLockFile(pidfilename) #else: # pidfile = None log('logging started.') if use_daemon: log('Starting daemon.') daemonize.daemonize(stdout=logfile, stderr=logfile, pidfilename=pidfilename, exclude_files=[logfile.fileno()]) else: log('Running as non-daemon.') try: try: if use_processcontrol: # Update the log. process_control = ProcessControl.objects.get( type=PROCESS_CONTROL_TYPES.XPLORE_IMPORT) process_control.log += 'Started.\n' process_control.date_updated = datetime.datetime.now() process_control.save() XploreUpdateResultsSummary = { 'tags_processed': 0, 'xplore_connection_errors': 0, 'xplore_hits_without_id': 0, 'existing_relationship_count': 0, 'relationships_to_periodicals_created': 0, 'relationships_to_conferences_created': 0, 'relationships_to_standards_created': 0, 'society_relationships_created': 0, 'resources_not_found': 0 } now = datetime.datetime.now() resSum = XploreUpdateResultsSummary log('Import Xplore Articles into Resource') log('Started at %s' % now) resource_type = ResourceType.objects.getFromName('periodical') tag_type = NodeType.objects.getFromName('tag') tags = Node.objects.filter(node_type=tag_type).order_by('name') if use_resume: # Filter out all tags up to and including the last processed tag so we can resume where we left off. process_control = ProcessControl.objects.get( type=PROCESS_CONTROL_TYPES.XPLORE_IMPORT) last_processed_tag = process_control.last_processed_tag log('Resuming from tag %r.' % last_processed_tag.name) assert last_processed_tag is not None, 'Trying to resume, but last_processed_tag (%r) is None.' % last_processed_tag old_tags_count = tags.count() tags = tags.filter(name__gt=last_processed_tag.name) new_tags_count = tags.count() log(' Found %s tags (filtered out %s).' % (new_tags_count, old_tags_count - new_tags_count)) num_tags = tags.count() last_updated = None last_tag = None society_set = None for i, tag in enumerate(tags): if use_processcontrol: # Update the log. process_control = ProcessControl.objects.get( type=PROCESS_CONTROL_TYPES.XPLORE_IMPORT) # Update the 'Processing...' log every 1 seconds. if last_updated is None or datetime.datetime.now( ) - last_updated > datetime.timedelta(seconds=1): process_control.log = re.sub( r'(?m)^Processing tag .+\n', '', process_control.log) process_control.log += 'Processing tag %r (%s/%s).\n' % ( tag.name, i, num_tags) last_updated = datetime.datetime.now() if last_tag is not None: # Record the last-updated tag name, in case we want to resume. process_control.last_processed_tag = last_tag process_control.date_updated = datetime.datetime.now() process_control.save() if not process_control.is_alive: # The database has signalled for this to quit now. log('is_alive is false, quitting.') break resSum['tags_processed'] += 1 log('Querying Xplore for Tag: %s' % tag.name) xplore_query_url = ieeetags.settings.EXTERNAL_XPLORE_URL + urllib.urlencode( { # Number of results 'hc': xplore_hc, # NOTE: Must UTF8 encode here, otherwise urlencode() fails with non-ASCII names. 'md': tag.name.encode('utf-8') }) log('Calling %s' % xplore_query_url) try: file = urllib2.urlopen(xplore_query_url) except (urllib2.URLError, httplib.BadStatusLine): log('Could not connect to the IEEE Xplore site to perform search.' ) resSum['xplore_connection_errors'] += 1 continue else: from xml.dom.minidom import parseString errors = [] # Need to correctly handle UTF8 responses from urlopen() above, avoid UnicodeEncodeError. try: temp, encoding = file.headers['content-type'].split( 'charset=') except ValueError: encoding = 'utf-8' ucontents = file.read() file.close() ucontents = ucontents.decode(encoding, 'replace').encode('utf-8') from lxml import etree # dom1 = xml.dom.minidom.parseString(ucontents) dom1 = etree.fromstring(ucontents) #xhits = dom1.documentElement.getElementsByTagName('document') xhits = dom1.findall('document') distinct_issns = {} distinct_conference_punumbers = {} distinct_standard_punumbers = {} for i, xhit in enumerate(xhits): #xhit_title = xhit.getElementsByTagName('title')[0].firstChild.nodeValue xhit_title = xhit.findtext('title') #xhit_pubtype = xhit.getElementsByTagName('pubtype')[0].firstChild.nodeValue xhit_pubtype = xhit.findtext('pubtype') if xhit_pubtype == "Journals": #issn = xhit.getElementsByTagName('issn') issn = xhit.findall('issn') if not len(issn): try: log('No ISSN node found in Xplore result with title "%s"' % xhit_title) resSum['xplore_hits_without_id'] += 1 except UnicodeEncodeError, e: log('No ISSN node found in Xplore result with UNPRINTABLE TITLE. See error.' ) log(e) continue # elif not issn[0].firstChild.nodeValue in distinct_issns: # distinct_issns[issn[0].firstChild.nodeValue] = xhit_title elif not issn[0].text in distinct_issns: distinct_issns[issn[0].text] = xhit_title elif xhit_pubtype == "Conferences": punumber = xhit.getElementsByTagName('punumber') punumber = xhit.findall('punumber') if not len(punumber): try: log('No punumber node found in Xplore result with title "%s"' % xhit_title) resSum['xplore_hits_without_id'] += 1 except UnicodeEncodeError, e: log('No punumber node found in Xplore result with UNPRINTABLE TITLE. See error.' ) log(e) continue # elif not punumber[0].firstChild.nodeValue in distinct_conference_punumbers: # distinct_conference_punumbers[punumber[0].firstChild.nodeValue] = xhit_title elif not punumber[ 0].text in distinct_conference_punumbers: distinct_conference_punumbers[ punumber[0].text] = xhit_title elif xhit_pubtype == "Standards": #punumber = xhit.getElementsByTagName('punumber') punumber = xhit.findall('punumber') if not len(punumber): try: log('No punumber node found in Xplore result with title "%s"' % xhit_title) resSum['xplore_hits_without_id'] += 1 except UnicodeEncodeError, e: log('No punumber node found in Xplore result with UNPRINTABLE TITLE. See error.' ) log(e) continue # elif not punumber[0].firstChild.nodeValue in distinct_standard_punumbers: # distinct_standard_punumbers[punumber[0].firstChild.nodeValue] = xhit_title elif not punumber[ 0].text in distinct_standard_punumbers: distinct_standard_punumbers[ punumber[0].text] = xhit_title
break for pid in jobs.keys(): if not check_status(pid): jobs.pop(pid) _c += 1 time.sleep(0.1) for pid in jobs: try: os.kill(pid, signal.SIGKILL) except: pass os.remove(pid_file) if __name__ == '__main__': if not set_exists_pid(): logger.error("service is alive") raise ("service is alive") if daemon_flag: daemonize() setproctitle("spider :Master") signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGTTIN, sig_add) signal.signal(signal.SIGTTOU, sig_reduce) #第二种方法,直接忽视子进程退出前发出的sigchld信号,交给内核,让内核来收拾,其实也是让内核用waitpid来解决。 signal.signal(signal.SIGCHLD, signal.SIG_IGN) logger.info('main process: %d start', os.getpid()) spawn_worker() logger.info('main: %d kill all jobs done', os.getpid())
info+=50 s = True break else: x = getMessageNum(info) s = False break for i in x.strip().split('\n'): if datetime.timedelta(seconds=0) < tm - getTime(i) <= datetime.timedelta(seconds=60): ln = i.split() if ln[-2] in ret: ret[ln[-2]]+=1 elif i[-2] not in ret: ret[ln[-2]]=1 return ret def main(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(('127.0.0.1',2003)) while True: h = getLastTime() for k, v in checkMessage().items(): s = 'http.http_%s %d %s\n' % (k,v,h.strftime('%s')) sock.send(s) time.sleep(60) if __name__ == "__main__": daemonize(stdout='/var/log/httpd/monitor.log', stderr='/var/log/httpd/monitor_error.log') main()
DELTA = 1 def __init__(self): self.is_running = True self.manager = Manager() Attack(frequence=4, max_to_kill=2).register_to(self.manager) MoodSwings().register_to(self.manager) Reproduction().register_to(self.manager) Time().register_to(self.manager) Weather(constants.WEATHER_SUNNY, 10).register_to(self.manager) self.api_server_thread = api_handler.setup(self.manager) def run(self): self.api_server_thread.start() while self.is_running: time.sleep(Game.TIME_TO_SLEEP) self.manager.update(Game.DELTA) self.api_server_thread.stop() def stop(self): self.is_running = False if __name__ == '__main__': daemon_game = daemonize.daemonize(Game) daemon_game.execute()
def handleArgs(*args): """Handle standard command line arguments, return the rest as a list. Takes the commandline arguments as Unicode strings, processes all global parameters such as -lang or -log. Returns a list of all arguments that are not global. This makes sure that global arguments are applied first, regardless of the order in which the arguments were given. args may be passed as an argument, thereby overriding sys.argv """ # get commandline arguments if necessary if not args: # it's the version in pywikibot.__init__ that is changed by scripts, # not the one in pywikibot.bot. args = pywikibot.argvu[1:] # get the name of the module calling this function. This is # required because the -help option loads the module's docstring and because # the module name will be used for the filename of the log. moduleName = calledModuleName() if not moduleName: moduleName = "terminal-interface" nonGlobalArgs = [] username = None do_help = False for arg in args: if arg == '-help': do_help = True elif arg.startswith('-family:'): config.family = arg[len("-family:"):] elif arg.startswith('-lang:'): config.mylang = arg[len("-lang:"):] elif arg.startswith("-user:"******"-user:"******"-putthrottle:"):]) elif arg.startswith('-pt:'): config.put_throttle = int(arg[len("-pt:"):]) elif arg == '-log': if moduleName not in config.log: config.log.append(moduleName) elif arg.startswith('-log:'): if moduleName not in config.log: config.log.append(moduleName) config.logfilename = arg[len("-log:"):] elif arg == '-nolog': if moduleName in config.log: config.log.remove(moduleName) elif arg in ('-cosmeticchanges', '-cc'): config.cosmetic_changes = not config.cosmetic_changes output(u'NOTE: option cosmetic_changes is %s\n' % config.cosmetic_changes) elif arg == '-simulate': config.simulate = True # # DEBUG control: # # The framework has four layers (by default, others can be added), # each designated by a string -- # # 1. "comm": the communication layer (http requests, etc.) # 2. "data": the raw data layer (API requests, XML dump parsing) # 3. "wiki": the wiki content representation layer (Page and Site # objects) # 4. "bot": the application layer (user scripts should always # send any debug() messages to this layer) # # The "-debug:layer" flag sets the logger for any specified # layer to the DEBUG level, causing it to output extensive debugging # information. Otherwise, the default logging setting is the INFO # level. "-debug" with no layer specified sets _all_ loggers to # DEBUG level. # # This method does not check the 'layer' part of the flag for # validity. # # If used, "-debug" turns on file logging, regardless of any # other settings. # elif arg == '-debug': if moduleName not in config.log: config.log.append(moduleName) if "" not in config.debug_log: config.debug_log.append("") elif arg.startswith("-debug:"): if moduleName not in config.log: config.log.append(moduleName) component = arg[len("-debug:"):] if component not in config.debug_log: config.debug_log.append(component) elif arg in ('-verbose', '-v'): config.verbose_output += 1 elif arg == '-daemonize': import daemonize daemonize.daemonize() elif arg.startswith('-daemonize:'): import daemonize daemonize.daemonize(redirect_std=arg[len('-daemonize:'):]) else: # the argument depends on numerical config settings # e.g. -maxlag: try: _arg, _val = arg[1:].split(':') # explicitly check for int (so bool doesn't match) if type(getattr(config, _arg)) is not int: raise TypeError setattr(config, _arg, int(_val)) except (ValueError, TypeError, AttributeError): # argument not global -> specific bot script will take care nonGlobalArgs.append(arg) if username: config.usernames[config.family][config.mylang] = username init_handlers() if config.verbose_output: # Please don't change the regular expression here unless you really # have to - some git versions (like 1.7.0.4) seem to treat lines # containing just `$Id:` as if they were ident lines (see # gitattributes(5)) leading to unwanted behaviour like automatic # replacement with `$Id: e8487e274d1ff7be3b45b58b80b486d30db77e20 $` # or `$Id: e8487e274d1ff7be3b45b58b80b486d30db77e20 $`. m = re.search(r"\$Id" r": (\w+) \$", pywikibot.__version__) if m: pywikibot.output(u'Pywikibot r%s' % m.group(1)) else: # Version ID not availlable on SVN repository. # Maybe these informations should be imported from version.py pywikibot.output(u'Pywikibot SVN repository') pywikibot.output(u'Python %s' % sys.version) if do_help: showHelp() sys.exit(0) pywikibot.debug(u"handleArgs() completed.", _logger) return nonGlobalArgs
#!/usr/bin/python import sys,os import HAConf from random import randint import time starting_point = os.getcwd() if "-d" in sys.argv: print "Daemonizing..." time.sleep(5) #To allow for very long gaps between submissions turn into a daemon to start with import daemonize daemonize.daemonize('/dev/null',starting_point + "/submission.log",starting_point + "/submission.log") print "Submission Daemon started with pid %d" % os.getpid() print "Started %s" % time.ctime() os.chdir(starting_point) from xgrid_tools import * input_file = sys.argv[1] number_of_repeats = 50 job_list =[] seed_packet = [randint(0,100000) for i in range(number_of_repeats)] #Tree structure is different here #Type/Sequence/Repeats