def __init__(self, pidfile, confile): '''CdmaAlarm class structure method. ''' Daemon.__init__(self, pidfile) self.confile = confile self.config = ConfigParser.ConfigParser() if os.path.exists(self.confile): self.config.read(self.confile) if not self.config.has_section("omp"): print("No such section.") sys.exit() else: self.fwHost = self.config.get("omp", "fw_host") self.fwPort = self.config.getint("omp", "fw_port") self.fwUser = self.config.get("omp", "fw_user") self.fwPasswd = self.config.get("omp", "fw_passwd") self.ompUser = self.config.get("omp", "omp_user") self.ompPasswd = self.config.get("omp", "omp_passwd") self.ompOldPasswd = self.config.get("omp", "omp_passwd_old") self.logfile = self.config.get("omp", "omp_alarm_log") self.interval = self.config.getint("omp", "omp_alarm_interval") if not self.config.has_section("mysql"): print("No such section.") sys.exit() else: self.myHost = self.config.get("mysql", "mysql_host") self.myPort = self.config.getint("mysql", "mysql_port") self.myUser = self.config.get("mysql", "mysql_user") self.myPasswd = self.config.get("mysql", "mysql_passwd") self.myDB = self.config.get("mysql", "mysql_db") else: print("CDMA configure file is not exist.") sys.exit()
def __init__(self, useSSL=True): Daemon.__init__(self, useSSL) self.__MIN_ID = 100000 self.__MAX_ID = 100000000 self.__SESSION_TIMEOUT_HOURS = 48 self.__loggedInUsers = {} self.__reverseLoggedInUsers = {} self.__userProtocols = {} self.__systemProtocol = Protocol(None) self.__controlProtocol = Protocol(None) try: self.__systemProtocol.open(DaemonLocations.systemDaemon[0], DaemonLocations.systemDaemon[1]) except: print "warning, could not connect to system daemon" try: self.__controlProtocol.open(DaemonLocations.controlDaemon[0], DaemonLocations.controlDaemon[1]) except: print "warning, could not connect to control daemon" self.registerMethodHandler("login", self.login) self.registerMethodHandler("logout", self.logout) self.registerMethodHandler("system", self.systemOperation) self.registerMethodHandler("control", self.controlOperation) self.__consoleOperationReply = None self.__controlOperationReply = None self.__systemOperationReply = None self.__ldapClient = LDAPClient("ldap://henldap:389")
def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'): Daemon.__init__(self, pidfile, stdin, stdout, stderr) self.systems = {"otc-gfxtest-bsw-01.local" : { "switch":1, "outlet":1 }, "otc-gfxtest-bsw-02.local" : { "switch":1, "outlet":2 }, "otc-gfxtest-bsw-03.local" : { "switch":1, "outlet":3 }, "otc-gfxtest-bsw-04.local" : { "switch":1, "outlet":4 }, "otc-gfxtest-bsw-05.local" : { "switch":1, "outlet":5 }, "otc-gfxtest-bsw-06.local" : { "switch":1, "outlet":6 }, # "otc-gfxtest-bsw-07.local" : { "switch":1, "outlet":7 }, # "otc-gfxtest-sklgt2-01.local" : { "switch":1, "outlet":8}, # byt does not boot after power loss # "otc-gfxtest-byt-01.local" : { "switch":2, "outlet":1 }, # "otc-gfxtest-byt-02.local" : { "switch":2, "outlet":2 }, # "otc-gfxtest-byt-03.local" : { "switch":2, "outlet":3 }, # "otc-gfxtest-byt-04.local" : { "switch":2, "outlet":4 }, "otc-gfxtest-kbl-01.local" : { "switch":2, "outlet":5 }, "otc-gfxtest-kbl-02.local" : { "switch":2, "outlet":4 }, "otc-gfxtest-bxt-02.local" : { "switch":2, "outlet":6 }, "otc-gfxtest-bxt-01.local" : { "switch":2, "outlet":7 }, "otc-gfxtest-sklgt2-02.local" : { "switch":2, "outlet":8}} self.switches = { 1 : PowerSwitch(hostname="192.168.1.2", userid="admin", password="******"), 2: PowerSwitch(hostname="192.168.1.3", userid="admin", password="******") } self.hangs = []
def __init__(self,pidfile,logfile_path = None,amqp_host = 'localhost',svm_save_filename = None, status_filename = None, X_filename = None, y_filename = None): if args: self.log = Logger(logfile_path,args.verbose) else: self.log = Logger(logfile_path) #For progress messages self.files_processed = 0 #Make sure pickle supports compress pickle.HIGHEST_PROTOCOL self.svc = svm.SVC(kernel="linear") self.X_filename = os.path.abspath(X_filename) if X_filename else None self.y_filename = os.path.abspath(y_filename) if y_filename else None self.__X = None self.__y = None self.amqp_host = amqp_host self.amqp_queue = 'classifyd' self.svm_filename = os.path.abspath(svm_save_filename) if svm_save_filename else None self.status_filename = status_filename if self.status_filename and os.path.exists(self.status_filename): #if the file exists, try to load it from there f = open(self.status_filename,'rb') self.status = pickle.load(f) f.close() else: #if not, then assume we are starting over self.status = Status() #call the parent's __init__ to initialize the daemon variables Daemon.__init__(self,pidfile)
def __init__(self, configFilePath='/usr/local/hen/etc/configs/config'): Daemon.__init__(self) self.__version = "Console Daemon v0.2 (dumb)" self.__terminalServerConnections = {} self.__terminalServerSockets = [] self.__configFilePath = configFilePath self.__configFile = ConfigParser.ConfigParser() self.__configFile.read(configFilePath) self.__henPhysicalTopology = self.__configFile.get('MAIN','PHYSICAL_TOPOLOGY') self.__henLogPath = self.__configFile.get('MAIN', 'LOG_PATH') self.__parser = HenParser(self.__henPhysicalTopology, \ None, \ None, \ self.__henLogPath, \ None, None, None, \ None) self.__controlProtocol = None # Create instances for all terminal servers in the testbed self.__terminalServerNodes = {} for terminalServerNode in self.__parser.getNodes("serial", "all").values(): self.__terminalServerNodes[terminalServerNode.getNodeID()] = terminalServerNode.getInstance() self.__computerNodes = self.__parser.getNodes("computer", "all") self.__registerMethods()
def __init__(self, config_file): """CdmaSms class structure method. """ config = configparser.ConfigParser() if os.path.exists(config_file): config.read(config_file) if not config.has_section("sms"): print("No 'sms' section.") sys.exit() else: self.sms_host = config.get("sms", "sms_host") self.sms_port = config.getint("sms", "sms_port") self.sms_user = config.get("sms", "sms_user") self.sms_passwd = config.get("sms", "sms_passwd") self.sms_send_uri = config.get("sms", "sms_send_uri") self.logfile = config.get("sms", "sms_log") self.sms_pid = config.get("sms", "sms_pid") self.interval = config.getint("sms", "sms_interval") if not config.has_section("mysql"): print("No 'mysql' section.") sys.exit() else: self.mysql_host = config.get("mysql", "mysql_host") self.mysql_port = config.getint("mysql", "mysql_port") self.mysql_user = config.get("mysql", "mysql_user") self.mysql_passwd = config.get("mysql", "mysql_passwd") self.mysql_db = config.get("mysql", "mysql_db") else: print("CDMA configure file is not exist.") sys.exit() Daemon.__init__(self, self.sms_pid)
def __init__(self, pidfile, cfgfile): Daemon.__init__(self, pidfile) self.jobs = {} self.immediately = False self.scheduler = Scheduler(daemonic=False) self.logger = logging.getLogger(self.__class__.__name__) if os.path.exists(cfgfile): with open(cfgfile, 'rt') as f: config = yaml.load(f.read()) for k1 in config.keys(): if k1 == 'version': pass if k1 == 'immediately': self.immediately = config[k1] elif k1 == 'taobao': self.jobs[k1] = config[k1] self.jobs[k1]['id'] = None if 'chktime' in self.jobs[k1].keys(): self.jobs[k1]['btime'] = time.strptime(self.jobs[k1]['chktime'].split('-')[0], '%H:%M') self.jobs[k1]['etime'] = time.strptime(self.jobs[k1]['chktime'].split('-')[1], '%H:%M') if self.jobs[k1]['btime'] >= self.jobs[k1]['etime']: raise ValueError('"chktime" is illegal') else: raise ValueError('There is no "chktime" be found in configure.') else: pass else: self.logger.error('{0} not found'.format(cfgfile))
def __init__(self, pid_file, sent=None): Daemon.__init__(self, pid_file) c = config.Config() self.config = c.cfg # self.log = logging.getLogger('trends') self.stats_freq = 3600 self.sid = SentimentIntensityAnalyzer()
def __init__(self): Daemon.__init__(self) self.__hen_manager = HenManager() self.__hen_manager.initLogging() self.__hardware_processor = LSHardwareProcessor(self.__hen_manager, \ self.__parseMacTableURL()) self.__registerMethods()
def __init__(self, debug): self.__debug = debug Daemon.__init__(self) self.__registerMethods() self.device_list = [] self.__detector_daemon = DetectorDaemon(self.device_list, self.__debug) self.__detector_daemon.start()
def stop(self, *args, **kwargs): GPIO.setmode(GPIO.BCM) GPIO.setwarnings(False) GPIO.setup(self.rpin,GPIO.OUT) GPIO.setup(self.gpin,GPIO.OUT) GPIO.setup(self.bpin,GPIO.OUT) self.rled = GPIO.PWM(self.rpin,self.freq) self.gled = GPIO.PWM(self.gpin,self.freq) self.bled = GPIO.PWM(self.bpin,self.freq) self.rled.ChangeDutyCycle(0) self.gled.ChangeDutyCycle(0) self.bled.ChangeDutyCycle(50) time.sleep(1) self.rled.stop() self.gled.stop() self.bled.stop() GPIO.cleanup() if os.path.exists(self.cf): os.remove(self.cf) if os.path.exists(self.bf): os.remove(self.bf) Daemon.stop(self, *args, **kwargs)
def __init__( self, configLoc, pidfile="/tmp/httpJsonStats.pid", stdin="/dev/null", stdout="/tmp/httpJson.log", stderr="/tmp/httpJson.log", ): self.configLoc = configLoc self._jsonStr = "" self._CARBON_SERVER = "server.domain.com" self._CARBON_PORT = 2003 self._delay = 20 self._stdin = stdin self._stdout = stdout self._stderr = stderr self._pidfile = pidfile self._evaluateConfig() if self._jsonStr["global"]["GRAPHITE_SERVER"]: self._CARBON_SERVER = self._jsonStr["global"]["GRAPHITE_SERVER"] if self._jsonStr["global"]["GRAPHITE_PORT"]: self._CARBON_PORT = self._jsonStr["global"]["GRAPHITE_PORT"] if self._jsonStr["global"]["INTERVAL"]: self._delay = self._jsonStr["global"]["INTERVAL"] if self._jsonStr["global"]["LOG_FILE"]: self._stdout = self._jsonStr["global"]["LOG_FILE"] if self._jsonStr["global"]["ERR_LOG_FILE"]: self._stderr = self._jsonStr["global"]["ERR_LOG_FILE"] if self._jsonStr["global"]["PID_FILE"]: self._pidfile = self._jsonStr["global"]["PID_FILE"] del self._jsonStr["global"] Daemon.__init__(self, self._pidfile, self._stdin, self._stdout, self._stderr)
def __init__(self, pidfile, confile): '''ZhwhPanCity class structure method. ''' Daemon.__init__(self, pidfile) self.confile = confile self.config = ConfigParser.ConfigParser() if os.path.exists(self.confile): self.config.read(self.confile) if not self.config.has_section("pancity"): print("No such section.") sys.exit() else: self.mail_smtp_host = self.config.get("pancity", "mail_smtp_host") self.mail_smtp_port = self.config.getint("pancity", "mail_smtp_port") self.mail_from = self.config.get("pancity", "mail_from") self.mail_password = self.config.get("pancity", "mail_password") self.mail_to = self.config.get("pancity", "mail_to") self.xls_path = self.config.get("pancity", "pancity_xls_path") self.interval = self.config.getint("pancity", "pancity_interval") self.logfile = self.config.get("pancity", "pancity_log") if not self.config.has_section("mysql"): print("No such section.") sys.exit() else: self.myHost = self.config.get("mysql", "mysql_host") self.myPort = self.config.getint("mysql", "mysql_port") self.myUser = self.config.get("mysql", "mysql_user") self.myPasswd = self.config.get("mysql", "mysql_passwd") self.myDB = self.config.get("mysql", "mysql_db") else: print("ZHWH configure file is not exist.") sys.exit()
def __init__(self, pidfile, argv): Daemon.__init__(self, pidfile) reload(sys) sys.setdefaultencoding('utf-8') # acquire cookie cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar)) urllib2.install_opener(opener) self.parser = optparse.OptionParser(self.MSG_USAGE) self.parser.add_option('-d', '--dbd', action='store_true', dest='dbd', help="DBD",default=False) self.parser.add_option('-u', '--undbd', action='store_true', dest='undbd', help="Un-DBD",default=False) self.parser.add_option('-i', '--input', action='store_true', dest='input', help="Input name & password",default=False) self.parser.add_option('-t', '--time', action='store_true', dest='time', help="Auto time card",default=False) self.parser.add_option('-g', '--debug', action='store_true', dest='debug', help="for debug",default=False) self.parser.add_option('--sync', action='store_true', dest='sync', help="Sync data with server",default=False) self.parser.add_option('--local', action='store_true', dest='local', help="Not sync with server",default=False) self.parser.add_option('--nodaemon', action='store_true', dest='nodaemon', help="Start with no-daemon",default=False) self.options, self.args = self.parser.parse_args(argv) hdlr = logging.FileHandler(self.log_file) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.INFO)
def __init__(self,pidfile=None): pidfile = pidfile or PidFile(PID_NAME, PID_DIR).get_path() Daemon.__init__(self,pidfile) self.basic_plugin=['ServerPlugin'] config=agentConfig.get_config() self.sender = Sender(port=config['recv_port']) self.check_frequency=config['check_freq']
def __init__(self): Daemon.__init__(self) # Initalise variables self.__henManager = HenManager() self.__nodes = None self.__vlans = {} # self.__vlan_info = {} # switch_name -> [vlan_name,vlan_name,...] self.__switch_instances = {} self.__test_mode = False self.__minimum_id = 200 self.__maximum_id = 2000 self.__vlanOwnerFilename = "/usr/local/hen/etc/switchd/vlan_owner.dat" self.__int_to_vlan = {} # (computer_str,interface_str) -> vlan_name self.__switchdb = SwitchDB() log.debug("Switchdb "+str(self.__switchdb)) # Register hen rpc methods and handlers log.debug("Registering methods") self.__registerMethods() # Load vlan info log.debug("Loading vlan info") self.__initaliseVlanInfo() # Setup mac polling log.debug("Loading henmanager") self.__switchdb.setHenManager(self.__henManager) log.debug("Loading links Db") self.__switchdb.loadLinksDb() log.debug("Initiating Nodes") self.initiateNodes() # vlan owners self.__vlan_owner_name = {} self.__vlan_name_owner = {} self.loadVlanOwnerFile()
def __init__(self): cur_path = os.path.dirname(os.path.abspath(__file__)) self.app = webapp.create_app() self.app.debug = True self.port = get_config().getint("setup", "listen_port") self.pidfile = os.path.join(cur_path, "..", get_config().get("setup", "pidpath")) Daemon.__init__(self, self.pidfile)
def __init__(self, config_file): """CdmaAlarm class structure method. """ self.config_file = config_file self.config = ConfigParser() if os.path.exists(self.config_file): self.config.read(self.config_file) if not self.config.has_section("omp"): print("No such section.") sys.exit() else: self.omp_host = self.config.get("omp", "omp_host") self.omp_port = self.config.getint("omp", "omp_port") self.omp_user = self.config.get("omp", "omp_user") self.omp_passwd = self.config.get("omp", "omp_passwd") self.omp_old_passwd = self.config.get("omp", "omp_passwd_old") self.logfile = self.config.get("omp", "omp_alarm_log") self.omp_alarm_pid = self.config.get("omp", "omp_alarm_pid") self.interval = self.config.getint("omp", "omp_alarm_interval") if not self.config.has_section("mysql"): print("No such section.") sys.exit() else: self.mysql_host = self.config.get("mysql", "mysql_host") self.mysql_port = self.config.getint("mysql", "mysql_port") self.mysql_user = self.config.get("mysql", "mysql_user") self.mysql_passwd = self.config.get("mysql", "mysql_passwd") self.mysql_db = self.config.get("mysql", "mysql_db") else: print('CDMA configure file is not exist.') sys.exit() Daemon.__init__(self, self.omp_alarm_pid)
def __init__(self, pidfile, confile): '''CdmaSms class structure method. ''' Daemon.__init__(self, pidfile) config = ConfigParser.ConfigParser() if os.path.exists(confile): config.read(confile) if not config.has_section("sms"): print("No 'sms' section.") sys.exit() else: self.smsHost = config.get("sms", "sms_host") self.smsPort = config.getint("sms", "sms_port") self.smsUser = config.get("sms", "sms_user") self.smsPasswd = config.get("sms", "sms_passwd") self.smsSendUri = config.get("sms", "sms_send_uri") self.logfile = config.get("sms", "sms_log") self.interval = config.getint("sms", "sms_interval") if not config.has_section("mysql"): print("No 'mysql' section.") sys.exit() else: self.myHost = config.get("mysql", "mysql_host") self.myPort = config.getint("mysql", "mysql_port") self.myUser = config.get("mysql", "mysql_user") self.myPasswd = config.get("mysql", "mysql_passwd") self.myDB = config.get("mysql", "mysql_db") else: print("CDMA configure file is not exist.") sys.exit()
def __init__(self, *args, **kwargs): self.sb = None self.params = None self.nodes = None self.processes = {} self.running = True Daemon.__init__(self, *args, **kwargs)
class TorrentClient(object): def __init__(self, hostname=None, port=9090): self.daemon = None if not hostname: self.daemon = Daemon(["transmission-daemon", "-f", "-p", str(port)]) self.daemon.start() hostname = "localhost" self._connection = TransmissionConnection(hostname, port) self._session = self._connection.execute(Command("session-get")) def __del__(self): if self.daemon: self.daemon.stop() def add_torrent(self, auto=None, metadata=None, filename=None, file=None, url=None): if auto: if isinstance(auto, str): if "://" in auto: data = urllib2.urlopen(auto).read() else: data = open(auto, "r").read() elif hasattr(auto, "read"): data = auto.read() elif hasattr(auto, "content"): data = auto.content else: raise AttributeError() else: if metadata: data = metadata elif filename: data = open(filename, "r").read() elif url: data = urllib2.urlopen(url).read() elif file: data = file.read() data = base64.encodestring(data) command = Command("torrent-add") command["metainfo"] = data command["paused"] = True torrent = self._connection.execute(command) return self.get_torrent(torrent["torrent-added"]["id"]) def get_torrent(self, id): command = TorrentGetCommand(Torrent.static_fields, id) torrent = self._connection.execute(command) return Torrent(self, torrent) def _get_torrents(self): command = TorrentGetListCommand(Torrent.static_fields) list = self._connection.execute(command) torrent_list = map(lambda t: Torrent(self, t), list) return torrent_list download_dir = session_property("download-dir") torrents = property(_get_torrents)
def __init__(self, config=CONFIG_FILE): """\brief Registers remote methods and starts update thread (timer) """ Daemon.__init__(self) self.__config_path = config self.__parseConfig(self.__config_path) self.__registerMethods() self.__checker_lock = threading.Lock()
def __init__(self): self.readConfig(CONFIG_FILE) Daemon.__init__(self,self.PIDFILE) self.alerts = {} self.dict_modified = 0 self.delay_window = 0 self.last_notification = 0 self.host_name = ''
def __init__(self, config=None, section=None): """ override parent __init__, then run it at end """ self.conf = config self.sect = section prefix = self.conf.get(self.sect, 'log')+'/'+self.sect Daemon.__init__(self, prefix+'.pid', stdout=prefix+'.log', stderr=prefix+'.log')
def __init__( self, timeinterval = 30, logfile = "machine_%s.log" % MachineTools.getMachineName(), loglevel = logging.INFO ): curfile = os.path.basename( sys._getframe().f_code.co_filename ) curfile, ext = os.path.splitext( curfile ) pidfile = os.path.abspath( curfile + "_" + MachineTools.getMachineName() + ".pid" ) Daemon.__init__( self, os.path.abspath( pidfile ) ) self.timeInterval = timeinterval self.targetFile = logfile self.logLevel = loglevel
def __init__(self, pid_file): """ Constructor """ logging.basicConfig(filename='log_tweets.txt',level=logging.DEBUG) Daemon.__init__(self, pid_file) self.db = None print "constructor"
def __init__(self,Config): self.Config = Config pidfile = Config.pid_filename stdin = '/dev/null' stdout = '/dev/null' stderr = '/dev/null' if Config.log_filename != "": stderr = Config.log_filename Daemon.__init__(self, pidfile, stdin, stdout, stderr)
def __init__(self): Daemon.__init__(self) self.__henManager = HenManager() # Allow specialized instance dictionary to be created in hm # self.__henManager.createNodeInstances() self.__registerMethods() self.__computerID = self.__getComputerID() self.__computerInfo = self.__henManager.getNodes("computer")[self.__computerID] self.__ifaceIDtoNameMappings = self.__getIfaceMappings()
def __init__( self, confFile, pidFile, logFile, handlefulllog, beforedays = 0 ): Daemon.__init__( self, pidFile ) self.confFile = confFile self.pidFile = pidFile self.logFile = logFile self.beforeDays = beforedays self.handleFullLog = handlefulllog # save datas defined in config file self.datas = []
def __init__ (self, name): Daemon.__init__(self, name, "-1") # check that independent beams is off if self.cfg["INDEPENDENT_BEAMS"] == "1": raise Exception ("ServerDaemons incompatible with INDEPENDENT_BEAMS") self.req_host = self.cfg["SERVER_HOST"] self.log_dir = self.cfg["SERVER_LOG_DIR"] self.control_dir = self.cfg["SERVER_CONTROL_DIR"]
def stop_exec(nicename, pidfile): Daemon.kill(pidfile) flog.debug("process " + nicename + " terminated.")
def __init__(self,pf): Daemon.__init__(self, pf)
def test_recover_trades(self, mock_broker): """ Test: Daemon.recover_trades() Scenarios: - No trades in broker or db Assertions: - Strategies recover no trades - Trade in broker and db Assertions: - Trade gets distributed to correct strategy - Trade is in broker, not db Assert: - Trade deleted from db - Trade gets distributed to correct strategy - Trade in db, broker unsure Assert: - Trade deleted from db - Trade's strategy does NOT adopt it - Trade in db has wonky data (non-existant strategy name, etc.) - (All): Assert: trades are distributed to strategies. """ """ Scenario: No trades in broker or db """ mock_broker.get_trades = MagicMock(return_value=Trades()) DB.execute.return_value = [] Daemon.recover_trades() # Check that no trades were adopted for s in Daemon.strategies: self.assertEqual(len(s.open_trade_ids), 0) """ Scenario: Open trade in broker and db """ def db_execute(query): if query == 'SELECT trade_id FROM open_trades_live': return [('id666', )] elif query == 'SELECT strategy, broker FROM open_trades_live WHERE trade_id="id666"': return [('Fifty', 'oanda')] elif query == 'SELECT oanda_name FROM instruments WHERE id=4': return [('USD_JPY', )] elif query == 'DELETE FROM open_trades_live WHERE trade_id="id666"': return else: print('unexpected query: {}'.format(query)) raise Exception DB.execute = db_execute trades = Trades() trades.append( Trade(units=1, broker_name='oanda', instrument=Instrument(4), stop_loss=90, strategy=self._strat, take_profit=100, trade_id='id666')) mock_broker.get_open_trades = MagicMock(return_value=trades) mock_broker.is_trade_closed = MagicMock(return_value=(False, None)) Daemon.recover_trades() # check Fifty adopted one trade for s in Daemon.strategies: if s.get_name() == 'Fifty': self.assertEqual(len(s.open_trade_ids), 1) else: self.assertEqual(len(s.open_trade_ids), 0) # check trade is the trade we think it is self.assertEqual(Fifty.open_trade_ids[0], 'id666') ''' self.assertEqual(Fifty._open_trades[0].get_broker_name(), 'oanda') self.assertEqual(Fifty._open_trades[0].get_instrument().get_name(), 'USD_JPY') self.assertEqual(Fifty._open_trades[0].get_instrument().get_id(), 4) self.assertEqual(Fifty._open_trades[0].get_stop_loss(), 90) self.assertEqual(Fifty._open_trades[0].get_take_profit(), 100) self.assertEqual(Fifty._open_trades[0].get_trade_id(), 'id666') ''' # Cleanup self._strat.cleanup() """ Scenario: Trade is in broker, not db """ # Trade may have been opened manually. # Nothing should happen for these trades. def db_execute(query): if query == 'SELECT trade_id FROM open_trades_live': return [] elif query == 'SELECT strategy, broker FROM open_trades_live WHERE trade_id="id666"': return [] elif query == 'SELECT oanda_name FROM instruments WHERE id=4': return [('USD_JPY', )] else: raise Exception DB.execute = MagicMock(side_effect=db_execute) trades = Trades() trades.append( Trade(units=1, broker_name='oanda', instrument=Instrument(4), stop_loss=90, strategy=self._strat, take_profit=100, trade_id='id666')) mock_broker.get_open_trades = MagicMock(return_value=trades) Daemon.recover_trades() # db should stay the same (no inserts or deletions) # Broker trades should stay the same... calls = [ call('SELECT trade_id FROM open_trades_live'), call( 'SELECT strategy, broker FROM open_trades_live WHERE trade_id="id666"' ) ] DB.execute.assert_has_calls(calls) # Check no trades adopted for s in Daemon.strategies: self.assertEqual(len(s.open_trade_ids), 0) """ Scenario: Trade in db, broker unsure """ def db_execute(query): if query == 'SELECT trade_id FROM open_trades_live': return [('id666', )] elif query == 'SELECT strategy, broker FROM open_trades_live WHERE trade_id="id666"': return [('Fifty', 'oanda')] elif query == 'SELECT oanda_name FROM instruments WHERE id=4': return [('USD_JPY', )] elif query == 'DELETE FROM open_trades_live WHERE trade_id="id666"': return else: raise Exception DB.execute = MagicMock(side_effect=db_execute) mock_broker.get_open_trades = MagicMock(return_value=Trades()) Daemon.recover_trades() # Check trade deleted from db calls = [ call('SELECT trade_id FROM open_trades_live'), call('DELETE FROM open_trades_live WHERE trade_id="id666"') ] DB.execute.assert_has_calls(calls) # Check no trades adopted for s in Daemon.strategies: self.assertEqual(len(s.open_trade_ids), 0) """ module cleanup """ Daemon.shutdown()
def __init__(self, title, config): self._config = config self._title = title pid_file = '/tmp/daemon-email-listener_' + title + '.pid' Daemon.__init__(self, pid_file)
def __init__(self, pidfile): Daemon.__init__(self, pidfile, stdout='/tmp/watch_stdout.log', stderr='/tmp/watch_stderr.log')
def __init__(self, *args, **kwargs): Daemon.__init__(self, *args, **kwargs)
import threading import ATRHandler import utils from atr_cmd import AtrCmd from daemon import Daemon if __name__ == '__main__': utils.get_client_id() # creates necessary config before launch server = Daemon(('127.0.0.1', 1234), ATRHandler.ATRHandler) threading.Thread(target=server.serve_forever).start() AtrCmd().cmdloop_with_keyboard_interrupt()
def __init__(self, pidfile): Daemon.__init__(self, pidfile, stdout=logfile, stderr=logfile)
def __init__(self,pidfile): Daemon.__init__(self,pidfile, '/dev/null', '/dev/null', '/dev/null')
def stop(self): tornado.ioloop.IOLoop.instance().stop() self.relay.stop_and_clean() Daemon.stop(self)
def stop(self, *args, **kwargs): logging.info("XiaomiBTDaemon stopped!!!") Daemon.stop(self, *args, **kwargs)
self.stopold() self._couples = newcouples i = 1 else: i = i + 1 time.sleep(self._option['sleep']) self.stopold() if __name__ == '__main__': if len(sys.argv) != 2: print "Usage : python couplemanager.py [start|stop|restart|run]" sys.exit(2) mode = sys.argv[1] runner = CoupleManager('../conf/cpmng.conf') adaemon = Daemon('cpmng', runner) if 'start' == mode: adaemon.start() elif 'stop' == mode: adaemon.stop() elif 'restart' == mode: adaemon.restart() elif 'run' == mode: adaemon.run() else: print "Unknown command" sys.exit(2) sys.exit(0)
def __init__(self, pid_file, server, reporter, autorestart): Daemon.__init__(self, pid_file, autorestart=autorestart) self.server = server self.reporter = reporter
def __init__(self, args): self.args = args Daemon.__init__(self, self.args.pidfile, stderr=self.args.error_log, stdout=self.args.access_log)
def __init__(self, pid_file): Daemon.__init__(self, pid_file) self.search_key = "sport"
def stop(self): # TODO: Stop the fswatcher and the web server self.exit() Daemon.stop(self)
import sys, os # mine from daemon import Daemon from configuration import Setup from rss_seeker import RSS_Daemon except ImportError, e: print "ERROR!!! Missing module : ", format(e.message[16:]) sys.exit(1) if __name__ == "__main__": if len(sys.argv) == 2: if 'start' == sys.argv[1]: conf = Setup() daemon = RSS_Daemon(conf) daemon.start() elif 'stop' == sys.argv[1]: daemon = Daemon('daemon-rss.pid') daemon.stop() elif 'restart' == sys.argv[1]: conf = Setup() daemon = RSS_Daemon(conf) daemon.restart() else: print "Unknown command" sys.exit(2) sys.exit(0) else: print "usage: %s start|stop|restart" % sys.argv[0] sys.exit(2)
def __init__(self, tvmaxepy): self.schedule = os.getenv("HOME") + '/.tvmaxe/schedule' self.tvmaxepy = tvmaxepy self.donejobs = [] Daemon.__init__(self, "/tmp/tvmaxed.pid")
def __init__(self, pidfile): Daemon.__init__(self, pidfile) self.run_forever = True
def run(self): self.dashboard_ip = None if settings.DASHBOARD_IP_ENV is not None: self.dashboard_ip = os.environ.get(settings.DASHBOARD_IP_ENV) daemon = Daemon(settings.DASHBOARD_TOKEN, settings.OUTPUT_DIR, settings.SCRIPTS_DIR, settings.SPEED, settings.MODELS) jobs = [] max_jobs = settings.NUM_THREADS processes = [] decoded_processes = {} while True: # If there is no process or we have less processes if len(processes) == 0 or \ len(processes) < len(self.alive_jobs(jobs)): new_processes = \ daemon.get_processes_to_decode( self.get_url(settings.DECODE_URLS['PROCESS_LIST'])) # Try to avoid race conditions processes = \ processes + \ [p for p in new_processes if p["id"] not in decoded_processes] if len(processes) == 0: daemon.delete_file_list( self.get_url(settings.DECODE_URLS['FILES_TO_DELETE']), settings.DECODE_URLS['UPDATE']) time.sleep(5) continue # We have nothing to do, kill zombies if len(self.alive_jobs(jobs)) == 0: while multiprocessing.active_children(): time.sleep(1) if len(self.alive_jobs(jobs)) < max_jobs: process = processes[0] decoded_processes[process["id"]] = True processes = processes[1:] p = multiprocessing.Process( target=daemon.decode_process, args=(process, self.get_url(settings.DECODE_URLS['DOWNLOAD']), self.get_url(settings.DECODE_URLS['UPDATE_PROCESS']), self.get_url(settings.DECODE_URLS['UPLOAD']), self.get_url(settings.DECODE_URLS['GET_FILE']))) p.daemon = True jobs.append(p) p.start() else: # We are full, kill zombies (join terminated processes) while multiprocessing.active_children(): time.sleep(1) daemon.delete_file_list( self.get_url(settings.DECODE_URLS['FILES_TO_DELETE']), settings.DECODE_URLS['UPDATE'])
def __init__(self,path,pid): self.path = path self.q = Queue() Daemon.__init__(self,pid)
def __init__(self): self.name = 'yisa1' Daemon.__init__(self, pidfile='/var/run/%s.pid' % (self.name.lower()), stderr='/tmp/yisa_error.log') procname.setprocname(self.name)
def start(self): Daemon.start(self)
'exclude': exc_dirs, 'qtt': qtt, 'mqi': mqi, 'triggermod': triggers } return watch_sections, module_sections mod_instances = {} # instances of module object per section daemon = Daemon( user="******", group="root", stdin="/dev/null", stdout="/var/log/insync/insync.log", stderr="/var/log/insync/insync.err", pidfile="/var/log/insync/insync.pid", conf="./insync.conf", workers=str(max_workers_default), ) if __name__ == "__main__": signal.signal(signal.SIGABRT, clean_exit) signal.signal(signal.SIGTERM, clean_exit) signal.signal(signal.SIGQUIT, clean_exit) signal.signal(signal.SIGINT, clean_exit) watch_sections, module_sections = read_config(daemon.options.conf) if daemon.service(): if not watch_sections: sys.exit()
def __init__ (self, name, id): Daemon.__init__(self, name, id) (self.req_host, self.beam_id) = self.getConfig(id) self.subband_id = 1
def __init__(self, pidfile, options, args): self.options = options self.args = args Daemon.__init__(self, pidfile)
class Loader(object): do_restart = False def __init__(self): # Get options via arg from couchpotato.runner import getOptions self.options = getOptions(base_path, sys.argv[1:]) # Load settings settings = Env.get('settings') settings.setFile(self.options.config_file) # Create data dir if needed if self.options.data_dir: self.data_dir = self.options.data_dir else: self.data_dir = os.path.expanduser(Env.setting('data_dir')) if self.data_dir == '': self.data_dir = getDataDir() if not os.path.isdir(self.data_dir): os.makedirs(self.data_dir) # Create logging dir self.log_dir = os.path.join(self.data_dir, 'logs') if not os.path.isdir(self.log_dir): os.mkdir(self.log_dir) # Logging from couchpotato.core.logger import CPLog self.log = CPLog(__name__) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%H:%M:%S') hdlr = handlers.RotatingFileHandler( os.path.join(self.log_dir, 'error.log'), 'a', 500000, 10) hdlr.setLevel(logging.CRITICAL) hdlr.setFormatter(formatter) self.log.logger.addHandler(hdlr) def addSignals(self): signal.signal(signal.SIGINT, self.onExit) signal.signal(signal.SIGTERM, lambda signum, stack_frame: sys.exit(1)) from couchpotato.core.event import addEvent addEvent('app.after_shutdown', self.afterShutdown) def afterShutdown(self, restart): self.do_restart = restart def onExit(self, signal, frame): from couchpotato.core.event import fireEvent fireEvent('app.crappy_shutdown', single=True) def run(self): self.addSignals() from couchpotato.runner import runCouchPotato runCouchPotato(self.options, base_path, sys.argv[1:], data_dir=self.data_dir, log_dir=self.log_dir, Env=Env) if self.do_restart: self.restart() def restart(self): try: # remove old pidfile first try: if self.runAsDaemon(): try: self.daemon.stop() except: pass except: self.log.critical(traceback.format_exc()) # Release log files and shutdown logger logging.shutdown() time.sleep(3) args = [sys.executable] + [ os.path.join(base_path, os.path.basename(__file__)) ] + sys.argv[1:] subprocess.Popen(args) except: self.log.critical(traceback.format_exc()) def daemonize(self): if self.runAsDaemon(): try: from daemon import Daemon self.daemon = Daemon(self.options.pid_file) self.daemon.daemonize() except SystemExit: raise except: self.log.critical(traceback.format_exc()) def runAsDaemon(self): return self.options.daemon and self.options.pid_file
def __init__(self, pidfile, autorestart, start_event=True): Daemon.__init__(self, pidfile, autorestart=autorestart) self.run_forever = True self.collector = None self.start_event = start_event
def __init__(self, pidf): Daemon.__init__(self, pidf) self.metric = Metric_Fling() config = self.read_config() self.sleeptime = config["nodeup"]["sleeptime"]
def __init__(self): Daemon.__init__(self, 'ubrs.pid') self.ip = config.HTTP_IP self.port = config.HTTP_PORT self.relay = Relay(os.path.join(os.getcwd(), config.UBR_PATH)) self.app = UBRSApplication(self.relay)