def _listen(self, sid): pid = os.getpid() pid_file = open( "%s/backend_%s.pid" % ( config.get_directory("pid_dir"), config.station_id_friendly[sid].lower(), ), "w", ) pid_file.write(str(pid)) pid_file.close() db.connect() cache.connect() zeromq.init_pub() log.init( "%s/rw_%s.log" % ( config.get_directory("log_dir"), config.station_id_friendly[sid].lower(), ), config.get("log_level"), ) memory_trace.setup(config.station_id_friendly[sid].lower()) if config.test_mode: playlist.remove_all_locks(sid) # (r"/refresh/([0-9]+)", RefreshScheduleRequest) app = tornado.web.Application( [ (r"/advance/([0-9]+)", AdvanceScheduleRequest), ], debug=(config.test_mode or config.get("developer_mode")), ) port = int(config.get("backend_port")) + sid server = tornado.httpserver.HTTPServer(app) server.listen(port, address="127.0.0.1") for station_id in config.station_ids: playlist.prepare_cooldown_algorithm(station_id) schedule.load() log.debug( "start", "Backend server started, station %s port %s, ready to go." % (config.station_id_friendly[sid], port), ) ioloop = tornado.ioloop.IOLoop.instance() try: ioloop.start() finally: ioloop.stop() server.stop() db.close() log.info("stop", "Backend has been shutdown.") log.close()
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() observers = [] for directory, sids in config.get("song_dirs").iteritems(): observer = watchdog.observers.Observer() observer.schedule(FileEventHandler(directory, sids), directory, recursive=True) observer.start() log.info("scan", "Observing %s with sids %s" % (directory, repr(sids))) observers.append(observer) try: while True: time.sleep(60) _process_album_art_queue() except Exception as e: log.exception("scan", "Exception leaked to top monitoring function.", e) for observer in observers: observer.stop() for observer in observers: observer.join()
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() observers = [] for directory, sids in config.get("song_dirs").iteritems(): observer = RWObserver() observer.schedule(FileEventHandler(directory, sids), directory, recursive=True) observer.start() log.info("scan", "Observing %s with sids %s" % (directory, repr(sids))) observers.append(observer) try: while True: time.sleep(1) finally: for observer in observers: observer.stop() for observer in observers: observer.join()
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() observers = [] for directory, sids in config.get("song_dirs").iteritems(): observer = watchdog.observers.Observer() observer.schedule(FileEventHandler(directory, sids), directory, recursive=True) observer.start() observers.append(observer) try: while True: time.sleep(60) _process_album_art_queue() except: for observer in observers: observer.stop() for observer in observers: observer.join()
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() mask = ( pyinotify.IN_CREATE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE | pyinotify.IN_MOVED_TO | pyinotify.IN_MOVED_FROM | pyinotify.IN_EXCL_UNLINK ) try: go = True while go: try: log.info("scan", "File monitor started.") wm = pyinotify.WatchManager() wm.add_watch(str(config.get("monitor_dir")), mask, rec=True) pyinotify.Notifier(wm, FileEventHandler()).loop() go = False except NewDirectoryException: log.debug("scan", "New directory added, restarting watch.") finally: try: wm.close() except: pass finally: log.info("scan", "File monitor shutdown.")
def write_unmatched_art_log(): with open( os.path.join(config.get_directory("log_dir"), "rw_unmatched_art.log"), "w") as unmatched_log: for album_art_tuple in _found_album_art: unmatched_log.write(album_art_tuple[0]) unmatched_log.write("\n")
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() mask = (pyinotify.IN_ATTRIB | pyinotify.IN_CREATE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE | pyinotify.IN_MOVED_TO | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVE_SELF | pyinotify.IN_EXCL_UNLINK) try: go = True while go: try: log.info("scan", "File monitor started.") wm = pyinotify.WatchManager() wm.add_watch(str(config.get("monitor_dir")), mask, rec=True) pyinotify.Notifier(wm, FileEventHandler()).loop() go = False except NewDirectoryException: log.debug("scan", "New directory added, restarting watch.") except DeletedDirectoryException: log.debug("scan", "Directory was deleted, restarting watch.") finally: try: wm.close() except: pass finally: log.info("scan", "File monitor shutdown.")
def _listen(self, sid): pid = os.getpid() pid_file = open("%s/backend_%s.pid" % (config.get_directory("pid_dir"), config.station_id_friendly[sid].lower()), 'w') pid_file.write(str(pid)) pid_file.close() db.connect() cache.connect() zeromq.init_pub() log.init("%s/rw_%s.log" % (config.get_directory("log_dir"), config.station_id_friendly[sid].lower()), config.get("log_level")) memory_trace.setup(config.station_id_friendly[sid].lower()) if config.test_mode: playlist.remove_all_locks(sid) # (r"/refresh/([0-9]+)", RefreshScheduleRequest) app = tornado.web.Application([ (r"/advance/([0-9]+)", AdvanceScheduleRequest), ], debug=(config.test_mode or config.get("developer_mode"))) port = int(config.get("backend_port")) + sid server = tornado.httpserver.HTTPServer(app) server.listen(port, address='127.0.0.1') for station_id in config.station_ids: playlist.prepare_cooldown_algorithm(station_id) schedule.load() log.debug("start", "Backend server started, station %s port %s, ready to go." % (config.station_id_friendly[sid], port)) ioloop = tornado.ioloop.IOLoop.instance() try: ioloop.start() finally: ioloop.stop() server.stop() db.close() log.info("stop", "Backend has been shutdown.") log.close()
#!/usr/bin/python import argparse from libs import config from libs import db from libs import cache from libs import log from backend import icecast_sync if __name__ == "__main__": parser = argparse.ArgumentParser(description="Rainwave API server.") parser.add_argument("--config", default=None) args = parser.parse_args() config.load(args.config) log_file = "%s/rw_icecast_sync.log" % (config.get_directory("log_dir"),) log.init(log_file, config.get("log_level")) db.connect() cache.connect() icecast_sync.start_icecast_sync()
def _listen(self, task_id): zeromq.init_pub() zeromq.init_sub() import api_requests.sync api_requests.sync.init() # task_ids start at zero, so we gobble up ports starting at the base port and work up port_no = int(config.get("api_base_port")) + task_id pid = os.getpid() pid_file = open("%s/api_%s.pid" % (config.get_directory("pid_dir"), port_no), 'w') pid_file.write(str(pid)) pid_file.close() # Log according to configured directory and port # we're operating on log_file = "%s/rw_api_%s.log" % (config.get_directory("log_dir"), port_no) if config.test_mode and os.path.exists(log_file): os.remove(log_file) log.init(log_file, config.get("log_level")) log.debug("start", "Server booting, port %s." % port_no) db.connect() cache.connect() memory_trace.setup(port_no) api.locale.load_translations() api.locale.compile_static_language_files() if config.get("web_developer_mode"): for station_id in config.station_ids: playlist.prepare_cooldown_algorithm(station_id) # automatically loads every station ID and fills things in if there's no data schedule.load() for station_id in config.station_ids: schedule.update_memcache(station_id) rainwave.request.update_line(station_id) rainwave.request.update_expire_times() cache.set_station(station_id, "backend_ok", True) cache.set_station(station_id, "backend_message", "OK") cache.set_station(station_id, "get_next_socket_timeout", False) for sid in config.station_ids: cache.update_local_cache_for_sid(sid) playlist.prepare_cooldown_algorithm(sid) playlist.update_num_songs() # If we're not in developer, remove development-related URLs if not config.get("developer_mode"): i = 0 while (i < len(request_classes)): if request_classes[i][0].find("/test/") != -1: request_classes.pop(i) i = i - 1 i = i + 1 # Make sure all other errors get handled in an API-friendly way request_classes.append((r"/api/.*", api.web.Error404Handler)) request_classes.append((r"/api4/.*", api.web.Error404Handler)) request_classes.append((r".*", api.web.HTMLError404Handler)) # Initialize the help (rather than it scan all URL handlers every time someone hits it) api.help.sectionize_requests() # Fire ze missiles! global app app = tornado.web.Application(request_classes, debug=(config.test_mode or config.get("developer_mode")), template_path=os.path.join(os.path.dirname(__file__), "../templates"), static_path=os.path.join(os.path.dirname(__file__), "../static"), autoescape=None) http_server = tornado.httpserver.HTTPServer(app, xheaders = True) http_server.listen(port_no) if config.get("api_user") and config.get("api_group"): chuser.change_user(config.get("api_user"), config.get("api_group")) for request in request_classes: log.debug("start", " Handler: %s" % str(request)) log.info("start", "API server on port %s ready to go." % port_no) self.ioloop = tornado.ioloop.IOLoop.instance() try: self.ioloop.start() finally: self.ioloop.stop() http_server.stop() db.close() log.info("stop", "Server has been shutdown.") log.close()
from datetime import datetime from pytz import timezone from libs import config from libs import db from libs import cache from libs import log from rainwave.events import pvpelection if __name__ == "__main__": parser = argparse.ArgumentParser(description="Rainwave PVP Hour generation script.") parser.add_argument("--config", default=None) args = parser.parse_args() config.load(args.config) log_file = "%s/rw_auto_pvp.log" % (config.get_directory("log_dir"),) log.init(log_file, config.get("log_level")) db.connect() cache.connect() dow_map = [ (4, 5), (2, 1), (3, 4), (5, 2), (1, 3), (2, 4), (1, 3) ] # dow_map = [ (1, 1), (1, 1), (1, 1), (1, 1), (1, 1), (1, 1), (1, 1) ] timezones = [ (timezone('Europe/London'), 13, 0), (timezone('US/Eastern'), 13, 1) ] for tz in timezones: start = datetime.now(tz[0]).replace(hour=tz[1], minute=0, second=0, microsecond=0) sid = dow_map[start.weekday()][tz[2]] start_e = (start - datetime.fromtimestamp(0, timezone('US/Eastern'))).total_seconds() log.debug("auto_pvp", "%04d/%02d/%02d %02d:%02d PVP %s %s" % (start.year, start.month, start.day, start.hour, start.minute, config.station_id_friendly[sid], tz[0].__class__.__name__)) pvpelection.PVPElectionProducer.create(sid, start_e, start_e + 3600, name="PvP Hour")
def _listen(self, task_id): import api_requests.sync api_requests.sync.init() # task_ids start at zero, so we gobble up ports starting at the base port and work up port_no = int(config.get("api_base_port")) + task_id pid = os.getpid() pid_file = open( "%s/api_%s.pid" % (config.get_directory("pid_dir"), port_no), 'w') pid_file.write(str(pid)) pid_file.close() # Log according to configured directory and port # we're operating on log_file = "%s/rw_api_%s.log" % (config.get_directory("log_dir"), port_no) if config.test_mode and os.path.exists(log_file): os.remove(log_file) log.init(log_file, config.get("log_level")) log.debug("start", "Server booting, port %s." % port_no) db.connect() cache.connect() memory_trace.setup(port_no) if config.get("web_developer_mode"): for station_id in config.station_ids: playlist.prepare_cooldown_algorithm(station_id) # automatically loads every station ID and fills things in if there's no data schedule.load() for station_id in config.station_ids: schedule.update_memcache(station_id) rainwave.request.update_line(station_id) rainwave.request.update_expire_times() cache.set_station(station_id, "backend_ok", True) cache.set_station(station_id, "backend_message", "OK") cache.set_station(station_id, "get_next_socket_timeout", False) for sid in config.station_ids: cache.update_local_cache_for_sid(sid) playlist.prepare_cooldown_algorithm(sid) playlist.update_num_songs() # If we're not in developer, remove development-related URLs if not config.get("developer_mode"): i = 0 while (i < len(request_classes)): if request_classes[i][0].find("/test/") != -1: request_classes.pop(i) i = i - 1 i = i + 1 # Make sure all other errors get handled in an API-friendly way request_classes.append((r"/api/.*", api.web.Error404Handler)) request_classes.append((r"/api4/.*", api.web.Error404Handler)) request_classes.append((r".*", api.web.HTMLError404Handler)) # Initialize the help (rather than it scan all URL handlers every time someone hits it) api.help.sectionize_requests() # Fire ze missiles! app = tornado.web.Application( request_classes, debug=(config.test_mode or config.get("developer_mode")), template_path=os.path.join(os.path.dirname(__file__), "../templates"), static_path=os.path.join(os.path.dirname(__file__), "../static"), autoescape=None) http_server = tornado.httpserver.HTTPServer(app, xheaders=True) http_server.listen(port_no) if config.get("api_user") and config.get("api_group"): chuser.change_user(config.get("api_user"), config.get("api_group")) if task_id == 0: buildtools.bake_css() buildtools.bake_js() buildtools.bake_beta_js() for request in request_classes: log.debug("start", " Handler: %s" % str(request)) log.info("start", "API server on port %s ready to go." % port_no) self.ioloop = tornado.ioloop.IOLoop.instance() try: self.ioloop.start() finally: self.ioloop.stop() http_server.stop() db.close() log.info("stop", "Server has been shutdown.") log.close()
pass else: raise if __name__ == "__main__": parser = argparse.ArgumentParser( description= "Rainwave auto-song cleanup. WARNING: This script hardcoded for Rainwave's setup! Please edit the code before using!" ) parser.add_argument("--config", default=None, required=True) parser.add_argument("--moveto", default=None, required=True) parser.add_argument("--execute", required=False, action="store_true") args = parser.parse_args() config.load(args.config) log_file = "%s/rw_auto_clean.log" % (config.get_directory("log_dir"), ) log.init(log_file, "print") db.connect() cache.connect() REMOVE_THRESHOLD = 3.0 REQUIRED_RATING_COUNT = 20 REQONLY_THRESHOLD = 3.3 REQONLY_STATION = 2 remove_songs = db.c.fetch_all( "SELECT song_id, song_origin_sid, song_filename FROM r4_songs WHERE song_rating <= %s AND song_origin_sid != %s AND song_origin_sid != 0 AND song_verified = TRUE AND song_rating_count >= %s", (REMOVE_THRESHOLD, REQONLY_STATION, REQUIRED_RATING_COUNT)) reqonly_songs = db.c.fetch_all( "SELECT song_id, song_origin_sid, song_filename FROM r4_songs WHERE song_rating > %s AND song_rating <= %s AND song_origin_sid != 0 AND song_verified = TRUE AND song_rating_count >= %s", (REMOVE_THRESHOLD, REQONLY_THRESHOLD, REQUIRED_RATING_COUNT))
def _listen(self, task_id): zeromq.init_pub() zeromq.init_sub() import api_requests.sync api_requests.sync.init() # task_ids start at zero, so we gobble up ports starting at the base port and work up port_no = int(config.get("api_base_port")) + task_id # Log according to configured directory and port # we're operating on log_file = "%s/rw_api_%s.log" % (config.get_directory("log_dir"), port_no) log.init(log_file, config.get("log_level")) log.debug("start", "Server booting, port %s." % port_no) db.connect(auto_retry=False, retry_only_this_time=True) cache.connect() memory_trace.setup(port_no) api.locale.load_translations() api.locale.compile_static_language_files() if config.get("developer_mode"): for station_id in config.station_ids: playlist.prepare_cooldown_algorithm(station_id) # automatically loads every station ID and fills things in if there's no data schedule.load() for station_id in config.station_ids: schedule.update_memcache(station_id) rainwave.request.update_line(station_id) rainwave.request.update_expire_times() cache.set_station(station_id, "backend_ok", True) cache.set_station(station_id, "backend_message", "OK") cache.set_station(station_id, "get_next_socket_timeout", False) for sid in config.station_ids: cache.update_local_cache_for_sid(sid) playlist.prepare_cooldown_algorithm(sid) playlist.update_num_songs() # If we're not in developer, remove development-related URLs if not config.get("developer_mode"): i = 0 while i < len(request_classes): if request_classes[i][0].find("/test/") != -1: request_classes.pop(i) i = i - 1 i = i + 1 # Make sure all other errors get handled in an API-friendly way request_classes.append((r"/api/.*", api.web.Error404Handler)) request_classes.append((r"/api4/.*", api.web.Error404Handler)) request_classes.append((r".*", api.web.HTMLError404Handler)) # Initialize the help (rather than it scan all URL handlers every time someone hits it) api.help.sectionize_requests() # Fire ze missiles! global app debug = config.get("developer_mode") app = tornado.web.Application( request_classes, debug=debug, template_path=os.path.join(os.path.dirname(__file__), "../templates"), static_path=os.path.join(os.path.dirname(__file__), "../static"), autoescape=None, autoreload=debug, serve_traceback=debug, ) http_server = tornado.httpserver.HTTPServer(app, xheaders=True) http_server.listen(port_no) for request in request_classes: log.debug("start", " Handler: %s" % str(request)) log.info("start", "API server on port %s ready to go." % port_no) self.ioloop = tornado.ioloop.IOLoop.instance() db_keepalive = tornado.ioloop.PeriodicCallback(db.connection_keepalive, 10000) db_keepalive.start() try: self.ioloop.start() finally: self.ioloop.stop() http_server.stop() db.close() log.info("stop", "Server has been shutdown.") log.close()