def process_album_art(filename): directory = filename[0:filename.rfind("/")] album_ids = db.c.fetch_list("SELECT DISTINCT album_id FROM r4_songs JOIN r4_song_album USING (song_id) WHERE song_filename LIKE %s% AND r4_song_album.album_is_tag = TRUE", (directory,)) if not album_ids or len(album_id) == 0: return im_original = Image.open(filename) im_320 = None im_240 = None im_120 = None if im_original.size[0] > 420 or im_original.size[1] > 420: im_320 = im_original.copy().thumbnail((320, 320), Image.ANTIALIAS) else: im_320 = im_original if im_original.size[0] > 260 or im_original.size[1] > 260: im_240 = im_original.copy().thumbnail((240, 240), Image.ANTIALIAS) else: im_240 = im_original if im_original.size[0] > 160 or im_original.size[1] > 160: im_120 = im_original.copy().thumbnail((120, 120), Image.ANTIALIAS) else: im_120 = im_original for album_id in album_ids: im_120.save("%s/%s_120.jpg" % (config.get("album_art_directory"), album_id)) im_240.save("%s/%s_240.jpg" % (config.get("album_art_directory"), album_id)) im_320.save("%s/%s.jpg" % (config.get("album_art_directory"), album_id))
def process_album_art(filename): # There's an ugly bug here where psycopg isn't correctly escaping the path's \ on Windows # So we need to repr() in order to get the proper number of \ and then chop the leading and trailing single-quotes # Nasty bug. This workaround needs to be tested on a POSIX system. if not config.get("album_art_enabled"): return True directory = repr(os.path.dirname(filename))[2:-1] album_ids = db.c.fetch_list("SELECT DISTINCT album_id FROM r4_songs JOIN r4_song_sid USING (song_id) WHERE song_filename LIKE %s || '%%'", (directory,)) if not album_ids or len(album_ids) == 0: return False im_original = Image.open(filename) if not im_original: _add_scan_error(filename, "Could not open album art.") return False im_320 = im_original im_240 = im_original im_120 = im_original if im_original.size[0] > 420 or im_original.size[1] > 420: im_320 = im_original.copy() im_320.thumbnail((320, 320), Image.ANTIALIAS) if im_original.size[0] > 260 or im_original.size[1] > 260: im_240 = im_original.copy() im_240.thumbnail((240, 240), Image.ANTIALIAS) if im_original.size[0] > 160 or im_original.size[1] > 160: im_120 = im_original.copy() im_120.thumbnail((120, 120), Image.ANTIALIAS) for album_id in album_ids: im_120.save("%s/%s_120.jpg" % (config.get("album_art_file_path"), album_id)) im_240.save("%s/%s_240.jpg" % (config.get("album_art_file_path"), album_id)) im_320.save("%s/%s.jpg" % (config.get("album_art_file_path"), album_id)) return True
def prepare(self): super(MainIndex, self).prepare() if not cache.get_station(self.sid, "sched_current"): raise APIException( "server_just_started", "Rainwave is Rebooting, Please Try Again in a Few Minutes", http_code=500 ) # self.json_payload = {} self.jsfiles = None if not self.user: self.user = User(1) self.user.ip_address = self.request.remote_ip self.user.ensure_api_key() if self.beta or config.get("web_developer_mode") or config.get("developer_mode") or config.get("test_mode"): buildtools.bake_beta_css() buildtools.bake_beta_templates() self.jsfiles = [] for root, subdirs, files in os.walk( os.path.join(os.path.dirname(__file__), "../static/%s" % self.js_dir) ): # pylint: disable=W0612 for f in files: if f.endswith(".js"): self.jsfiles.append(os.path.join(root[root.find("static/%s" % self.js_dir) :], f))
def start(self): buildtools.bake_css() buildtools.bake_js() buildtools.bake_templates() buildtools.bake_beta_templates() buildtools.copy_woff() # Setup variables for the long poll module # Bypass Tornado's forking processes for Windows machines if num_processes is set to 1 if config.get("api_num_processes") == 1 or config.get("web_developer_mode"): self._listen(0) else: # The way this works, is that the parent PID is hijacked away from us and everything after this # is a child process. As of Tornado 2.1, fork() is used, which means we do have a complete # copy of all execution in memory up until this point and we will have complete separation of # processes from here on out. Tornado handles child cleanup and zombification. # # We can have a config directive for numprocesses but it's entirely optional - a return of # None from the config option getter (if the config didn't exist) will cause Tornado # to spawn as many processes as there are cores on the server CPU(s). tornado.process.fork_processes(config.get("api_num_processes")) task_id = tornado.process.task_id() if task_id != None: self._listen(task_id)
def _listen(self, task_id): # task_ids start at zero, so we gobble up ports starting at the base port and work up port_no = int(config.get("api_base_port")) + task_id # Log according to configured directory and port # we're operating on log_file = "%s/api%s.log" % (config.get("log_dir"), port_no) if config.test_mode and os.path.exists(log_file): os.remove(log_file) log.init(log_file, config.get("log_level")) log.debug("start", "Server booting, port %s." % port_no) db.open() # Fire ze missiles! app = tornado.web.Application(request_classes) http_server = tornado.httpserver.HTTPServer(app, xheaders = True) http_server.listen(port_no) for request in request_classes: log.debug("start", " Handler: %s" % str(request)) for sid in config.station_ids: cache.update_local_cache_for_sid(sid) log.info("start", "Server bootstrapped and ready to go.") self.ioloop = tornado.ioloop.IOLoop.instance() self.ioloop.start() http_server.stop() log.info("stop", "Server has been shutdown.") log.close()
def _scan_all_directories(art_only=False): total_files = 0 file_counter = 0 for directory, sids in config.get("song_dirs").iteritems(): for root, subdirs, files in os.walk(directory.encode("utf-8"), followlinks = True): #pylint: disable=W0612 total_files += len(files) for directory, sids in config.get("song_dirs").iteritems(): for root, subdirs, files in os.walk(directory.encode("utf-8"), followlinks = True): for filename in files: filename = os.path.normpath(root + os.sep + filename) try: if art_only and not _is_image(filename): pass else: _scan_file(filename, sids, raise_exceptions=True) except Exception as e: type_, value_, traceback_ = sys.exc_info() #pylint: disable=W0612 if not isinstance(e, PassableScanError): print "\n%s:\n\t %s: %s" % (filename.decode("utf-8", errors="ignore"), type_, value_) else: print "\n%s:\n\t %s" % (filename.decode("utf-8", errors="ignore"), value_) sys.stdout.flush() file_counter += 1 _print_to_screen_inline('%s %s / %s' % (directory, file_counter, total_files)) sys.stdout.flush() print "\n" sys.stdout.flush()
def open(): global _memcache if not config.test_mode or config.get("test_use_memcache"): _memcache = pylibmc.Client(config.get("memcache_servers"), binary = True) _memcache.behaviors = { "tcp_nodelay": True, "ketama": config.get("memcache_ketama") } else: _memcache = TestModeCache()
def create_baked_directory(): d = os.path.join(os.path.dirname(__file__), "../static/baked/", str(get_build_number())) if not os.path.exists(d): os.makedirs(d) if os.name != "nt" and os.getuid() == 0: #pylint: disable=E1101 subprocess.call(["chown", "-R", "%s:%s" % (config.get("api_user"), config.get("api_group")), d ]) return True return False
def sync_frontend_all(sid): _sync_frontend_all_timed_stop(sid) http_client = AsyncHTTPClient() params = urllib.urlencode({ "sid": sid }) for i in range(0, config.get("api_num_processes")): http_client.fetch("http://localhost:%s/api4/sync_update_all" % (config.get("api_base_port") + i,), sync_result, method='POST', body=params) log.debug("sync_front", "Sent update_all to API port %s" % (config.get("api_base_port") + i,))
def get_art_url(self, album_id, sid = None): if not config.get("album_art_file_path"): return "" elif sid and os.path.isfile(os.path.join(config.get("album_art_file_path"), "%s_%s_320.jpg" % (sid, album_id))): return "%s/%s_%s" % (config.get("album_art_url_path"), sid, album_id) elif os.path.isfile(os.path.join(config.get("album_art_file_path"), "a_%s_320.jpg" % album_id)): return "%s/a_%s" % (config.get("album_art_url_path"), album_id) return ""
def sync_frontend_all(sid): try: params = urllib.urlencode({ "sid": sid }) for i in range(0, config.get("api_num_processes")): urllib2.urlopen(urllib2.Request("http://localhost:%s/api/sync_update_all" % (config.get("api_base_port") + i,), params)) log.debug("sync_front", "Sent update_all to API port %s" % (config.get("api_base_port") + i,)) except urllib2.URLError, e: log.warn("sync_front", "Could not connect to an API port: %s" % repr(e.reason))
def prepare(self): if self.local_only and not self.request.remote_ip in config.get("api_trusted_ip_addresses"): log.info("api", "Rejected %s request from %s, untrusted address." % (self.url, self.request.remote_ip)) raise APIException("rejected", text="You are not coming from a trusted address.") if self.allow_cors: self.set_header("Access-Control-Allow-Origin", "*") self.set_header("Access-Control-Max-Age", "600") self.set_header("Access-Control-Allow-Credentials", "false") if not isinstance(self.locale, locale.RainwaveLocale): self.locale = self.get_browser_locale() self.setup_output() if 'in_order' in self.request.arguments: self._output = [] self._output_array = True else: self._output = {} if not self.sid: self.sid = fieldtypes.integer(self.get_cookie("r4_sid", None)) hostname = self.request.headers.get('Host', None) if hostname: hostname = unicode(hostname).split(":")[0] if hostname in config.station_hostnames: self.sid = config.station_hostnames[hostname] sid_arg = fieldtypes.integer(self.get_argument("sid", None)) if sid_arg is not None: self.sid = sid_arg if self.sid is None and self.sid_required: raise APIException("missing_station_id", http_code=400) self.arg_parse() self.sid_check() if self.sid: self.set_cookie("r4_sid", str(self.sid), expires_days=365) if self.phpbb_auth: self.do_phpbb_auth() else: self.rainwave_auth() if not self.user and self.auth_required: raise APIException("auth_required", http_code=403) elif not self.user and not self.auth_required: self.user = User(1) self.user.ip_address = self.request.remote_ip self.user.refresh(self.sid) if self.user and config.get("store_prefs"): self.user.save_preferences(self.request.remote_ip, self.get_cookie("r4_prefs", None)) self.permission_checks()
def _run_tests(self): passed = True headers = ({"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain text/html text/javascript application/json application/javascript" }) params = {} for request_pair in testable_requests: request = request_pair['class'] sys.stdout.write(".") try: #print "*** ", request.url # Setup and get the data from the HTTP server params = {} if request.auth_required: params = { "user_id": 2, "key": "TESTKEY" } if request.login_required or request.admin_required or request.dj_required: # admin login, user ID 2 currently is though. pass else: # need an anon user/key added to params here pass params = urllib.urlencode(params) conn = httplib.HTTPConnection('localhost', config.get("api_base_port")) conn.request(request_pair['method'], "/api/1/%s" % request.url, params, headers) response = conn.getresponse() if response.status == 200: web_data = json.load(response) del(web_data['api_info']) ref_file = open("api_tests/%s.json" % request.url) ref_data = json.load(ref_file) ref_file.close() if not dict_compare.print_differences(ref_data, web_data): passed = False print "JSON from server:" print json.dumps(web_data, indent=4, sort_keys=True) print else: print print "*** ERROR:", request.url, ": Response status", response.status passed = False except: print traceback.print_exc(file=sys.stdout) print "*** ERROR:", request.url, ": ", sys.exc_info()[0] passed = False print conn = httplib.HTTPConnection('localhost', config.get("api_base_port")) conn.request("GET", "/api/1/shutdown", params, headers) conn.getresponse() time.sleep(1) print print "----------------------------------------------------------------------" print "Ran %s tests." % len(testable_requests) return passed
def get(self): info.attach_info_to_request(self, all_lists=True) self.append("api_info", { "time": int(time.time()) }) self.render("index.html", request=self, site_description=self.locale.translate("station_description_id_%s" % self.sid), revision_number=config.build_number, api_url=config.get("api_external_url_prefix"), cookie_domain=config.get("cookie_domain"), locales=api.locale.locale_names_json)
def get(self): info.attach_info_to_request(self, playlist=True, artists=True) self.append("api_info", { "time": int(time.time()) }) self.render("beta_index.html", request=self, site_description=self.locale.translate("station_description_id_%s" % self.sid), jsfiles=jsfiles, revision_number=config.get("revision_number"), api_url=config.get("api_external_url_prefix"), cookie_domain=config.get("cookie_domain"))
def sync_frontend_user_id(user_id): http_client = AsyncHTTPClient() params = urllib.urlencode({"sync_user_id": user_id, "sid": 1}) for i in range(0, config.get("api_num_processes")): http_client.fetch( "http://127.0.0.1:%s/api4/sync_update_user" % (config.get("api_base_port") + i,), sync_result, method="POST", body=params, )
def _listen(self, task_id): # task_ids start at zero, so we gobble up ports starting at the base port and work up port_no = int(config.get("api_base_port")) + task_id # Log according to configured directory and port # we're operating on log_file = "%s/rw_api_%s.log" % (config.get("api_log_dir"), port_no) if config.test_mode and os.path.exists(log_file): os.remove(log_file) log.init(log_file, config.get("log_level")) log.debug("start", "Server booting, port %s." % port_no) db.open() cache.open() for sid in config.station_ids: cache.update_local_cache_for_sid(sid) # If we're not in developer, remove development-related URLs if not config.get("developer_mode"): i = 0 while i < len(request_classes): if request_classes[i][0].find("/test/") != -1: request_classes.pop(i) i = i - 1 i = i + 1 # Make sure all other errors get handled in an API-friendly way request_classes.append((r".*", api.web.Error404Handler)) # Initialize the help (rather than it scan all URL handlers every time someone hits it) api.help.sectionize_requests() # Initialize playlist variables playlist.prepare_cooldown_algorithm(sid) # Fire ze missiles! app = tornado.web.Application( request_classes, debug=(config.test_mode or config.get("developer_mode")), template_path=os.path.join(os.path.dirname(__file__), "../templates"), static_path=os.path.join(os.path.dirname(__file__), "../static"), autoescape=None, ) http_server = tornado.httpserver.HTTPServer(app, xheaders=True) http_server.listen(port_no) if config.get("api_user") and config.get("api_group"): chuser.change_user(config.get("api_user"), config.get("api_group")) for request in request_classes: log.debug("start", " Handler: %s" % str(request)) log.info("start", "API server bootstrapped and ready to go.") self.ioloop = tornado.ioloop.IOLoop.instance() try: self.ioloop.start() finally: self.ioloop.stop() http_server.stop() db.close() log.info("stop", "Server has been shutdown.") log.close()
def sync_frontend_ip(ip_address): http_client = AsyncHTTPClient() # the sid here is for local testing purposes params = urllib.urlencode({"ip_address": ip_address, "sid": 1}) for i in range(0, config.get("api_num_processes")): http_client.fetch( "http://localhost:%s/api4/sync_update_ip" % (config.get("api_base_port") + i,), sync_result, method="POST", body=params, )
def init_proxy(): td = zmq.devices.ThreadDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB) td.bind_in(config.get("zeromq_pub")) td.setsockopt_in(zmq.IDENTITY, 'SUB') td.setsockopt_in(zmq.SUBSCRIBE, "") td.bind_out(config.get("zeromq_sub")) td.setsockopt_out(zmq.IDENTITY, 'PUB') td.start()
def sync_frontend_user_id(user_id): try: headers = ({"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain text/html text/javascript application/json application/javascript" }) params = urllib.urlencode({ "sid": sid }) for i in range(0, config.get("api_num_processes")): conn = httplib.HTTPConnection('localhost', config.get("api_base_port") + i) conn.request("GET", "/api/sync_update_user", params, headers) conn.close() except socket.error: log.warn("sync_front", "Could not connect to an API port.") except socket.timeout: log.warn("sync_front", "Timeout connecting to an API port.")
def _get_pause_file(self): if not config.get("liquidsoap_annotations"): log.debug("backend", "Station is paused, using: %s" % config.get("pause_file")) return config.get("pause_file") string = "annotate:crossfade=\"2\",use_suffix=\"1\"," if cache.get_station(self.sid, "pause_title"): string += "title=\"%s\"" % cache.get_station(self.sid, "pause_title") else: string += "title=\"Intermission\"" string += ":" + config.get("pause_file") log.debug("backend", "Station is paused, using: %s" % string) return string
def prepare(self): if config.get("public_beta"): self.perks_required = False super(R4Index, self).prepare() self.json_payload = {} self.jsfiles = None if config.get("web_developer_mode") or config.get("developer_mode") or config.get("test_mode"): buildtools.bake_css() self.jsfiles = [] for root, subdirs, files in os.walk(os.path.join(os.path.dirname(__file__), "../static/js4")): for f in files: self.jsfiles.append(os.path.join(root[root.find("static/js4"):], f))
def get(self): info.attach_info_to_request(self, extra_list=self.get_cookie("r4_active_list")) self.append("api_info", { "time": int(time.time()) }) self.render("r4_index.html", request=self, site_description=self.locale.translate("station_description_id_%s" % self.sid), revision_number=config.build_number, jsfiles=self.jsfiles, api_url=config.get("api_external_url_prefix"), cookie_domain=config.get("cookie_domain"), locales=api.locale.locale_names_json, relays=config.public_relays_json[self.sid], stream_filename=config.get_station(self.sid, "stream_filename"), station_list=config.station_list_json)
def start(): log.init(log_file, config.get("log_level")) log.debug("start", "Server booting, port %s." % port_no) db.open() app = tornado.web.Application([(r"/advance/([0-9]+)", AdvanceScheduleRequest)]) server = tornado.httpserver.HTTPServer(app) server.listen(int(config.get("backend_port")), address="127.0.0.1") schedule.load() tornado.ioloop.IOLoop.instance().start()
def post(self, sid): user_id = db.c.fetch_var("SELECT MAX(user_id) FROM phpbb_users") if user_id < 2: user_id = user_id + 1 db.c.update("INSERT INTO phpbb_users (username, user_id, group_id) VALUES ('Test" + str(user_id) + ", %s, 5)", (user_id,)) self.set_cookie(config.get("phpbb_cookie_name") + "u", user_id) session_id = db.c.fetch_var("SELECT session_id FROM phpbb_sessions WHERE session_user_id = %s", (user_id,)) if not session_id: session_id = hashlib.md5(repr(time.time())).hexdigest() db.c.update("INSERT INTO phpbb_sessions (session_id, session_user_id) VALUES (%s, %s)", (session_id, user_id)) self.set_cookie(config.get("phpbb_cookie_name") + "u", user_id) self.set_cookie(config.get("phpbb_cookie_name") + "sid", session_id) self.execute(user_id, sid) self.append_standard("dev_login_ok", "You are now user ID %s session ID %s" % (user_id, session_id))
def get(self): self.set_header("Content-Type", "text/javascript") self.append("locales", api.locale.locale_names) self.append("cookie_domain", config.get("cookie_domain")) self.append("on_init", []) self.append("on_measure", []) self.append("on_draw", []) self.append("websocket_host", config.get("websocket_host")) self.post() if self.request.headers.get("User-Agent").lower().find("mobile") != -1 or self.request.headers.get("User-Agent").lower().find("android") != -1: self.write("window.MOBILE = true;") else: self.write("window.MOBILE = false;") self.write("var BOOTSTRAP=")
def start(): db.open() cache.open() if config.test_mode: playlist.remove_all_locks(1) app = tornado.web.Application([ (r"/advance/([0-9]+)", AdvanceScheduleRequest), (r"/refresh/([0-9]+)", RefreshScheduleRequest) ], debug=(config.test_mode or config.get("developer_mode"))) server = tornado.httpserver.HTTPServer(app) server.listen(int(config.get("backend_port")), address='127.0.0.1') if config.get("backend_user") or config.get("backend_group"): chuser.change_user(config.get("backend_user"), config.get("backend_group")) pid = os.getpid() pidfile = open(config.get("backend_pid_file"), 'w') pidfile.write(str(pid)) pidfile.close() schedule.load() log.debug("start", "Backend server bootstrapped, port %s, ready to go." % int(config.get("backend_port"))) for sid in config.station_ids: playlist.prepare_cooldown_algorithm(sid) try: tornado.ioloop.IOLoop.instance().start() finally: db.close()
def _trim(sid): # Deletes any events in the schedule and elections tables that are old, according to the config current_time = int(timestamp()) db.c.update( "DELETE FROM r4_schedule WHERE sched_start_actual <= %s", (current_time - config.get("trim_event_age"),) ) db.c.update( "DELETE FROM r4_elections WHERE elec_start_actual <= %s", (current_time - config.get("trim_election_age"),) ) max_history_id = db.c.fetch_var("SELECT MAX(songhist_id) FROM r4_song_history") db.c.update( "DELETE FROM r4_song_history WHERE songhist_id <= %s AND sid = %s", (max_history_id - config.get("trim_history_length"), sid), )
def get_age_cooldown_multiplier(added_on): age_weeks = (int(time.time()) - added_on) / 604800.0 cool_age_multiplier = 1.0 if age_weeks < config.get("cooldown_age_threshold"): s2_end = config.get("cooldown_age_threshold") s2_start = config.get("cooldown_age_stage2_start") s2_min_multiplier = config.get("cooldown_age_stage2_min_multiplier") s1_min_multiplier = config.get("cooldown_age_stage1_min_multiplier") # Age Cooldown Stage 1 if age_weeks <= s2_start: cool_age_multiplier = (age_weeks / s2_start) * (s2_min_multiplier - s1_min_multiplier) + s1_min_multiplier # Age Cooldown Stage 2 else: cool_age_multiplier = s2_min_multiplier + ((1.0 - s2_min_multiplier) * ((0.32436 - (s2_end / 288.0) + (math.pow(s2_end, 2.0) / 38170.0)) * math.log(2.0 * age_weeks + 1.0))) return cool_age_multiplier
def prepare(self): super(MainIndex, self).prepare() self.json_payload = {} self.jsfiles = None if not self.user: self.user = User(1) self.user.ensure_api_key(self.request.remote_ip) if self.beta or config.get("web_developer_mode") or config.get("developer_mode") or config.get("test_mode"): buildtools.bake_css() self.jsfiles = [] for root, subdirs, files in os.walk(os.path.join(os.path.dirname(__file__), "../static/js4")): for f in files: self.jsfiles.append(os.path.join(root[root.find("static/js4"):], f))
def _trim(sid): # Deletes any events in the schedule and elections tables that are old, according to the config current_time = int(timestamp()) db.c.update( "DELETE FROM r4_schedule WHERE sched_start_actual <= %s AND sched_type != 'OneUpProducer'", (current_time - config.get("trim_event_age"),), ) db.c.update( "DELETE FROM r4_elections WHERE elec_start_actual <= %s", (current_time - config.get("trim_election_age"),), ) max_history_id = db.c.fetch_var("SELECT MAX(songhist_id) FROM r4_song_history") db.c.update( "DELETE FROM r4_song_history WHERE songhist_id <= %s AND sid = %s", (max_history_id - config.get("trim_history_length"), sid), )
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() observers = [] for directory, sids in config.get("song_dirs").iteritems(): observer = watchdog.observers.Observer() observer.schedule(FileEventHandler(directory, sids), directory, recursive=True) observer.start() log.info("scan", "Observing %s with sids %s" % (directory, repr(sids))) observers.append(observer) try: while True: time.sleep(60) _process_album_art_queue() except Exception as e: log.exception("scan", "Exception leaked to top monitoring function.", e) for observer in observers: observer.stop() for observer in observers: observer.join()
def monitor(): _common_init() mask = (pyinotify.IN_ATTRIB | pyinotify.IN_CREATE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE | pyinotify.IN_MOVED_TO | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVE_SELF | pyinotify.IN_EXCL_UNLINK) try: go = True while go: try: log.info("scan", "File monitor started.") wm = pyinotify.WatchManager() wm.add_watch(str(config.get("monitor_dir")), mask, rec=True, auto_add=True) pyinotify.Notifier(wm, FileEventHandler()).loop() go = False except NewDirectoryException: log.debug("scan", "New directory added, restarting watch.") except DeletedDirectoryException: log.debug("scan", "Directory was deleted, restarting watch.") finally: try: wm.close() except: pass finally: log.info("scan", "File monitor shutdown.")
def _get_pause_file(self): if not config.get("liquidsoap_annotations"): log.debug( "backend", "Station is paused, using: %s" % config.get("pause_file")) return config.get("pause_file") string = "annotate:crossfade=\"2\",use_suffix=\"1\"," if cache.get_station(self.sid, "pause_title"): string += "title=\"%s\"" % cache.get_station( self.sid, "pause_title") else: string += "title=\"Intermission\"" string += ":" + config.get("pause_file") log.debug("backend", "Station is paused, using: %s" % string) return string
def sid_check(self): if self.sid is None and not self.sid_required: self.sid = config.get("default_station") if self.sid == 0 and self.allow_sid_zero: pass elif not self.sid in config.station_ids: raise APIException("invalid_station_id", http_code=400)
def bake_js(source_dir="js5", dest_file="script5.js"): create_baked_directory() fn = os.path.join( os.path.dirname(__file__), "..", "static", "baked", str(get_build_number()), dest_file, ) if not os.path.exists(fn): js_content = "" for sfn in get_js_file_list(source_dir): jsfile = open(os.path.join(os.path.dirname(__file__), "..", sfn)) js_content += jsfile.read() + "\n" jsfile.close() o = open(fn, "w") # Pylint disabled for next line because pylint is buggy about the es5 function o.write(minify_print(es5(js_content))) # pylint: disable=not-callable if config.has("sentry_frontend_dsn"): sentry_frontend_dsn = config.get("sentry_frontend_dsn") o.write( 'if (window.Sentry) {' 'window.Sentry.init({' f'dsn: "{ sentry_frontend_dsn }",' 'tunnel: "/sentry_tunnel",' '});' '}' ) o.close()
def update_rating(self, skip_album_update = False): """ Calculate an updated rating from the database. """ dislikes = db.c.fetch_var("SELECT COUNT(*) FROM r4_song_ratings JOIN phpbb_users USING (user_id) WHERE radio_inactive = FALSE AND song_id = %s AND song_rating_user < 3 GROUP BY song_id", (self.id,)) if not dislikes: dislikes = 0 neutrals = db.c.fetch_var("SELECT COUNT(*) FROM r4_song_ratings JOIN phpbb_users USING (user_id) WHERE radio_inactive = FALSE AND song_id = %s AND song_rating_user >= 3 AND song_rating_user < 3.5 GROUP BY song_id", (self.id,)) if not neutrals: neutrals = 0 neutralplus = db.c.fetch_var("SELECT COUNT(*) FROM r4_song_ratings JOIN phpbb_users USING (user_id) WHERE radio_inactive = FALSE AND song_id = %s AND song_rating_user >= 3.5 AND song_rating_user < 4 GROUP BY song_id", (self.id,)) if not neutralplus: neutralplus = 0 likes = db.c.fetch_var("SELECT COUNT(*) FROM r4_song_ratings JOIN phpbb_users USING (user_id) WHERE radio_inactive = FALSE AND song_id = %s AND song_rating_user >= 4 GROUP BY song_id", (self.id,)) if not likes: likes = 0 rating_count = dislikes + neutrals + neutralplus + likes log.debug("song_rating", "%s ratings for %s" % (rating_count, self.filename)) if rating_count > config.get("rating_threshold_for_calc"): self.data['rating'] = round(((((likes + (neutrals * 0.5) + (neutralplus * 0.75)) / (likes + dislikes + neutrals + neutralplus) * 4.0)) + 1), 1) log.debug("song_rating", "rating update: %s for %s" % (self.data['rating'], self.filename)) db.c.update("UPDATE r4_songs SET song_rating = %s, song_rating_count = %s WHERE song_id = %s", (self.data['rating'], rating_count, self.id)) if not skip_album_update: for album in self.albums: album.update_rating()
def update_rating(self): for sid in db.c.fetch_list( "SELECT sid FROM r4_album_sid WHERE album_id = %s", (self.id, )): ratings = db.c.fetch_all( "SELECT r4_song_ratings.song_rating_user AS rating, COUNT(r4_song_ratings.user_id) AS count " "FROM r4_songs " "JOIN r4_song_sid ON (r4_songs.song_id = r4_song_sid.song_id AND r4_song_sid.sid = %s AND r4_song_sid.song_exists = TRUE) " "JOIN r4_song_ratings ON (r4_song_sid.song_id = r4_song_ratings.song_id) " "JOIN phpbb_users ON (r4_song_ratings.user_id = phpbb_users.user_id AND phpbb_users.radio_inactive = FALSE) " "WHERE r4_songs.album_id = %s " "GROUP BY rating ", (sid, self.id)) (points, potential_points) = rating.rating_calculator(ratings) log.debug( "song_rating", "%s album ratings for %s (%s)" % (potential_points, self.data['name'], config.station_id_friendly[sid])) if points > 0 and potential_points > config.get( "rating_threshold_for_calc"): self.rating_precise = ((points / potential_points) * 4) + 1 self.data['rating'] = round(self.rating_precise, 1) self.data['rating_count'] = potential_points log.debug( "album_rating", "%s new rating for %s" % (self.rating_precise, self.data['name'])) db.c.update( "UPDATE r4_album_sid SET album_rating = %s, album_rating_count = %s WHERE album_id = %s AND sid = %s", (self.rating_precise, potential_points, self.id, sid))
def post(self): if self.request.remote_ip not in config.get("api_trusted_ip_addresses"): raise APIException("auth_failed", f"{self.request.remote_ip} is not allowed to access this endpoint.") discord_user_id = self.get_argument("discord_user_id") nickname = self.get_argument("nickname") possible_id = db.c.fetch_var( "SELECT user_id FROM phpbb_users WHERE discord_user_id = %s", (discord_user_id,), ) if possible_id: db.c.update( ( "UPDATE phpbb_users SET " " radio_username = %s " "WHERE user_id = %s" ), ( nickname, possible_id, ), ) self.append_standard("yes")
def get(self, station=None): self.mobile = False ua = self.request.headers.get("User-Agent") or '' if ua: self.mobile = ( ua.lower().find("mobile") != -1 or ua.lower().find("android") != -1 ) page_title = None if self.sid == config.get("default_station"): page_title = self.locale.translate("page_title_on_google") else: page_title = "%s %s" % ( self.locale.translate("page_title_on_google"), self.locale.translate("station_name_%s" % self.sid), ) self.render( self.page_template, request=self, site_description=self.locale.translate( "station_description_id_%s" % self.sid ), revision_number=config.build_number, jsfiles=self.jsfiles, mobile=self.mobile, station_name=page_title, dj=self.user.is_dj(), )
def get(self): self.set_header("Content-Type", "text/javascript") self.append("locales", api.locale.locale_names) self.append("cookie_domain", config.get("cookie_domain")) self.append("on_init", []) self.append("on_measure", []) self.append("on_draw", []) self.append("websocket_host", config.get("websocket_host")) self.post() if self.request.headers.get("User-Agent").lower().find( "mobile") != -1 or self.request.headers.get( "User-Agent").lower().find("android") != -1: self.write("window.MOBILE = true;") else: self.write("window.MOBILE = false;") self.write("var BOOTSTRAP=")
def get(self): self.mobile = self.request.headers.get("User-Agent").lower().find( "mobile") != -1 or self.request.headers.get( "User-Agent").lower().find("android") != -1 # if not self.beta: # info.attach_info_to_request(self, extra_list=self.get_cookie("r4_active_list")) # self.append("api_info", { "time": int(timestamp()) }) page_title = None if (self.sid == config.get("default_station")): page_title = self.locale.translate("page_title_on_google") else: page_title = "%s %s" % ( self.locale.translate("page_title_on_google"), self.locale.translate("station_name_%s" % self.sid)) self.render( self.page_template, request=self, site_description=self.locale.translate( "station_description_id_%s" % self.sid), revision_number=config.build_number, jsfiles=self.jsfiles, # api_url=config.get("api_external_url_prefix"), # cookie_domain=config.get("cookie_domain"), # locales=api.locale.locale_names_json, # relays=config.public_relays_json[self.sid], # stream_filename=config.get_station(self.sid, "stream_filename"), # station_list=config.station_list_json, mobile=self.mobile, station_name=page_title, dj=self.user.is_dj())
def _start(callback): global in_process if in_process: log.warn("icecast_sync", "Previous operation did not finish!") stream_names = {} for sid in config.station_ids: stream_names[sid] = config.get_station(sid, 'stream_filename') for relay, relay_info in config.get("relays").iteritems(): relay_base_url = "%s%s:%s/admin/listclients?mount=/" % ( relay_info['protocol'], relay_info['ip_address'], relay_info['port']) for sid in relay_info['sids']: for ftype in ('.mp3', '.ogg'): try: handler = IcecastSyncCallback(relay, relay_info, ftype, sid, callback) in_process[handler] = False http_client = tornado.httpclient.HTTPClient() http_client.fetch( relay_base_url + stream_names[sid] + ftype, auth_username=relay_info['admin_username'], auth_password=relay_info['admin_password'], callback=handler.process) except Exception as e: log.exception( "icecast_sync", "Could not sync %s %s.%s" % (relay, stream_names[sid], ftype), e) callback()
def valid_relay(str, request=None): if not str: return None for name, value in config.get("relays").iteritems(): if value['ip_address'] == str: return name return None
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() observers = [] for directory, sids in config.get("song_dirs").iteritems(): observer = watchdog.observers.Observer() observer.schedule(FileEventHandler(directory, sids), directory, recursive=True) observer.start() observers.append(observer) try: while True: time.sleep(60) _process_album_art_queue() except: for observer in observers: observer.stop() for observer in observers: observer.join()
def do_phpbb_auth(self): phpbb_cookie_name = config.get("phpbb_cookie_name") + "_" user_id = fieldtypes.integer( self.get_cookie(phpbb_cookie_name + "u", "")) if not user_id: pass else: if self._verify_phpbb_session(user_id): # update_phpbb_session is done by verify_phpbb_session if successful self.user = User(user_id) self.user.ip_address = self.request.remote_ip self.user.authorize(self.sid, None, bypass=True) return True if not self.user and self.get_cookie(phpbb_cookie_name + "k"): can_login = db.c.fetch_var( "SELECT 1 FROM phpbb_sessions_keys WHERE key_id = %s AND user_id = %s", (hashlib.md5(self.get_cookie(phpbb_cookie_name + "k")).hexdigest(), user_id)) if can_login == 1: self._update_phpbb_session( self._get_phpbb_session(user_id)) self.user = User(user_id) self.user.ip_address = self.request.remote_ip self.user.authorize(self.sid, None, bypass=True) return True return False
def post(self): if self.request.remote_ip not in config.get("api_trusted_ip_addresses"): raise APIException("auth_failed", f"{self.request.remote_ip} is not allowed to access this endpoint.") discord_user_id = self.get_argument("discord_user_id") avatar = self.get_argument("avatar") avatar_url = f"https://cdn.discordapp.com/avatars/{discord_user_id}/{avatar}.png?size=320" user_avatar_type = "avatar.driver.remote" possible_id = db.c.fetch_var( "SELECT user_id FROM phpbb_users WHERE discord_user_id = %s", (discord_user_id,), ) if possible_id: db.c.update( ( "UPDATE phpbb_users SET " " user_avatar_type = %s, " " user_avatar = %s " "WHERE user_id = %s" ), ( user_avatar_type, avatar_url, possible_id, ), ) self.append_standard("yes")
def _fix_codepage_1252(filename, path=None): fqfn = filename if path: fqfn = os.path.normpath(path + os.sep + filename) try: fqfn = fqfn.decode("utf-8") except UnicodeDecodeError: if config.get("scanner_rename_files"): try: os.rename(fqfn, fqfn.decode("utf-8", errors="ignore")) fqfn = fqfn.decode("utf-8", errors="ignore") except OSError as e: new_e = Exception( "Permissions or file error renaming non-UTF-8 filename. Please rename or fix permissions." ) _add_scan_error(fqfn.decode("utf-8", errors="ignore"), new_e) raise new_e except Exception as e: _add_scan_error(fqfn.decode("utf-8", errors="ignore"), e) raise else: raise except Exception as e: _add_scan_error(fqfn.decode("utf-8", errors="ignore"), e) raise return fqfn
def prepare(self): if config.get("public_beta"): self.perks_required = False super(R4Index, self).prepare() self.json_payload = {} self.jsfiles = None if config.get("web_developer_mode") or config.get( "developer_mode") or config.get("test_mode"): buildtools.bake_css() self.jsfiles = [] for root, subdirs, files in os.walk( os.path.join(os.path.dirname(__file__), "../static/js4")): for f in files: self.jsfiles.append( os.path.join(root[root.find("static/js4"):], f))
async def get(self): if self.get_argument("code", False): # step 2 - we've come back from Discord with a state parameter # that needs to be verified against the user's cookie. oauth_secret = self.get_cookie("r4_oauth_secret") oauth_expected_state = bcrypt.hashpw( oauth_secret.encode(), OAUTH_STATE_SALT).decode("utf-8") self.set_cookie("r4_oauth_secret", "") destination, oauth_state = self.get_argument("state").split( "$", maxsplit=1) if oauth_expected_state != oauth_state: raise OAuthRejectedError # step 3 - we've come back from Discord with a unique auth code, get # token that we can use to act on behalf of user with discord token = await self.get_token(self.get_argument("code")) # step 4 - get user info from Discord and login to Rainwave await self.register_and_login(token, destination) else: # step 1 - redirect to Discord login page destination = self.get_destination() oauth_secret = secrets.token_hex() self.set_cookie("r4_oauth_secret", oauth_secret) oauth_state = destination + "$" + bcrypt.hashpw( oauth_secret.encode(), OAUTH_STATE_SALT).decode("utf-8") self.authorize_redirect( redirect_uri=REDIRECT_URI, client_id=config.get("discord_client_id"), scope=["identify"], response_type="code", extra_params={ "prompt": "none", "state": oauth_state }, )
def update_rating(self, skip_album_update=False): ratings = db.c.fetch_all( "SELECT song_rating_user AS rating, COUNT(user_id) AS count FROM r4_song_ratings JOIN phpbb_users USING (user_id) WHERE song_id = %s AND radio_inactive = FALSE AND song_rating_user IS NOT NULL GROUP BY song_rating_user", (self.id, ), ) (points, potential_points) = rating.rating_calculator(ratings) log.debug("song_rating", "%s ratings for %s" % (potential_points, self.filename)) if points > 0 and potential_points > config.get( "rating_threshold_for_calc"): self.data["rating"] = ((points / potential_points) * 4) + 1 self.data["rating_count"] = potential_points log.debug( "song_rating", "rating update: %s for %s" % (self.data["rating"], self.filename), ) db.c.update( "UPDATE r4_songs SET song_rating = %s, song_rating_count = %s WHERE song_id = %s", (self.data["rating"], potential_points, self.id), ) if not skip_album_update: for album in self.albums: album.update_rating()
def monitor(): _common_init() pid = os.getpid() pid_file = open("%s/scanner.pid" % config.get_directory("pid_dir"), 'w') pid_file.write(str(pid)) pid_file.close() observers = [] for directory, sids in config.get("song_dirs").iteritems(): observer = RWObserver() observer.schedule(FileEventHandler(directory, sids), directory, recursive=True) observer.start() log.info("scan", "Observing %s with sids %s" % (directory, repr(sids))) observers.append(observer) try: while True: time.sleep(1) finally: for observer in observers: observer.stop() for observer in observers: observer.join()
def _html_write_error(self, status_code, **kwargs): if kwargs.has_key("exc_info"): exc = kwargs['exc_info'][1] # Restart DB on a connection error if that's what we're handling if isinstance(exc, (psycopg2.OperationalError, psycopg2.InterfaceError)): try: db.close() db.connect() self.append("error", { "code": 500, "tl_key": "db_error_retry", "text": self.locale.translate("db_error_retry") }) except: self.append("error", { "code": 500, "tl_key": "db_error_permanent", "text": self.locale.translate("db_error_permanent") }) elif isinstance(exc, APIException): if not isinstance(self.locale, locale.RainwaveLocale): exc.localize(locale.RainwaveLocale.get("en_CA")) else: exc.localize(self.locale) if (isinstance(exc, APIException) or isinstance(exc, tornado.web.HTTPError)) and exc.reason: self.write(self.render_string("basic_header.html", title="%s - %s" % (status_code, exc.reason))) else: self.write(self.render_string("basic_header.html", title="HTTP %s - %s" % (status_code, tornado.httputil.responses.get(status_code, 'Unknown')))) if status_code == 500 or config.get("developer_mode"): self.write("<p>") self.write(self.locale.translate("unknown_error_message")) self.write("</p><p>") self.write(self.locale.translate("debug_information")) self.write("</p><div class='json'>") for line in traceback.format_exception(kwargs['exc_info'][0], kwargs['exc_info'][1], kwargs['exc_info'][2]): self.write(line) self.write("</div>") self.finish()
def sync_frontend_all(sid): _sync_frontend_all_timed_stop(sid) http_client = AsyncHTTPClient() params = urllib.urlencode({"sid": sid}) for i in range(0, config.get("api_num_processes")): http_client.fetch("http://%s:%s/api4/sync_update_all" % ( config.get("api_url"), config.get("api_base_port") + i, ), sync_result, method='POST', body=params) log.debug( "sync_front", "Sent update_all to API port %s" % (config.get("api_base_port") + i, ))
def get(self, sid): self.success = False self.sid = None if int(sid) in config.station_ids: self.sid = int(sid) else: return try: schedule.advance_station(self.sid) except psycopg2.extensions.TransactionRollbackError as e: if not self.retried: self.retried = True log.warn( "backend", "Database transaction deadlock. Re-opening database and setting retry timeout." ) db.close() db.open() tornado.ioloop.IOLoop.instance().add_timeout( datetime.timedelta(milliseconds=350), self.get) else: raise if not config.get("liquidsoap_annotations"): self.write(schedule.get_current_file(self.sid)) else: self.write( self._get_annotated(schedule.get_current_event(self.sid))) self.success = True
def _process(self, event): # Ignore WinSCP events. if event.pathname.endswith('.filepart'): return try: matched_sids = [] for song_dirs_path, sids in config.get('song_dirs').iteritems(): if event.pathname.startswith(song_dirs_path): matched_sids.extend(sids) except Exception as xception: _add_scan_error(event.pathname, xception) log.debug("scan", "%s %s %s" % (event.maskname, event.pathname, matched_sids)) try: if event.dir: _scan_directory(event.pathname, matched_sids) elif len(matched_sids) == 0 or event.mask in DELETE_OPERATION: _disable_file(event.pathname) else: _scan_file(event.pathname, matched_sids) _process_album_art_queue() except Exception as xception: _add_scan_error(event.pathname, xception)
def start_icecast_sync(): global all_returned stream_names = {} for sid in config.station_ids: stream_names[sid] = config.get_station(sid)['stream_filename'] if all_returned: log.warn("icecast_sync", "Previous operation did not finish!") all_returned = {} listener_ids = {} for relay, relay_info in config.get("relays").iteritems(): listener_ids[relay] = [] relay_base_url = "%s%s:%s/admin/listclients?mount=/" % ( relay_info['protocol'], relay_info['ip_address'], relay_info['port']) for sid in relay_info['sids']: # Commented out since the beta version of the site doesn't do MP3 #all_returned["%s_%s_mp3" % (relay, sid)] = False #handler = IcecastSyncCallback(relay, relay_info, "%s_%s_mp3" % (relay, sid), sid) #http_client = tornado.httpclient.AsyncHTTPClient() #http_client.fetch(relay_base_url + stream_names[sid] + ".mp3", # handler.respond, # auth_username=relay_info['admin_username'], # auth_password=relay_info['admin_password']) all_returned["%s_%s_ogg" % (relay, sid)] = False handler2 = IcecastSyncCallback(relay, relay_info, "%s_%s_ogg" % (relay, sid), sid) http_client2 = tornado.httpclient.AsyncHTTPClient() http_client2.fetch(relay_base_url + stream_names[sid] + ".ogg", handler2.respond, auth_username=relay_info['admin_username'], auth_password=relay_info['admin_password'])
def test_scheduler(self): # We need a completely blank slate for this db.c.update("DELETE FROM r4_schedule") db.c.update("DELETE FROM r4_elections") # A hundred fake songs to fill our range out if we're not running in test mode or running in sqlite if config.get("db_type") != "postgres" and not config.test_mode: db.c.update("DELETE FROM r4_songs") for i in range(0, 100): playlist.Song.create_fake(1) reset_schedule(1) # First test: # Create an event 5 minutes from now (the fake songs created above are all 60 seconds long) # Load the schedule, then watch the predicted start times of each election # The elections should work around the event properly schedule.load() # Second test: # Cycle through the elections and make sure the event gets played properly for i in range(0, 10): schedule.advance_station(1) schedule.post_process(1) schedule.current[1].to_dict() # Third test: # Reset the schedule, fill with elections, then create an event that is supposed to happen # between the elections already created in next. Advance the schedule. # Observe the start times. # Fourth test: # Reset the schedule, fill with elections, then add a 1up. # The 1up should play as soon as you advance the schedule.
def init_sub(): global _sub_stream context = zmq.Context() sub = context.socket(zmq.SUB) sub.connect(bytes(config.get("zeromq_sub"), "utf-8")) sub.setsockopt(zmq.SUBSCRIBE, b"") _sub_stream = zmqstream.ZMQStream(sub)
def load_tag_from_file(self, filename): """ Reads ID3 tags and sets object-level variables. """ # log.debug("playlist", u"reading tag info from {}".format(filename)) f = MP3(filename) self.filename = filename if not f.tags: raise PassableScanError("Song filename \"%s\" has no tags." % filename) w = f.tags.getall('TIT2') if len(w) > 0 and len(unicode(w[0])) > 0: self.data['title'] = unicode(w[0]).strip() else: raise PassableScanError("Song filename \"%s\" has no title tag." % filename) w = f.tags.getall('TPE1') if len(w) > 0 and len(unicode(w[0])) > 0: self.artist_tag = unicode(w[0]) else: raise PassableScanError("Song filename \"%s\" has no artist tag." % filename) w = f.tags.getall('TALB') if len(w) > 0 and len(unicode(w[0])) > 0: self.album_tag = unicode(w[0]).strip() else: raise PassableScanError("Song filename \"%s\" has no album tag." % filename) w = f.tags.getall('TCON') if len(w) > 0 and len(unicode(w[0])) > 0: self.genre_tag = unicode(w[0]) w = f.tags.getall('COMM') if len(w) > 0 and len(unicode(w[0])) > 0: self.data['link_text'] = unicode(w[0]).strip() w = f.tags.getall('WXXX') if len(w) > 0 and len(unicode(w[0])) > 0: self.data['url'] = unicode(w[0]).strip() self.replay_gain = self._get_replaygain(f) if not self.replay_gain and config.get("mp3gain_scan"): # Run mp3gain quietly, finding peak while not clipping, output DB friendly, and preserving original timestamp gain_std, gain_error = subprocess.Popen( [ _mp3gain_path, "-o", "-q", "-s", "i", "-p", "-k", "-T", self.filename ], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() if len(gain_error) > 0: raise Exception("Error when replay gaining \"%s\": %s" % (filename, gain_error)) f = MP3(filename) self.replay_gain = self._get_replaygain(f) self.data['length'] = int(f.info.length)