Exemplo n.º 1
0
def _add_scan_error(filename, xception):
	scan_errors = cache.get("backend_scan_errors")
	if not scan_errors:
		scan_errors = []

	scan_errors.insert(0, { "time": int(timestamp()), "file": filename, "type": xception.__class__.__name__, "error": str(xception) })
	if len(scan_errors) > 100:
		scan_errors = scan_errors[0:100]
	cache.set("backend_scan_errors", scan_errors)
	log.exception("scan", "Error scanning %s" % filename, xception)
Exemplo n.º 2
0
def start():
	global _directories
	global _scan_errors
	
	_scan_errors = cache.get("backend_scan_errors")
	if not _scan_errors:
		_scan_errors = []
	_directories = config.get("song_dirs", True)
	full_update()
	monitor()
Exemplo n.º 3
0
	def _auth_anon_user(self, api_key, bypass = False):
		if not bypass:
			cache_key = unicodedata.normalize('NFKD', u"api_key_listen_key_%s" % api_key).encode('ascii', 'ignore')
			listen_key = cache.get(cache_key)
			if not listen_key:
				listen_key = db.c.fetch_var("SELECT api_key_listen_key FROM r4_api_keys WHERE api_key = %s AND user_id = 1", (self.api_key,))
				if not listen_key:
					return
				else:
					self.data['listen_key'] = listen_key
			else:
				self.data['listen_key'] = listen_key
		self.authorized = True
Exemplo n.º 4
0
	def _auth_anon_user(self, api_key, bypass = False):
		if not bypass:
			auth_against = cache.get("ip_%s_api_key" % self.ip_address)
			if not auth_against:
				auth_against = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE api_ip = %s AND user_id = 1", (self.ip_address,))
				if not auth_against:
					# log.debug("user", "Anonymous user key %s not found." % api_key)
					return
				cache.set("ip_%s_api_key" % self.ip_address, auth_against)
			if auth_against != api_key:
				# log.debug("user", "Anonymous user key %s does not match key %s." % (api_key, auth_against))
				return
		self.authorized = True
Exemplo n.º 5
0
	def _auth_anon_user(self, api_key, bypass = False):
		if not bypass:
			cache_key = unicodedata.normalize('NFKD', u"api_key_ip_%s" % api_key).encode('ascii', 'ignore')
			auth_against = cache.get(cache_key)
			if not auth_against:
				auth_against = db.c.fetch_var("SELECT api_ip FROM r4_api_keys WHERE api_key = %s AND user_id = 1", (self.api_key,))
				if not auth_against or not auth_against == self.ip_address:
					log.debug("user", "Anonymous user key %s not found for IP %s: record in DB is %s." % (api_key, self.ip_address, auth_against))
					log.debug("user", "Re-generating API key %s." % api_key)
					self.ensure_api_key(reuse = api_key)
				cache.set(cache_key, auth_against)
			if auth_against != self.ip_address:
				log.debug("user", "Anonymous user key %s has IP %s which does not match IP %s." % (api_key, auth_against, self.ip_address))
				return
		self.authorized = True
Exemplo n.º 6
0
def _add_scan_error(filename, xception, full_exc=None):
	scan_errors = cache.get("backend_scan_errors")
	if not scan_errors:
		scan_errors = []

	eo = { "time": int(timestamp()), "file": filename, "type": xception.__class__.__name__, "error": str(xception), "traceback": "" }
	if not isinstance(xception, PassableScanError) and not isinstance(xception, IOError) and not isinstance(xception, OSError):
		if full_exc:
			eo['traceback'] = traceback.format_exception(*full_exc)			#pylint: disable=W0142
		else:
			eo['traceback'] = traceback.format_exception(*sys.exc_info())
	scan_errors.insert(0, eo)
	if len(scan_errors) > 100:
		scan_errors = scan_errors[0:100]
	cache.set("backend_scan_errors", scan_errors)
	log.exception("scan", "Error scanning %s" % filename, xception)
Exemplo n.º 7
0
def start(full_scan):
	global _directories
	global _scan_errors
	
	_scan_errors = cache.get("backend_scan_errors")
	if not _scan_errors:
		_scan_errors = []
	_directories = config.get("song_dirs")
	
	p = psutil.Process(os.getpid())
	p.set_nice(10)
	p.set_ionice(psutil.IOPRIO_CLASS_IDLE)
	
	if full_scan:
		full_update()
	else:
		monitor()
Exemplo n.º 8
0
    def post(self):
        # limit size of submission
        for k, v in self.cleaned_args.iteritems():
            if isinstance(object, (str, unicode)):
                self.cleaned_args[k] = v[:2048]
        self.cleaned_args["user_id"] = self.user.id
        self.cleaned_args["username"] = self.user.data["name"]
        self.cleaned_args["time"] = time.time()

        reports = cache.get("error_reports")
        if not isinstance(reports, list):
            reports = []

        reports.insert(0, self.cleaned_args)
        cache.set("error_reports", reports)

        self.append_standard("report_submitted", "Error report submitted.")
Exemplo n.º 9
0
def _common_init():
	global _scan_errors
	_scan_errors = cache.get("backend_scan_errors")
	if not _scan_errors:
		_scan_errors = []

	try:
		p = psutil.Process(os.getpid())
		p.set_nice(10)
	except:
		pass

	try:
		p = psutil.Process(os.getpid())
		p.set_ionice(psutil.IOPRIO_CLASS_IDLE)
	except:
		pass
Exemplo n.º 10
0
def _common_init():
	if config.get("mp3gain_scan") and not mp3gain_path:
		raise Exception("mp3gain_scan flag in config is enabled, but could not find mp3gain executable.")

	global _scan_errors
	_scan_errors = cache.get("backend_scan_errors")
	if not _scan_errors:
		_scan_errors = []

	try:
		p = psutil.Process(os.getpid())
		p.set_nice(10)
	except:
		pass

	try:
		p = psutil.Process(os.getpid())
		p.set_ionice(psutil.IOPRIO_CLASS_IDLE)
	except:
		pass
Exemplo n.º 11
0
	def get(self):
		self.write(self.render_string("bare_header.html", title="Relay Status"))
		status = cache.get("relay_status")
		self.write("<div style='float: right'>")
		if status:
			for relay, count in status.iteritems():
				self.write("%s: %s listeners<br />" % (relay, count))
		else:
			self.write("No relay status available.")
		self.write("</div>")
		self.write("<div>")
		total = 0
		for row in db.c.fetch_all("SELECT sid, lc_guests AS c FROM r4_listener_counts ORDER BY lc_time DESC, sid LIMIT %s", (len(config.station_ids),)):
			total += row['c']
			self.write("%s: %s listeners<br />" % (config.station_id_friendly[row['sid']], row['c']))
		if total == 0:
			self.write("No listener stats available.")
		else:
			self.write("<br />")
			self.write("<b>Total: %s listeners</b>" % total)
		self.write("</div>")
		self.write(self.render_string("basic_footer.html"))
Exemplo n.º 12
0
def attach_info_to_request(request):
	# Front-load all non-animated content ahead of the schedule content
	# Since the schedule content is the most animated on R3, setting this content to load
	# first has a good impact on the perceived animation smoothness since table redrawing
	# doesn't have to take place during the first few frames.

	if request.user:
		request.append("user", request.user.to_public_dict())
		
	if 'playlist' in request.request.arguments:
		request.append("all_albums", playlist.get_all_albums_list(request.user))
	elif 'artist_list' in request.request.arguments:
		request.append("artist_list", playlist.get_all_artists_list(request.sid))
	elif 'init' not in request.request.arguments:
		request.append("album_diff", cache.get_station(request.sid, 'album_diff'))
	
	request.append("requests_all", cache.get_station(request.sid, "request_all"))
	request.append("calendar", cache.get("calendar"))
	request.append("listeners_current", cache.get_station(request.sid, "listeners_current"))
	
	sched_next = []
	sched_history = []
	if request.user:
		request.append("requests_user", request.user.get_requests())
		# TODO: Some mixing of pre-dictionaried items here might help speed
		request.append("sched_current", cache.get_station(request.sid, "sched_current").to_dict(request.user))
		for evt in cache.get_station(request.sid, "sched_next"):
			sched_next.append(evt.to_dict(request.user))
		for evt in cache.get_station(request.sid, "sched_history"):
			sched_history.append(evt.to_dict(request.user))
	else:
		request.append("sched_current", cache.get_station(request.sid, "sched_current_dict"))
		sched_next = cache.get_station(request.sid, "sched_next_dict")
		sched_history = cache.get_station(request.sid, "sched_history_dict")
	request.append("sched_next", sched_next)
	request.append("sched_history", sched_history)
Exemplo n.º 13
0
def get_weather(bust_cache=False):
    if bust_cache:
        weather = {}
    else:
        weather = cache.get('weather') or {}

    if not weather:
        weather = {
            'sun': {},
            'forecast': [],
            'forecast_hour': [],
        }

        response = requests.get(
            'https://www.yr.no/place/Sweden/Stockholm/Stockholm/forecast.xml',
            headers={
                'user-agent': 'johanli.com',
            },
        )

        forecast_xml = etree.fromstring(response.content)

        sun = forecast_xml.xpath('sun')[0]

        weather['sun'] = {
            'rise': sun.get('rise'),
            'set': sun.get('set'),
        }

        for time_element in forecast_xml.xpath('forecast/tabular/time')[:28]:
            weather['forecast'].append({
                'time':
                time_element.get('from'),
                'description':
                time_element.find('symbol').get('name'),
                'temperature':
                time_element.find('temperature').get('value'),
            })

        response = requests.get(
            'https://www.yr.no/place/Sweden/Stockholm/Stockholm/forecast_hour_by_hour.xml',
            headers={
                'user-agent': 'johanli.com',
            },
        )

        forecast_hour_xml = etree.fromstring(response.content)

        for time_element in forecast_hour_xml.xpath(
                'forecast/tabular/time')[:24]:
            weather['forecast_hour'].append({
                'time':
                time_element.get('from'),
                'description':
                time_element.find('symbol').get('name'),
                'temperature':
                time_element.find('temperature').get('value'),
            })

        cache.set('weather', weather, 7200)

    return weather
Exemplo n.º 14
0
	def post(self):
		errors = cache.get("backend_scan_errors") or [ { "time": time.time(), "backend_scan_errors": "No errors in memory." } ]
		self.append(self.return_name, errors)
Exemplo n.º 15
0
def js(content, location):
    initialContentLines = content.split("\n")
    finalContentLines = []
    firstLocation = location

    finalContentLines.append("var _assets = {};")

    while len(initialContentLines) > 0:
        location = firstLocation
        initialContentLines[0] = initialContentLines[0].strip()

        if initialContentLines[0].startswith("// @import"):
            try:
                library = initialContentLines[0][11:]
                libraryName = ""

                if not ((location + "/" + library) in importedLibs):
                    importedLibs.append(location + "/" + library)

                    if location.startswith("http://") or location.startswith(
                            "https://"):
                        if not (library.startswith("http://")
                                or library.startswith("https://")):
                            library = location + "/" + library

                        libraryName = library.split("/")[-1].split(".")[0]

                        finalURL = urllib.parse.urljoin(
                            library, ".") + library.split("/")[-1]

                        if not finalURL.endswith(".js"):
                            finalURL += ".js"

                        output.action(
                            _("importLibrary", [libraryName, finalURL]))

                        location = urllib.parse.urljoin(
                            "/".join(
                                _replaceExceptFirst(library, "//",
                                                    "/").split("/")),
                            ".").rstrip("/")

                        try:
                            siteData = cache.get(finalURL)

                            if siteData != False:
                                finalContentLines.append(
                                    js(siteData.decode("utf-8"), location))
                            else:
                                output.warning(
                                    _("unknownImport",
                                      [initialContentLines[0][3:]]))
                        except:
                            output.warning(
                                _("unknownImport",
                                  [initialContentLines[0][3:]]))
                    elif library.startswith("http://") or library.startswith(
                            "https://"):
                        libraryName = library.split("/")[-1].split(".")[0]

                        output.action(
                            _("importLibrary", [libraryName, library]))

                        if not (location.startswith("http://")
                                or location.startswith("https://")):
                            location = urllib.parse.urljoin(
                                "/".join(
                                    _replaceExceptFirst(library, "//",
                                                        "/").split("/")),
                                ".").rstrip("/")

                        if not library.endswith(".js"):
                            library += ".js"

                        try:
                            siteData = cache.get(library)

                            if siteData != False:
                                finalContentLines.append(
                                    js(siteData.decode("utf-8"), location))
                            else:
                                output.warning(
                                    _("unknownImport",
                                      [initialContentLines[0][3:]]))
                        except:
                            output.warning(
                                _("unknownImport",
                                  [initialContentLines[0][3:]]))
                    else:
                        oldLibrary = library
                        library = location + "/" + library
                        libraryName = library.split("/")[-1]

                        if "/" in oldLibrary:
                            location += "/" + "/".join(
                                oldLibrary.split("/")[:-1])

                        output.action(
                            _("importLibrary", [libraryName, library + ".js"]))

                        try:
                            libPath = library.split("/")

                            libPath[-1] += ".js"

                            file = open(
                                os.path.join(*[os.sep, *libPath]).replace(
                                    ":", ":\\"), "r")
                            fileData = file.read()

                            file.close()

                            finalContentLines.append(js(fileData, location))
                        except:
                            output.warning(
                                _("unknownImport",
                                  [initialContentLines[0][3:]]))
                else:
                    output.action(_("circularImport", [library]))
            except:
                output.warning(_("illegalImport",
                                 [initialContentLines[0][3:]]))
        elif initialContentLines[0].startswith("// @import!"):
            try:
                library = initialContentLines[0][12:]
                libraryName = ""

                if location.startswith("http://") or location.startswith(
                        "https://"):
                    if not (library.startswith("http://")
                            or library.startswith("https://")):
                        library = location + "/" + library

                    libraryName = library.split("/")[-1].split(".")[0]

                    finalURL = urllib.parse.urljoin(
                        library, ".") + library.split("/")[-1]

                    if not finalURL.endswith(".js"):
                        finalURL += ".js"

                    output.action(_("importLibrary", [libraryName, finalURL]))

                    location = urllib.parse.urljoin(
                        "/".join(
                            _replaceExceptFirst(library, "//",
                                                "/").split("/")),
                        ".").rstrip("/")

                    try:
                        siteData = cache.get(finalURL)

                        if siteData != False:
                            finalContentLines.append(
                                js(siteData.decode("utf-8"), location))
                        else:
                            output.warning(
                                _("unknownImport",
                                  [initialContentLines[0][3:]]))
                    except:
                        output.warning(
                            _("unknownImport", [initialContentLines[0][3:]]))
                elif library.startswith("http://") or library.startswith(
                        "https://"):
                    libraryName = library.split("/")[-1].split(".")[0]

                    output.action(_("importLibrary", [libraryName, library]))

                    if not (location.startswith("http://")
                            or location.startswith("https://")):
                        location = urllib.parse.urljoin(
                            "/".join(
                                _replaceExceptFirst(library, "//",
                                                    "/").split("/")),
                            ".").rstrip("/")

                        if not library.endswith(".js"):
                            library += ".js"

                    try:
                        siteData = cache.get(library)

                        if siteData != False:
                            finalContentLines.append(
                                js(siteData.decode("utf-8"), location))
                        else:
                            output.warning(
                                _("unknownImport",
                                  [initialContentLines[0][3:]]))
                    except:
                        output.warning(
                            _("unknownImport", [initialContentLines[0][3:]]))
                else:
                    oldLibrary = library
                    library = location + "/" + library
                    libraryName = library.split("/")[-1]

                    if "/" in oldLibrary:
                        location += "/" + "/".join(oldLibrary.split("/")[:-1])

                    output.action(
                        _("importLibrary", [libraryName, library + ".js"]))

                    try:
                        libPath = library.split("/")

                        libPath[-1] += ".js"

                        file = open(
                            os.path.join(*["/", *libPath]).replace(":", ":\\"),
                            "r")
                        fileData = file.read()

                        file.close()

                        finalContentLines.append(js(fileData, location))
                    except:
                        output.warning(
                            _("unknownImport", [initialContentLines[0][3:]]))
            except:
                output.warning(_("illegalImport",
                                 [initialContentLines[0][3:]]))
        elif initialContentLines[0].startswith("// @asset"):
            try:
                asset = initialContentLines[0][10:]
                assetName = ""

                initPath = asset.split("/")[:-1]

                if os.name == "nt":
                    fullPath = os.path.join(location.replace("/", "\\"), asset)
                else:
                    fullPath = urllib.parse.urljoin(location + "/",
                                                    ".") + asset

                if not (fullPath in importedAssets):
                    importedAssets.append(fullPath)

                    if location.startswith("http://") or location.startswith(
                            "https://"):
                        originalAsset = asset

                        if not (asset.startswith("http://")
                                or asset.startswith("https://")):
                            asset = location + "/" + asset

                        assetName = asset.split("/")[-1].split(".")[0]

                        output.action(_("importAsset", [assetName, asset]))

                        location = urllib.parse.urljoin(
                            "/".join(
                                _replaceExceptFirst(library, "//",
                                                    "/").split("/")),
                            ".").rstrip("/")

                        try:
                            siteData = cache.get(asset)

                            if siteData != False:
                                finalContentLines.append(
                                    "_assets[\"" +
                                    originalAsset.replace("\"", "-") +
                                    "\"] = \"" + base64.b64encode(
                                        siteData).decode("utf-8") + "\";")
                            else:
                                output.warning(
                                    _("unknownAsset",
                                      [initialContentLines[0][3:]]))
                        except:
                            output.warning(
                                _("unknownAsset",
                                  [initialContentLines[0][3:]]))
                    elif asset.startswith("http://") or asset.startswith(
                            "https://"):
                        assetName = asset.split("/")[-1]

                        output.action(_("importAsset", [assetName, asset]))

                        try:
                            siteData = cache.get(asset)

                            if siteData != False:
                                finalContentLines.append(
                                    "_assets[\"" + asset.replace("\"", "-") +
                                    "\"] = \"" + base64.b64encode(
                                        siteData).decode("utf-8") + "\";")
                            else:
                                output.warning(
                                    _("unknownAsset",
                                      [initialContentLines[0][3:]]))
                        except:
                            output.warning(
                                _("unknownAsset",
                                  [initialContentLines[0][3:]]))
                    else:
                        oldAsset = asset
                        assetName = asset.split("/")[-1]

                        if "/" in asset:
                            location += "/" + "/".join(asset.split("/")[:-1])

                        output.action(_("importAsset", [assetName, asset]))

                        try:
                            asset = asset.split("/")[-1]

                            file = open(fullPath, "rb")
                            fileData = file.read()

                            file.close()

                            finalContentLines.append(
                                "_assets[\"" + assetName.replace("\"", "-") +
                                "\"] = \"" +
                                base64.b64encode(fileData).decode("utf-8") +
                                "\";")
                        except:
                            output.warning(
                                _("unknownAsset",
                                  [initialContentLines[0][3:]]))
            except:
                output.warning(_("illegalAsset", [initialContentLines[0][3:]]))
        else:
            finalContentLines.append(initialContentLines[0])

        initialContentLines.pop(0)

    return jsmin.jsmin("\n".join(finalContentLines))
Exemplo n.º 16
0
def html(content, location):
    minified = htmlmin.minify(content)
    imports = re.findall(r"\{\{ @import (.*?) \}\}", minified)

    for i in range(0, len(imports)):
        importStatement = "{{ @import " + imports[i] + " }}"

        library = imports[i]
        libraryName = ""

        if location.startswith("http://") or location.startswith("https://"):
            library = location + "/" + library
            libraryName = library.split("/")[-1].split(".")[0]

            finalURL = urllib.parse.urljoin(library,
                                            ".") + library.split("/")[-1]

            if not finalURL.endswith(".html"):
                finalURL += ".html"

            output.action(_("importLibrary", [libraryName, finalURL]))

            location = urllib.parse.urljoin(
                "/".join(_replaceExceptFirst(library, "//", "/").split("/")),
                ".").rstrip("/")

            try:
                siteData = cache.get(finalURL)

                if siteData != False:
                    finalContentLines.append(
                        js(siteData.decode("utf-8"), location))
                else:
                    output.warning(
                        _("unknownImport", [initialContentLines[0][3:]]))
            except:
                output.warning(_("unknownImport",
                                 [initialContentLines[0][3:]]))
        elif library.startswith("http://") or library.startswith("https://"):
            libraryName = library.split("/")[-1].split(".")[0]

            output.action(_("importLibrary", [libraryName, library]))

            if not (location.startswith("http://")
                    or location.startswith("https://")):
                location = urllib.parse.urljoin(
                    "/".join(
                        _replaceExceptFirst(library, "//", "/").split("/")),
                    ".").rstrip("/")

                if not library.endswith(".html"):
                    library += ".html"

            try:
                siteData = cache.get(library)

                if siteData != False:
                    minified = minified.replace(
                        importStatement,
                        html(siteData.decode("utf-8"), location))
                else:
                    output.warning(
                        _("unknownImport", ["@import " + imports[i]]))
            except:
                output.warning(_("unknownImport", [imports[i]]))
        else:
            library = location + "/" + library
            libraryName = library.split("/")[-1]

            output.action(_("importLibrary", [libraryName, library + ".html"]))

            try:
                libPath = library.split("/")

                libPath[-1] += ".html"

                file = open(
                    os.path.join(*["/", *libPath]).replace(":", ":\\"), "r")
                fileData = file.read()

                file.close()

                minified = minified.replace(importStatement,
                                            html(fileData, location))
            except:
                output.warning(_("unknownImport", [imports[i]]))

    return minified
Exemplo n.º 17
0
	def post(self):
		errors = cache.get("backend_scan_errors") or [ { "time": timestamp(), "backend_scan_errors": "No errors in memory." } ]
		self.append(self.return_name, errors)
Exemplo n.º 18
0
 def post(self):
     errors = cache.get("error_reports") or []
     self.append(self.return_name, errors)
Exemplo n.º 19
0
 def post(self):
     self.append("all_stations_info", cache.get("all_stations_info"))
Exemplo n.º 20
0
def attach_info_to_request(
    request, extra_list=None, all_lists=False, live_voting=False
):
    # Front-load all non-animated content ahead of the schedule content
    # Since the schedule content is the most animated on R3, setting this content to load
    # first has a good impact on the perceived animation smoothness since table redrawing
    # doesn't have to take place during the first few frames.

    if request.user:
        request.append("user", request.user.to_private_dict())
        if request.user.is_dj():
            attach_dj_info_to_request(request)

    if not request.mobile:
        if (
            all_lists
            or (extra_list == "all_albums")
            or (extra_list == "album")
            or "all_albums" in request.request.arguments
        ):
            request.append(
                "all_albums",
                api_requests.playlist.get_all_albums(request.sid, request.user),
            )
        else:
            request.append("album_diff", cache.get_station(request.sid, "album_diff"))

        if (
            all_lists
            or (extra_list == "all_artists")
            or (extra_list == "artist")
            or "all_artists" in request.request.arguments
        ):
            request.append(
                "all_artists", api_requests.playlist.get_all_artists(request.sid)
            )

        if (
            all_lists
            or (extra_list == "all_groups")
            or (extra_list == "group")
            or "all_groups" in request.request.arguments
        ):
            request.append(
                "all_groups", api_requests.playlist.get_all_groups(request.sid)
            )

        if (
            all_lists
            or (extra_list == "current_listeners")
            or "current_listeners" in request.request.arguments
            or request.get_cookie("r4_active_list") == "current_listeners"
        ):
            request.append(
                "current_listeners", cache.get_station(request.sid, "current_listeners")
            )

        request.append("request_line", cache.get_station(request.sid, "request_line"))

    sched_next = None
    sched_history = None
    sched_current = None
    if request.user and not request.user.is_anonymous():
        request.append("requests", request.user.get_requests(request.sid))
        sched_current = cache.get_station(request.sid, "sched_current")
        if not sched_current:
            raise APIException(
                "server_just_started",
                "Rainwave is Rebooting, Please Try Again in a Few Minutes",
                http_code=500,
            )
        if request.user.is_tunedin():
            sched_current.get_song().data["rating_allowed"] = True
        sched_current = sched_current.to_dict(request.user)
        sched_next = []
        sched_next_objects = cache.get_station(request.sid, "sched_next")
        for evt in sched_next_objects:
            sched_next.append(evt.to_dict(request.user))
        if (
            len(sched_next) > 0
            and request.user.is_tunedin()
            and sched_next_objects[0].is_election
            and len(sched_next_objects[0].songs) > 1
        ):
            sched_next[0]["voting_allowed"] = True
        if request.user.is_tunedin() and request.user.has_perks():
            for i in range(1, len(sched_next)):
                if (
                    sched_next_objects[i].is_election
                    and len(sched_next_objects[i].songs) > 1
                ):
                    sched_next[i]["voting_allowed"] = True
        sched_history = []
        for evt in cache.get_station(request.sid, "sched_history"):
            sched_history.append(evt.to_dict(request.user, check_rating_acl=True))
    elif request.user:
        sched_current = cache.get_station(request.sid, "sched_current_dict")
        if not sched_current:
            raise APIException(
                "server_just_started",
                "Rainwave is Rebooting, Please Try Again in a Few Minutes",
                http_code=500,
            )
        sched_next = cache.get_station(request.sid, "sched_next_dict")
        sched_history = cache.get_station(request.sid, "sched_history_dict")
        if (
            len(sched_next) > 0
            and request.user.is_tunedin()
            and sched_next[0]["type"] == "Election"
            and len(sched_next[0]["songs"]) > 1
        ):
            sched_next[0]["voting_allowed"] = True
    request.append("sched_current", sched_current)
    request.append("sched_next", sched_next)
    request.append("sched_history", sched_history)
    if request.user:
        if not request.user.is_anonymous():
            user_vote_cache = cache.get_user(request.user, "vote_history")
            if user_vote_cache:
                request.append("already_voted", user_vote_cache)
        else:
            if (
                len(sched_next) > 0
                and request.user.data.get("voted_entry")
                and request.user.data.get("voted_entry") > 0
                and request.user.data["lock_sid"] == request.sid
            ):
                request.append(
                    "already_voted",
                    [(sched_next[0]["id"], request.user.data["voted_entry"])],
                )

    request.append("all_stations_info", cache.get("all_stations_info"))

    if live_voting:
        request.append("live_voting", cache.get_station(request.sid, "live_voting"))
Exemplo n.º 21
0
def attach_info_to_request(request, extra_list=None, all_lists=False):
    # Front-load all non-animated content ahead of the schedule content
    # Since the schedule content is the most animated on R3, setting this content to load
    # first has a good impact on the perceived animation smoothness since table redrawing
    # doesn't have to take place during the first few frames.

    if request.user:
        request.append("user", request.user.to_private_dict())

    if not request.mobile:
        if all_lists or (extra_list == "all_albums"
                         ) or 'all_albums' in request.request.arguments:
            request.append(
                "all_albums",
                api_requests.playlist.get_all_albums(request.sid,
                                                     request.user))
        else:
            request.append("album_diff",
                           cache.get_station(request.sid, 'album_diff'))

        if all_lists or (extra_list == "all_artists"
                         ) or 'all_artists' in request.request.arguments:
            request.append("all_artists",
                           api_requests.playlist.get_all_artists(request.sid))

        if all_lists or (extra_list == "all_groups"
                         ) or 'all_groups' in request.request.arguments:
            request.append("all_groups",
                           api_requests.playlist.get_all_groups(request.sid))

        if all_lists or (
                extra_list == "current_listeners"
        ) or 'current_listeners' in request.request.arguments or request.get_cookie(
                "r4_active_list") == "current_listeners":
            request.append("current_listeners",
                           cache.get_station(request.sid, "current_listeners"))

        request.append("request_line",
                       cache.get_station(request.sid, "request_line"))

    sched_next = None
    sched_history = None
    sched_current = None
    if request.user and not request.user.is_anonymous():
        request.append("requests", request.user.get_requests(request.sid))
        sched_current = cache.get_station(request.sid, "sched_current")
        if not sched_current:
            raise APIException(
                "server_just_started",
                "Rainwave is Rebooting, Please Try Again in a Few Minutes",
                http_code=500)
        if request.user.is_tunedin():
            sched_current.get_song().data['rating_allowed'] = True
        sched_current = sched_current.to_dict(request.user)
        sched_next = []
        sched_next_objects = cache.get_station(request.sid, "sched_next")
        for evt in sched_next_objects:
            sched_next.append(evt.to_dict(request.user))
        if len(sched_next) > 0 and request.user.is_tunedin(
        ) and sched_next_objects[0].is_election:
            sched_next[0]['voting_allowed'] = True
        if request.user.is_tunedin() and request.user.has_perks():
            for i in range(1, len(sched_next)):
                if sched_next_objects[i].is_election:
                    sched_next[i]['voting_allowed'] = True
        sched_history = []
        for evt in cache.get_station(request.sid, "sched_history"):
            sched_history.append(
                evt.to_dict(request.user, check_rating_acl=True))
    elif request.user:
        sched_current = cache.get_station(request.sid, "sched_current_dict")
        if not sched_current:
            raise APIException(
                "server_just_started",
                "Rainwave is Rebooting, Please Try Again in a Few Minutes",
                http_code=500)
        sched_next = cache.get_station(request.sid, "sched_next_dict")
        sched_history = cache.get_station(request.sid, "sched_history_dict")
        if len(sched_next) > 0 and request.user.is_tunedin(
        ) and sched_next[0]['type'] == "Election":
            sched_next[0]['voting_allowed'] = True
    request.append("sched_current", sched_current)
    request.append("sched_next", sched_next)
    request.append("sched_history", sched_history)
    if request.user:
        if not request.user.is_anonymous():
            user_vote_cache = cache.get_user(request.user, "vote_history")
            temp_current = list()
            temp_current.append(sched_current)
            if user_vote_cache:
                for history in user_vote_cache:
                    for event in (sched_history + sched_next + temp_current):
                        if history[0] == event['id']:
                            api_requests.vote.append_success_to_request(
                                request, event['id'], history[1])
        else:
            if len(sched_next) > 0 and request.user.data[
                    'voted_entry'] > 0 and request.user.data[
                        'lock_sid'] == request.sid:
                api_requests.vote.append_success_to_request(
                    request, sched_next[0]['id'],
                    request.user.data['voted_entry'])

    request.append("all_stations_info", cache.get("all_stations_info"))
Exemplo n.º 22
0
	def post(self):
		self.append("all_stations_info", cache.get("all_stations_info"))
Exemplo n.º 23
0
 def get_request_expiry(self):
     if self.id <= 1:
         return None
     if self.id in cache.get("request_expire_times"):
         return cache.get("request_expire_times")[self.id]
     return None
Exemplo n.º 24
0
def attach_info_to_request(request, extra_list = None, all_lists = False):
	# Front-load all non-animated content ahead of the schedule content
	# Since the schedule content is the most animated on R3, setting this content to load
	# first has a good impact on the perceived animation smoothness since table redrawing
	# doesn't have to take place during the first few frames.

	if request.user:
		request.append("user", request.user.to_private_dict())

	if not request.mobile:
		if all_lists or (extra_list == "all_albums") or 'all_albums' in request.request.arguments:
			request.append("all_albums", api_requests.playlist.get_all_albums(request.sid, request.user))
		else:
			request.append("album_diff", cache.get_station(request.sid, 'album_diff'))

		if all_lists or (extra_list == "all_artists") or 'all_artists' in request.request.arguments:
			request.append("all_artists", api_requests.playlist.get_all_artists(request.sid))

		if all_lists or (extra_list == "all_groups") or 'all_groups' in request.request.arguments:
			request.append("all_groups", api_requests.playlist.get_all_groups(request.sid))

		if all_lists or (extra_list == "current_listeners") or 'current_listeners' in request.request.arguments or request.get_cookie("r4_active_list") == "current_listeners":
			request.append("current_listeners", cache.get_station(request.sid, "current_listeners"))

		request.append("request_line", cache.get_station(request.sid, "request_line"))

	sched_next = None
	sched_history = None
	sched_current = None
	if request.user and not request.user.is_anonymous():
		request.append("requests", request.user.get_requests(request.sid))
		sched_current = cache.get_station(request.sid, "sched_current")
		if not sched_current:
			raise APIException("server_just_started", "Rainwave is Rebooting, Please Try Again in a Few Minutes", http_code=500)
		if request.user.is_tunedin():
			sched_current.get_song().data['rating_allowed'] = True
		sched_current = sched_current.to_dict(request.user)
		sched_next = []
		sched_next_objects = cache.get_station(request.sid, "sched_next")
		for evt in sched_next_objects:
			sched_next.append(evt.to_dict(request.user))
		if len(sched_next) > 0 and request.user.is_tunedin() and sched_next_objects[0].is_election and len(sched_next_objects[0].songs) > 1:
			sched_next[0]['voting_allowed'] = True
		if request.user.is_tunedin() and request.user.has_perks():
			for i in range(1, len(sched_next)):
				if sched_next_objects[i].is_election and len(sched_next_objects[0].songs) > 1:
					sched_next[i]['voting_allowed'] = True
		sched_history = []
		for evt in cache.get_station(request.sid, "sched_history"):
			sched_history.append(evt.to_dict(request.user, check_rating_acl=True))
	elif request.user:
		sched_current = cache.get_station(request.sid, "sched_current_dict")
		if not sched_current:
			raise APIException("server_just_started", "Rainwave is Rebooting, Please Try Again in a Few Minutes", http_code=500)
		sched_next = cache.get_station(request.sid, "sched_next_dict")
		sched_history = cache.get_station(request.sid, "sched_history_dict")
		if len(sched_next) > 0 and request.user.is_tunedin() and sched_next[0]['type'] == "Election" and len(sched_next[0]['songs']) > 1:
			sched_next[0]['voting_allowed'] = True
	request.append("sched_current", sched_current)
	request.append("sched_next", sched_next)
	request.append("sched_history", sched_history)
	if request.user:
		if not request.user.is_anonymous():
			user_vote_cache = cache.get_user(request.user, "vote_history")
			temp_current = list()
			temp_current.append(sched_current)
			if user_vote_cache:
				for history in user_vote_cache:
					for event in (sched_history + sched_next + temp_current):
						if history[0] == event['id']:
							api_requests.vote.append_success_to_request(request, event['id'], history[1])
		else:
			if len(sched_next) > 0 and request.user.data['voted_entry'] > 0 and request.user.data['lock_sid'] == request.sid:
				api_requests.vote.append_success_to_request(request, sched_next[0]['id'], request.user.data['voted_entry'])

	request.append("all_stations_info", cache.get("all_stations_info"))
Exemplo n.º 25
0
	def get_request_expiry(self):
		if self.id <= 1:
			return None
		if self.id in cache.get("request_expire_times"):
			return cache.get("request_expire_times")[self.id]
		return None
Exemplo n.º 26
0
	def post(self):
		errors = cache.get("error_reports") or []
		self.append(self.return_name, errors)