Example #1
0
def monitor():
    global _wm
    if not _wm:
        raise "Cannot monitor on Windows, or without pyinotify."

    class EventHandler(pyinotify.ProcessEvent):
        def __init__(self, sids):
            self.sids = sids

        def _rw_process(self, event):
            try:
                _scan_file(_fix_codepage_1252(event.pathname, self.sids))
            except Exception as e:
                _add_scan_error(filename, e)

        def process_IN_CREATE(self, event):
            self._rw_process(event)

        def process_IN_MODIFY(self, event):
            self._rw_process(event)

        def process_IN_DELETE(self, event):
            _disable_file(event.pathname)

    cache.set("backend_scan", "monitoring")
    mask = pyinotify.IN_DELETE | pyinotify.IN_CREATE | pyinotify.IN_MODIFY
    notifiers = []
    descriptors = []
    for dir, sids in _directories.iteritems():
        notifiers.append(pyinotify.AsyncNotifier(_wm, EventHandler(sids)))
        descriptors.append(_wm.add_watch(dir, mask, rec=True, auto_add=True))
    print "Monitoring"
    asyncore.loop()
    cache.set("backend_scan", "off")
Example #2
0
def monitor():
	global _wm
	if not _wm:
		raise "Cannot monitor on Windows, or without pyinotify."

	class EventHandler(pyinotify.ProcessEvent):
		def __init__(self, sids):
			self.sids = sids

		def _rw_process(self, event):
			try:
				_scan_file(_fix_codepage_1252(event.pathname, self.sids))
			except Exception as e:
				_add_scan_error(filename, e)

		def process_IN_CREATE(self, event):
			self._rw_process(event)

		def process_IN_MODIFY(self, event):
			self._rw_process(event)

		def process_IN_DELETE(self, event):
			_disable_file(event.pathname)

	cache.set("backend_scan", "monitoring")
	mask = pyinotify.IN_DELETE | pyinotify.IN_CREATE | pyinotify.IN_MODIFY
	notifiers = []
	descriptors = []
	for dir, sids in _directories.iteritems():
		notifiers.append(pyinotify.AsyncNotifier(_wm, EventHandler(sids)))
		descriptors.append(_wm.add_watch(dir, mask, rec=True, auto_add=True))
	print "Monitoring"
	asyncore.loop()
	cache.set("backend_scan", "off")
Example #3
0
def _add_scan_error(filename, xception, full_exc=None):
    scan_errors = []
    try:
        scan_errors = cache.get("backend_scan_errors")
    except:
        pass
    if not scan_errors:
        scan_errors = []

    eo = {
        "time": int(timestamp()),
        "file": filename,
        "type": xception.__class__.__name__,
        "error": str(xception),
        "traceback": ""
    }
    if not isinstance(xception, PassableScanError) and not isinstance(
            xception, IOError) and not isinstance(xception, OSError):
        if full_exc:
            eo['traceback'] = traceback.format_exception(*full_exc)  #pylint: disable=W0142
            log.exception("scan", "Error scanning %s" % filename, full_exc)
        else:
            eo['traceback'] = traceback.format_exception(*sys.exc_info())
            log.exception("scan", "Error scanning %s" % filename,
                          sys.exc_info())
    else:
        log.warn("scan",
                 "Warning scanning %s: %s" % (filename, xception.message))
    scan_errors.insert(0, eo)
    if len(scan_errors) > 100:
        scan_errors = scan_errors[0:100]
    cache.set("backend_scan_errors", scan_errors)
Example #4
0
def get_top_stories(bust_cache=False):
    if bust_cache:
        top_stories = []
    else:
        top_stories = cache.get('hackernews-topstories') or []

    if not top_stories:
        for top_story_id in get_top_story_ids()[:20]:
            response = requests.get(
                'https://hacker-news.firebaseio.com/v0/item/' + str(top_story_id) + '.json',
                headers={
                    'user-agent': 'johanli.com',
                },
            )

            top_story = response.json()

            top_stories.append({
                'title': html.unescape(top_story['title']),
                'score': top_story['score'],
                'number_comments': top_story.get('descendants'),
                'created': top_story['time'],
                'url': top_story.get('url'),
                'comments_url': 'https://news.ycombinator.com/item?id=' + str(top_story['id']),
            })

        cache.set('hackernews-topstories', top_stories, 7200)

    return top_stories
Example #5
0
def get_plants():
    plants = cache.get('plants')

    if not plants:
        plants = default_plants
        cache.set('plants', plants)

    return [needs_watering(plant) for plant in plants]
Example #6
0
def _add_scan_error(filename, xception):
	scan_errors = cache.get("backend_scan_errors")
	if not scan_errors:
		scan_errors = []

	scan_errors.insert(0, { "time": int(timestamp()), "file": filename, "type": xception.__class__.__name__, "error": str(xception) })
	if len(scan_errors) > 100:
		scan_errors = scan_errors[0:100]
	cache.set("backend_scan_errors", scan_errors)
	log.exception("scan", "Error scanning %s" % filename, xception)
Example #7
0
def full_update():
	cache.set("backend_scan", "full scan")
	db.c.update("UPDATE r4_songs SET song_scanned = FALSE")
	_scan_directories()

	dead_songs = db.c.fetch_list("SELECT song_id FROM r4_songs WHERE song_scanned = FALSE AND song_verified = TRUE")
	for song_id in dead_songs:
		song = playlist.Song.load_from_id(song_id)
		song.disable()
	
	cache.set("backend_scan", "off")
Example #8
0
	def _auth_anon_user(self, ip_address, api_key, bypass = False):
		if not bypass:
			auth_against = cache.get("ip_%s_api_key" % ip_address)
			if not auth_against:
				auth_against = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE api_ip = %s AND user_id = 1", (ip_address,))
				if not auth_against:
					# log.debug("user", "Anonymous user key %s not found." % api_key)
					return
				cache.set("ip_%s_api_key" % ip_address, auth_against)
			if auth_against != api_key:
				# log.debug("user", "Anonymous user key %s does not match key %s." % (api_key, auth_against))
				return
		self.authorized = True
Example #9
0
def full_update():
    cache.set("backend_scan", "full scan")
    db.c.update("UPDATE r4_songs SET song_scanned = FALSE")
    _scan_directories()

    dead_songs = db.c.fetch_list(
        "SELECT song_id FROM r4_songs WHERE song_scanned = FALSE AND song_verified = TRUE"
    )
    for song_id in dead_songs:
        song = playlist.Song.load_from_id(song_id)
        song.disable()

    cache.set("backend_scan", "off")
Example #10
0
def monitor():
	global _wm
	
	cache.set("backend_scan", "monitoring")
	mask = pyinotify.IN_DELETE | pyinotify.IN_CREATE | pyinotify.IN_MODIFY
	notifiers = []
	descriptors = []
	for dir, sids in _directories.iteritems():
		notifiers.append(pyinotify.AsyncNotifier(_wm, EventHandler(sids)))
		descriptors.append(_wm.add_watch(dir, mask, rec=True, auto_add=True))
	print "Monitoring"
	asyncore.loop()
	cache.set("backend_scan", "off")
Example #11
0
	def _auth_anon_user(self, api_key, bypass = False):
		if not bypass:
			auth_against = cache.get("ip_%s_api_key" % self.ip_address)
			if not auth_against:
				auth_against = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE api_ip = %s AND user_id = 1", (self.ip_address,))
				if not auth_against:
					# log.debug("user", "Anonymous user key %s not found." % api_key)
					return
				cache.set("ip_%s_api_key" % self.ip_address, auth_against)
			if auth_against != api_key:
				# log.debug("user", "Anonymous user key %s does not match key %s." % (api_key, auth_against))
				return
		self.authorized = True
Example #12
0
	def ensure_api_key(self, ip_address = None):
		if self.id == 1 and ip_address:
			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = 1 AND api_ip = %s", (ip_address,))
			if not api_key:
				api_key = self.generate_api_key(ip_address, int(time.time()) + 172800)
				cache.set("ip_%s_api_key" % ip_address, api_key)
		elif self.id > 1:
			if 'api_key' in self.data and self.data['api_key']:
				return self.data['api_key']
			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = %s", (self.id,))
			if not api_key:
				api_key = self.generate_api_key()
		self.data['api_key'] = api_key
		return api_key
Example #13
0
def _scan_directories():
	global _scan_errors
	global _directories
	
	leftovers = []
	for dir, sids in _directories.iteritems():
		for root, subdirs, files in os.walk(dir, followlinks = True):
			cache.set("backend_scan_size", len(files))
			file_counter = 0
			for filename in files:
				cache.set("backend_scan_counted", file_counter)
				fqfn = root + "/" + filename
				_scan_file(fqfn, sids, True)
	_save_scan_errors()
Example #14
0
	def ensure_api_key(self):
		if self.id == 1:
			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = 1 AND api_ip = %s", (self.ip_address,))
			if not api_key:
				api_key = self.generate_api_key(int(time.time()) + 172800)
				cache.set("ip_%s_api_key" % self.ip_address, api_key)
		elif self.id > 1:
			if 'api_key' in self.data and self.data['api_key']:
				return self.data['api_key']
			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = %s", (self.id,))
			if not api_key:
				api_key = self.generate_api_key()
		self.data['api_key'] = api_key
		return api_key
Example #15
0
def _cache_relay_status():
    global in_process

    relays = {}
    for relay, relay_info in config.get("relays").iteritems():  #pylint: disable=W0612
        relays[relay] = 0

    for handler, data in in_process.iteritems():
        if isinstance(data, list):
            relays[handler.relay_name] += len(data)

    for relay, count in relays.iteritems():
        log.debug("icecast_sync", "%s total listeners: %s" % (relay, count))

    cache.set("relay_status", relays)
Example #16
0
def update_expire_times():
	expiry_times = {}
	for row in db.c.fetch_all("SELECT * FROM r4_request_line"):
		expiry_times[row['user_id']] = None
		if not row['line_expiry_tune_in'] and not row['line_expiry_election']:
			pass
		elif row['line_expiry_tune_in'] and not row['line_expiry_election']:
			expiry_times[row['user_id']] = row['line_expiry_tune_in']
		elif row['line_expiry_election'] and not row['line_expiry_tune_in']:
			expiry_times[row['user_id']] = row['line_expiry_election']
		elif row['line_expiry_election'] <= row['line_expiry_tune_in']:
			expiry_times[row['user_id']] = row['line_expiry_election']
		else:
			expiry_times[row['user_id']] = row['line_expiry_tune_in']
	cache.set("request_expire_times", expiry_times, True)
Example #17
0
	def ensure_api_key(self, reuse = None):
		if self.id == 1:
			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = 1 AND api_ip = %s", (self.ip_address,))
			if not api_key:
				cache_key = unicodedata.normalize('NFKD', u"api_key_ip_%s" % api_key).encode('ascii', 'ignore')
				api_key = self.generate_api_key(int(timestamp()) + 172800, reuse)
				cache.set(cache_key, api_key)
		elif self.id > 1:
			if 'api_key' in self.data and self.data['api_key']:
				return self.data['api_key']
			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = %s", (self.id,))
			if not api_key:
				api_key = self.generate_api_key()
		self.data['api_key'] = api_key
		return api_key
Example #18
0
	def _auth_anon_user(self, api_key, bypass = False):
		if not bypass:
			cache_key = unicodedata.normalize('NFKD', u"api_key_ip_%s" % api_key).encode('ascii', 'ignore')
			auth_against = cache.get(cache_key)
			if not auth_against:
				auth_against = db.c.fetch_var("SELECT api_ip FROM r4_api_keys WHERE api_key = %s AND user_id = 1", (self.api_key,))
				if not auth_against or not auth_against == self.ip_address:
					log.debug("user", "Anonymous user key %s not found for IP %s: record in DB is %s." % (api_key, self.ip_address, auth_against))
					log.debug("user", "Re-generating API key %s." % api_key)
					self.ensure_api_key(reuse = api_key)
				cache.set(cache_key, auth_against)
			if auth_against != self.ip_address:
				log.debug("user", "Anonymous user key %s has IP %s which does not match IP %s." % (api_key, auth_against, self.ip_address))
				return
		self.authorized = True
Example #19
0
def update_expire_times():
    expiry_times = {}
    for row in db.c.fetch_all("SELECT * FROM r4_request_line"):
        expiry_times[row['user_id']] = None
        if not row['line_expiry_tune_in'] and not row['line_expiry_election']:
            pass
        elif row['line_expiry_tune_in'] and not row['line_expiry_election']:
            expiry_times[row['user_id']] = row['line_expiry_tune_in']
        elif row['line_expiry_election'] and not row['line_expiry_tune_in']:
            expiry_times[row['user_id']] = row['line_expiry_election']
        elif row['line_expiry_election'] <= row['line_expiry_tune_in']:
            expiry_times[row['user_id']] = row['line_expiry_election']
        else:
            expiry_times[row['user_id']] = row['line_expiry_tune_in']
    cache.set("request_expire_times", expiry_times, True)
Example #20
0
def _cache_relay_status():
	global in_process

	relays = {}
	for relay, relay_info in config.get("relays").iteritems():	#pylint: disable=W0612
		relays[relay] = 0


	for handler, data in in_process.iteritems():
		if isinstance(data, list):
			relays[handler.relay_name] += len(data)

	for relay, count in relays.iteritems():
		log.debug("icecast_sync", "%s total listeners: %s" % (relay, count))

	cache.set("relay_status", relays)
Example #21
0
def _add_scan_error(filename, xception, full_exc=None):
	scan_errors = cache.get("backend_scan_errors")
	if not scan_errors:
		scan_errors = []

	eo = { "time": int(timestamp()), "file": filename, "type": xception.__class__.__name__, "error": str(xception), "traceback": "" }
	if not isinstance(xception, PassableScanError) and not isinstance(xception, IOError) and not isinstance(xception, OSError):
		if full_exc:
			eo['traceback'] = traceback.format_exception(*full_exc)			#pylint: disable=W0142
		else:
			eo['traceback'] = traceback.format_exception(*sys.exc_info())
	scan_errors.insert(0, eo)
	if len(scan_errors) > 100:
		scan_errors = scan_errors[0:100]
	cache.set("backend_scan_errors", scan_errors)
	log.exception("scan", "Error scanning %s" % filename, xception)
Example #22
0
	def ensure_api_key(self):
		if self.id == 1:
			if self.data.get('api_key') and self.data['listen_key']:
				return
			api_key = self.generate_api_key(int(timestamp()) + 172800, self.data.get('api_key'))
			cache_key = unicodedata.normalize('NFKD', u"api_key_listen_key_%s" % api_key).encode('ascii', 'ignore')
			cache.set(cache_key, self.data['listen_key'])
		elif self.id > 1:
			if 'api_key' in self.data and self.data['api_key']:
				return self.data['api_key']

			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = %s", (self.id,))
			if not api_key:
				api_key = self.generate_api_key()
		self.data['api_key'] = api_key

		return api_key
Example #23
0
def get_subreddits(bust_cache=False):
    if bust_cache:
        subreddits = []
    else:
        subreddits = cache.get('reddit-subreddits') or []

    if not subreddits:
        for subreddit in monitored_subreddits:
            posts = get_posts(subreddit)
            subreddits.append({
                'name': subreddit,
                'posts': posts,
            })

        cache.set('reddit-subreddits', subreddits, 7200)

    return subreddits
Example #24
0
	def ensure_api_key(self):
		if self.id == 1:
			if self.data.get('api_key') and self.data['listen_key']:
				return
			api_key = self.generate_api_key(int(timestamp()) + 172800, self.data.get('api_key'))
			cache_key = unicodedata.normalize('NFKD', u"api_key_listen_key_%s" % api_key).encode('ascii', 'ignore')
			cache.set(cache_key, self.data['listen_key'])
		elif self.id > 1:
			if 'api_key' in self.data and self.data['api_key']:
				return self.data['api_key']

			api_key = db.c.fetch_var("SELECT api_key FROM r4_api_keys WHERE user_id = %s", (self.id,))
			if not api_key:
				api_key = self.generate_api_key()
		self.data['api_key'] = api_key

		return api_key
Example #25
0
    def post(self):
        # limit size of submission
        for k, v in self.cleaned_args.iteritems():
            if isinstance(object, (str, unicode)):
                self.cleaned_args[k] = v[:2048]
        self.cleaned_args["user_id"] = self.user.id
        self.cleaned_args["username"] = self.user.data["name"]
        self.cleaned_args["time"] = time.time()

        reports = cache.get("error_reports")
        if not isinstance(reports, list):
            reports = []

        reports.insert(0, self.cleaned_args)
        cache.set("error_reports", reports)

        self.append_standard("report_submitted", "Error report submitted.")
Example #26
0
    def post(self):
        # limit size of submission
        for k, v in self.cleaned_args.iteritems():
            if isinstance(object, (str, unicode)):
                self.cleaned_args[k] = v[:2048]
        self.cleaned_args['user_id'] = self.user.id
        self.cleaned_args['username'] = self.user.data['name']
        self.cleaned_args['time'] = time.time()

        reports = cache.get("error_reports")
        if not isinstance(reports, list):
            reports = []

        reports.insert(0, self.cleaned_args)
        cache.set("error_reports", reports)

        self.append_standard("report_submitted", "Error report submitted.")
Example #27
0
def access_token():
    access_token = cache.get('reddit-access-token')

    if not access_token:
        response = requests.post(
            'https://www.reddit.com/api/v1/access_token',
            auth=HTTPBasicAuth(
                os.environ.get('REDDIT_CLIENT_ID'),
                os.environ.get('REDDIT_CLIENT_SECRET'),
            ),
            headers={'user-agent': 'johanli.com'},
            data={'grant_type': 'client_credentials'},
        )

        access_token = response.json()['access_token']
        cache.set('reddit-access-token', access_token, 3000)

    return access_token
Example #28
0
def get_weather(bust_cache=False):
    if bust_cache:
        weather = {}
    else:
        weather = cache.get('weather') or {}

    if not weather:
        weather = {
            'sun': {},
            'forecast': [],
            'forecast_hour': [],
        }

        response = requests.get(
            'https://www.yr.no/place/Sweden/Stockholm/Stockholm/forecast.xml',
            headers={
                'user-agent': 'johanli.com',
            },
        )

        forecast_xml = etree.fromstring(response.content)

        sun = forecast_xml.xpath('sun')[0]

        weather['sun'] = {
            'rise': sun.get('rise'),
            'set': sun.get('set'),
        }

        for time_element in forecast_xml.xpath('forecast/tabular/time')[:28]:
            weather['forecast'].append({
                'time':
                time_element.get('from'),
                'description':
                time_element.find('symbol').get('name'),
                'temperature':
                time_element.find('temperature').get('value'),
            })

        response = requests.get(
            'https://www.yr.no/place/Sweden/Stockholm/Stockholm/forecast_hour_by_hour.xml',
            headers={
                'user-agent': 'johanli.com',
            },
        )

        forecast_hour_xml = etree.fromstring(response.content)

        for time_element in forecast_hour_xml.xpath(
                'forecast/tabular/time')[:24]:
            weather['forecast_hour'].append({
                'time':
                time_element.get('from'),
                'description':
                time_element.find('symbol').get('name'),
                'temperature':
                time_element.find('temperature').get('value'),
            })

        cache.set('weather', weather, 7200)

    return weather
Example #29
0
def _save_scan_errors():
    global _scan_errors

    if len(_scan_errors) > 100:
        _scan_errors = _scan_errors[0:100]
    cache.set("backend_scan_errors", _scan_errors)
Example #30
0
def water(id):
    plants = get_plants()
    plants[id]['last_watered'] = time()
    cache.set('plants', plants)
Example #31
0
def _save_scan_errors():
	global _scan_errors
	
	if len(_scan_errors) > 100:
		_scan_errors = _scan_errors[0:100]
	cache.set("backend_scan_errors", _scan_errors)