def map_show_keys(data): for k, v in data.iteritems(): k_org = k k = k.lower() if None is not v: if k in ['banner', 'fanart', 'poster'] and v: v = self.config['url_artworkPrefix'] % v elif 'genre' == k: v = '|%s|' % '|'.join([ self._clean_data(c) for c in v if isinstance(c, basestring) ]) elif 'firstaired' == k: if v: try: v = parse(v, fuzzy=True).strftime('%Y-%m-%d') except (StandardError, Exception): v = None else: v = None else: v = self._clean_data(v) if k in map_show: k = map_show[k] if k_org is not k: del (data[k_org]) data[k] = v return data
def date2iso(*args): """ .. function:: date2iso(sec) -> ISO Datetime Converts an input date to ISO-8601 date format. It tries to autodetect, the input date format. Examples: >>> table1(''' ... 2007-12-31 ... 2010-01-01 ... 2010W06 ... "18/Jan/2011:11:13:00 +0100" ... ''') >>> sql("select date2iso(a) from table1") date2iso(a) ------------------------- 2007-12-31T00:00:00+00:00 2010-01-01T00:00:00+00:00 2010-02-05T00:00:00+00:00 2011-01-18T11:13:00+01:00 """ date = args[0] try: date = iso8601.parse_date(date) except iso8601.ParseError: date = parser.parse(date, fuzzy=True) return date.isoformat()
def get_tvmaze_by_name(showname, premiere_date): ids = {} try: url = '%ssearch/shows?%s' % ( sickbeard.indexerApi(INDEXER_TVMAZE).config['base_url'], urlencode({'q': clean_show_name(showname)})) res = get_tvmaze_data(url=url, json=True, raise_status_code=True, timeout=120) if res: for r in res: if 'show' in r and 'premiered' in r[ 'show'] and 'externals' in r['show']: premiered = parse(r['show']['premiered'], fuzzy=True) if abs(premiere_date - premiered.date()) < datetime.timedelta(days=2): ids[INDEXER_TVRAGE] = r['show']['externals'].get( 'tvrage', 0) ids[INDEXER_TVDB] = r['show']['externals'].get( 'thetvdb', 0) ids[INDEXER_IMDB] = tryInt( str(r['show']['externals'].get('imdb')).replace( 'tt', '')) ids[INDEXER_TVMAZE] = r['show'].get('id', 0) break except (StandardError, Exception): pass return {k: v for k, v in ids.iteritems() if v not in (None, '', 0)}
def selectSeries(self, allSeries): searchResults = [] seriesnames = [] # get all available shows if allSeries: if 'searchterm' in self.config: searchterm = self.config['searchterm'] # try to pick a show that's in my show list for curShow in allSeries: if curShow in searchResults: continue if 'seriesname' in curShow: seriesnames.append(curShow['seriesname']) if 'aliasnames' in curShow: seriesnames.extend(curShow['aliasnames'].split('|')) for name in seriesnames: if searchterm.lower() in name.lower(): if 'firstaired' not in curShow: curShow['firstaired'] = str(datetime.date.fromordinal(1)) curShow['firstaired'] = re.sub("([-]0{2}){1,}", "", curShow['firstaired']) fixDate = parser.parse(curShow['firstaired'], fuzzy=True).date() curShow['firstaired'] = fixDate.strftime("%Y-%m-%d") if curShow not in searchResults: searchResults += [curShow] return searchResults
def parse_date(string): """Return a datetime with a best guess of the supplied string, using dateutil""" from lib.dateutil import parser try: dt = parser.parse(string) except ValueError, e: log.warning(e) dt = None
def map_show_keys(data): keep_data = {} del_keys = [] new_data = {} for k, v in iteritems(data): k_org = k k = k.lower() if None is not v: if k in ['banner', 'fanart', 'poster'] and v: v = self.config['url_artworks'] % v elif 'genre' == k: keep_data['genre_list'] = v v = '|%s|' % '|'.join([clean_data(c) for c in v if isinstance(c, string_types)]) elif 'gueststars' == k: keep_data['gueststars_list'] = v v = '|%s|' % '|'.join([clean_data(c) for c in v if isinstance(c, string_types)]) elif 'writers' == k: keep_data[k] = v v = '|%s|' % '|'.join([clean_data(c) for c in v if isinstance(c, string_types)]) elif 'rating' == k: new_data['contentrating'] = v elif 'firstaired' == k: if v: try: v = parse(v, fuzzy=True).strftime('%Y-%m-%d') except (BaseException, Exception): v = None else: v = None elif 'imdbid' == k: if v: if re.search(r'^(tt)?\d{1,9}$', v, flags=re.I): v = clean_data(v) else: v = '' else: v = clean_data(v) else: if 'seriesname' == k: if isinstance(data.get('aliases'), list) and 0 < len(data.get('aliases')): v = data['aliases'].pop(0) # this is a invalid show, it has no Name if None is v: return None if k in map_show: k = map_show[k] if k_org is not k: del_keys.append(k_org) new_data[k] = v else: data[k] = v for d in del_keys: del (data[d]) if isinstance(data, dict): data.update(new_data) data.update(keep_data) return data
def map_show_keys(data): keep_data = {} del_keys = [] new_data = {} for k, v in data.iteritems(): k_org = k k = k.lower() if None is not v: if k in ['banner', 'fanart', 'poster'] and v: v = self.config['url_artworkPrefix'] % v elif 'genre' == k: keep_data['genre_list'] = v v = '|%s|' % '|'.join([ clean_data(c) for c in v if isinstance(c, basestring) ]) elif 'gueststars' == k: keep_data['gueststars_list'] = v v = '|%s|' % '|'.join([ clean_data(c) for c in v if isinstance(c, basestring) ]) elif 'writers' == k: keep_data[k] = v v = '|%s|' % '|'.join([ clean_data(c) for c in v if isinstance(c, basestring) ]) elif 'firstaired' == k: if v: try: v = parse(v, fuzzy=True).strftime('%Y-%m-%d') except (StandardError, Exception): v = None else: v = None elif 'imdbid' == k: if v: if re.search(r'^(tt)?\d{1,7}$', v, flags=re.I): v = clean_data(v) else: v = '' else: v = clean_data(v) if k in map_show: k = map_show[k] if k_org is not k: del_keys.append(k_org) new_data[k] = v else: data[k] = v for d in del_keys: del (data[d]) if isinstance(data, dict): data.update(new_data) data.update(keep_data) return data
def _to_base_type(self, value): aDate = parser.parse(value) # Sometimes the string will have timezone information, # but this can't be handled by App Engine, so we will # remove it and assume all dates are UTC if aDate.tzinfo: aDate = aDate.astimezone(tz.tzutc()) aDate = aDate.replace(tzinfo=None) return aDate
def map_show_keys(data): keep_data = {} del_keys = [] new_data = {} for k, v in data.iteritems(): k_org = k k = k.lower() if None is not v: if k in ['banner', 'fanart', 'poster'] and v: v = self.config['url_artworkPrefix'] % v elif 'genre' == k: keep_data['genre_list'] = v v = '|%s|' % '|'.join([clean_data(c) for c in v if isinstance(c, basestring)]) elif 'gueststars' == k: keep_data['gueststars_list'] = v v = '|%s|' % '|'.join([clean_data(c) for c in v if isinstance(c, basestring)]) elif 'writers' == k: keep_data[k] = v v = '|%s|' % '|'.join([clean_data(c) for c in v if isinstance(c, basestring)]) elif 'firstaired' == k: if v: try: v = parse(v, fuzzy=True).strftime('%Y-%m-%d') except (StandardError, Exception): v = None else: v = None elif 'imdbid' == k: if v: if re.search(r'^(tt)?\d{1,7}$', v, flags=re.I): v = clean_data(v) else: v = '' else: v = clean_data(v) else: if 'seriesname' == k: if isinstance(data.get('aliases'), list) and 0 < len(data.get('aliases')): v = data['aliases'].pop(0) # this is a invalid show, it has no Name if None is v: return None if k in map_show: k = map_show[k] if k_org is not k: del_keys.append(k_org) new_data[k] = v else: data[k] = v for d in del_keys: del(data[d]) if isinstance(data, dict): data.update(new_data) data.update(keep_data) return data
def updateWatchedData(self): try: response = self.trakt_api.traktRequest("users/me/history/episodes") changes = dict() myDB = db.DBConnection() for data in response: show_id = None if not data['show']['ids']["tvdb"] is None: show_id = data['show']['ids']["tvdb"] elif not data['show']['ids']["tvrage"] is None: show_id = data['show']['ids']["tvrage"] else: logger.log(u"Could not retrieve show_id from trakt history", logger.WARNING) continue show_name = data["show"]["title"] season = data["episode"]["season"] episode = data["episode"]["number"] watched = time.mktime(parser.parse(data["watched_at"]).timetuple()) cursor = myDB.action("UPDATE tv_episodes SET last_watched=? WHERE showid=? AND season=? AND episode=? AND (last_watched IS NULL OR last_watched < ?)", [watched, show_id, season, episode, watched]) if cursor.rowcount > 0: changes[show_name] = changes.get(show_name, 0) + 1 logger.log("Updated " + show_name + ", episode " + str(season) + "x" + str(episode) + ": Episode was watched at " + str(watched)) show = Show.find(sickbeard.showList, int(show_id)) show.last_seen = max(show.last_seen, watched) message = "Watched episodes synchronization complete: "; if (len(changes) == 0): message += "No changes detected." else: message += "Marked as watched " first = True; for show_name in changes: if (not first): message += ", " message += str(changes[show_name]) + " episodes of " + show_name first = False; logger.log(message) self._updateAllShowsNextEpisodeData() except traktException as e: logger.log(u"Could not connect to trakt service, cannot synch Watched Data: %s" % ex(e), logger.ERROR)
def _getNames(self): if not self.hc.last_cache or parser.parse(self.hc.last_cache) < datetime.now() - timedelta(days=2): log("getting new names from xem") r = requests.get("http://thexem.de/map/allNames?origin=tvdb&seasonNumbers=1") names = r.json()["data"] cache_file_path = os.path.join(my_install_folder, 'cache.json') log("saving xem names to %s" % cache_file_path) with open(cache_file_path, "w") as cache: cache.write(json.dumps(names)) self.hc.last_cache = str(datetime.now()) with open(os.path.join(my_install_folder, 'cache.json'), "r") as cache: out = json.loads(cache.read()) return out
def _parse_pub_date(date_str): parsed_date = None try: if date_str: p = parser.parse(date_str, fuzzy=True) try: p = p.astimezone(sb_timezone) except (StandardError, Exception): pass if isinstance(p, datetime.datetime): parsed_date = p.replace(tzinfo=None) except (StandardError, Exception): pass return parsed_date
def _parse_pub_date(date_str): parsed_date = None try: if date_str: p = parser.parse(date_str, fuzzy=True) try: p = p.astimezone(sb_timezone) except (BaseException, Exception): pass if isinstance(p, datetime.datetime): parsed_date = p.replace(tzinfo=None) except (BaseException, Exception): pass return parsed_date
def updateWatchedData(self): self.amActive = True method = "users/me/history/episodes" response = trakt.sendData(method) if response != False: changes = dict() myDB = db.DBConnection() for data in response: show_name = data["show"]["title"] show_id = data["show"]["ids"]["tvdb"] season = data["episode"]["season"] episode = data["episode"]["number"] watched = time.mktime( parser.parse(data["watched_at"]).timetuple()) cursor = myDB.action( "UPDATE tv_episodes SET last_watched=? WHERE showid=? AND season=? AND episode=? AND (last_watched IS NULL OR last_watched < ?)", [watched, show_id, season, episode, watched]) if cursor.rowcount > 0: changes[show_name] = changes.get(show_name, 0) + 1 logger.log("Updated " + show_name + ", episode " + str(season) + "x" + str(episode) + " watched @ " + str(watched)) message = "Watched episodes synchronization complete: " if (len(changes) == 0): message += "No changes detected." else: message += "Marked as watched " first = True for show_name in changes: if (first): message += ", " first = False message += str( changes[show_name]) + " episodes of " + show_name + "" logger.log(message) else: logger.log("Watched episodes synchronization failed.") self.updateNextEpisodeData()
def _parse_pub_date(item, default=None): parsed_date = default try: p = item.findtext('pubDate') if p: p = parser.parse(p, fuzzy=True) try: p = p.astimezone(sb_timezone) except (StandardError, Exception): pass if isinstance(p, datetime.datetime): parsed_date = p.replace(tzinfo=None) except (StandardError, Exception): pass return parsed_date
def updateWatchedData(self): self.amActive = True method = "users/me/history/episodes" response = trakt.sendData(method); if response != False: changes = dict(); myDB = db.DBConnection() for data in response: show_name = data["show"]["title"] show_id = data["show"]["ids"]["tvdb"] season = data["episode"]["season"] episode = data["episode"]["number"] watched = time.mktime(parser.parse(data["watched_at"]).timetuple()) cursor = myDB.action("UPDATE tv_episodes SET last_watched=? WHERE showid=? AND season=? AND episode=? AND (last_watched IS NULL OR last_watched < ?)", [watched, show_id, season, episode, watched]) if cursor.rowcount > 0: changes[show_name] = changes.get(show_name, 0) + 1 logger.log("Updated " + show_name + ", episode " + str(season) + "x" + str(episode) + " watched @ " + str(watched)) message = "Watched episodes synchronization complete: "; if (len(changes) == 0): message += "No changes detected." else: message += "Marked as watched " first = True; for show_name in changes: if (first): message += ", " first = False; message += str(changes[show_name]) + " episodes of " + show_name + "" logger.log(message) else: logger.log("Watched episodes synchronization failed.") self.updateNextEpisodeData();
def selectSeries(self, allSeries): search_results = [] # get all available shows if allSeries: search_term = self.config.get('searchterm', '').lower() if search_term: # try to pick a show that's in my show list for cur_show in allSeries: if cur_show in search_results: continue seriesnames = [] if 'seriesname' in cur_show: name = cur_show['seriesname'].lower() seriesnames += [ name, unidecode(name.encode('utf-8').decode('utf-8')) ] if 'aliasnames' in cur_show: name = cur_show['aliasnames'].lower() seriesnames += name.split('|') + unidecode( name.encode('utf-8').decode('utf-8')).split('|') if search_term in set(seriesnames): if 'firstaired' not in cur_show: cur_show['firstaired'] = str( datetime.date.fromordinal(1)) cur_show['firstaired'] = re.sub( '([-]0{2})+', '', cur_show['firstaired']) fix_date = parser.parse(cur_show['firstaired'], fuzzy=True).date() cur_show['firstaired'] = fix_date.strftime( '%Y-%m-%d') if cur_show not in search_results: search_results += [cur_show] return search_results
def get_tvmaze_by_name(showname, premiere_date): """ :param showname: show name :type showname: AnyStr :param premiere_date: premiere date :type premiere_date: datetime.date :return: :rtype: Dict """ ids = {} try: url = '%ssearch/shows?%s' % ( sickbeard.TVInfoAPI(TVINFO_TVMAZE).config['base_url'], urlencode({'q': clean_show_name(showname)})) res = get_tvmaze_data(url=url, parse_json=True, raise_status_code=True, timeout=120) if res: for r in res: if 'show' in r and 'premiered' in r[ 'show'] and 'externals' in r['show']: premiered = parse(r['show']['premiered'], fuzzy=True) if abs(premiere_date - premiered.date()) < datetime.timedelta(days=2): ids[TVINFO_TVRAGE] = r['show']['externals'].get( 'tvrage', 0) ids[TVINFO_TVDB] = r['show']['externals'].get( 'thetvdb', 0) ids[TVINFO_IMDB] = try_int( str(r['show']['externals'].get('imdb')).replace( 'tt', '')) ids[TVINFO_TVMAZE] = r['show'].get('id', 0) break except (BaseException, Exception): pass return {k: v for k, v in iteritems(ids) if v not in (None, '', 0)}
def _parse_pub_date(item, default=None): # type: (etree.Element, Union[int, None]) -> Union[datetime.date, None] """ :param item: :param default: :return: """ parsed_date = default try: p = item.findtext('pubDate') if p: p = parser.parse(p, fuzzy=True) try: p = p.astimezone(sb_timezone) except (BaseException, Exception): pass if isinstance(p, datetime.datetime): parsed_date = p.replace(tzinfo=None) except (BaseException, Exception): pass return parsed_date
def VTiter(self, *parsedArgs, **envars): largs, dictargs = self.full_parse(parsedArgs) if 'query' not in dictargs: raise functions.OperatorError( __name__.rsplit('.')[-1], "No query argument ") query = dictargs['query'] if 'timewindow' not in dictargs: raise functions.OperatorError( __name__.rsplit('.')[-1], "No TimeWindow argument ") else: winlen = int(dictargs['timewindow']) if 'timecolumn' not in dictargs: raise functions.OperatorError( __name__.rsplit('.')[-1], "No timecolumn argument ") else: timecolumn = int(dictargs['timecolumn']) cur = envars['db'].cursor() c = cur.execute(query, parse=False) try: yield [('wid', 'integer')] + list(cur.getdescriptionsafe()) except StopIteration: try: raise finally: try: c.close() except: pass wid = 0 secs = 0 row = c.next() firstTime = int( time.mktime(parser.parse(row[timecolumn], fuzzy=True).timetuple())) head = {firstTime: [row]} window = deque([]) while row: prev = row try: row = c.next() except StopIteration: if wid == 0: for k in head.keys(): for t in head[k]: yield (wid, ) + t for rl in window: for k in rl.keys(): for t in rl[k]: yield (wid, ) + t break secs = int( time.mktime( parser.parse(row[timecolumn], fuzzy=True).timetuple())) if secs <= firstTime + winlen: if prev[0] == row[timecolumn] and window: old = window.pop()[secs] old.append(row) rowlist = {secs: old} else: rowlist = {secs: [row]} window.append(rowlist) else: if wid == 0: for k in head.keys(): for t in head[k]: yield (wid, ) + t for rl in window: for k in rl.keys(): for t in rl[k]: yield (wid, ) + t while secs > firstTime + winlen and window: try: head = window.popleft() firstTime = head.keys()[0] except IndexError: break rowlist = {secs: [row]} window.append(rowlist) wid += 1 for k in head.keys(): for t in head[k]: yield (wid, ) + t for rl in window: for k in rl.keys(): for t in rl[k]: yield (wid, ) + t
def VTiter(self, *parsedArgs, **envars): largs, dictargs = self.full_parse(parsedArgs) if 'query' not in dictargs: raise functions.OperatorError(__name__.rsplit('.')[-1], "No query argument ") query = dictargs['query'] if 'timewindow' not in dictargs: raise functions.OperatorError(__name__.rsplit('.')[-1], "No TimeWindow argument ") else: winlen = int(dictargs['timewindow']) if 'timecolumn' not in dictargs: raise functions.OperatorError(__name__.rsplit('.')[-1], "No timecolumn argument ") else: timecolumn = int(dictargs['timecolumn']) cur = envars['db'].cursor() c = cur.execute(query, parse=False) try: yield [('wid', 'integer')] + list(cur.getdescriptionsafe()) except StopIteration: try: raise finally: try: c.close() except: pass wid = 0 secs = 0 row = c.next() firstTime = int(time.mktime(parser.parse(row[timecolumn], fuzzy=True).timetuple())) head = {firstTime: [row]} window = deque([]) while row: prev = row try: row = c.next() except StopIteration: if wid == 0: for k in head.keys(): for t in head[k]: yield (wid,) + t for rl in window: for k in rl.keys(): for t in rl[k]: yield (wid,) + t break secs = int(time.mktime(parser.parse(row[timecolumn], fuzzy=True).timetuple())) if secs <= firstTime + winlen: if prev[0] == row[timecolumn] and window: old = window.pop()[secs] old.append(row) rowlist = {secs: old} else: rowlist = {secs: [row]} window.append(rowlist) else: if wid == 0: for k in head.keys(): for t in head[k]: yield (wid,) + t for rl in window: for k in rl.keys(): for t in rl[k]: yield (wid,) + t while secs > firstTime + winlen and window: try: head = window.popleft() firstTime = head.keys()[0] except IndexError: break rowlist = {secs: [row]} window.append(rowlist) wid += 1 for k in head.keys(): for t in head[k]: yield (wid,) + t for rl in window: for k in rl.keys(): for t in rl[k]: yield (wid,) + t
def get(self): #Find existing feed uri = self.request.get('uri') format = self.request.get('format') feed = Feed.get_by_key_name(uri) if not feed: #new feed feed = Feed() feed.uri = uri feed.fetch() if not feed.error: feed.put() else: #existing feed if feed.cache_expired(): taskqueue.add(url = '/fetch', params = {'uri': uri}, method = 'GET') if not feed or feed.error: self.response.out.write("document.write('<ul><li>Error: " + feed.error + "</li></ul>')") return option = Option(self.request) def get_updated_format(parsed_time): if option.tm == 'n': return '' timef = '' if option.tm == 's': timef = '(%m/%d)' elif option.tm == 'm': timef = '(%Y/%m/%d)' elif option.tm == 'l': timef = '(%Y/%m/%d %H:%M)' else: timef = option.tm return parsed_time.strftime(timef) for entry in feed.entries: try: parsed_time = parse(entry.updated) except: parsed_time = datetime.datetime.now() entry.updated_time = parsed_time.strftime('%Y/%m/%d %H:%M:%s') entry.updated_format = get_updated_format(parsed_time) entry.title = re.sub('[\r\n]', ' ', entry.title) if option.st == 's': feed.entries.sort(sorter) if option.mc > 0: feed.entries = feed.entries[0:int(option.mc)] protocol = 'https' if os.environ['HTTPS'] == 'on' else 'http' template_values = { 'SITE_NAME': 'Tomato Feed', 'APP_URI': protocol + '://' + os.environ['HTTP_HOST'], 'rss_uri': uri, 'option': option, 'entries': feed.entries, 'entries_count': len(feed.entries), } if format == 'html': path = os.path.join(os.path.dirname(__file__), 'views/list.html') else: self.response.headers['Content-Type'] = 'application/x-javascript;charset=utf-8;' path = os.path.join(os.path.dirname(__file__), 'views/list.js') self.response.out.write(template.render(path, template_values))