def _change_missing_episodes(): if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: cur_date = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: cur_date = (datetime.date.today() - datetime.timedelta(days=2)).toordinal() cur_time = datetime.datetime.now(network_timezones.sb_timezone) my_db = db.DBConnection() sql_results = my_db.select( 'SELECT * FROM tv_episodes' ' WHERE status = ? AND season > 0 AND airdate <= ? AND airdate > 1' ' ORDER BY showid', [common.UNAIRED, cur_date]) sql_l = [] show = None wanted = False for sqlEp in sql_results: try: if not show or show.indexerid != int(sqlEp['showid']): show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid'])) # for when there is orphaned series in the database but not loaded into our showlist if not show: continue except exceptions.MultipleShowObjectsException: logger.log(u'ERROR: expected to find a single show matching %s' % sqlEp['showid']) continue try: end_time = (network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))) # filter out any episodes that haven't aired yet if end_time > cur_time: continue except (StandardError, Exception): # if an error occurred assume the episode hasn't aired yet continue ep = show.getEpisode(int(sqlEp['season']), int(sqlEp['episode'])) with ep.lock: # Now that it is time, change state of UNAIRED show into expected or skipped ep.status = (common.WANTED, common.SKIPPED)[ep.show.paused] result = ep.get_sql() if None is not result: sql_l.append(result) wanted |= (False, True)[common.WANTED == ep.status] else: logger.log(u'No unaired episodes marked wanted') if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) if wanted: logger.log(u'Found new episodes marked wanted')
def _change_missing_episodes(): if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: cur_date = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: cur_date = (datetime.date.today() - datetime.timedelta(days=2)).toordinal() cur_time = datetime.datetime.now(network_timezones.sb_timezone) my_db = db.DBConnection() sql_results = my_db.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ? AND airdate > 1', [common.UNAIRED, cur_date]) sql_l = [] show = None wanted = False for sqlEp in sql_results: try: if not show or show.indexerid != int(sqlEp['showid']): show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid'])) # for when there is orphaned series in the database but not loaded into our showlist if not show: continue except exceptions.MultipleShowObjectsException: logger.log(u'ERROR: expected to find a single show matching %s' % sqlEp['showid']) continue try: end_time = (network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))) # filter out any episodes that haven't aired yet if end_time > cur_time: continue except: # if an error occurred assume the episode hasn't aired yet continue ep = show.getEpisode(int(sqlEp['season']), int(sqlEp['episode'])) with ep.lock: # Now that it is time, change state of UNAIRED show into expected or skipped ep.status = (common.WANTED, common.SKIPPED)[ep.show.paused] result = ep.get_sql() if None is not result: sql_l.append(result) wanted |= (False, True)[common.WANTED == ep.status] else: logger.log(u'No unaired episodes marked wanted') if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) if wanted: logger.log(u'Found new episodes marked wanted')
def test_timezone(self): network_timezones.update_network_dict() network_timezones.sb_timezone = tz.gettz('CET', zoneinfo_priority=True) d = datetime.date(2018, 9, 2).toordinal() t = 'Monday 9:00 PM' network = 'NBC' r = network_timezones.parse_date_time(d, t, network) local_date = datetime.datetime(2018, 9, 3, 3, 0, 0).replace(tzinfo=tz.gettz('CET', zoneinfo_priority=True)) self.assertEqual(r, local_date)
def test_timezone(self): network_timezones.update_network_dict() network_timezones.sb_timezone = tz.gettz('CET', zoneinfo_priority=True) d = datetime.date(2018, 9, 2).toordinal() t = 'Monday 9:00 PM' network = 'NBC' r = network_timezones.parse_date_time(d, t, network) local_date = datetime.datetime( 2018, 9, 3, 3, 0, 0).replace(tzinfo=tz.gettz('CET', zoneinfo_priority=True)) self.assertEqual(r, local_date)
def render_body(context, curListType, myShowList, **pageargs): __M_caller = context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(curListType=curListType, pageargs=pageargs, myShowList=myShowList) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f617e93e310')._populate( _import_ns, [u'renderQualityPill']) show_stat = _import_ns.get('show_stat', context.get('show_stat', UNDEFINED)) renderQualityPill = _import_ns.get( 'renderQualityPill', context.get('renderQualityPill', UNDEFINED)) static_url = _import_ns.get('static_url', context.get('static_url', UNDEFINED)) bool = _import_ns.get('bool', context.get('bool', UNDEFINED)) srRoot = _import_ns.get('srRoot', context.get('srRoot', UNDEFINED)) str = _import_ns.get('str', context.get('str', UNDEFINED)) ValueError = _import_ns.get('ValueError', context.get('ValueError', UNDEFINED)) _ = _import_ns.get('_', context.get('_', UNDEFINED)) __M_writer = context.writer() __M_writer(u'\n') __M_writer(u'\n') __M_writer(u'\n\n') if sickbeard.HOME_LAYOUT == 'poster': __M_writer(u' <div id="') __M_writer( unicode( ('container', 'container-anime')[curListType == 'Anime'])) __M_writer( u'" class="show-grid clearfix">\n <div class="posterview">\n' ) for curLoadingShow in sickbeard.showQueueScheduler.action.loading_show_list: __M_writer(u' ') loading_show = curLoadingShow.info __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['loading_show'] if __M_key in __M_locals_builtin_stored ])) __M_writer( u'\n <div class="show-container" data-name="' ) __M_writer(unicode(loading_show.sort_name)) __M_writer( u'" data-date="1" data-network="0" data-progress="0">\n <div class="show-image">\n <img alt="" title="' ) __M_writer(unicode(loading_show.name)) __M_writer( u'" class="show-image" style="border-bottom: 1px solid #111;" src="" data-src="' ) __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(filters.url_escape(unicode(loading_show.id))) __M_writer( u'&which=poster_thumb" />\n </div>\n <div class="show-information">\n <div class="progressbar hidden-print" style="position:relative;" data-show-id="' ) __M_writer(filters.url_escape(unicode(loading_show.id))) __M_writer( u'" data-progress-percentage="0"></div>\n <div class="show-title">' ) __M_writer(unicode(_('Loading'))) __M_writer(u' (') __M_writer(unicode(loading_show.name)) __M_writer( u')</div>\n <div class="show-date"> </div>\n <div class="show-details">\n <table class="show-details" width="100%" cellspacing="1" border="0" cellpadding="0">\n <tr>\n <td class="show-table">\n <span class="show-dlstats" title="' ) __M_writer(unicode('Loading')) __M_writer(u'">') __M_writer(unicode('Loading')) __M_writer( u'</span>\n </td>\n <td class="show-table">\n <span title="' ) __M_writer(unicode(loading_show.network)) __M_writer( u'"><img class="show-network-image" src="" data-src="') __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(filters.url_escape(unicode(loading_show.id))) __M_writer(u'&which=network" alt="') __M_writer(unicode(loading_show.network)) __M_writer(u'" title="') __M_writer(unicode(loading_show.network)) __M_writer( u'" /></span>\n </td>\n <td class="show-table">\n ' ) __M_writer( unicode( renderQualityPill(loading_show.quality, showTitle=True, overrideClass="show-quality"))) __M_writer( u'\n </td>\n </tr>\n </table>\n </div>\n </div>\n </div>\n' ) for curShow in myShowList: __M_writer(u' ') if sickbeard.showQueueScheduler.action.is_in_remove_queue( curShow ) or sickbeard.showQueueScheduler.action.is_being_removed( curShow): continue cur_airs_next = '' cur_snatched = 0 cur_downloaded = 0 cur_total = 0 download_stat_tip = '' display_status = curShow.status if display_status: if re.search(r'(?i)(?:new|returning)\s*series', curShow.status): display_status = 'Continuing' elif re.search(r'(?i)(?:nded)', curShow.status): display_status = 'Ended' if curShow.indexerid in show_stat: cur_airs_next = show_stat[ curShow.indexerid]['ep_airs_next'] cur_snatched = show_stat[ curShow.indexerid]['ep_snatched'] or 0 cur_downloaded = show_stat[ curShow.indexerid]['ep_downloaded'] or 0 cur_total = show_stat[curShow.indexerid]['ep_total'] or 0 download_stat = str(cur_downloaded) download_stat_tip = _('Downloaded') + ": " + str( cur_downloaded) if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + "
" + _( 'Snatched') + ": " + str(cur_snatched) download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + "
" + _( 'Total') + ": " + str(cur_total) nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = _('Unaired') progressbar_percent = nom * 100 / den data_date = '6000000000.0' if cur_airs_next: data_date = calendar.timegm( sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time( cur_airs_next, curShow.airs, curShow.network)).timetuple()) elif display_status: if display_status != 'Ended' and curShow.paused: data_date = '5000000500.0' elif display_status == 'Continuing': data_date = '5000000000.0' elif display_status == 'Ended': data_date = '5000000100.0' __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in [ 'nom', 'cur_downloaded', 'display_status', 'cur_total', 'cur_snatched', 'progressbar_percent', 'cur_airs_next', 'den', 'download_stat', 'data_date', 'download_stat_tip' ] if __M_key in __M_locals_builtin_stored ])) __M_writer( u'\n <div class="show-container" id="show') __M_writer(unicode(curShow.indexerid)) __M_writer(u'" data-name="') __M_writer(unicode(curShow.sort_name)) __M_writer(u'" data-date="') __M_writer(unicode(data_date)) __M_writer(u'" data-network="') __M_writer(unicode(curShow.network)) __M_writer(u'" data-progress="') __M_writer(unicode(progressbar_percent)) __M_writer( u'">\n <div class="show-image">\n <a href="' ) __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(unicode(curShow.indexerid)) __M_writer( u'"><img alt="" class="show-image" src="" data-src="') __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(unicode(curShow.indexerid)) __M_writer( u'&which=poster_thumb" /></a>\n </div>\n\n <div class="show-information">\n <div class="progressbar hidden-print" style="position:relative;" data-show-id="' ) __M_writer(unicode(curShow.indexerid)) __M_writer(u'" data-progress-percentage="') __M_writer(unicode(progressbar_percent)) __M_writer( u'"></div>\n\n <div class="show-title">\n ' ) __M_writer(unicode(curShow.name)) __M_writer( u'\n </div>\n\n <div class="show-date">\n' ) if cur_airs_next: __M_writer(u' ') ldatetime = sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time( cur_airs_next, curShow.airs, curShow.network)) __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['ldatetime'] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n ') try: out = str(sbdatetime.sbdatetime.sbfdate(ldatetime)) except ValueError: out = _('Invalid date') pass __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['out'] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n ') __M_writer(unicode(out)) __M_writer(u'\n') else: __M_writer(u' ') output_html = '?' display_status = curShow.status if display_status: if display_status != 'Ended' and curShow.paused: output_html = 'Paused' elif display_status: output_html = display_status __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['display_status', 'output_html'] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n ') __M_writer(unicode(_(output_html))) __M_writer(u'\n') __M_writer( u' </div>\n\n <div class="show-details">\n <table class="show-details" width="100%" cellspacing="1" border="0" cellpadding="0">\n <tr>\n <td class="show-table">\n <span class="show-dlstats" title="' ) __M_writer(unicode(download_stat_tip)) __M_writer(u'">') __M_writer(unicode(download_stat)) __M_writer( u'</span>\n </td>\n\n <td class="show-table">\n' ) if curShow.network: __M_writer( u' <span title="' ) __M_writer(unicode(curShow.network)) __M_writer( u'"><img class="show-network-image" src="" data-src="') __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'&which=network" alt="') __M_writer(unicode(curShow.network)) __M_writer(u'" title="') __M_writer(unicode(curShow.network)) __M_writer(u'" /></span>\n') else: __M_writer( u' <span title="' ) __M_writer(unicode(_('No Network'))) __M_writer( u'"><img class="show-network-image" src="" data-src="') __M_writer( unicode(static_url('images/network/nonetwork.png'))) __M_writer( u'" alt="No Network" title="No Network" /></span>\n') __M_writer( u' </td>\n <td class="show-table">\n ' ) __M_writer( unicode( renderQualityPill(curShow.quality, showTitle=True, overrideClass="show-quality"))) __M_writer( u'\n </td>\n </tr>\n </table>\n </div>\n </div>\n </div>\n' ) __M_writer(u' </div>\n </div>\n') else: __M_writer( u' <div class="horizontal-scroll">\n <table id="showListTable' ) __M_writer(unicode(curListType)) __M_writer( u'" class="tablesorter" cellspacing="1" border="0" cellpadding="0">\n <thead>\n <tr>\n <th class="nowrap">' ) __M_writer(unicode(_('Next Ep'))) __M_writer(u'</th>\n <th class="nowrap">') __M_writer(unicode(_('Prev Ep'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Show'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Network'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Quality'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Downloads'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Size'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Active'))) __M_writer(u'</th>\n <th>') __M_writer(unicode(_('Status'))) __M_writer( u'</th>\n </tr>\n </thead>\n <tfoot class="hidden-print">\n <tr>\n <th rowspan="1" colspan="1" align="center"><a href="' ) __M_writer(unicode(static_url("addShows/", include_version=False))) __M_writer(u'">') __M_writer(unicode(_('Add'))) __M_writer(u' ') __M_writer(unicode( (_('Show'), _('Anime'))[curListType == 'Anime'])) __M_writer( u'</a></th>\n <th> </th>\n <th> </th>\n <th> </th>\n <th> </th>\n <th> </th>\n <th> </th>\n <th> </th>\n <th> </th>\n </tr>\n </tfoot>\n <tbody>\n' ) for curLoadingShow in sickbeard.showQueueScheduler.action.loading_show_list: __M_writer(u' ') loading_show = curLoadingShow.info __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['loading_show'] if __M_key in __M_locals_builtin_stored ])) __M_writer( u'\n <tr>\n <td align="center">(' ) __M_writer(unicode(_('loading'))) __M_writer(u')</td><td align="center"></td>\n') if sickbeard.HOME_LAYOUT == 'small': __M_writer( u' <td class="tvShow">\n <div class="imgsmallposter ' ) __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer(u'">\n') if curLoadingShow.show: __M_writer( u' <a href="' ) __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(filters.url_escape(unicode( loading_show.id))) __M_writer(u'" title="') __M_writer(unicode(loading_show.name)) __M_writer(u'">\n') else: __M_writer( u' <span title="' ) __M_writer(unicode(loading_show.name)) __M_writer(u'">\n') __M_writer( u' <img src="" data-src="' ) __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(filters.url_escape(unicode(loading_show.id))) __M_writer(u'&which=poster_thumb" class="') __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer(u'" alt="') __M_writer(unicode(loading_show.name)) __M_writer(u'"/>\n') if curLoadingShow.show: __M_writer( u' </a>\n <a href="' ) __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(filters.url_escape(unicode( loading_show.id))) __M_writer(u'" style="vertical-align: middle;">') __M_writer(unicode(loading_show.name)) __M_writer(u'</a>\n') else: __M_writer( u' </span>\n <span style="vertical-align: middle;">' ) __M_writer(unicode(_('Loading...'))) __M_writer(u' (') __M_writer(unicode(loading_show.name)) __M_writer(u')</span>\n') __M_writer( u' </div>\n </td>\n' ) elif sickbeard.HOME_LAYOUT == 'banner': __M_writer( u' <td>\n <span style="display: none;">' ) __M_writer(unicode(_('Loading...'))) __M_writer(u' (') __M_writer(unicode(loading_show.name)) __M_writer( u')</span>\n <div class="imgbanner ' ) __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer(u'">\n') if curLoadingShow.show: __M_writer( u' <a href="' ) __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(filters.url_escape(unicode( loading_show.id))) __M_writer(u'">\n') __M_writer( u' <img src="" data-src="' ) __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(filters.url_escape(unicode(loading_show.id))) __M_writer(u'&which=banner" class="') __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer(u'" alt="') __M_writer(unicode(loading_show.name)) __M_writer(u'" title="') __M_writer(unicode(loading_show.name)) __M_writer(u'"/>\n') if curLoadingShow.show: __M_writer( u' </a>\n') __M_writer( u' </div>\n </td>\n' ) elif sickbeard.HOME_LAYOUT == 'simple': __M_writer( u' <td class="tvShow">\n') if curLoadingShow.show: __M_writer( u' <a href="') __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(filters.url_escape(unicode( loading_show.id))) __M_writer(u'">') __M_writer(unicode(loading_show.name)) __M_writer(u'</a>\n') else: __M_writer( u' <span title="">' ) __M_writer(unicode(_('Loading...'))) __M_writer(u' (') __M_writer(unicode(loading_show.name)) __M_writer(u')</span>\n') __M_writer(u' </td>\n') __M_writer( u' <td></td>\n <td></td>\n <td></td>\n <td></td>\n <td></td>\n <td></td>\n </tr>\n' ) for curShow in myShowList: __M_writer(u' ') if sickbeard.showQueueScheduler.action.is_in_remove_queue( curShow ) or sickbeard.showQueueScheduler.action.is_being_removed( curShow): continue cur_airs_next = '' cur_airs_prev = '' cur_snatched = 0 cur_downloaded = 0 cur_total = 0 show_size = 0 download_stat_tip = '' if curShow.indexerid in show_stat: cur_airs_next = show_stat[ curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[ curShow.indexerid]['ep_airs_prev'] cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 cur_downloaded = show_stat[ curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 show_size = show_stat[curShow.indexerid]['show_size'] download_stat = str(cur_downloaded) download_stat_tip = _('Downloaded') + ": " + str( cur_downloaded) if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + "
" + _( 'Snatched') + ": " + str(cur_snatched) download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + "
" + _( 'Total') + ": " + str(cur_total) nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = _('Unaired') progressbar_percent = nom * 100 / den __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in [ 'nom', 'cur_airs_prev', 'cur_downloaded', 'cur_total', 'cur_snatched', 'show_size', 'progressbar_percent', 'cur_airs_next', 'den', 'download_stat', 'download_stat_tip' ] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n <tr>\n') if cur_airs_next: __M_writer(u' ') airDate = sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time( cur_airs_next, curShow.airs, curShow.network)) __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['airDate'] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n') try: __M_writer( u' <td align="center" class="nowrap">\n <time datetime="' ) __M_writer(unicode(airDate.isoformat('T'))) __M_writer(u'" class="date">') __M_writer( unicode(sbdatetime.sbdatetime.sbfdate(airDate))) __M_writer( u'</time>\n </td>\n' ) except ValueError: __M_writer( u' <td align="center" class="nowrap"></td>\n' ) else: __M_writer( u' <td align="center" class="nowrap"></td>\n' ) __M_writer(u'\n') if cur_airs_prev: __M_writer(u' ') airDate = sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time( cur_airs_prev, curShow.airs, curShow.network)) __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['airDate'] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n') try: __M_writer( u' <td align="center" class="nowrap">\n <time datetime="' ) __M_writer(unicode(airDate.isoformat('T'))) __M_writer(u'" class="date">') __M_writer( unicode(sbdatetime.sbdatetime.sbfdate(airDate))) __M_writer( u'</time>\n </td>\n' ) except ValueError: __M_writer( u' <td align="center" class="nowrap"></td>\n' ) else: __M_writer( u' <td align="center" class="nowrap"></td>\n' ) __M_writer(u'\n') if sickbeard.HOME_LAYOUT == 'small': __M_writer( u' <td class="tvShow">\n <div class="imgsmallposter ' ) __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer( u'">\n <a href="') __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'" title="') __M_writer(unicode(curShow.name)) __M_writer( u'">\n <img src="" data-src="' ) __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'&which=poster_thumb" class="') __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer(u'" alt="') __M_writer(unicode(curShow.indexerid)) __M_writer( u'"/>\n </a>\n <a href="' ) __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'" style="vertical-align: middle;">') __M_writer(unicode(curShow.name)) __M_writer( u'</a>\n </div>\n </td>\n' ) elif sickbeard.HOME_LAYOUT == 'banner': __M_writer( u' <td>\n <span style="display: none;">' ) __M_writer(unicode(curShow.name)) __M_writer( u'</span>\n <div class="imgbanner ' ) __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer( u'">\n <a href="') __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(unicode(curShow.indexerid)) __M_writer( u'">\n <img src="" data-src="' ) __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'&which=banner" class="') __M_writer(unicode(sickbeard.HOME_LAYOUT)) __M_writer(u'" alt="') __M_writer(unicode(curShow.indexerid)) __M_writer(u'" title="') __M_writer(unicode(curShow.name)) __M_writer( u'"/>\n </a>\n </div>\n </td>\n' ) elif sickbeard.HOME_LAYOUT == 'simple': __M_writer( u' <td class="tvShow"><a href="' ) __M_writer(unicode(srRoot)) __M_writer(u'/home/displayShow?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'">') __M_writer(unicode(curShow.name)) __M_writer(u'</a></td>\n') __M_writer(u'\n') if sickbeard.HOME_LAYOUT != 'simple': __M_writer( u' <td align="center">\n') if curShow.network: __M_writer( u' <span title="' ) __M_writer(unicode(curShow.network)) __M_writer( u'" class="hidden-print"><img id="network" width="54" height="27" src="" data-src="' ) __M_writer(unicode(srRoot)) __M_writer(u'/showPoster/?show=') __M_writer(unicode(curShow.indexerid)) __M_writer(u'&which=network" alt="') __M_writer(unicode(curShow.network)) __M_writer(u'" title="') __M_writer(unicode(curShow.network)) __M_writer( u'" /></span>\n <span class="visible-print-inline">' ) __M_writer(unicode(curShow.network)) __M_writer(u'</span>\n') else: __M_writer( u' <span title="No Network" class="hidden-print"><img id="network" width="54" height="27" src="" data-src="' ) __M_writer( unicode( static_url('images/network/nonetwork.png'))) __M_writer( u'" alt="No Network" title="No Network" /></span>\n <span class="visible-print-inline">' ) __M_writer(unicode(_('No Network'))) __M_writer(u'</span>\n') __M_writer(u' </td>\n') else: __M_writer( u' <td>\n <span title="' ) __M_writer(unicode(curShow.network)) __M_writer(u'">') __M_writer(unicode(curShow.network)) __M_writer(u'</span>\n </td>\n') __M_writer(u'\n <td align="center">') __M_writer( unicode(renderQualityPill(curShow.quality, showTitle=True))) __M_writer( u'</td>\n\n <td align="center">\n') __M_writer( u' <span style="display: none;">' ) __M_writer(unicode(download_stat)) __M_writer( u'</span>\n <div class="progressbar hidden-print" style="position:relative;" data-show-id="' ) __M_writer(unicode(curShow.indexerid)) __M_writer(u'" data-progress-percentage="') __M_writer(unicode(progressbar_percent)) __M_writer(u'" data-progress-text="') __M_writer(unicode(download_stat)) __M_writer(u'" data-progress-tip="') __M_writer(unicode(download_stat_tip)) __M_writer( u'"></div>\n <span class="visible-print-inline">' ) __M_writer(unicode(download_stat)) __M_writer( u'</span>\n </td>\n\n <td align="center" data-show-size="' ) __M_writer(unicode(show_size)) __M_writer(u'">') __M_writer(unicode(pretty_file_size(show_size))) __M_writer( u'</td>\n\n <td align="center">\n <span class="displayshow-icon-' ) __M_writer( unicode(("disable", "enable")[not bool(curShow.paused)])) __M_writer(u'" title="') __M_writer(unicode(('No', 'Yes')[not bool(curShow.paused)])) __M_writer( u'"></span>\n </td>\n\n <td align="center">\n ' ) display_status = curShow.status if display_status: if re.search(r'(?i)(?:new|returning)\s*series', curShow.status): display_status = 'Continuing' elif re.search('(?i)(?:nded)', curShow.status): display_status = 'Ended' __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update( __M_dict_builtin([ (__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['display_status'] if __M_key in __M_locals_builtin_stored ])) __M_writer(u'\n ') __M_writer(unicode(_(display_status))) __M_writer( u'\n </td>\n </tr>\n' ) __M_writer(u' </tbody>\n </table>\n </div>\n') return '' finally: context.caller_stack._pop_frame()
def run(self, force=False): self.amActive = True logger.log(u"Searching for new released episodes ...") if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal() curTime = datetime.datetime.now(network_timezones.sb_timezone) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?", [common.UNAIRED, curDate]) sql_l = [] show = None for sqlEp in sqlResults: try: if not show or int(sqlEp["showid"]) != show.indexerid: show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) # for when there is orphaned series in the database but not loaded into our showlist if not show: continue except exceptions.MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) continue try: end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta( minutes=helpers.tryInt(show.runtime, 60)) # filter out any episodes that haven't aried yet if end_time > curTime: continue except: # if an error occured assume the episode hasn't aired yet continue UpdateWantedList = 0 ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) with ep.lock: if ep.show.paused: ep.status = common.SKIPPED elif ep.season == 0: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED because is a special season") ep.status = common.SKIPPED elif sickbeard.TRAKT_USE_ROLLING_DOWNLOAD and sickbeard.USE_TRAKT: ep.status = common.SKIPPED UpdateWantedList = 1 else: logger.log(u"New episode %s airs today, setting to default episode status for this show: %s" % (ep.prettyName(), common.statusStrings[ep.show.default_ep_status])) ep.status = ep.show.default_ep_status sql_l.append(ep.get_sql()) else: logger.log(u"No new released episodes found ...") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) sickbeard.traktRollingScheduler.action.updateWantedList() # queue episode for daily search dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) self.amActive = False
def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() recently = ( date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() db = DBConnection(row_type='dict') fields_to_select = ', '.join([ 'airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'e.location', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 'e.status as epstatus', 's.status' ]) status_list = [WANTED, UNAIRED] + SNATCHED sql_l = [] for show_obj in sickbeard.showList: next_air_date = show_obj.nextEpisode() sql_l.append([ 'SELECT DISTINCT {0} '.format(fields_to_select) + 'FROM tv_episodes e, tv_shows s ' 'WHERE showid = ? ' 'AND airdate <= ? ' 'AND airdate >= ? ' 'AND s.indexer_id = e.showid ' 'AND e.status IN (' + ','.join(['?'] * len(status_list)) + ')', [show_obj.indexerid, next_air_date or today, recently] + status_list ]) results = [] for sql_i in sql_l: if results: results += db.select(*sql_i) else: results = db.select(*sql_i) for index, item in enumerate(results): results[index][b'localtime'] = sbdatetime.convert_to_setting( parse_date_time(item[b'airdate'], item[b'airs'], item[b'network'])) results[index][b'snatchedsort'] = int( not results[index][b'epstatus'] in SNATCHED) results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result[b'paused'] and not paused: continue result[b'airs'] = str(result[b'airs']).replace( 'am', ' AM').replace('pm', ' PM').replace(' ', ' ') result[b'airdate'] = result[b'localtime'].toordinal() if result[b'epstatus'] in SNATCHED: if result[b'location']: continue else: category = 'snatched' elif result[b'airdate'] < today: category = 'missed' elif result[b'airdate'] >= next_week: category = 'later' elif result[b'airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result[b'network']: result[b'network'] = '' result[b'quality'] = get_quality_string(result[b'quality']) result[b'airs'] = sbdatetime.sbftime( result[b'localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result[b'weekday'] = 1 + result[b'localtime'].weekday() result[b'tvdbid'] = result[b'indexer_id'] result[b'airdate'] = sbdatetime.sbfdate(result[b'localtime'], d_preset=dateFormat) result[b'localtime'] = result[b'localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, next_week] + qualities_list ) done_shows_list = [int(result['showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, recently, WANTED, UNAIRED] + qualities_list ) results = [dict(result) for result in results] for index, item in enumerate(results): results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ if not isinstance(categories, list): categories = categories.split("|") if sort not in ComingEpisodes.sorts.keys(): sort = "date" today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() qualities_list = ( Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] ) db = DBConnection() fields_to_select = ", ".join( [ "airdate", "airs", "description", "episode", "imdb_id", "e.indexer", "indexer_id", "name", "network", "paused", "quality", "runtime", "season", "show_name", "showid", "s.status", ] ) results = db.select( "SELECT %s " % fields_to_select + "FROM tv_episodes e, tv_shows s " "WHERE season != 0 " "AND airdate >= ? " "AND airdate < ? " "AND s.indexer_id = e.showid " "AND e.status NOT IN (" + ",".join(["?"] * len(qualities_list)) + ")", [today, next_week] + qualities_list, ) done_shows_list = [int(result[b"showid"]) for result in results] placeholder = ",".join(["?"] * len(done_shows_list)) placeholder2 = ",".join( ["?"] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER) ) results += db.select( "SELECT %s " % fields_to_select + "FROM tv_episodes e, tv_shows s " "WHERE season != 0 " "AND showid NOT IN (" + placeholder + ") " "AND s.indexer_id = e.showid " "AND airdate = (SELECT airdate " "FROM tv_episodes inner_e " "WHERE inner_e.season != 0 " "AND inner_e.showid = e.showid " "AND inner_e.airdate >= ? " "ORDER BY inner_e.airdate ASC LIMIT 1) " "AND e.status NOT IN (" + placeholder2 + ")", done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER, ) results += db.select( "SELECT %s " % fields_to_select + "FROM tv_episodes e, tv_shows s " "WHERE season != 0 " "AND s.indexer_id = e.showid " "AND airdate < ? " "AND airdate >= ? " "AND e.status IN (?,?) " "AND e.status NOT IN (" + ",".join(["?"] * len(qualities_list)) + ")", [today, recently, WANTED, UNAIRED] + qualities_list, ) results = [dict(result) for result in results] for index, item in enumerate(results): results[index][b"localtime"] = sbdatetime.convert_to_setting( parse_date_time(item[b"airdate"], item[b"airs"], item[b"network"]) ) results.sort(ComingEpisodes.sorts[sort]) if not group: return results grouped_results = {category: [] for category in categories} for result in results: if result[b"paused"] and not paused: continue result[b"airs"] = str(result[b"airs"]).replace("am", " AM").replace("pm", " PM").replace(" ", " ") result[b"airdate"] = result[b"localtime"].toordinal() if result[b"airdate"] < today: category = "missed" elif result[b"airdate"] >= next_week: category = "later" elif result[b"airdate"] == today: category = "today" else: category = "soon" if len(categories) > 0 and category not in categories: continue if not result[b"network"]: result[b"network"] = "" result[b"quality"] = get_quality_string(result[b"quality"]) result[b"airs"] = ( sbdatetime.sbftime(result[b"localtime"], t_preset=timeFormat).lstrip("0").replace(" 0", " ") ) result[b"weekday"] = 1 + date.fromordinal(result[b"airdate"]).weekday() result[b"tvdbid"] = result[b"indexer_id"] result[b"airdate"] = sbdatetime.sbfdate(result[b"localtime"], d_preset=dateFormat) result[b"localtime"] = result[b"localtime"].toordinal() grouped_results[category].append(result) return grouped_results
def run(self, force=False): if self.amActive: return self.amActive = True logger.log(u"Searching for new released episodes ...") if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal() curTime = datetime.datetime.now(network_timezones.sb_timezone) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?", [common.UNAIRED, curDate]) sql_l = [] show = None for sqlEp in sqlResults: try: if not show or int(sqlEp["showid"]) != show.indexerid: show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) # for when there is orphaned series in the database but not loaded into our showlist if not show: continue except exceptions.MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) continue try: end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta( minutes=helpers.tryInt(show.runtime, 60)) # filter out any episodes that haven't aried yet if end_time > curTime: continue except: # if an error occured assume the episode hasn't aired yet continue UpdateWantedList = 0 ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) with ep.lock: if ep.show.paused: ep.status = ep.show.default_ep_status elif ep.season == 0: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED because is a special season") ep.status = common.SKIPPED elif sickbeard.TRAKT_USE_ROLLING_DOWNLOAD and sickbeard.USE_TRAKT: ep.status = common.SKIPPED UpdateWantedList = 1 else: logger.log(u"New episode %s airs today, setting to default episode status for this show: %s" % (ep.prettyName(), common.statusStrings[ep.show.default_ep_status])) ep.status = ep.show.default_ep_status sql_l.append(ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) else: logger.log(u"No new released episodes found ...") sickbeard.traktRollingScheduler.action.updateWantedList() # queue episode for daily search dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) self.amActive = False
def calendar(self): """ Provides a subscribable URL for iCal subscriptions """ logger.log('Receiving iCal request from {ip}'.format(ip=self.request.remote_ip)) # Create a iCal string ical = 'BEGIN:VCALENDAR\r\n' ical += 'VERSION:2.0\r\n' ical += 'X-WR-CALNAME:Medusa\r\n' ical += 'X-WR-CALDESC:Medusa\r\n' ical += 'PRODID://Medusa Upcoming Episodes//\r\n' future_weeks = try_int(self.get_argument('future', 52), 52) past_weeks = try_int(self.get_argument('past', 52), 52) # Limit dates past_date = (datetime.date.today() + datetime.timedelta(weeks=-past_weeks)).toordinal() future_date = (datetime.date.today() + datetime.timedelta(weeks=future_weeks)).toordinal() # Get all the shows that are not paused and are currently on air (from kjoconnor Fork) main_db_con = db.DBConnection() calendar_shows = main_db_con.select( b'SELECT show_name, indexer_id, network, airs, runtime ' b'FROM tv_shows ' b'WHERE ( status = ? OR status = ? ) AND paused != 1', ('Continuing', 'Returning Series') ) for show in calendar_shows: # Get all episodes of this show airing between today and next month episode_list = main_db_con.select( b'SELECT indexerid, name, season, episode, description, airdate ' b'FROM tv_episodes ' b'WHERE airdate >= ? AND airdate < ? AND showid = ?', (past_date, future_date, int(show[b'indexer_id'])) ) utc = tz.gettz('GMT') for episode in episode_list: air_date_time = network_timezones.parse_date_time(episode[b'airdate'], show[b'airs'], show[b'network']).astimezone(utc) air_date_time_end = air_date_time + datetime.timedelta( minutes=try_int(show[b'runtime'], 60)) # Create event for episode ical += 'BEGIN:VEVENT\r\n' ical += 'DTSTART:{date}\r\n'.format(date=air_date_time.strftime('%Y%m%dT%H%M%SZ')) ical += 'DTEND:{date}\r\n'.format(date=air_date_time_end.strftime('%Y%m%dT%H%M%SZ')) if sickbeard.CALENDAR_ICONS: icon_url = 'https://cdn.pymedusa.com/images/ico/favicon-16.png' ical += 'X-GOOGLE-CALENDAR-CONTENT-ICON:{icon_url}\r\n'.format(icon_url=icon_url) ical += 'X-GOOGLE-CALENDAR-CONTENT-DISPLAY:CHIP\r\n' ical += 'SUMMARY: {show} - {season}x{episode} - {title}\r\n'.format( show=show[b'show_name'], season=episode[b'season'], episode=episode[b'episode'], title=episode[b'name'], ) ical += 'UID:Medusa-{date}-{show}-E{episode}S{season}\r\n'.format( date=datetime.date.today().isoformat(), show=show[b'show_name'].replace(' ', '-'), episode=episode[b'episode'], season=episode[b'season'], ) ical += 'DESCRIPTION: {date} on {network}'.format( date=show[b'airs'] or '(Unknown airs)', network=show[b'network'] or 'Unknown network', ) if episode[b'description']: ical += ' \\n\\n {description}\r\n'.format(description=episode[b'description'].splitlines()[0]) else: ical += '\r\n' ical += 'END:VEVENT\r\n' # Ending the iCal ical += 'END:VCALENDAR' return ical
def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() recently = (date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() db = DBConnection(row_type='dict') fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) sql_l = [] for show_obj in sickbeard.showList: next_air_date = show_obj.nextEpisode() if next_air_date: sql_l.append( [ 'SELECT DISTINCT {0} '.format(fields_to_select) + 'FROM tv_episodes e, tv_shows s ' 'WHERE showid = ? ' 'AND airdate <= ? ' 'AND airdate >= ? ' 'AND s.indexer_id = e.showid ' 'AND e.status IN (' + ','.join(['?'] * 2) + ')', [show_obj.indexerid, next_air_date, recently, WANTED, UNAIRED] ] ) results = [] for sql_i in sql_l: if results: results += db.select(*sql_i) else: results = db.select(*sql_i) for index, item in enumerate(results): results[index][b'localtime'] = sbdatetime.convert_to_setting( parse_date_time(item[b'airdate'], item[b'airs'], item[b'network'])) results.sort(key=itemgetter(b'localtime')) results.sort(ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result[b'paused'] and not paused: continue result[b'airs'] = str(result[b'airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result[b'airdate'] = result[b'localtime'].toordinal() if result[b'airdate'] < today: category = 'missed' elif result[b'airdate'] >= next_week: category = 'later' elif result[b'airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result[b'network']: result[b'network'] = '' result[b'quality'] = get_quality_string(result[b'quality']) result[b'airs'] = sbdatetime.sbftime(result[b'localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result[b'weekday'] = 1 + result[b'localtime'].weekday() result[b'tvdbid'] = result[b'indexer_id'] result[b'airdate'] = sbdatetime.sbfdate(result[b'localtime'], d_preset=dateFormat) result[b'localtime'] = result[b'localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def run(self, force=False): self.amActive = True logger.log(u"Searching for new released episodes ...") if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal() curTime = datetime.datetime.now(network_timezones.sb_timezone) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?", [common.UNAIRED, curDate]) sql_l = [] show = None for sqlEp in sqlResults: try: if not show or int(sqlEp["showid"]) != show.indexerid: show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) # for when there is orphaned series in the database but not loaded into our showlist if not show: continue except exceptions.MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) continue try: end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta( minutes=helpers.tryInt(show.runtime, 60)) # filter out any episodes that haven't aried yet if end_time > curTime: continue except: # if an error occured assume the episode hasn't aired yet continue ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) with ep.lock: if ep.show.paused: ep.status = common.SKIPPED else: myDB = db.DBConnection() sql_selection="SELECT show_name, indexer_id, season, episode, paused FROM (SELECT * FROM tv_shows s,tv_episodes e WHERE s.indexer_id = e.showid) T1 WHERE T1.paused = 0 and T1.episode_id IN (SELECT T2.episode_id FROM tv_episodes T2 WHERE T2.showid = T1.indexer_id and T2.status in (?) ORDER BY T2.season,T2.episode LIMIT 1) and airdate is not null and indexer_id = ? ORDER BY T1.show_name,season,episode" results = myDB.select(sql_selection, [common.SKIPPED, sqlEp["showid"]]) if not sickbeard.TRAKT_USE_ROLLING_DOWNLOAD: if ep.season == 0: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED, due to trakt integration") ep.status = common.SKIPPED else: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED") ep.status = common.WANTED else: sn_sk = results[0]["season"] ep_sk = results[0]["episode"] if (int(sn_sk)*100+int(ep_sk)) < (int(sqlEp["season"])*100+int(sqlEp["episode"])): logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED, due to trakt integration") ep.status = common.SKIPPED else: if ep.season == 0: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED, due to trakt integration") ep.status = common.SKIPPED else: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED") ep.status = common.WANTED sql_l.append(ep.get_sql()) else: logger.log(u"No new released episodes found ...") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # queue episode for daily search dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) self.amActive = False
def calendar(self): """ Provides a subscribeable URL for iCal subscriptions """ logger.log("Receiving iCal request from {0}".format( self.request.remote_ip)) # Create a iCal string ical = 'BEGIN:VCALENDAR\r\n' ical += 'VERSION:2.0\r\n' ical += 'X-WR-CALNAME:SickChill\r\n' ical += 'X-WR-CALDESC:SickChill\r\n' ical += 'PRODID://Sick-Beard Upcoming Episodes//\r\n' future_weeks = try_int(self.get_argument('future', 52), 52) past_weeks = try_int(self.get_argument('past', 52), 52) # Limit dates past_date = (datetime.date.today() + datetime.timedelta(weeks=-past_weeks)).toordinal() future_date = (datetime.date.today() + datetime.timedelta(weeks=future_weeks)).toordinal() # Get all the shows that are not paused and are currently on air (from kjoconnor Fork) main_db_con = db.DBConnection() # noinspection PyPep8 calendar_shows = main_db_con.select( "SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE ( status = 'Continuing' OR status = 'Returning Series' ) AND paused != '1'" ) for show in calendar_shows: # Get all episodes of this show airing between today and next month episode_list = main_db_con.select( "SELECT indexerid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?", (past_date, future_date, int(show[b"indexer_id"]))) utc = tz.gettz('GMT') for episode in episode_list: air_date_time = network_timezones.parse_date_time( episode[b'airdate'], show[b"airs"], show[b'network']).astimezone(utc) air_date_time_end = air_date_time + datetime.timedelta( minutes=try_int(show[b"runtime"], 60)) # Create event for episode ical += 'BEGIN:VEVENT\r\n' ical += 'DTSTART:' + air_date_time.strftime( "%Y%m%d") + 'T' + air_date_time.strftime( "%H%M%S") + 'Z\r\n' ical += 'DTEND:' + air_date_time_end.strftime( "%Y%m%d") + 'T' + air_date_time_end.strftime( "%H%M%S") + 'Z\r\n' if sickbeard.CALENDAR_ICONS: # noinspection PyPep8 ical += 'X-GOOGLE-CALENDAR-CONTENT-ICON:https://lh3.googleusercontent.com/-Vp_3ZosvTgg/VjiFu5BzQqI/AAAAAAAA_TY/3ZL_1bC0Pgw/s16-Ic42/SickChill.png\r\n' ical += 'X-GOOGLE-CALENDAR-CONTENT-DISPLAY:CHIP\r\n' ical += 'SUMMARY: {0} - {1}x{2} - {3}\r\n'.format( show[b'show_name'], episode[b'season'], episode[b'episode'], episode[b'name']) ical += 'UID:SickChill-' + str(datetime.date.today().isoformat()) + '-' + \ show[b'show_name'].replace(" ", "-") + '-E' + str(episode[b'episode']) + \ 'S' + str(episode[b'season']) + '\r\n' if episode[b'description']: ical += 'DESCRIPTION: {0} on {1} \\n\\n {2}\r\n'.format( (show[b'airs'] or '(Unknown airs)'), (show[b'network'] or 'Unknown network'), episode[b'description'].splitlines()[0]) else: ical += 'DESCRIPTION:' + ( show[b'airs'] or '(Unknown airs)') + ' on ' + ( show[b'network'] or 'Unknown network') + '\r\n' ical += 'END:VEVENT\r\n' # Ending the iCal ical += 'END:VCALENDAR' return ical
def run(self, force=False): # pylint:disable=too-many-branches """ Runs the daily searcher, queuing selected episodes for search :param force: Force search """ if self.amActive: logger.log('Daily search is still running, not starting it again', logger.DEBUG) return elif sickbeard.forcedSearchQueueScheduler.action.is_forced_search_in_progress() and not force: logger.log('Manual search is running. Can\'t start Daily search', logger.WARNING) return self.amActive = True logger.log('Searching for newly released episodes ...') if not network_dict: update_network_dict() cur_time = datetime.now(sb_timezone) cur_date = ( date.today() + timedelta(days=1 if network_dict else 2) ).toordinal() main_db_con = DBConnection() episodes_from_db = main_db_con.select( b'SELECT showid, airdate, season, episode ' b'FROM tv_episodes ' b'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date] ) new_releases = [] show = None for db_episode in episodes_from_db: try: show_id = int(db_episode[b'showid']) if not show or show_id != show.indexerid: show = Show.find(sickbeard.showList, show_id) # for when there is orphaned series in the database but not loaded into our show list if not show or show.paused: continue except MultipleShowObjectsException: logger.log('ERROR: expected to find a single show matching {id}'.format(id=show_id)) continue if show.airs and show.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode[b'airdate'], show.airs, show.network) end_time = show_air_time.astimezone(sb_timezone) + timedelta(minutes=try_int(show.runtime, 60)) # filter out any episodes that haven't finished airing yet, if end_time > cur_time: continue cur_ep = show.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: cur_ep.status = show.default_ep_status if cur_ep.season else common.SKIPPED logger.log('Setting status ({status}) for show airing today: {name} {special}'.format( name=cur_ep.pretty_name(), status=common.statusStrings[cur_ep.status], special='(specials are not supported)' if not cur_ep.season else '' )) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) else: logger.log('No newly released episodes found ...') # queue episode for daily search sickbeard.searchQueueScheduler.action.add_item( DailySearchQueueItem() ) self.amActive = False
def run(self, force=False): # pylint:disable=too-many-branches """ Runs the daily searcher, queuing selected episodes for search :param force: Force search """ if self.amActive: return self.amActive = True _ = force logger.log(u"Searching for new released episodes ...") if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal() curTime = datetime.datetime.now(network_timezones.sb_timezone) main_db_con = db.DBConnection() sql_results = main_db_con.select("SELECT showid, airdate, season, episode FROM tv_episodes WHERE status = ? AND (airdate <= ? and airdate > 1)", [common.UNAIRED, curDate]) sql_l = [] show = None for sqlEp in sql_results: try: if not show or int(sqlEp["showid"]) != show.indexerid: show = Show.find(sickbeard.showList, int(sqlEp["showid"])) # for when there is orphaned series in the database but not loaded into our showlist if not show or show.paused: continue except MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) continue if show.airs and show.network: # This is how you assure it is always converted to local time air_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network).astimezone(network_timezones.sb_timezone) # filter out any episodes that haven't started airing yet, # but set them to the default status while they are airing # so they are snatched faster if air_time > curTime: continue ep = show.getEpisode(sqlEp["season"], sqlEp["episode"]) with ep.lock: if ep.season == 0: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED because is a special season") ep.status = common.SKIPPED else: logger.log(u"New episode %s airs today, setting to default episode status for this show: %s" % (ep.prettyName(), common.statusStrings[ep.show.default_ep_status])) ep.status = ep.show.default_ep_status sql_l.append(ep.get_sql()) if len(sql_l) > 0: main_db_con = db.DBConnection() main_db_con.mass_action(sql_l) else: logger.log(u"No new released episodes found ...") # queue episode for daily search dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) self.amActive = False
def run(self, force=False): """ Runs the daily searcher, queuing selected episodes for search :param force: Force search """ if self.amActive: return self.amActive = True logger.log(u"Searching for new released episodes ...") if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal() curTime = datetime.datetime.now(network_timezones.sb_timezone) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND (airdate <= ? and airdate > 1)", [common.UNAIRED, curDate]) sql_l = [] show = None for sqlEp in sqlResults: try: if not show or int(sqlEp["showid"]) != show.indexerid: show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) # for when there is orphaned series in the database but not loaded into our showlist if not show or show.paused: continue except MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) continue if show.airs and show.network: # This is how you assure it is always converted to local time air_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network).astimezone(network_timezones.sb_timezone) # filter out any episodes that haven't started airing yet, # but set them to the default status while they are airing # so they are snatched faster if air_time > curTime: continue ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) with ep.lock: if ep.season == 0: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED because is a special season") ep.status = common.SKIPPED else: logger.log(u"New episode %s airs today, setting to default episode status for this show: %s" % (ep.prettyName(), common.statusStrings[ep.show.default_ep_status])) ep.status = ep.show.default_ep_status sql_l.append(ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) else: logger.log(u"No new released episodes found ...") # queue episode for daily search dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) self.amActive = False
def run(self, force=False): self.amActive = True logger.log(u"Searching for new released episodes ...") if not network_timezones.network_dict: network_timezones.update_network_dict() if network_timezones.network_dict: curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() else: curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal() curTime = datetime.datetime.now(network_timezones.sb_timezone) myDB = db.DBConnection() sqlResults = myDB.select( "SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?", [common.UNAIRED, curDate]) sql_l = [] show = None for sqlEp in sqlResults: try: if not show or int(sqlEp["showid"]) != show.indexerid: show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) # for when there is orphaned series in the database but not loaded into our showlist if not show: continue except exceptions.MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) continue try: end_time = network_timezones.parse_date_time( sqlEp['airdate'], show.airs, show.network) + datetime.timedelta( minutes=helpers.tryInt(show.runtime, 60)) # filter out any episodes that haven't aried yet if end_time > curTime: continue except: # if an error occured assume the episode hasn't aired yet continue ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) with ep.lock: if ep.show.paused: ep.status = common.SKIPPED else: myDB = db.DBConnection() sql_selection = "SELECT show_name, indexer_id, season, episode, paused FROM (SELECT * FROM tv_shows s,tv_episodes e WHERE s.indexer_id = e.showid) T1 WHERE T1.paused = 0 and T1.episode_id IN (SELECT T2.episode_id FROM tv_episodes T2 WHERE T2.showid = T1.indexer_id and T2.status in (?) ORDER BY T2.season,T2.episode LIMIT 1) and airdate is not null and indexer_id = ? ORDER BY T1.show_name,season,episode" results = myDB.select(sql_selection, [common.SKIPPED, sqlEp["showid"]]) if not sickbeard.TRAKT_USE_ROLLING_DOWNLOAD: if ep.season == 0: logger.log( u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED, due to trakt integration" ) ep.status = common.SKIPPED else: logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED") ep.status = common.WANTED else: sn_sk = results[0]["season"] ep_sk = results[0]["episode"] if (int(sn_sk) * 100 + int(ep_sk)) < (int(sqlEp["season"]) * 100 + int(sqlEp["episode"])): logger.log( u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED, due to trakt integration" ) ep.status = common.SKIPPED else: if ep.season == 0: logger.log( u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED, due to trakt integration" ) ep.status = common.SKIPPED else: logger.log( u"New episode " + ep.prettyName() + " airs today, setting status to WANTED") ep.status = common.WANTED sql_l.append(ep.get_sql()) else: logger.log(u"No new released episodes found ...") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # queue episode for daily search dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) self.amActive = False