def tz_fallback(t): # type: (...) -> datetime.tzinfo if isinstance(t, datetime.tzinfo): return t if is_win: return tz.tzwinlocal() return tz.tzlocal()
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR) return if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None): cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: cur_zoneinfo = None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)): return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/' + new_zoneinfo zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile) if (ek.ek(os.path.exists, zonefile_tmp)): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if (zoneinfo_md5.upper() == new_hash.upper()): logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if (cur_zoneinfo is not None): old_file = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) if (ek.ek(os.path.exists, old_file)): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR) return
from sickbeard import logger from sickbeard import encodingKludge as ek from os.path import basename, join, isfile import os import re import time import datetime # regex to parse time (12/24 hour format) time_regex = re.compile(r"(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b", flags=re.IGNORECASE) am_regex = re.compile(r"(A[. ]? ?M)", flags=re.IGNORECASE) pm_regex = re.compile(r"(P[. ]? ?M)", flags=re.IGNORECASE) network_dict = None sb_timezone = tz.tzlocal() # helper to remove failed temp download def _remove_zoneinfo_failed(filename): try: ek.ek(os.remove, filename) except: pass # helper to remove old unneeded zoneinfo files def _remove_old_zoneinfo(): if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None): cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: return
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # now check if the zoneinfo needs update url_zv = 'https://github.com/Prinz23/sb_network_timezones/raw/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR) return if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None): cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: cur_zoneinfo = None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)): return # now load the new zoneinfo url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile) if (ek.ek(os.path.exists,zonefile_tmp)): try: ek.ek(os.remove,zonefile_tmp) except: logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists,zonefile_tmp): logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if (zoneinfo_md5.upper() == new_hash.upper()): logger.log(u"Updating timezone info with new one: " + new_zoneinfo,logger.MESSAGE) try: # remove the old zoneinfo file if (cur_zoneinfo is not None): old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) if (ek.ek(os.path.exists,old_file)): ek.ek(os.remove,old_file) # rename downloaded file ek.ek(os.rename,zonefile_tmp,zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(),logger.ERROR) return
from sickbeard import helpers from sickbeard import logger from sickbeard import encodingKludge as ek from os.path import basename, join, isfile import os import re import datetime # regex to parse time (12/24 hour format) time_regex = re.compile(r"(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b", flags=re.IGNORECASE) am_regex = re.compile(r"(A[. ]? ?M)", flags=re.IGNORECASE) pm_regex = re.compile(r"(P[. ]? ?M)", flags=re.IGNORECASE) network_dict = None sb_timezone = tz.tzlocal() # helper to remove failed temp download def _remove_zoneinfo_failed(filename): try: ek.ek(os.remove,filename) except: pass # helper to remove old unneeded zoneinfo files def _remove_old_zoneinfo(): if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None): cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: return
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: # When urlData is None, trouble connecting to github logger.log( u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv, logger.WARNING) return zonefilename = zoneinfo._ZONEFILENAME cur_zoneinfo = zonefilename if None is not cur_zoneinfo: cur_zoneinfo = ek.ek(basename, zonefilename) zonefile = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek( os.path.isfile, zonefile) else None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo) if not newtz_regex or len(newtz_regex.groups()) != 1: return newtzversion = newtz_regex.group(1) if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata[ 'tzversion'] == newtzversion: return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) from dateutil.zoneinfo import gettz if '_CLASS_ZONE_INSTANCE' in gettz.func_globals: gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list()) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log( u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' try: url_data = helpers.getURL(url_zv) if not url_data: raise if lib.dateutil.zoneinfo.ZONEINFOFILE is not None: cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: cur_zoneinfo = None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') except: # When urlData is None, trouble connecting to github logger.log( u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv, logger.WARNING) return if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo): return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo zonefile = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.INFO) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek( join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log( u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # TODO `git subtree pull` commands on updates loc_zv = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/zoneinfo.txt')) # Read version file try: with open(loc_zv, 'r') as file: data = file.read() if not data: raise # Filename of existing zoneinfo if lib.dateutil.zoneinfo.ZONEINFOFILE is not None: cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: cur_zoneinfo = None # Filename and hash of new zoneinfo (new_zoneinfo, zoneinfo_md5) = data.decode('utf-8').strip().rsplit(u' ') except Exception as e: logger.log(u'Crazy problem with zoneinfo: %s' % ex(e), logger.ERROR) return if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo): return # now load the new zoneinfo loc_tar = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/%s' % new_zoneinfo)) zonefile = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.copyFile(loc_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.INFO) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek( join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log( u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv, logger.WARNING) return if lib.dateutil.zoneinfo.ZONEINFOFILE is not None: cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: cur_zoneinfo = None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo): return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
def tz_fallback(t): return t if isinstance(t, datetime.tzinfo) else tz.tzlocal()
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # TODO `git subtree pull` commands on updates loc_zv = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/zoneinfo.txt')) # Read version file try: with open(loc_zv, 'r') as file: data = file.read() if not data: raise # Filename of existing zoneinfo if lib.dateutil.zoneinfo.ZONEINFOFILE is not None: cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE) else: cur_zoneinfo = None # Filename and hash of new zoneinfo (new_zoneinfo, zoneinfo_md5) = data.decode('utf-8').strip().rsplit(u' ') except Exception as e: logger.log(u'Crazy problem with zoneinfo: %s' % ex(e), logger.ERROR) return if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo): return # now load the new zoneinfo loc_tar = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/%s' % new_zoneinfo)) zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo)) zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.copyFile(loc_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.INFO) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) # load the new zoneinfo reload(lib.dateutil.zoneinfo) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
def _update_zoneinfo(): global sb_timezone sb_timezone = tz.tzlocal() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv, logger.WARNING) return zonefilename = zoneinfo._ZONEFILENAME cur_zoneinfo = zonefilename if None is not cur_zoneinfo: cur_zoneinfo = ek.ek(basename, zonefilename) zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo) if not newtz_regex or len(newtz_regex.groups()) != 1: return newtzversion = newtz_regex.group(1) if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata['tzversion'] == newtzversion: return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) from dateutil.zoneinfo import gettz if '_CLASS_ZONE_INSTANCE' in gettz.func_globals: gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list()) sb_timezone = tz.tzlocal() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return