def __init__(self, inifile=None, configs=None): if inifile is None: return None self.inifile = inifile self.cfg = ConfigParser() with FileLock(self.inifile): if exists(self.inifile): self.cfg.read(self.inifile) # initialize configurations default_configs = {} if configs is None else configs needupdate = False for sec, secdata in default_configs.items(): if not self.cfg.has_section(sec): self.cfg.add_section(sec) needupdate = True for opt, val in secdata.items(): if not self.cfg.has_option(sec, opt): self.cfg.set(sec, opt, val) needupdate = True # update ini file if needupdate: self.update(False)
def __init__(self, inifile='data/config.ini'): self.inifile = inifile self.cfg = ConfigParser() with FileLock(inifile): if os.path.exists(inifile): self.cfg.read(inifile) # initialize configurations default_configs = { 'server': { 'ip': '*', 'port': '8888', 'forcehttps': 'off', # force use https 'lastcheckupdate': 0, 'updateinfo': '' }, 'auth': { 'username': '******', 'password': '', # empty password never validated 'passwordcheck': 'on', 'accesskey': '', # empty access key never validated 'accesskeyenable': 'off', }, 'runtime': { 'mode': '', # format: demo | dev | prod 'loginlock': 'off', 'loginfails': 0, 'loginlockexpire': 0, }, 'file': { 'lastdir': '/root', 'lastfile': '', }, 'time': { 'timezone': '' # format: timezone = Asia/Shanghai }, 'ecs': { 'accounts': '' }, 'inpanel': { 'Instance Name': 'Access key' } } needupdate = False for sec, secdata in default_configs.items(): if not self.cfg.has_section(sec): self.cfg.add_section(sec) needupdate = True for opt, val in secdata.items(): if not self.cfg.has_option(sec, opt): self.cfg.set(sec, opt, val) needupdate = True # update ini file if needupdate: self.update(False)
def get_epg(config, location): epg_path = pathlib.Path(config['main']['cache_dir']).joinpath(str(location["DMA"]) + "_epg.xml") xml_lock = FileLock(str(epg_path) + '.lock') return_str = None with xml_lock: with open(epg_path, 'rb') as epg_file: return_str = epg_file.read().decode('utf-8') return return_str
def get_dma_stations_and_channels(config, location): dma_channels = None dma_channels_list_path = location["DMA"] + "_stations.json" dma_channels_list_file = pathlib.Path(config["main"]["cache_dir"]).joinpath("stations").joinpath(dma_channels_list_path) dma_channels_list_file_lock = FileLock(str(dma_channels_list_file) + ".lock") with dma_channels_list_file_lock: with open(dma_channels_list_file, "r") as dma_stations_file: dma_channels = json.load(dma_stations_file) return dma_channels
def update(self, lock=True): if lock: flock = FileLock(self.inifile) flock.acquire() try: inifp = open(self.inifile, 'w') self.cfg.write(inifp) inifp.close() if lock: flock.release() return True except: if lock: flock.release() return False
def generate_dma_stations_and_channels_file(config, locast, location, fcc_stations): station_list = locast.get_stations() final_channel_list = {} print("Found " + str(len(station_list)) + " stations for DMA " + str(location["DMA"])) fcc_market = get_dma_info(str(location["DMA"])) if not len(fcc_market): print( "No DMA to FCC mapping found. Poke the developer to get it into locast2plex." ) noneChannel = 1000 for index, locast_station in enumerate(station_list): sid = locast_station['id'] final_channel_list[sid] = {'callSign': locast_station['name']} if 'logo226Url' in locast_station.keys(): final_channel_list[sid]['logoUrl'] = locast_station['logo226Url'] elif 'logoUrl' in locast_station.keys(): final_channel_list[sid]['logoUrl'] = locast_station['logoUrl'] # check if this is a [channel] [station name] result in the callsign # whether the first char is a number (checking for result like "2.1 CBS") try: # if number, get the channel and name -- we're done! # Check if the the callsign has a float (x.x) value. Save as a # string though, to preserve any trailing 0s as on reported # on https://github.com/tgorgdotcom/locast2plex/issues/42 assert (float(locast_station['callSign'].split()[0])) final_channel_list[sid]['channel'] = locast_station[ 'callSign'].split()[0] final_channel_list[sid]['friendlyName'] = locast_station[ 'callSign'].split()[1] except ValueError: # result like "WDPN" or "CBS" in the callsign field, or the callsign in the name field # then we'll search the callsign in a few different lists to get the station channel # note: callsign field usually has the most recent data if it contains an actual callsign skip_sub_id = False # callsign from "callsign" field callsign_result = detect_callsign(locast_station['callSign']) # callsign from "name" field - usually in "[callsign][TYPE][subchannel]" format # example: WABCDT2 alt_callsign_result = detect_callsign(locast_station['name']) # check the known station json that we maintain whenever locast's # reported station is iffy with open("known_stations.json", "r") as known_stations_file_obj: known_stations = json.load(known_stations_file_obj) # first look via "callsign" value ks_result = find_known_station(locast_station, 'callSign', known_stations) if ks_result is not None: final_channel_list[sid]['channel'] = ks_result['channel'] skip_sub_id = ks_result['skip_sub'] # then check "name" if ('channel' not in final_channel_list[sid]): ks_result = find_known_station(locast_station, 'name', known_stations) if ks_result is not None: final_channel_list[sid]['channel'] = ks_result['channel'] skip_sub_id = ks_result['skip_sub'] # if we couldn't find anything look through fcc list for a match. # first by searching the callsign found in the "callsign" field if ('channel' not in final_channel_list[sid] ) and callsign_result['verified']: for market_item in fcc_market: result = find_fcc_station(callsign_result['callsign'], market_item["fcc_dma_str"], fcc_stations) if result is not None: final_channel_list[sid]['channel'] = result['channel'] skip_sub_id = result['analog'] break # if we still couldn't find it, see if there's a match via the # "name" field if ('channel' not in final_channel_list[sid] ) and alt_callsign_result['verified']: for market_item in fcc_market: result = find_fcc_station(alt_callsign_result['callsign'], market_item["fcc_dma_str"], fcc_stations) if result is not None: final_channel_list[sid]['channel'] = result['channel'] skip_sub_id = result['analog'] break # locast usually adds a number in it's callsign (in either field). that # number is the subchannel if (not skip_sub_id) and ('channel' in final_channel_list[sid]): if callsign_result['verified'] and ( callsign_result['subchannel'] is not None): final_channel_list[sid]['channel'] = final_channel_list[ sid]['channel'] + '.' + callsign_result['subchannel'] elif alt_callsign_result['verified'] and ( alt_callsign_result['subchannel'] is not None): final_channel_list[sid]['channel'] = final_channel_list[ sid]['channel'] + '.' + alt_callsign_result[ 'subchannel'] else: final_channel_list[sid][ 'channel'] = final_channel_list[sid]['channel'] + '.1' # mark stations that did not get a channel, but outside of the normal range. # the user will have to weed these out in Plex... if ('channel' not in final_channel_list[sid]): final_channel_list[sid]['channel'] = str(noneChannel) noneChannel = noneChannel + 1 final_channel_list[sid]['friendlyName'] = locast_station[ 'callSign'] dma_channels_list_path = location["DMA"] + "_stations.json" dma_channels_list_file = pathlib.Path( config["main"]["cache_dir"]).joinpath("stations").joinpath( dma_channels_list_path) dma_channels_list_file_lock = FileLock( str(dma_channels_list_file) + ".lock") with dma_channels_list_file_lock: with open(dma_channels_list_file, "w") as dma_stations_file: json.dump(final_channel_list, dma_stations_file, indent=4)
def get_fcc_stations(config): fcc_cache_dir = pathlib.Path( config["main"]["cache_dir"]).joinpath("stations") facility_url = 'https://transition.fcc.gov/Bureaus/MB/Databases/cdbs/facility.zip' facility_zip_dl_path = pathlib.Path(fcc_cache_dir).joinpath("facility.zip") fcc_unzipped_dat = pathlib.Path(fcc_cache_dir).joinpath("facility.dat") fcc_cached_file = pathlib.Path(fcc_cache_dir).joinpath( "tv_facilities.json") fcc_cached_file_lock = pathlib.Path(fcc_cache_dir).joinpath( "tv_facilities.json.lock") why_download = None if not os.path.exists(facility_zip_dl_path): why_download = "FCC facilities database cache missing." else: print("Checking FCC facilities database for updates.") offline_file_time = get_offline_file_time(facility_zip_dl_path) online_file_time = get_online_file_time(facility_url) if not offline_file_time <= online_file_time: print("Cached facilities database is current.") else: why_download = "Online facilities database is newer." if why_download: print(why_download + ' Downloading the latest FCC facilities database...') # remove old copies of zip and dat if os.path.exists(facility_zip_dl_path): os.remove(facility_zip_dl_path) if os.path.exists(fcc_unzipped_dat): os.remove(fcc_unzipped_dat) if (not os.path.exists(facility_zip_dl_path)): with urllib.request.urlopen( facility_url, context=fcc_ssl_context) as fcc_facility_net: fcc_facility_data = fcc_facility_net.read() with open(facility_zip_dl_path, 'wb') as fcc_facility_file: fcc_facility_file.write(fcc_facility_data) if (not os.path.exists(fcc_unzipped_dat)) and ( os.path.exists(facility_zip_dl_path)): print('Unzipping FCC facilities database...') with zipfile.ZipFile(facility_zip_dl_path, 'r') as zip_ref: zip_ref.extractall(fcc_cache_dir) # make sure the fcc data is not corrupted (if the file isn't as big as we expect) if (os.path.exists(fcc_unzipped_dat) and os.path.getsize(fcc_unzipped_dat) > 7000000): print('Reading and formatting FCC database...') with open(fcc_unzipped_dat, "r") as fac_file: lines = fac_file.readlines() facility_list = [] for fac_line in lines: formatteddict = fcc_db_format(fac_line) if formatteddict: facility_list.append(formatteddict) print('Found ' + str(len(facility_list)) + ' stations.') facility_json = {"fcc_station_list": facility_list} json_file_lock = FileLock(fcc_cached_file_lock) with json_file_lock: if os.path.exists(fcc_cached_file): os.remove(fcc_cached_file) with open(fcc_cached_file, "w") as write_file: json.dump(facility_json, write_file, indent=4, sort_keys=True) return facility_list else: json_file_lock = FileLock(fcc_cached_file_lock) with json_file_lock: with open(fcc_cached_file, "r") as fcc_station_file_obj: fcc_stations = json.load(fcc_station_file_obj) return fcc_stations["fcc_station_list"]
def generate_epg_file(config, location): base_cache_dir = config["main"]["cache_dir"] out_path = pathlib.Path(base_cache_dir).joinpath( str(location["DMA"]) + "_epg").with_suffix(".xml") out_lock_path = pathlib.Path(base_cache_dir).joinpath( str(location["DMA"]) + "_epg").with_suffix(".xml.lock") cache_dir = pathlib.Path(base_cache_dir).joinpath( str(location["DMA"]) + "_epg") if not cache_dir.is_dir(): cache_dir.mkdir() dma_channels = stations.get_dma_stations_and_channels(config, location) # Make a date range to pull todaydate = datetime.datetime.utcnow().replace( hour=0, minute=0, second=0, microsecond=0) # make sure we're dealing with UTC! dates_to_pull = [todaydate] days_to_pull = int(config["main"]["epg_update_days"]) for x in range(1, days_to_pull - 1): xdate = todaydate + datetime.timedelta(days=x) dates_to_pull.append(xdate) remove_stale_cache(cache_dir, todaydate) out = ET.Element('tv') out.set('source-info-url', 'https://www.locast.org') out.set('source-info-name', 'locast.org') out.set('generator-info-name', 'locastepg') out.set('generator-info-url', 'github.com/tgorgdotcom/locast2plex') out.set('generator-special-thanks', 'deathbybandaid') done_channels = False for x_date in dates_to_pull: url = ('https://api.locastnet.org/api/watch/epg/' + str(location["DMA"]) + "?startTime=" + x_date.isoformat()) result = get_cached(cache_dir, x_date.strftime("%m-%d-%Y"), url) channel_info = json.loads(result) # List Channels First if not done_channels: done_channels = True for channel_item in channel_info: sid = str(channel_item['id']) if sid in dma_channels.keys(): channel_number = str(dma_channels[sid]['channel']) channel_realname = str(dma_channels[sid]['friendlyName']) channel_callsign = str(dma_channels[sid]['callSign']) if 'logo226Url' in channel_item.keys(): channel_logo = channel_item['logo226Url'] elif 'logoUrl' in channel_item.keys(): channel_logo = channel_item['logoUrl'] c_out = sub_el(out, 'channel', id=sid) sub_el(c_out, 'display-name', text='%s %s' % (channel_number, channel_callsign)) sub_el(c_out, 'display-name', text='%s %s %s' % (channel_number, channel_callsign, sid)) sub_el(c_out, 'display-name', text=channel_number) sub_el(c_out, 'display-name', text='%s %s fcc' % (channel_number, channel_callsign)) sub_el(c_out, 'display-name', text=channel_callsign) sub_el(c_out, 'display-name', text=channel_realname) if channel_logo != None: sub_el(c_out, 'icon', src=channel_logo) # Now list Program informations for channel_item in channel_info: sid = str(channel_item['id']) if sid in dma_channels.keys(): channel_number = str(dma_channels[sid]['channel']) channel_realname = str(dma_channels[sid]['friendlyName']) channel_callsign = str(dma_channels[sid]['callSign']) if 'logo226Url' in channel_item.keys(): channel_logo = channel_item['logo226Url'] elif 'logoUrl' in channel_item.keys(): channel_logo = channel_item['logoUrl'] for event in channel_item['listings']: tm_start = tm_parse( event['startTime'] ) # this is returned from locast in UTC tm_duration = event['duration'] * 1000 tm_end = tm_parse(event['startTime'] + tm_duration) event_genres = [] if 'genres' in event.keys(): event_genres = event['genres'].split(",") # note we're returning everything as UTC, as the clients handle converting to correct timezone prog_out = sub_el(out, 'programme', start=tm_start, stop=tm_end, channel=sid) if event['title']: sub_el(prog_out, 'title', lang='en', text=event['title']) if 'movie' in event_genres and event['releaseYear']: sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + event['releaseYear']) elif 'episodeTitle' in event.keys(): sub_el(prog_out, 'sub-title', lang='en', text=event['episodeTitle']) if 'description' not in event.keys(): event['description'] = "Unavailable" elif event['description'] is None: event['description'] = "Unavailable" sub_el(prog_out, 'desc', lang='en', text=event['description']) sub_el(prog_out, 'length', units='minutes', text=str(event['duration'])) for f in event_genres: sub_el(prog_out, 'category', lang='en', text=f.strip()) sub_el(prog_out, 'genre', lang='en', text=f.strip()) if event["preferredImage"] is not None: sub_el(prog_out, 'icon', src=event["preferredImage"]) if 'rating' not in event.keys(): event['rating'] = "N/A" r = ET.SubElement(prog_out, 'rating') sub_el(r, 'value', text=event['rating']) if 'seasonNumber' in event.keys( ) and 'episodeNumber' in event.keys(): s_ = int(str(event['seasonNumber']), 10) e_ = int(str(event['episodeNumber']), 10) sub_el(prog_out, 'episode-num', system='common', text='S%02dE%02d' % (s_, e_)) sub_el(prog_out, 'episode-num', system='xmltv_ns', text='%d.%d.0' % (int(s_) - 1, int(e_) - 1)) sub_el(prog_out, 'episode-num', system='SxxExx', text='S%02dE%02d' % (s_, e_)) if 'isNew' in event.keys(): if event['isNew']: sub_el(prog_out, 'new') xml_lock = FileLock(out_lock_path) with xml_lock: with open(out_path, 'wb') as f: f.write(b'<?xml version="1.0" encoding="UTF-8"?>\n') f.write(ET.tostring(out, encoding='UTF-8'))