Пример #1
0
 def set_config(self, _config):
     self.config = _config
     if self.db is None:
         self.db = DBConfigDefn(self.config)
         if self.config_defn:
             self.save_defn_to_db()
     self.logger = logging.getLogger(__name__)
Пример #2
0
 def __init__(self,
              _script_dir=None,
              _opersystem=None,
              _args=None,
              _config=None):
     self.logger = None
     self.defn_json = None
     self.script_dir = str(_script_dir)
     self.defn_json = config_defn.load_default_config_defns()
     self.data = self.defn_json.get_default_config()
     if _script_dir is not None:
         config_file = TVHUserConfig.get_config_path(_script_dir, _args)
         self.import_config(config_file)
         self.defn_json.call_oninit(self)
         utils.logging_setup(self.data)
         # at this point, the config is setup
         self.db = DBConfigDefn(self.data)
         self.db.reinitialize_tables()
         self.defn_json.set_config(self.data)
         self.defn_json.save_defn_to_db()
     else:
         self.set_config(_config)
         self.defn_json.garbage_collect()
     self.db = DBConfigDefn(self.data)
     self.db.add_config(self.data)
Пример #3
0
 def __init__(self, *args):
     os.chdir(os.path.dirname(os.path.abspath(__file__)))
     self.script_dir = pathlib.Path(
         os.path.dirname(os.path.abspath(__file__)))
     self.ffmpeg_proc = None  # process for running ffmpeg
     self.block_moving_avg = 0
     self.last_refresh = None
     self.block_prev_pts = 0
     self.block_prev_time = None
     self.buffer_prev_time = None
     self.block_max_pts = 0
     self.small_pkt_streaming = False
     self.real_namespace = None
     self.real_instance = None
     self.m3u8_redirect = M3U8Redirect(TunerHttpHandler.plugins,
                                       TunerHttpHandler.hdhr_queue)
     self.internal_proxy = InternalProxy(TunerHttpHandler.plugins,
                                         TunerHttpHandler.hdhr_queue)
     self.ffmpeg_proxy = FFMpegProxy(TunerHttpHandler.plugins,
                                     TunerHttpHandler.hdhr_queue)
     self.db_configdefn = DBConfigDefn(self.config)
     try:
         super().__init__(*args)
     except ConnectionResetError:
         self.logger.warning(
             '########## ConnectionResetError occurred, will try again')
         time.sleep(1)
         super().__init__(*args)
     except ValueError:
         self.logger.warning(
             'ValueError occurred, Bad stream recieved.  Could be HTTPS or the stream was disconnected early'
         )
Пример #4
0
 def __init__(self, _plugins, _hdhr_queue):
     self.last_refresh = None
     self.channel_dict = None
     self.write_buffer = None
     self.file_filter = None
     self.pts_validation = None
     self.duration = 6
     super().__init__(_plugins, _hdhr_queue)
     self.config = self.plugins.config_obj.data
     self.db_configdefn = DBConfigDefn(self.config)
Пример #5
0
def backup_list(_config_obj):
    """
    A list of dicts that contain what is backed up for use with restore.
    """
    db_confdefn = DBConfigDefn(_config_obj.data)
    dm_section = db_confdefn.get_one_section_dict('general', 'datamgmt')
    bkup_defn = {}
    for key in Restore.restore2func.keys():
        bkup_defn[key] = dm_section['datamgmt']['settings'][key]
    return bkup_defn
Пример #6
0
 def __init__(self, _plugins, _hdhr_queue):
     self.ffmpeg_proc = None
     self.last_refresh = None
     self.block_prev_time = None
     self.buffer_prev_time = None
     self.small_pkt_streaming = False
     self.block_max_pts = 0
     self.block_prev_pts = 0
     self.prev_last_pts = 0
     self.default_duration = 0
     self.block_moving_avg = 0
     self.channel_dict = None
     self.write_buffer = None
     self.stream_queue = None
     self.pts_validation = None
     super().__init__(_plugins, _hdhr_queue)
     self.config = self.plugins.config_obj.data
     self.db_configdefn = DBConfigDefn(self.config)
Пример #7
0
    def __init__(self, _config_obj, _plugin_defn, _plugin_path):

        if Plugin.logger is None:
            Plugin.logger = logging.getLogger(__name__)
        self.enabled = True
        self.plugin_path = _plugin_path
        self.config_obj = _config_obj
        self.db_configdefn = DBConfigDefn(_config_obj.data)
        self.load_config_defn()

        # plugin is registered after this call, so grab reg data
        self.init_func = Plugin._plugin_func
        self.plugin_settings = {}
        self.plugin_db = DBPlugins(_config_obj.data)
        self.namespace = None
        self.instances = []
        self.load_plugin_manifest(_plugin_defn)
        self.load_instances()
        self.logger.info('Plugin created for {}'.format(self.name))
        self.plugin_obj = None
Пример #8
0
def update_instance_label(_config_obj, _section, _key):
    value = _config_obj.data[_section][_key]
    db_confdefn = DBConfigDefn(_config_obj.data)
    areas = db_confdefn.get_area_by_section(_section)
    if len(areas) > 1:
        results = 'WARNING: There is more than one section named {}'.format(_section)
    elif len(areas) == 0:
        return
    else:
        result = None
    section_data = db_confdefn.get_one_section_dict(areas[0], _section)
    section_data[_section]['label'] = value
    db_confdefn.add_section(areas[0], _section, section_data[_section])
Пример #9
0
class ConfigDefn:
    def __init__(self,
                 _defn_path=None,
                 _defn_file=None,
                 _config=None,
                 _is_instance=False):
        self.logger = None
        self.config_defn = {}
        self.config = None
        self.db = None
        self.is_instance_defn = _is_instance
        self.restricted_items = []
        if _config:
            self.set_config(_config)
        if _defn_file and _defn_path:
            self.merge_defn_file(_defn_path, _defn_file)

    def set_config(self, _config):
        self.config = _config
        if self.db is None:
            self.db = DBConfigDefn(self.config)
            if self.config_defn:
                self.save_defn_to_db()
        self.logger = logging.getLogger(__name__)

    def merge_defn_file(self, _defn_path, _defn_file):
        """ Merges a definition file into the current object
        """
        json_file = resources.read_text(_defn_path, _defn_file)
        defn = json.loads(json_file)
        self.call_ondefnload(defn)
        self.merge_defn_dict(defn)

    def merge_defn_dict(self, _defn_dict):
        """ Merges a definition file into the current object
        """
        self.config_defn = utils.merge_dict(self.config_defn, _defn_dict)
        self.update_restricted_items(_defn_dict)
        if self.db is not None:
            self.save_defn_to_db(_defn_dict)

    def merge_defn_obj(self, _defn_obj):
        """ will merge and terminate defn object
        """
        self.config_defn = utils.merge_dict(self.config_defn,
                                            _defn_obj.config_defn)
        self.update_restricted_items(_defn_obj.config_defn)

    def garbage_collect(self):
        self.logger.debug('garbage collecting for Thread:{}'.format(
            threading.get_ident()))
        self.config_defn = None

    def get_default_config(self):
        """
        JSON format: [module]['sections'][section]['settings'][setting][metadata]
        section is the section in the ini file
        setting is the name in the ini file
        """
        config_defaults = {}
        if self.db is not None:
            areas = self.get_areas()
            for area in areas:
                area_dict = self.get_defn(area)
                defaults_dict = self.get_default_config_area(area, area_dict)
                config_defaults = utils.merge_dict(config_defaults,
                                                   defaults_dict)
        else:
            for area, area_dict in self.config_defn.items():
                defaults_dict = self.get_default_config_area(area, area_dict)
                config_defaults = utils.merge_dict(config_defaults,
                                                   defaults_dict)
        return config_defaults

    def get_default_config_area(self, _area, _area_dict=None):
        config_defaults = {}
        if _area_dict is None:
            area_dict = self.get_defn(_area)
        else:
            area_dict = _area_dict

        for section in list(area_dict['sections'].keys()):
            if section not in list(config_defaults.keys()):
                config_defaults[section] = {}
            for setting in list(
                    area_dict['sections'][section]['settings'].keys()):
                value = area_dict['sections'][section]['settings'][setting][
                    'default']
                config_defaults[section][setting] = value
        return config_defaults

    def get_defn(self, _area):
        area_dict = self.db.get_area_dict(_area)[0]
        sections = self.db.get_sections_dict(_area)
        area_dict['sections'] = sections
        return area_dict

    def get_areas(self):
        return self.db.get_areas()

    def call_oninit(self, _config_obj):
        for module in list(self.config_defn.keys()):
            for section in list(self.config_defn[module]['sections'].keys()):
                for key, settings in list(self.config_defn[module]['sections']
                                          [section]['settings'].items()):
                    if 'onInit' in settings:
                        config_callbacks.call_function(settings['onInit'],
                                                       section, key,
                                                       _config_obj)

    def call_onchange(self, _area, _updated_data, _config_obj):
        results = ''
        area_data = self.get_defn(_area)
        for section, section_data in area_data['sections'].items():
            if section in _updated_data:
                for key, setting_data in section_data['settings'].items():
                    if key in _updated_data[section] and \
                            _updated_data[section][key][1] and \
                            'onChange' in setting_data:
                        status = config_callbacks.call_function(
                            setting_data['onChange'], section, key,
                            _config_obj)
                        if status is None:
                            results += '<li>[{}][{}] implemented</li>'.format(
                                section, key)
                        else:
                            results += '<li>[{}][{}] {}</li>'.format(
                                section, key, status)
        return results

    def call_ondefnload(self, _defn):
        for module in list(_defn.keys()):
            for section in list(_defn[module]['sections'].keys()):
                for key, settings in list(_defn[module]['sections'][section]
                                          ['settings'].items()):
                    if 'onDefnLoad' in settings:
                        config_callbacks.call_ondefnload_function(
                            settings['onDefnLoad'], section, key, self.config,
                            _defn)

    def save_defn_to_db(self, _delta_defn=None):
        if _delta_defn:
            delta_defn = _delta_defn
        else:
            delta_defn = self.config_defn
        for area, area_data in delta_defn.items():
            if 'icon' in area_data:
                self.db.add_area(area, area_data)
            for section, section_data in area_data['sections'].items():
                if self.is_instance_defn:
                    self.db.add_instance(area, section, section_data)
                else:
                    self.db.add_section(area, section, section_data)

    def save_instance_defn_to_db(self, _delta_defn=None):
        if _delta_defn:
            delta_defn = _delta_defn
        else:
            delta_defn = self.config_defn
        for area, area_data in delta_defn.items():
            if 'icon' in area_data:
                self.db.add_area(area, area_data)
            for section, section_data in area_data['sections'].items():
                self.db.add_instance(area, section, section_data)

    def get_type(self, _section, _key, _value):
        """ Returns the expected type of the setting
        """
        for module in list(self.config_defn.keys()):
            for section in list(self.config_defn[module]['sections'].keys()):
                if section == _section:
                    for setting in list(self.config_defn[module]['sections']
                                        [section]['settings'].keys()):
                        if setting == _key:
                            return self.config_defn[module]['sections'][
                                section]['settings'][setting]['type']
        return None

    def validate_list_item(self, _section, _key, _value):
        """ for list settings, will determine if the value
            is in the list
        """
        for module in list(self.config_defn.keys()):
            for section in list(self.config_defn[module]['sections'].keys()):
                if section == _section:
                    for setting in list(self.config_defn[module]['sections']
                                        [section]['settings'].keys()):
                        if setting == _key:
                            if _value in str(
                                    self.config_defn[module]['sections']
                                [section]['settings'][setting]['values']):
                                return True
                            else:
                                return False
        return None

    def update_restricted_items(self, _defn_file):
        for area, area_data in _defn_file.items():
            self.update_restricted_items_area(area_data)

    def update_restricted_items_area(self, _defn_area):
        for section, section_data in _defn_area['sections'].items():
            for key, settings in section_data['settings'].items():
                if settings['level'] == 4:
                    self.restricted_items.append([section, key])
                elif 'hidden' in settings and settings['hidden']:
                    self.restricted_items.append([section, key])

    def get_restricted_items(self):
        if not self.restricted_items:
            area_list = self.db.get_areas()
            for area in area_list:
                area_dict = self.get_defn(area)
                self.update_restricted_items_area(area_dict)
        return self.restricted_items

    @property
    def defn_path(self):
        return CONFIG_DEFN_PATH

    def terminate(self):
        self.db.close()
        self.config_defn = None
        self.config = None
        self.db = None
        self.restricted_items = None
        self.logger.debug('Database terminated for thread:{}'.format(
            threading.get_ident()))
Пример #10
0
class InternalProxy(Stream):
    def __init__(self, _plugins, _hdhr_queue):
        self.last_refresh = None
        self.channel_dict = None
        self.write_buffer = None
        self.file_filter = None
        self.pts_validation = None
        self.duration = 6
        super().__init__(_plugins, _hdhr_queue)
        self.config = self.plugins.config_obj.data
        self.db_configdefn = DBConfigDefn(self.config)

    def stream_direct(self, _channel_dict, _write_buffer):
        """
        Processes m3u8 interface without using ffmpeg
        """
        self.config = self.db_configdefn.get_config()
        self.channel_dict = _channel_dict
        self.write_buffer = _write_buffer
        duration = 6
        play_queue = OrderedDict()
        self.last_refresh = time.time()
        stream_uri = self.get_stream_uri(_channel_dict)
        if not stream_uri:
            self.logger.warning('Unknown Channel')
            return
        self.logger.debug('M3U8: {}'.format(stream_uri))
        self.file_filter = None
        if self.config[_channel_dict['namespace'].lower(
        )]['player-enable_url_filter']:
            stream_filter = self.config[
                _channel_dict['namespace'].lower()]['player-url_filter']
            if stream_filter is not None:
                self.file_filter = re.compile(stream_filter)
            else:
                self.logger.warning(
                    '[{}]][player-enable_url_filter]'
                    ' enabled but [player-url_filter] not set'.format(
                        _channel_dict['namespace'].lower()))
        if self.config[_channel_dict['namespace'].lower(
        )]['player-enable_pts_filter']:
            self.pts_validation = PTSValidation(self.config, self.channel_dict)

        while True:
            try:
                added = 0
                removed = 0
                playlist = m3u8.load(stream_uri)
                removed += self.remove_from_stream_queue(playlist, play_queue)
                added += self.add_to_stream_queue(playlist, play_queue)
                if added == 0 and duration > 0:
                    time.sleep(duration * 0.3)
                elif self.plugins.plugins[_channel_dict['namespace']].plugin_obj \
                        .is_time_to_refresh_ext(self.last_refresh, _channel_dict['instance']):
                    stream_uri = self.get_stream_uri(_channel_dict)
                    self.logger.debug('M3U8: {}'.format(stream_uri))
                    self.last_refresh = time.time()
                self.play_queue(play_queue)
            except IOError as e:
                # Check we hit a broken pipe when trying to write back to the client
                if e.errno in [
                        errno.EPIPE, errno.ECONNABORTED, errno.ECONNRESET,
                        errno.ECONNREFUSED
                ]:
                    # Normal process.  Client request end of stream
                    self.logger.info(
                        '2. Connection dropped by end device {}'.format(e))
                    break
                else:
                    self.logger.error('{}{}'.format('3 UNEXPECTED EXCEPTION=',
                                                    e))
                    raise

    def add_to_stream_queue(self, _playlist, _play_queue):
        total_added = 0
        for m3u8_segment in _playlist.segments:
            uri = m3u8_segment.absolute_uri
            if uri not in _play_queue:
                played = False
                if self.file_filter is not None:
                    m = self.file_filter.match(uri)
                    if m:
                        played = True
                _play_queue[uri] = {
                    'played': played,
                    'duration': m3u8_segment.duration
                }
                self.logger.debug(f"Added {uri} to play queue")
                total_added += 1
        return total_added

    def remove_from_stream_queue(self, _playlist, _play_queue):
        total_removed = 0
        for segment_key in list(_play_queue.keys()):
            is_found = False
            for segment_m3u8 in _playlist.segments:
                uri = segment_m3u8.absolute_uri
                if segment_key == uri:
                    is_found = True
                    break
            if not is_found:
                del _play_queue[segment_key]
                total_removed += 1
                self.logger.debug(f"Removed {segment_key} from play queue")
                continue
            else:
                break
        return total_removed

    def play_queue(self, _play_queue):
        for uri, data in _play_queue.items():
            if not data["played"]:
                start_download = datetime.datetime.utcnow()
                chunk = None
                count = 5
                while count > 0:
                    count -= 1
                    try:
                        req = urllib.request.Request(uri)
                        with urllib.request.urlopen(req) as resp:
                            chunk = resp.read()
                            break
                    except http.client.IncompleteRead as e:
                        self.logger.info(
                            'Provider gave partial stream, trying again. {}'.
                            format(e, len(e.partial)))
                        chunk = e.partial
                        time.sleep(1)
                data['played'] = True
                if not chunk:
                    self.logger.warning(
                        f"Segment {uri} not available. Skipping..")
                    continue
                if not self.is_pts_valid(chunk):
                    continue

                atsc_msg = ATSCMsg()
                chunk_updated = atsc_msg.update_sdt_names(
                    chunk[:80], self.channel_dict['namespace'].encode(),
                    self.set_service_name(self.channel_dict).encode())
                chunk = chunk_updated + chunk[80:]
                self.duration = data['duration']
                runtime = (datetime.datetime.utcnow() -
                           start_download).total_seconds()
                target_diff = 0.3 * self.duration
                wait = target_diff - runtime
                self.logger.info(
                    f"Serving {uri} ({self.duration}s) ({len(chunk)}B)")
                self.write_buffer.write(chunk)
                if wait > 0:
                    time.sleep(wait)

    def is_pts_valid(self, video_data):
        if not self.config[self.channel_dict['namespace'].lower(
        )]['player-enable_pts_filter']:
            return True
        results = self.pts_validation.check_pts(video_data)
        if results['byteoffset'] != 0:
            return False
        if results['refresh_stream']:
            return False
        if results['reread_buffer']:
            return False
        return True
Пример #11
0
class FFMpegProxy(Stream):
    def __init__(self, _plugins, _hdhr_queue):
        self.ffmpeg_proc = None
        self.last_refresh = None
        self.block_prev_time = None
        self.buffer_prev_time = None
        self.small_pkt_streaming = False
        self.block_max_pts = 0
        self.block_prev_pts = 0
        self.prev_last_pts = 0
        self.default_duration = 0
        self.block_moving_avg = 0
        self.channel_dict = None
        self.write_buffer = None
        self.stream_queue = None
        self.pts_validation = None
        super().__init__(_plugins, _hdhr_queue)
        self.config = self.plugins.config_obj.data
        self.db_configdefn = DBConfigDefn(self.config)

    def stream_ffmpeg(self, _channel_dict, _write_buffer):
        self.channel_dict = _channel_dict
        self.write_buffer = _write_buffer
        self.config = self.db_configdefn.get_config()
        self.pts_validation = PTSValidation(self.config, self.channel_dict)
        channel_uri = self.get_stream_uri(self.channel_dict)
        if not channel_uri:
            self.logger.warning('Unknown Channel')
            return
        self.ffmpeg_proc = self.open_ffmpeg_proc(channel_uri)
        time.sleep(0.01)
        self.last_refresh = time.time()
        self.block_prev_time = self.last_refresh
        self.buffer_prev_time = self.last_refresh
        video_data = self.read_buffer()
        while True:
            if not video_data:
                self.logger.debug('No Video Data, refreshing stream')
                self.ffmpeg_proc = self.refresh_stream()
            else:
                try:
                    video_data = self.validate_stream(video_data)
                    self.write_buffer.write(video_data)
                except IOError as e:
                    if e.errno in [
                            errno.EPIPE, errno.ECONNABORTED, errno.ECONNRESET,
                            errno.ECONNREFUSED
                    ]:
                        self.logger.info('1. Connection dropped by end device')
                        break
                    else:
                        self.logger.error('{}{}'.format(
                            '1 ################ UNEXPECTED EXCEPTION=', e))
                        raise
            try:
                video_data = self.read_buffer()
            except Exception as e:
                self.logger.error('{}{}'.format(
                    '2 ################ UNEXPECTED EXCEPTION=', e))
                raise
        self.logger.debug('Terminating ffmpeg stream')
        self.ffmpeg_proc.terminate()
        try:
            self.ffmpeg_proc.communicate()
        except ValueError:
            pass

    def validate_stream(self, video_data):
        if not self.config[self.channel_dict['namespace'].lower(
        )]['player-enable_pts_filter']:
            return video_data

        has_changed = True
        while has_changed:
            has_changed = False
            results = self.pts_validation.check_pts(video_data)
            if results['byteoffset'] != 0:
                if results['byteoffset'] < 0:
                    self.write_buffer.write(
                        video_data[-results['byteoffset']:len(video_data) - 1])
                else:
                    self.write_buffer.write(
                        video_data[0:results['byteoffset']])
                has_changed = True
            if results['refresh_stream']:
                self.ffmpeg_proc = self.refresh_stream()
                video_data = self.read_buffer()
                has_changed = True
            if results['reread_buffer']:
                video_data = self.read_buffer()
                has_changed = True
        return video_data

    def read_buffer(self):
        data_found = False
        video_data = None
        idle_timer = 2
        while not data_found:
            video_data = self.stream_queue.read()
            if video_data:
                data_found = True
            else:
                time.sleep(0.2)
                idle_timer -= 1
                if idle_timer == 0:
                    if self.plugins.plugins[self.channel_dict['namespace']].plugin_obj \
                            .is_time_to_refresh_ext(self.last_refresh, self.channel_dict['instance']):
                        self.ffmpeg_proc = self.refresh_stream()
        # with open("x.ts"+str(datetime.datetime.now().timestamp()), 'wb') as temp_file:
        # with open("x.ts", 'wb') as temp_file:
        #   temp_file.write(video_data)
        return video_data

    def refresh_stream(self):
        self.last_refresh = time.time()
        channel_uri = self.get_stream_uri(self.channel_dict)
        try:
            self.ffmpeg_proc.terminate()
            self.ffmpeg_proc.wait(timeout=0.1)
            self.logger.debug('Previous ffmpeg terminated')
        except ValueError:
            pass
        except subprocess.TimeoutExpired:
            self.ffmpeg_proc.terminate()
            time.sleep(0.01)

        self.logger.debug('{}{}'.format('Refresh Stream channelUri=',
                                        channel_uri))
        ffmpeg_process = self.open_ffmpeg_proc(channel_uri)
        # make sure the previous ffmpeg is terminated before exiting
        self.buffer_prev_time = time.time()
        return ffmpeg_process

    def open_ffmpeg_proc(self, _channel_uri):
        """
        ffmpeg drops the first 9 frame/video packets when the program starts.
        this means everytime a refresh occurs, 9 frames will be dropped.  This is
        visible by looking at the video packets for a 6 second window being 171
        instead of 180.  Following the first read, the packets increase to 180.
        """
        ffmpeg_command = [
            self.config['paths']['ffmpeg_path'], '-i',
            str(_channel_uri), '-f', 'mpegts', '-nostats', '-hide_banner',
            '-loglevel', 'warning', '-copyts', 'pipe:1'
        ]
        ffmpeg_process = subprocess.Popen(ffmpeg_command,
                                          stdout=subprocess.PIPE,
                                          bufsize=-1)
        self.stream_queue = StreamQueue(188, ffmpeg_process,
                                        self.channel_dict['uid'])
        time.sleep(0.1)
        return ffmpeg_process
Пример #12
0
class TVHUserConfig:

    config_handler = configparser.ConfigParser(interpolation=None)

    def __init__(self,
                 _script_dir=None,
                 _opersystem=None,
                 _args=None,
                 _config=None):
        self.logger = None
        self.defn_json = None
        self.script_dir = str(_script_dir)
        self.defn_json = config_defn.load_default_config_defns()
        self.data = self.defn_json.get_default_config()
        if _script_dir is not None:
            config_file = TVHUserConfig.get_config_path(_script_dir, _args)
            self.import_config(config_file)
            self.defn_json.call_oninit(self)
            utils.logging_setup(self.data)
            # at this point, the config is setup
            self.db = DBConfigDefn(self.data)
            self.db.reinitialize_tables()
            self.defn_json.set_config(self.data)
            self.defn_json.save_defn_to_db()
        else:
            self.set_config(_config)
            self.defn_json.garbage_collect()
        self.db = DBConfigDefn(self.data)
        self.db.add_config(self.data)

    def refresh_config_data(self):
        self.data = self.db.get_config()

    def set_config(self, _config):
        self.data = copy.deepcopy(_config)
        self.config_handler.read(self.data['paths']['config_file'])
        self.logger = logging.getLogger(__name__)

    def init_logger_config(self):
        log_sections = [
            'loggers', 'logger_root', 'handlers', 'formatters',
            'handler_filehandler', 'handler_loghandler', 'formatter_extend',
            'formatter_simple'
        ]
        for section in log_sections:
            try:
                self.config_handler.add_section(section)
            except configparser.DuplicateSectionError:
                pass
            for key, value in self.data[section].items():
                self.config_handler.set(section, key, str(value))
        with open(self.data['paths']['config_file'], 'w') as config_file:
            self.config_handler.write(config_file)
        utils.logging_setup(self.data)

    def import_config(self, config_file):
        self.config_handler.read(config_file)
        self.data['paths']['config_file'] = str(config_file)
        try:
            utils.logging_setup(self.data)
        except KeyError:
            self.init_logger_config()
        self.logger = logging.getLogger(__name__)
        self.logger.info("Loading Configuration File: " + str(config_file))

        for each_section in self.config_handler.sections():
            lower_section = each_section.lower()
            if lower_section not in self.data.keys():
                self.data.update({lower_section: {}})
            for (each_key,
                 each_val) in self.config_handler.items(each_section):
                lower_key = each_key.lower()
                self.data[lower_section][lower_key] = \
                    self.fix_value_type(lower_section, lower_key, each_val)

    @staticmethod
    def get_config_path(_script_dir, args=None):
        config_file = None
        if args is not None and args.cfg:
            config_file = pathlib.Path(str(args.cfg))
        else:
            for x in [CONFIG_FILENAME, 'data/' + CONFIG_FILENAME]:
                poss_config = pathlib.Path(_script_dir).joinpath(x)
                if os.path.exists(poss_config):
                    config_file = poss_config
                    break
        if config_file and os.path.exists(config_file):
            return config_file
        else:
            print('ERROR: Config file missing {}, Exiting...'.format(
                poss_config))
            clean_exit(1)

    def fix_value_type(self, _section, _key, _value):
        try:
            val_type = self.defn_json.get_type(_section, _key, _value)
            if val_type == 'boolean':
                return self.config_handler.getboolean(_section, _key)
            elif val_type == 'list':
                if isinstance(_value, str) and _value.isdigit():
                    _value = int(_value)
                if not self.defn_json.validate_list_item(
                        _section, _key, _value):
                    logging.info(
                        'INVALID VALUE ({}) FOR CONFIG ITEM [{}][{}]'.format(
                            _value, _section, _key))
                return _value
            elif val_type == 'integer':
                return int(_value)
            elif val_type == 'float':
                return float(_value)
            elif val_type is None:
                return _value
            else:
                return _value
        except (configparser.NoOptionError, configparser.NoSectionError,
                TypeError):
            return _value
        except ValueError:
            return None

    # removes sensitive data from config and returns a copy
    def filter_config_data(self):
        restricted_list = self.defn_json.get_restricted_items()
        filtered_config = copy.deepcopy(self.data)
        for item in restricted_list:
            del filtered_config[item[0]][item[1]]
        return filtered_config

    def detect_change(self, _section, _key, _updated_data):
        current_value = self.data[_section][_key]
        if type(current_value) is int:
            if _updated_data[_section][_key][0] is not None:
                _updated_data[_section][_key][0] = int(
                    _updated_data[_section][_key][0])
        elif type(current_value) is bool:
            _updated_data[_section][_key][0] = bool(
                int(_updated_data[_section][_key][0]))
        elif type(current_value) is str:
            pass
        elif current_value is None:
            pass
        else:
            self.logger.debug(
                'unknown value type for [{}][{}]  type is {}'.format(
                    _section, _key, type(self.data[_section][_key])))

        if self.data[_section][_key] != _updated_data[_section][_key][0]:
            if len(_updated_data[_section][_key]) > 1:
                _updated_data[_section][_key][1] = True
            else:
                _updated_data[_section][_key].append(True)
        else:
            if len(_updated_data[_section][_key]) > 1:
                _updated_data[_section][_key][1] = False
            else:
                _updated_data[_section][_key].append(False)

    def merge_config(self, _delta_config_dict):
        self.data = utils.merge_dict(self.data,
                                     _delta_config_dict,
                                     ignore_conflicts=True)

    def update_config(self, _area, _updated_data):
        # make sure the config_handler has all the data from the file
        self.config_handler.read(self.data['paths']['config_file'])

        area_data = self.defn_json.get_defn(_area)
        for section, section_data in area_data['sections'].items():
            if section in _updated_data:
                for setting, setting_data in section_data['settings'].items():
                    if setting in _updated_data[section]:
                        if setting_data['level'] == 4:
                            pass
                        elif 'writable' in setting_data and not setting_data[
                                'writable']:
                            if setting in _updated_data[section]:
                                _updated_data[section][setting].append(False)
                        elif 'hidden' in setting_data and setting_data[
                                'hidden']:
                            if _updated_data[section][setting][0] is None:
                                _updated_data[section][setting].append(False)
                            else:
                                _updated_data[section][setting].append(True)
                                _updated_data[section][setting].append(True)
                        else:
                            self.detect_change(section, setting, _updated_data)

        # save the changes to config.ini and self.data
        results = '<hr><h3>Status Results</h3><ul>'

        config_defaults = self.defn_json.get_default_config_area(_area)
        for key in _updated_data.keys():
            results += self.save_config_section(key, _updated_data,
                                                config_defaults)
        with open(self.data['paths']['config_file'], 'w') as config_file:
            self.config_handler.write(config_file)

        # need to inform things that changes occurred...
        restart = False
        results += self.defn_json.call_onchange(_area, _updated_data, self)
        self.db.add_config(self.data)
        if restart:
            results += '</ul><b>Service may need to be restarted if not all changes were implemented</b><hr><br>'
        else:
            results += '</ul><hr><br>'
        return results

    def save_config_section(self, _section, _updated_data, _config_defaults):
        results = ''
        for (key, value) in _updated_data[_section].items():
            if value[1]:
                if value[0] is None:
                    # use default and remove item from config.ini
                    try:
                        self.config_handler.remove_option(_section, key)
                    except configparser.NoSectionError:
                        pass
                    self.data[_section][key] \
                        = _config_defaults[_section][key]
                    self.logger.debug('Config Update: Removed [{}][{}]'.format(
                        _section, key))
                    results += \
                        '<li>Removed [{}][{}] from {}, using default value</li>' \
                        .format(_section, key, CONFIG_FILENAME)
                else:
                    # set new value
                    if len(_updated_data[_section][key]) == 3:
                        self.logger.debug(
                            'Config Update: Changed [{}][{}] updated'.format(
                                _section, key))
                    else:
                        self.logger.debug(
                            'Config Update: Changed [{}][{}] to {}'.format(
                                _section, key,
                                _updated_data[_section][key][0]))

                    try:
                        self.config_handler.set(
                            _section, key,
                            str(_updated_data[_section][key][0]))
                    except configparser.NoSectionError:
                        self.config_handler.add_section(_section)
                        self.config_handler.set(
                            _section, key,
                            str(_updated_data[_section][key][0]))
                    self.data[_section][key] = _updated_data[_section][key][0]
                    if len(_updated_data[_section][key]) == 3:
                        results += '<li>Updated [{}][{}] updated</li>' \
                            .format(_section, key)
                    else:
                        results += '<li>Updated [{}][{}] to {}</li>' \
                            .format(_section, key, _updated_data[_section][key][0])
        return results

    def write(self, _section, _key, _value):
        self.data[_section][_key] = _value
        try:
            self.config_handler.set(_section, _key, _value)
        except configparser.NoSectionError:
            self.config_handler.add_section(_section)
            self.config_handler.set(_section, _key, _value)
        with open(self.data['paths']['config_file'], 'w') as config_file:
            self.config_handler.write(config_file)
Пример #13
0
class TunerHttpHandler(WebHTTPHandler):
    def __init__(self, *args):
        os.chdir(os.path.dirname(os.path.abspath(__file__)))
        self.script_dir = pathlib.Path(
            os.path.dirname(os.path.abspath(__file__)))
        self.ffmpeg_proc = None  # process for running ffmpeg
        self.block_moving_avg = 0
        self.last_refresh = None
        self.block_prev_pts = 0
        self.block_prev_time = None
        self.buffer_prev_time = None
        self.block_max_pts = 0
        self.small_pkt_streaming = False
        self.real_namespace = None
        self.real_instance = None
        self.m3u8_redirect = M3U8Redirect(TunerHttpHandler.plugins,
                                          TunerHttpHandler.hdhr_queue)
        self.internal_proxy = InternalProxy(TunerHttpHandler.plugins,
                                            TunerHttpHandler.hdhr_queue)
        self.ffmpeg_proxy = FFMpegProxy(TunerHttpHandler.plugins,
                                        TunerHttpHandler.hdhr_queue)
        self.db_configdefn = DBConfigDefn(self.config)
        try:
            super().__init__(*args)
        except ConnectionResetError:
            self.logger.warning(
                '########## ConnectionResetError occurred, will try again')
            time.sleep(1)
            super().__init__(*args)
        except ValueError:
            self.logger.warning(
                'ValueError occurred, Bad stream recieved.  Could be HTTPS or the stream was disconnected early'
            )

    def do_GET(self):
        content_path, query_data = self.get_query_data()
        if content_path.startswith('/auto/v'):
            channel = content_path.replace('/auto/v', '')
            station_list = TunerHttpHandler.channels_db.get_channels(
                query_data['name'], query_data['instance'])
            if channel not in station_list.keys():
                # check channel number
                for station in station_list.keys():
                    if station_list[station]['number'] == channel:
                        self.do_tuning(station, query_data['name'],
                                       query_data['instance'])
                        return
            else:
                self.do_tuning(channel, query_data['name'],
                               query_data['instance'])
                return
            self.do_mime_response(
                501, 'text/html',
                web_templates['htmlError'].format('501 - Unknown channel'))

        elif content_path.startswith('/logreset'):
            logging.config.fileConfig(
                fname=self.config['paths']['config_file'],
                disable_existing_loggers=False)
            self.do_mime_response(200, 'text/html')

        elif content_path.startswith('/watch'):
            sid = content_path.replace('/watch/', '')
            self.do_tuning(sid, query_data['name'], query_data['instance'])
        else:
            self.logger.warning("Unknown request to " + content_path)
            self.do_mime_response(
                501, 'text/html',
                web_templates['htmlError'].format('501 - Not Implemented'))
        return

    def do_POST(self):
        content_path = self.path
        query_data = {}
        self.logger.debug('Receiving a post form {}'.format(content_path))
        # get POST data
        if self.headers.get('Content-Length') != '0':
            post_data = self.rfile.read(int(
                self.headers.get('Content-Length'))).decode('utf-8')
            # if an input is empty, then it will remove it from the list when the dict is gen
            query_data = urllib.parse.parse_qs(post_data)

        # get QUERYSTRING
        if self.path.find('?') != -1:
            get_data = self.path[(self.path.find('?') + 1):]
            get_data_elements = get_data.split('&')
            for get_data_item in get_data_elements:
                get_data_item_split = get_data_item.split('=')
                if len(get_data_item_split) > 1:
                    query_data[get_data_item_split[0]] = get_data_item_split[1]

        self.do_mime_response(
            501, 'text/html',
            web_templates['htmlError'].format('501 - Badly Formatted Message'))
        return

    def do_tuning(self, sid, _namespace, _instance):

        # refresh the config data in case it changed in the web_admin process
        self.plugins.config_obj.refresh_config_data()
        self.config = self.plugins.config_obj.data
        self.config = self.db_configdefn.get_config()
        self.plugins.config_obj.data = self.config
        #try:
        station_list = TunerHttpHandler.channels_db.get_channels(
            _namespace, _instance)
        try:
            self.real_namespace, self.real_instance, station_data = self.get_ns_inst_station(
                station_list[sid])
        except KeyError:
            self.logger.warning(
                'Unknown channel ID, not found in database {} {} {}'.format(
                    _namespace, _instance, sid))
            self.do_mime_response(
                501, 'text/html',
                web_templates['htmlError'].format('501 - Unknown channel'))
            return
        if self.config[self.real_namespace.lower(
        )]['player-stream_type'] == 'm3u8redirect':
            self.do_dict_response(
                self.m3u8_redirect.gen_m3u8_response(station_data))
            return
        elif self.config[self.real_namespace.lower(
        )]['player-stream_type'] == 'internalproxy':
            resp = self.internal_proxy.gen_response(self.real_namespace,
                                                    self.real_instance,
                                                    station_data['number'],
                                                    TunerHttpHandler)
            self.do_dict_response(resp)
            if resp['tuner'] < 0:
                return
            else:
                self.internal_proxy.stream_direct(station_data, self.wfile)
        elif self.config[self.real_namespace.lower(
        )]['player-stream_type'] == 'ffmpegproxy':
            resp = self.ffmpeg_proxy.gen_response(self.real_namespace,
                                                  self.real_instance,
                                                  station_data['number'],
                                                  TunerHttpHandler)
            self.do_dict_response(resp)
            if resp['tuner'] < 0:
                return
            else:
                self.ffmpeg_proxy.stream_ffmpeg(station_data, self.wfile)
        else:
            self.do_mime_response(
                501, 'text/html',
                web_templates['htmlError'].format('501 - Unknown streamtype'))
            self.logger.error('Unknown [player-stream_type] {}'.format(
                self.config[
                    self.real_namespace.lower()]['player-stream_type']))
            return
        self.logger.info('1 Provider Connection Closed')
        WebHTTPHandler.rmg_station_scans[self.real_namespace][
            resp['tuner']] = 'Idle'

    def get_ns_inst_station(self, _station_data):
        ns = []
        inst = []
        counter = {}
        for one_station in _station_data:
            ns.append(one_station['namespace'])
            inst.append(one_station['instance'])
            counter[one_station['instance']] = 0
        for namespace, status_list in WebHTTPHandler.rmg_station_scans.items():
            for status in status_list:
                if type(status) is dict:
                    if status['instance'] not in counter:
                        counter[status['instance']] = 1
                    else:
                        counter[status['instance']] += 1

        # pick the instance with the lowest counter
        lowest_value = 100
        lowest_instance = None
        for instance, value in counter.items():
            if value < lowest_value:
                lowest_value = value
                lowest_instance = instance

        lowest_namespace = None
        for i in range(len(inst)):
            if inst[i] == lowest_instance:
                lowest_namespace = ns[i]

        # find the station data associated with the pick
        station = None
        for one_station in _station_data:
            if one_station['namespace'] == lowest_namespace and \
                    one_station['instance'] == lowest_instance:
                station = one_station
                break
        return lowest_namespace, lowest_instance, station

    @classmethod
    def init_class_var(cls, _plugins, _hdhr_queue, _sched_queue):
        WebHTTPHandler.logger = logging.getLogger(__name__)
        tuner_count = 0
        for plugin_name in _plugins.plugins.keys():
            if 'player-tuner_count' in _plugins.config_obj.data[
                    plugin_name.lower()]:
                WebHTTPHandler.logger.debug(
                    '{} Implementing {} tuners for {}'.format(
                        cls.__name__, _plugins.config_obj.data[
                            plugin_name.lower()]['player-tuner_count'],
                        plugin_name))
                tuner_count += _plugins.config_obj.data[
                    plugin_name.lower()]['player-tuner_count']
        WebHTTPHandler.total_instances = tuner_count
        super(TunerHttpHandler, cls).init_class_var(_plugins, _hdhr_queue)
Пример #14
0
class Plugin:

    # Temporarily used to register the plugin setup() function
    _plugin_func = None
    logger = None

    def __init__(self, _config_obj, _plugin_defn, _plugin_path):

        if Plugin.logger is None:
            Plugin.logger = logging.getLogger(__name__)
        self.enabled = True
        self.plugin_path = _plugin_path
        self.config_obj = _config_obj
        self.db_configdefn = DBConfigDefn(_config_obj.data)
        self.load_config_defn()

        # plugin is registered after this call, so grab reg data
        self.init_func = Plugin._plugin_func
        self.plugin_settings = {}
        self.plugin_db = DBPlugins(_config_obj.data)
        self.namespace = None
        self.instances = []
        self.load_plugin_manifest(_plugin_defn)
        self.load_instances()
        self.logger.info('Plugin created for {}'.format(self.name))
        self.plugin_obj = None

    def load_config_defn(self):
        try:
            self.logger.debug(
                'Plugin Config Defn file loaded at {}'.format(self.plugin_path))
            defn_obj = ConfigDefn(self.plugin_path, PLUGIN_CONFIG_DEFN_FILE, self.config_obj.data)
            
            default_config = defn_obj.get_default_config()
            self.config_obj.merge_config(default_config)
            defn_obj.call_oninit(self.config_obj)
            self.config_obj.defn_json.merge_defn_obj(defn_obj)
            for area, area_data in defn_obj.config_defn.items():
                for section, section_data in area_data['sections'].items():
                    for setting in section_data['settings'].keys():
                        new_value = self.config_obj.fix_value_type(
                            section, setting, self.config_obj.data[section][setting])
                        self.config_obj.data[section][setting] = new_value
            self.db_configdefn.add_config(self.config_obj.data)
            defn_obj.terminate()
        except FileNotFoundError:
            self.logger.warning(
                'PLUGIN CONFIG DEFN FILE NOT FOUND AT {}'.format(self.plugin_path))

    def load_instances(self):
        inst_defn_obj = ConfigDefn(self.plugin_path, PLUGIN_INSTANCE_DEFN_FILE, self.config_obj.data, True)
        # determine in the config data whether the instance of this name exists.  It would have a section name = 'name-instance'
        self.instances = self.find_instances()
        for inst in self.instances:
            self.plugin_db.save_instance(self.namespace, inst, '')
            # create a defn with the instance name as the section name. then process it.
            inst_defn_obj.is_instance_defn = False
            for area, area_data in inst_defn_obj.config_defn.items():
                if len(area_data['sections']) != 1:
                    self.logger.error('INSTANCE MUST HAVE ONE AND ONLY ONE SECTION')
                    raise exceptions.CabernetException('plugin defn must have one and only one instance section')
                section = list(area_data['sections'].keys())[0]
                base_section = section.split('_', 1)[0]
                area_data['sections'][base_section + '_' + inst] = area_data['sections'].pop(section)
                if 'label' in self.config_obj.data[base_section + '_' + inst] \
                    and self.config_obj.data[base_section + '_' + inst]['label'] is not None:
                    area_data['sections'][base_section + '_' + inst]['label'] = self.config_obj.data[base_section + '_' + inst]['label']
                inst_defn_obj.save_defn_to_db()
                
                default_config = inst_defn_obj.get_default_config()
                self.config_obj.merge_config(default_config)
                inst_defn_obj.call_oninit(self.config_obj)
                self.config_obj.defn_json.merge_defn_obj(inst_defn_obj)
                for area, area_data in inst_defn_obj.config_defn.items():
                    for section, section_data in area_data['sections'].items():
                        for setting in section_data['settings'].keys():
                            new_value = self.config_obj.fix_value_type(
                                section, setting, self.config_obj.data[section][setting])
                            self.config_obj.data[section][setting] = new_value
        self.db_configdefn.add_config(self.config_obj.data)

    def find_instances(self):
        instances = []
        inst_sec = self.namespace.lower() + '_'
        for section in self.config_obj.data.keys():
            if section.startswith(inst_sec):
                instances.append(section.split(inst_sec, 1)[1])
        return instances

    def load_plugin_manifest(self, _plugin_defn):
        self.load_default_settings(_plugin_defn)
        self.import_manifest()

    def load_default_settings(self, _plugin_defn):
        for name, attr in _plugin_defn.items():
            self.plugin_settings[name] = attr['default']

    def import_manifest(self):
        try:
            json_settings = importlib.resources.read_text(self.plugin_path, PLUGIN_MANIFEST_FILE)
            settings = json.loads(json_settings)
            self.namespace = settings['name']
            self.plugin_db.save_plugin(settings)
            self.logger.debug(
                'Plugin Manifest file loaded at {}'.format(self.plugin_path))
            self.plugin_settings = utils.merge_dict(self.plugin_settings, settings, True)
        except FileNotFoundError:
            self.logger.warning(
                'PLUGIN MANIFEST FILE NOT FOUND AT {}'.format(self.plugin_path))

    @property
    def name(self):
        return self.plugin_settings['name']