Example #1
0
    def __init__(self, cfg_parser, engine_classes, max_containers_per_machine, max_scans_per_container, id_overwrite = None, enable_startup_logic = True):
        if id_overwrite:
            self.id = str(id_overwrite)
        else:
            self.id = "multiav-{0}".format(uuid.uuid1()).lower()
        
        self._event_subscribers = dict()
        self.cfg_parser = cfg_parser

        self.engine_classes = engine_classes

        self.max_containers_per_machine = max_containers_per_machine
        self.max_scans_per_container = max_scans_per_container

        self._container_lock = RWLock()
        
        self._images_lock = dict(map(lambda engine: (engine.name, RWLock()), list(map(lambda engine_class: engine_class(self.cfg_parser), engine_classes))))

        self.containers = []
        self.DOCKER_NETWORK_NO_INTERNET_NAME = self.cfg_parser.gets("MULTIAV", "DOCKER_NETWORK_NO_INTERNET_NAME", DOCKER_NETWORK_NO_INTERNET_NAME_DEFAULT)
        self.DOCKER_NETWORK_INTERNET_NAME = self.cfg_parser.gets("MULTIAV", "DOCKER_NETWORK_INTERNET_NAME", DOCKER_NETWORK_INTERNET_NAME_DEFAULT)

        # starup logic
        if enable_startup_logic:
            self.pull_all_containers()
            self.setup_networks()
            self.remove_running_containers()
Example #2
0
class TweetWorker(object):
    def __init__(self, n, callback):
        self.processing_users = []
        self.lock = RWLock()
        self.jobs = ThreadQueue.Queue()
        self.results = ThreadQueue.Queue()
        self.processes = []
        for _ in range(0, n):
            s = Thread(target=worker, args=(job_queue, self.results))
            self.processes.append(s)
            s.daemon = True
            s.start()
        print('Started {} worker processes'.format(len(self.processes)))

        self.consumer = Thread(target=consumer_process,
                               args=(self.results, callback))
        self.consumer.daemon = True
        self.consumer.start()
        print('Started consumer process')

    def get(self, username):
        if username is None:
            return

        self.lock.acquire_read()
        if username in self.processing_users:
            self.lock.release()
            return
        self.lock.release()

        self.lock.acquire_write()
        self.processing_users.append(username)
        self.lock.release()
        job_queue.put(username)
        return
Example #3
0
 def __init__(self,
              model,
              master='127.0.0.1:5000',
              min_updates=0,
              max_updates=4096):
     """DeepDist - Distributed deep learning.
     :param model: provide a model that can be trained in parallel on the workers
     """
     self.model = model
     self.lock = RWLock()
     self.descent = lambda model, gradient: model
     self.master = master
     self.state = 'serving'
     self.served = 0
     self.received = 0
     #self.server   = None
     self.pmodel = None
     self.min_updates = min_updates
     self.max_updates = max_updates
     print("THIS IS THE MASTER")
     print(self.master)
     print("\n")
     print("THIS IS THE MODEL 1")
     print(self.model)
     print("\n")
Example #4
0
class TweetWorker(object):

    def __init__(self, n, callback):
        self.processing_users = []
        self.lock = RWLock()
        self.jobs = ThreadQueue.Queue()
        self.results = ThreadQueue.Queue()
        self.processes = []
        for _ in range(0, n):
            s = Thread(target=worker, args=(job_queue, self.results))
            self.processes.append(s)
            s.daemon = True
            s.start()
        print('Started {} worker processes'.format(len(self.processes)))

        self.consumer = Thread(target=consumer_process, args=(self.results, callback))
        self.consumer.daemon = True
        self.consumer.start()
        print('Started consumer process')

    def get(self, username):
        if username is None:
            return

        self.lock.acquire_read()
        if username in self.processing_users:
            self.lock.release()
            return
        self.lock.release()

        self.lock.acquire_write()
        self.processing_users.append(username)
        self.lock.release()
        job_queue.put(username)
        return
Example #5
0
    def __init__(self, key_length=SHORT_KEY_LENGTH):
        """
        Create an instance of the class

        :param key_length: length of dictionary key
        """
        self.key_to_url = redis.Redis(host=REDIS_HOST,
                                      port=REDIS_PORT,
                                      db=REDIS_DB)
        self.key_length = key_length
        self.read_write_lock = RWLock()
Example #6
0
    def __init__(self, num_threads, workers_maxsize = 0):
        self.workers_maxsize = workers_maxsize
        self.tasks = Queue()
        self.min_threads = num_threads
        self.workers = []

        self._worker_lock = RWLock()
        self._tasks_lock = RWLock()

        with self._worker_lock.writer_lock:
            for _ in range(num_threads):
                self.workers.append(Worker(self.tasks, self._stop_worker_callback))
Example #7
0
    def __init__(self, logger, host, port):
        cfg = SyncObjConf()
        cfg.fullDumpFile = 'raft.bin'
        cfg.logCompactionMinTime = 10
        cfg.useFork = True

        self.serv = ThreadXMLRPCServer(
            (host, port),
            logRequests=True)

        for name in self._rpc_methods:
            self.serv.register_function(getattr(self, name))

        self.logger = logger
        self.host = host
        self.port = port

        self.lock = RWLock()

        self.act_vol_serv = dict()
        self.writable_vid = ReplList() # 可写的vid

        self.vid = ReplCounter()
        self.fkey = ReplCounter()
        self.db = ReplDict()

        super(Master, self).__init__(config.addr, config.clusters, cfg, consumers=[self.vid, self.fkey, self.db, self.writable_vid])
	def __init__(self, server_address, RequestHandlerClass, clients, debug, security):
		self.allow_reuse_address = True
		SocketServer.ThreadingTCPServer.__init__(self,server_address,RequestHandlerClass)
		self.clients = clients
		#self.arg2 = arg2
		self.rwlock = RWLock()
		self.debug = debug
		self.security = security
Example #9
0
 def __init__(self, max_containers_per_machine, max_scans_per_container):
     self.id = str(uuid.uuid1())
     self.ip = []
     self.networks = []
     self.containers = []
     self._lock = RWLock()
     self.max_scans_per_container = max_scans_per_container
     self.max_containers_per_machine = max_containers_per_machine
Example #10
0
 def __init__(self, ip, port, engine, max_scans_per_container, machine):
     self.id = str(uuid.uuid1())
     self.ip = ip
     self.port = port
     self.engine = engine
     self.scans = []
     self._lock = RWLock()
     self._machine = machine
     self.max_scans_per_container = max_scans_per_container
Example #11
0
 def __init__(self, tasks, stop_callback=None):
     threading.Thread.__init__(self)
     self.tasks = tasks
     self.daemon = True
     self._lock = RWLock()
     self.working = False
     self.marked_for_removal = False
     self._stop_callback = stop_callback
     self.start()
Example #12
0
    def __init__(self, n, callback):
        self.processing_users = []
        self.lock = RWLock()
        self.jobs = ThreadQueue.Queue()
        self.results = ThreadQueue.Queue()
        self.processes = []
        for _ in range(0, n):
            s = Thread(target=worker, args=(job_queue, self.results))
            self.processes.append(s)
            s.daemon = True
            s.start()
        print('Started {} worker processes'.format(len(self.processes)))

        self.consumer = Thread(target=consumer_process,
                               args=(self.results, callback))
        self.consumer.daemon = True
        self.consumer.start()
        print('Started consumer process')
Example #13
0
    def __init__(self, config_name, initial_scan_time_average=20):
        self._config_name = config_name
        self._event_subscribers = dict()

        # statistics in seconds
        self.scan_time_average = (1, initial_scan_time_average)
        self._scan_time_lock = RWLock()
        self._get_average_scan_time()

        self._read_config()
Example #14
0
 def __init__(self, model, host='127.0.0.1:5000'):
     """DeepDist - Distributed deep learning.
     :param model: provide a model that can be trained in parallel on the workers
     """
     self.model  = model
     self.lock   = RWLock()
     self.descent  = lambda model, gradient: model
     self.host     = host
     self.state    = 'serving'
     self.served   = 0
     self.received = 0
Example #15
0
    def __init__(self,
                 config_name,
                 max_machines,
                 max_containers_per_machine,
                 max_scans_per_container,
                 min_machines=1,
                 initial_scan_time_average=20,
                 expected_machine_startup_time=130,
                 minimal_machine_run_time=480):
        ScannerStrategy.__init__(self, config_name, initial_scan_time_average)
        # variables
        self.expected_machine_startup_time = expected_machine_startup_time
        self.minimal_machine_run_time = minimal_machine_run_time
        self.min_machines = min_machines

        # sample copying to worker nodes
        self._scanning_samples = dict()
        self._workers_mounted_storage_lock = RWLock()

        # locks
        self._machine_lock = RWLock()
        self._worker_lock = RWLock()
        self._machines_starting = dict()  # Event = amount of workers waiting

        # use thread pool to handle overload when maxed out scaling => tasks will stay in queue
        self._min_workers = min_machines * max_containers_per_machine * max_scans_per_container
        self._max_workers = max_machines * max_containers_per_machine * max_scans_per_container
        self.pool = PromiseExecutorPool(self._min_workers, self._max_workers)
        print(
            "AutoScaleDockerStrategy: initialized thread pool using {0} threads (max: {1})"
            .format(self._min_workers, self._max_workers))

        # machine vars
        self._machines = []
        self.max_machines = max_machines
        self.max_containers_per_machine = max_containers_per_machine
        self.max_scans_per_container = max_scans_per_container
        print(
            "AutoScaleDockerStrategy: initialized using min_machines: {0} max_machines: {1} max_containers_per_machine: {2} max_scans_per_container: {3}"
            .format(min_machines, max_machines, max_containers_per_machine,
                    max_scans_per_container))
Example #16
0
    def __init__(self, logger, host, port):
        self.logger = logger
        self.host = host
        self.port = port

        self.lock = RWLock()

        self.vdb = dict()
        self.fdb = dict()
        if os.path.isfile('vdb'):
            self.vdb = pickle.load(open('vdb', 'rb'))
        if os.path.isfile('fdb'):
            self.fdb = pickle.load(open('fdb', 'rb'))

        self.act_mst_serv = list()

        self.serv = ThreadXMLRPCServer((self.host, self.port),
                                       logRequests=True)

        for name in self._rpc_methods:
            self.serv.register_function(getattr(self, name))
Example #17
0
class Solution(object):
    def __init__(self):
        self.database = {}
        self.lock = RWLock()

    def add_action(self, action):
        """
		Adds a new action to the database.

		:param action: string formatted json object. Ex. '{"action":"jump", "time":100}'
		:return: None
		"""

        action_json = json.loads(action)
        action_name = action_json.get('action', '')
        action_duration = action_json.get('time', 0)

        with self.lock.w_locked():
            if action_name in self.database:
                self.database[action_name].add(action_duration)
            else:
                new_action = Action(action_name, action_duration)
                self.database[action_name] = new_action

    def get_stats(self):
        """
		Creates a string formatted json object of all actions in self.database, along with the average time.

		:return: string formatted json object
		"""

        stats = []

        with self.lock.r_locked():
            for action_name in self.database:
                action = self.database[action_name]
                new_item = {"action": action_name, "avg": action.get_average()}
                stats.append(new_item)

        return json.dumps(stats)
Example #18
0
 def __init__(self, model, master='127.0.0.1:5000', min_updates=0, max_updates=4096):
     """DeepDist - Distributed deep learning.
     :param model: provide a model that can be trained in parallel on the workers
     """
     self.model  = model
     self.lock   = RWLock()
     self.descent  = lambda model, gradient: model
     self.master   = master
     self.state    = 'serving'
     self.served   = 0
     self.received = 0
     #self.server   = None
     self.pmodel   = None
     self.min_updates = min_updates
     self.max_updates = max_updates
Example #19
0
class ChatMessageStore:
    def __init__(self):
        self.__id = 1
        self.__storage = []
        self.__rwlock = RWLock()

    def add_message(self, message):
        self.__rwlock.acquire_write()
        self.__storage.append((self.__id, message))
        self.__id += 1
        self.__rwlock.release()

    def get_messages(self, start_id=0):
        self.__rwlock.acquire_read()
        result = ([message for id_, message in self.__storage if id_ >= start_id], self.__id)
        self.__rwlock.release()

        return result
Example #20
0
class Beast(SmartersProvider):
    __slots__ = []

    _do_reduce_hls_stream_delay = True
    _do_reduce_hls_stream_delay_lock = RWLock()
    _provider_name = BeastConstants.PROVIDER_NAME.lower()

    @classmethod
    def _generate_playlist_m3u8_static_track_url(cls, track_information, **kwargs):
        channel_number = kwargs['channel_number']
        playlist_protocol = kwargs['playlist_protocol']

        username = Configuration.get_configuration_parameter('BEAST_USERNAME')
        password = SecurityManager.decrypt_password(
            Configuration.get_configuration_parameter('BEAST_PASSWORD')).decode()

        track_information.append(
            '{0}{1}{2}/{3}/{4}{5}\n'.format(
                ProvidersController.get_provider_map_class(cls._provider_name).constants_class().BASE_URL,
                'live/' if playlist_protocol == 'hls'
                else '',
                username,
                password,
                channel_number,
                '.m3u8' if playlist_protocol == 'hls'
                else ''))

    @classmethod
    def _initialize(cls, **kwargs):
        pass

    @classmethod
    def _initialize_class_variables(cls):
        try:
            cls.set_do_reduce_hls_stream_delay(
                OptionalSettings.get_optional_settings_parameter('reduce_beast_delay'))
        except KeyError:
            pass

    @classmethod
    def _retrieve_fresh_authorization_token(cls):
        return None

    @classmethod
    def _terminate(cls, **kwargs):
        pass
def main():

    partido = []
    hilos = []
    lock = RWLock()

    escritor = Escritor(partido, lock)
    hilos.append(escritor)
    escritor.start()

    for i in range(4):
        lector = Lector(partido, lock)
        hilos.append(i)
        lector.start()

    for i in hilos:
        i.join()
Example #22
0
    def __init__(self, n, callback):
        self.processing_users = []
        self.lock = RWLock()
        self.jobs = ThreadQueue.Queue()
        self.results = ThreadQueue.Queue()
        self.processes = []
        for _ in range(0, n):
            s = Thread(target=worker, args=(job_queue, self.results))
            self.processes.append(s)
            s.daemon = True
            s.start()
        print('Started {} worker processes'.format(len(self.processes)))

        self.consumer = Thread(target=consumer_process, args=(self.results, callback))
        self.consumer.daemon = True
        self.consumer.start()
        print('Started consumer process')
Example #23
0
class DarkMediaDatabase(ProviderDatabase):
    __slots__ = []

    _access_lock = RWLock()
    _database_file_path = None
    _engine = None
    _provider_name = DarkMediaConstants.PROVIDER_NAME.lower()
    _session_factory = None
    _temporary_database_file_path = None
    _temporary_engine = None
    _temporary_session_factory = None
    _write_lock = RLock()

    @classmethod
    def _migrate(cls, old_db_session, new_db_session):
        pass

    @classmethod
    def create_session(cls):
        return cls._session_factory()

    @classmethod
    def initialize(cls):
        cls._database_file_path = os.path.join(
            os.path.dirname(Database.get_database_file_path()),
            DarkMediaConstants.DB_FILE_NAME,
        )

        super().initialize()

        Base.metadata.create_all(cls._engine)

    @classmethod
    def initialize_temporary(cls):
        cls._temporary_database_file_path = os.path.join(
            os.path.dirname(Database.get_database_file_path()),
            DarkMediaConstants.TEMPORARY_DB_FILE_NAME,
        )

        super().initialize_temporary()

        Base.metadata.create_all(cls._temporary_engine)
Example #24
0
class SmoothStreamsDatabase(ProviderDatabase):
    __slots__ = []

    _access_lock = RWLock()
    _database_file_path = None
    _engine = None
    _provider_name = SmoothStreamsConstants.PROVIDER_NAME.lower()
    _session_factory = None
    _temporary_database_file_path = None
    _temporary_engine = None
    _temporary_session_factory = None
    _write_lock = RLock()

    @classmethod
    def _migrate(cls, old_db_session, new_db_session):
        setting_row = SmoothStreamsDatabaseAccess.query_setting(
            old_db_session, 'session')

        if setting_row is not None:
            new_db_session.merge(setting_row)

    @classmethod
    def initialize(cls):
        cls._database_file_path = os.path.join(
            os.path.dirname(Database.get_database_file_path()),
            SmoothStreamsConstants.DB_FILE_NAME,
        )

        super().initialize()

        Base.metadata.create_all(cls._engine)

    @classmethod
    def initialize_temporary(cls):
        cls._temporary_database_file_path = os.path.join(
            os.path.dirname(Database.get_database_file_path()),
            SmoothStreamsConstants.TEMPORARY_DB_FILE_NAME,
        )

        super().initialize_temporary()

        Base.metadata.create_all(cls._temporary_engine)
Example #25
0
def lector(partido, name):

    llave_Lector = RWLock()

    while True:
        llave_Lector.r_acquire()
        try:
            logging.info(
                '%s', name +
                ':  El resultado fue: {0[0]} {0[1]} - {0[2]} {0[3]} '.format(
                    random.choice(partido)))
        finally:
            #time.sleep(random.randint(1,2))
            llave_Lector.r_release()
            time.sleep(random.randint(1, 2))
Example #26
0
class Node_Db_Context(object):
    def __init__(self, node_info):
        self.sync = RWLock()
        self.db = "db/" + str(node_info.key) + ".db"
        self.node_info = node_info
        self.own_start = None
        self.own_end = None

        #self.back_thread = threading.Thread(target=lambda:  self.backup_loop(self))
        #self.back_thread.daemon = True
        #self.back_thread.start()

    def write(self, hash_name, content):
        self.sync.writer_acquire()
        try:
            records = shelve.open(self.db)
            records[hash_name] = content
            records.close()
            print str(self.node_info) + " write block: " + hash_name
        finally:
            self.sync.writer_release()


    def read(self, hash_name):
        content = None
        self.sync.reader_acquire()
        try:
            records = shelve.open(self.db)
            if not records.has_key(hash_name):
                print str(self.node_info) + " read block: " + hash_name + " FAILED!!"
                content = "404 Error"
            else:
                print str(self.node_info) + " read block: " + hash_name
                content = records[hash_name]    #this retrieved COPY OF CONTENT
            records.close()
        finally:
            self.sync.reader_release()
        return content
Example #27
0
class CrystalClearEPG(XStreamCodesProviderEPG):
    __slots__ = []

    _channel_group_map = OrderedDict([('name', OrderedDict()), ('number', OrderedDict())])
    _channel_group_map_lock = RWLock()
    _channel_name_map = OrderedDict()
    _channel_name_map_lock = RWLock()
    _do_use_provider_icons = False
    _do_use_provider_icons_lock = RWLock()
    _ignored_channels = OrderedDict([('name', OrderedDict()), ('number', OrderedDict())])
    _ignored_channels_lock = RWLock()
    _ignored_m3u8_groups = []
    _ignored_m3u8_groups_lock = RWLock()
    _lock = RLock()
    _m3u8_group_map = OrderedDict()
    _m3u8_group_map_lock = RWLock()
    _provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
    _refresh_epg_timer = None
    _update_times = ['06:00:00']
    _update_times_lock = RWLock()
	def __init__(self, configDic):
		"""
		Init Controler class
		"""
		self.running = True
		self.config = configDic
		self.log_file = self.config["log"]
		self.debug = self.config["debug"]
		self.security = self.config["security"]
		self.clients = {}
		self.lock = RWLock()
		## Start TCP comm server ##
		listen_ip = self.config["listen_ip"]
		listen_port = self.config["listen_port"]
		try:
			self.server = tcpServer((listen_ip,listen_port), handleConnection, self.clients, self.debug, self.security )
		except:
			self.log_error("Unable to bind TCP socket %s:%s !" % (listen_ip,listen_port))
			proc = subprocess.Popen(["ss", "-pant"], stdout=subprocess.PIPE)
			code = proc.wait()
			for aline in proc.stdout:
				if (str(listen_ip)+':'+str(listen_port)) in aline and "LISTEN" in aline:
					tmpstr1 = re.sub(').*', '', re.sub('.*(', '', aline))
					pid = re.sub(',.*', '', re.sub('.*pid=', tmpstr1))
					prog = re.sub('.*"', '', re.sub('",.*', '', aline))
					self.log_warning("Process %s, PID %s, is binding port %s. It will be killed." % (prog, pid, listen_port))
					os.system("kill -9 %s" % pid)
		
			time.sleep(10)
			self.log_info("Trying again to bind %s on %s." % (listen_port, listen_ip))
			self.server = tcpServer((listen_ip,listen_port), handleConnection, self.clients, self.debug, self.security )

		self.comm_thread = threading.Thread(target=self.server.serve_forever)
		self.comm_thread.daemon = True
		self.comm_thread.start()
		##### Send a keepalive message every minutes (60 sec) ##
		self.keepalive = KeepAliveTimer(60, self.send_keepalive, ["KeepAliveTimer"])
		self.keepalive.start()
Example #29
0
def escritor(partido, name):

    llave_Escritor = RWLock()

    while True:
        llave_Escritor.w_acquire()
        try:
            equipo_1 = random.choice(equipos)
            equipo_2 = random.choice(equipos)
            while equipo_1 == equipo_2:
                equipo_2 = random.choice(equipos)
            partido.append(
                (equipo_1, random.randint(0,
                                          3), equipo_2, random.randint(0, 3)))
            logging.info("Partido actualizado por: %s", name)
        finally:
            llave_Escritor.w_release()
            time.sleep(random.randint(1, 2))
Example #30
0
class ProvidersController:
    __slots__ = []

    _active_providers = []
    _active_providers_lock = RWLock()
    _providers_executing_initialization = {}
    _providers_executing_termination = {}
    _providers_initialization_termination_lock = RLock()
    _providers_map_class = {}
    _providers_pending_initialization = {}
    _providers_pending_termination = {}
    _wait_event = Event()
    _stop_event = Event()

    @classmethod
    def _initialize_providers_map_class(cls):
        for module_info in pkgutil.walk_packages(
                path=sys.modules[__name__].__path__, onerror=lambda x: None):
            if module_info.name != 'iptv_provider' and module_info.ispkg:
                map_module_path = '{0}.{1}.{2}'.format(__name__,
                                                       module_info.name, 'map')

                importlib.import_module(map_module_path)

                for (_, class_) in inspect.getmembers(
                        sys.modules[map_module_path], inspect.isclass):
                    if map_module_path in '{0}'.format(class_):
                        class_.initialize()

                        for (method_name, method) in inspect.getmembers(
                                class_, inspect.ismethod):
                            if method_name == 'api_class':
                                cls._providers_map_class[
                                    method().__name__.lower()] = class_

    @classmethod
    def _run(cls):
        while True:
            with cls._providers_initialization_termination_lock:
                providers_executed_termination = []

                for provider_name in cls._providers_executing_termination:
                    if (cls._providers_executing_termination[provider_name]
                        ['api_class_event'].is_set() and
                            cls._providers_executing_termination[provider_name]
                        ['epg_class_event'].is_set()):
                        providers_executed_termination.append(provider_name)

                for provider_name in providers_executed_termination:
                    del cls._providers_executing_termination[provider_name]

                providers_executed_initialization = []

                for provider_name in cls._providers_executing_initialization:
                    if (cls._providers_executing_initialization[provider_name]
                        ['api_class_event'].is_set()
                            and cls._providers_executing_initialization[
                                provider_name]['epg_class_event'].is_set()):
                        providers_executed_initialization.append(provider_name)

                for provider_name in providers_executed_initialization:
                    del cls._providers_executing_initialization[provider_name]

                providers_executing_termination = []

                for provider_name in cls._providers_pending_termination:
                    if provider_name not in cls._providers_executing_initialization:
                        logger.debug(
                            'Terminating %s',
                            cls._providers_map_class[provider_name].api_class(
                            ).__name__,
                        )

                        with cls._active_providers_lock.writer_lock:
                            cls._active_providers.remove(provider_name)

                        cls._providers_executing_termination[provider_name] = {
                            'api_class_event': Event(),
                            'epg_class_event': Event(),
                        }

                        api_class_thread = Thread(
                            target=cls._providers_map_class[provider_name].
                            api_class().terminate,
                            kwargs={
                                **cls._providers_pending_termination[provider_name],
                                'event':
                                cls._providers_executing_termination[
                                    provider_name]['api_class_event'],
                            },
                        )
                        api_class_thread.daemon = True
                        api_class_thread.start()

                        epg_class_thread = Thread(
                            target=cls._providers_map_class[provider_name].
                            epg_class().terminate,
                            kwargs={
                                **cls._providers_pending_termination[provider_name],
                                'event':
                                cls._providers_executing_termination[
                                    provider_name]['epg_class_event'],
                            },
                        )
                        epg_class_thread.daemon = True
                        epg_class_thread.start()

                        providers_executing_termination.append(provider_name)

                for provider_name in providers_executing_termination:
                    del cls._providers_pending_termination[provider_name]

                providers_executing_initialization = []

                for provider_name in cls._providers_pending_initialization:
                    if (provider_name
                            not in cls._providers_executing_initialization
                            and provider_name
                            not in cls._providers_executing_termination):
                        logger.debug(
                            'Reinitializing %s',
                            cls._providers_map_class[provider_name].api_class(
                            ).__name__,
                        )

                        with cls._active_providers_lock.writer_lock:
                            bisect.insort(cls._active_providers, provider_name)

                        cls._providers_executing_initialization[
                            provider_name] = {
                                'api_class_event': Event(),
                                'epg_class_event': Event(),
                            }

                        api_class_thread = Thread(
                            target=cls._providers_map_class[provider_name].
                            api_class().initialize,
                            kwargs={
                                **cls._providers_pending_initialization[provider_name],
                                'event':
                                cls._providers_executing_initialization[
                                    provider_name]['api_class_event'],
                            },
                        )
                        api_class_thread.daemon = True
                        api_class_thread.start()

                        epg_class_thread = Thread(
                            target=cls._providers_map_class[provider_name].
                            epg_class().initialize,
                            kwargs={
                                **cls._providers_pending_initialization[provider_name],
                                'event':
                                cls._providers_executing_initialization[
                                    provider_name]['epg_class_event'],
                            },
                        )
                        epg_class_thread.daemon = True
                        epg_class_thread.start()

                        providers_executing_initialization.append(
                            provider_name)

                for provider_name in providers_executing_initialization:
                    del cls._providers_pending_initialization[provider_name]

                cls._wait_event.clear()

            if any((
                    cls._providers_executing_initialization,
                    cls._providers_executing_termination,
                    cls._providers_pending_initialization,
                    cls._providers_pending_termination,
            )):
                time.sleep(1)
            else:
                cls._wait_event.wait()

            if cls._stop_event.is_set():
                with cls._active_providers_lock.reader_lock:
                    for provider_name in cls._active_providers[:]:
                        logger.debug(
                            'Terminating %s',
                            cls._providers_map_class[provider_name].api_class(
                            ).__name__,
                        )

                        cls._providers_map_class[provider_name].api_class(
                        ).terminate(Event())
                        cls._providers_map_class[provider_name].epg_class(
                        ).terminate(Event())

                        cls._active_providers.remove(provider_name)

                break

    @classmethod
    def get_active_providers(cls):
        with cls._active_providers_lock.reader_lock:
            return tuple(cls._active_providers)

    @classmethod
    def get_active_providers_map_class(cls):
        active_providers_map_class = {}

        with cls._active_providers_lock.reader_lock:
            for provider_name in cls._active_providers:
                active_providers_map_class[
                    provider_name] = cls._providers_map_class[provider_name]

        return active_providers_map_class

    @classmethod
    def get_active_provider_map_class(cls, provider_name):
        with cls._active_providers_lock.reader_lock:
            if provider_name in cls._active_providers:
                return cls._providers_map_class[provider_name]

        logger.error('Provider %s is inactive', provider_name)

        raise ProviderNotFoundError

    @classmethod
    def get_providers_map_class(cls):
        return copy.copy(cls._providers_map_class)

    @classmethod
    def get_provider_map_class(cls, provider_name):
        return cls._providers_map_class[provider_name]

    @classmethod
    def initialize(cls):
        cls._initialize_providers_map_class()

        message_to_log = ['Supported IPTV providers']

        for (i, provider_name) in enumerate(sorted(cls._providers_map_class)):
            message_to_log.append('{0:02} - {1}'.format(
                i + 1,
                cls._providers_map_class[provider_name].constants_class().
                PROVIDER_NAME,
            ))
            cls._providers_map_class[provider_name].database_class(
            ).initialize()

        logger.info('\n'.join(message_to_log))

        providers_controller_thread = Thread(target=cls._run)
        providers_controller_thread.daemon = True

        providers_controller_thread.start()

    @classmethod
    def initialize_provider(cls, provider_name, **kwargs):
        with cls._providers_initialization_termination_lock:
            if provider_name in cls._providers_pending_termination:
                # Don't terminate the provider if we'll be
                # reinitializing it anyways
                del cls._providers_pending_termination[provider_name]

            cls._providers_pending_initialization[provider_name] = kwargs

            cls._wait_event.set()

    @classmethod
    def initialize_providers(cls, active_providers):
        for provider_name in active_providers:
            logger.debug(
                'Initializing %s',
                cls._providers_map_class[provider_name].api_class().__name__,
            )

            try:
                cls._providers_map_class[provider_name].api_class().initialize(
                )
                cls._providers_map_class[provider_name].epg_class().initialize(
                )

                cls._active_providers.append(provider_name)
            except Exception:
                logger.error(
                    'Failed to initialize %s',
                    cls._providers_map_class[provider_name].api_class().
                    __name__,
                )

                (status, value_, traceback_) = sys.exc_info()

                logger.error('\n'.join(
                    traceback.format_exception(status, value_, traceback_)))

    @classmethod
    def set_active_providers(cls, active_providers):
        with cls._active_providers_lock.writer_lock:
            cls._active_providers = sorted(active_providers)

    @classmethod
    def terminate(cls):
        cls._stop_event.set()

    @classmethod
    def terminate_provider(cls, provider_name, **kwargs):
        with cls._providers_initialization_termination_lock:
            if provider_name in cls._providers_pending_initialization:
                # Don't initialize the provider if we'll be terminating
                # it anyways
                del cls._providers_pending_initialization[provider_name]

            if provider_name not in cls._providers_executing_termination:
                # No point in terminating a provider that is being
                # terminated
                cls._providers_pending_termination[provider_name] = kwargs

                cls._wait_event.set()
Example #31
0
class CacheManager():
    __slots__ = []

    _cache = {}
    _cleanup_cache_timer = None
    _do_cache_downloaded_segments = True
    _lock = RWLock()

    @classmethod
    def _cleanup_cache(cls):
        current_date_time_in_utc = datetime.now(pytz.utc)

        logger.trace(
            'Cache cleanup started\n'
            'Cutoff date & time => {0}'.format(current_date_time_in_utc))

        with cls._lock.writer_lock:
            for channel_number in list(cls._cache.keys()):
                cache_bucket = cls._cache[channel_number]

                for segment_file_name in list(cache_bucket.keys()):
                    cache_entry = cache_bucket[segment_file_name]

                    if (cache_entry.expiry_date_time_in_utc and
                        current_date_time_in_utc > cache_entry.expiry_date_time_in_utc) or \
                            (cache_entry.segment_file_content is None and
                             current_date_time_in_utc > cache_entry.creation_date_time_in_utc + timedelta(
                                        seconds=CACHE_TIME_TO_LIVE)):
                        del cache_bucket[segment_file_name]

                        logger.trace(
                            'Deleted expired cache entry\n'
                            'Channel number       => {0}\n'
                            'Segment file name    => {1}\n'
                            'Creation date & time => {2}\n'
                            'Expiry date & time   => {3}'.format(
                                channel_number, segment_file_name,
                                cache_entry.expiry_date_time_in_utc,
                                cache_entry.creation_date_time_in_utc))

                if not cache_bucket:
                    del cls._cache[channel_number]

                    logger.trace(
                        'Deleted expired cache bucket\n'
                        'Channel number => {0}'.format(channel_number))

            if len(cls._cache):
                cls._cleanup_cache_timer = Timer(CACHE_TIME_TO_LIVE,
                                                 cls._cleanup_cache)
                cls._cleanup_cache_timer.daemon = True
                cls._cleanup_cache_timer.start()
            else:
                cls._cleanup_cache_timer = None

                logger.debug('Deleted all cache buckets')

    @classmethod
    def _initialize_class_variables(cls):
        try:
            cls.set_do_cache_downloaded_segments(
                OptionalSettings.get_optional_settings_parameter(
                    'cache_downloaded_segments'))
        except KeyError:
            pass

    @classmethod
    def _query_cache(cls, channel_number, segment_file_name):
        if channel_number in cls._cache:
            cache_bucket = cls._cache[channel_number]

            if segment_file_name in cache_bucket:
                cache_entry = cache_bucket[segment_file_name]

                # Expiry date for a cache entry is set to CACHE_TIME_TO_LIVE seconds following the last time the
                # entry was accessed
                cache_entry.expiry_date_time_in_utc = datetime.now(
                    pytz.utc) + timedelta(seconds=CACHE_TIME_TO_LIVE)

                if cache_entry.segment_file_content:
                    cache_response_type = CacheResponseType.HARD_HIT

                    logger.trace('Hard hit cache entry\n'
                                 'Channel number    => {0}\n'
                                 'Segment file name => {1}'.format(
                                     channel_number, segment_file_name))
                else:
                    cache_response_type = CacheResponseType.SOFT_HIT

                    logger.trace('Soft hit cache entry\n'
                                 'Channel number    => {0}\n'
                                 'Segment file name => {1}'.format(
                                     channel_number, segment_file_name))
            else:
                cache_entry = None
                cache_response_type = CacheResponseType.MISS

                cache_bucket[segment_file_name] = CacheEntry()

                logger.trace('Created cache entry\n'
                             'Channel number    => {0}\n'
                             'Segment file name => {1}'.format(
                                 channel_number, segment_file_name))
        else:
            cache_entry = None
            cache_response_type = CacheResponseType.MISS

            cls._cache[channel_number] = {}
            cls._cache[channel_number][segment_file_name] = CacheEntry()

            logger.trace('Created cache bucket & entry\n'
                         'Channel number    => {0}\n'
                         'Segment file name => {1}'.format(
                             channel_number, segment_file_name))

        logger.trace('Query cache\n'
                     'Channel number    => {0}\n'
                     'Segment file name => {1}\n'
                     'Result            => {2}'.format(
                         channel_number, segment_file_name,
                         cache_response_type.value))

        if cache_response_type == CacheResponseType.MISS:
            if cls._cleanup_cache_timer is None:
                cls._cleanup_cache_timer = Timer(CACHE_TIME_TO_LIVE,
                                                 cls._cleanup_cache)
                cls._cleanup_cache_timer.daemon = True
                cls._cleanup_cache_timer.start()

        return CacheResponse(cache_entry, cache_response_type)

    @classmethod
    def cancel_cleanup_cache_timer(cls):
        if cls._cleanup_cache_timer:
            cls._cleanup_cache_timer.cancel()

    @classmethod
    def initialize(cls):
        cls._initialize_class_variables()

    @classmethod
    def query_cache(cls, channel_number, segment_file_name):
        segment_file_content = None

        with cls._lock.reader_lock:
            if cls._do_cache_downloaded_segments:
                cache_response = cls._query_cache(channel_number,
                                                  segment_file_name)

                if cache_response.response_type == CacheResponseType.HARD_HIT:
                    segment_file_content = cache_response.entry.segment_file_content
                elif cache_response.response_type == CacheResponseType.SOFT_HIT:
                    cache_response.entry.primed_event.wait(CACHE_WAIT_TIME)

                    cache_response = cls._query_cache(channel_number,
                                                      segment_file_name)

                    if cache_response.response_type == CacheResponseType.HARD_HIT:
                        segment_file_content = cache_response.entry.segment_file_content

        return segment_file_content

    @classmethod
    def set_do_cache_downloaded_segments(cls, do_cache_downloaded_segments):
        with cls._lock.writer_lock:
            cls._do_cache_downloaded_segments = do_cache_downloaded_segments

    @classmethod
    def update_cache(cls, channel_number, segment_file_name,
                     segment_file_content):
        with cls._lock.writer_lock:
            if cls._do_cache_downloaded_segments:
                try:
                    cache_bucket = cls._cache[channel_number]

                    try:
                        cache_entry = cache_bucket[segment_file_name]
                    except KeyError:
                        cache_entry = CacheEntry()

                        cache_bucket[segment_file_name] = cache_entry
                except KeyError:
                    cache_entry = CacheEntry()

                    cls._cache[channel_number] = {}
                    cls._cache[channel_number][segment_file_name] = cache_entry

                cache_entry.segment_file_content = segment_file_content

                cache_entry.expiry_date_time_in_utc = datetime.now(
                    pytz.utc) + timedelta(seconds=CACHE_TIME_TO_LIVE)
                cache_entry.primed_event.set()

                if cls._cleanup_cache_timer is None:
                    cls._cleanup_cache_timer = Timer(CACHE_TIME_TO_LIVE,
                                                     cls._cleanup_cache)
                    cls._cleanup_cache_timer.daemon = True
                    cls._cleanup_cache_timer.start()

                logger.trace('Updated cache entry\n'
                             'Channel number     => {0}\n'
                             'Segment file name  => {1}\n'
                             'Expiry date & time => {2}'.format(
                                 channel_number, segment_file_name,
                                 cache_entry.expiry_date_time_in_utc))
Example #32
0
class DeepDist:
    def __init__(self, model, master='127.0.0.1:5000', min_updates=0, max_updates=4096):
        """DeepDist - Distributed deep learning.
        :param model: provide a model that can be trained in parallel on the workers
        """
        self.model  = model
        self.lock   = RWLock()
        self.descent  = lambda model, gradient: model
        self.master   = master
        self.state    = 'serving'
        self.served   = 0
        self.received = 0
        #self.server   = None
        self.pmodel   = None
        self.min_updates = min_updates
        self.max_updates = max_updates

    def __enter__(self):
        Thread(target=self.start).start()
        # self.server = Process(target=self.start)
        # self.server.start()
        return self

    def __exit__(self, type, value, traceback):
        # self.server.terminate()
        pass # need to shut down server here

    def start(self):
        from flask import Flask, request

        app = Flask(__name__)

        @app.route('/')
        def index():
            return 'DeepDist'

        @app.route('/model', methods=['GET', 'POST', 'PUT'])
        def model_flask():
            i = 0
            while (self.state != 'serving' or self.served >= self.max_updates) and (i < 1000):
                time.sleep(1)
                i += 1

            # pickle on first read
            pmodel = None
            self.lock.acquire_read()
            if not self.pmodel:
                self.lock.release()
                self.lock.acquire_write()
                if not self.pmodel:
                    print(self.model)
                    self.pmodel = pickle.dumps(self.model, -1)
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            else:
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            return pmodel


        @app.route('/update', methods=['GET', 'POST', 'PUT'])
        def update_flask():
            gradient = pickle.loads(request.data)

            self.lock.acquire_write()
            if self.min_updates <= self.served:
                state = 'receiving'
            self.received += 1

            self.descent(self.model, gradient)

            if self.received >= self.served and self.min_updates <= self.received:
                self.received = 0
                self.served   = 0
                self.state    = 'serving'
                self.pmodel = None

            self.lock.release()
            return 'OK'

        print 'Listening to 0.0.0.0:5000...'
        app.run(host='0.0.0.0', debug=True, threaded=True, use_reloader=False)

    def train(self, rdd, gradient, descent):
        master = self.master   # will be pickled
        if master == None:
            master = rdd.ctx._conf.get('spark.master')
        if master.startswith('local['):
            master = 'localhost:5000'
        else:
            if master.startswith('spark://'):
                master = '%s:5000' % urlparse.urlparse(master).netloc.split(':')[0]
            else:
                master = '%s:5000' % master.split(':')[0]
        print '\n*** Master: %s\n' % master

        self.descent = descent

        def mapPartitions(data):
            #a = fetch_model(master=master)
            #print(a)
            aa = gradient(fetch_model(master=master), data)
            bb = pickle.dumps(aa)
            #print aa
            return [send_gradient(gradient(fetch_model(master=master), data), master=master)]
        return rdd.mapPartitions(mapPartitions).collect()
Example #33
0
class OptionalSettings(object):
    __slots__ = []

    _lock = RWLock()
    _optional_settings = OrderedDict()
    _optional_settings_file_path = None
    _optional_settings_file_watchdog_observer = None
    _previous_optional_settings = OrderedDict()

    @classmethod
    def _backup_optional_settings(cls):
        with cls._lock.writer_lock:
            cls._previous_optional_settings = copy.deepcopy(
                cls._optional_settings)

    @classmethod
    def _set_optional_settings(cls, optional_settings):
        with cls._lock.writer_lock:
            cls._optional_settings = optional_settings

    @classmethod
    def get_optional_settings_file_path(cls):
        return cls._optional_settings_file_path

    @classmethod
    def get_optional_settings_parameter(cls, parameter_name):
        with cls._lock.reader_lock:
            return cls._optional_settings[parameter_name]

    @classmethod
    def join_optional_settings_file_watchdog_observer(cls):
        cls._optional_settings_file_watchdog_observer.join()

    @classmethod
    def process_optional_settings_file_updates(cls):
        with cls._lock.writer_lock:
            message_to_log = []

            # <editor-fold desc="Detect and handle cache_downloaded_segments change">
            if 'cache_downloaded_segments' not in cls._optional_settings:
                cls._optional_settings['cache_downloaded_segments'] = True

            if 'cache_downloaded_segments' not in cls._previous_optional_settings:
                cls._previous_optional_settings[
                    'cache_downloaded_segments'] = True

            if cls._optional_settings['cache_downloaded_segments'] != \
                    cls._previous_optional_settings['cache_downloaded_segments']:
                from iptv_proxy.cache import CacheManager

                message_to_log.append(
                    'Detected a change in the cache_downloaded_segments setting\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        json.dumps(cls._previous_optional_settings[
                            'cache_downloaded_segments']),
                        json.dumps(
                            cls._optional_settings['cache_downloaded_segments']
                        )))

                CacheManager.set_do_cache_downloaded_segments(
                    cls._optional_settings['cache_downloaded_segments'])
            # </editor-fold>

            # <editor-fold desc="Detect and handle allow_insecure_lan_connections change">
            if 'allow_insecure_lan_connections' not in cls._optional_settings:
                cls._optional_settings['allow_insecure_lan_connections'] = True

            if 'allow_insecure_lan_connections' not in cls._previous_optional_settings:
                cls._previous_optional_settings[
                    'allow_insecure_lan_connections'] = True

            if cls._optional_settings['allow_insecure_lan_connections'] != \
                    cls._previous_optional_settings['allow_insecure_lan_connections']:
                from iptv_proxy.http_server import HTTPRequestHandler

                message_to_log.append(
                    'Detected a change in the allow_insecure_lan_connections setting\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        json.dumps(cls._previous_optional_settings[
                            'allow_insecure_lan_connections']),
                        json.dumps(cls._optional_settings[
                            'allow_insecure_lan_connections'])))

                HTTPRequestHandler.set_allow_insecure_lan_connections(
                    cls._optional_settings['allow_insecure_lan_connections'])
            # </editor-fold>

            # <editor-fold desc="Detect and handle allow_insecure_wan_connections change">
            if 'allow_insecure_wan_connections' not in cls._optional_settings:
                cls._optional_settings[
                    'allow_insecure_wan_connections'] = False

            if 'allow_insecure_wan_connections' not in cls._previous_optional_settings:
                cls._previous_optional_settings[
                    'allow_insecure_wan_connections'] = False

            if cls._optional_settings['allow_insecure_wan_connections'] != \
                    cls._previous_optional_settings['allow_insecure_wan_connections']:
                from iptv_proxy.http_server import HTTPRequestHandler

                message_to_log.append(
                    'Detected a change in the allow_insecure_wan_connections setting\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        json.dumps(cls._previous_optional_settings[
                            'allow_insecure_wan_connections']),
                        json.dumps(cls._optional_settings[
                            'allow_insecure_wan_connections'])))

                HTTPRequestHandler.set_allow_insecure_wan_connections(
                    cls._optional_settings['allow_insecure_wan_connections'])
            # </editor-fold>

            # <editor-fold desc="Detect and handle lan_connections_require_credentials change">
            if 'lan_connections_require_credentials' not in cls._optional_settings:
                cls._optional_settings[
                    'lan_connections_require_credentials'] = False

            if 'lan_connections_require_credentials' not in cls._previous_optional_settings:
                cls._previous_optional_settings[
                    'lan_connections_require_credentials'] = False

            if cls._optional_settings['lan_connections_require_credentials'] != \
                    cls._previous_optional_settings['lan_connections_require_credentials']:
                from iptv_proxy.http_server import HTTPRequestHandler

                message_to_log.append(
                    'Detected a change in the lan_connections_require_credentials setting\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        json.dumps(cls._previous_optional_settings[
                            'lan_connections_require_credentials']),
                        json.dumps(cls._optional_settings[
                            'lan_connections_require_credentials'])))

                HTTPRequestHandler.set_lan_connections_require_credentials(
                    cls.
                    _optional_settings['lan_connections_require_credentials'])
            # </editor-fold>

            # <editor-fold desc="Detect and handle wan_connections_require_credentials change">
            if 'wan_connections_require_credentials' not in cls._optional_settings:
                cls._optional_settings[
                    'wan_connections_require_credentials'] = True

            if 'wan_connections_require_credentials' not in cls._previous_optional_settings:
                cls._previous_optional_settings[
                    'wan_connections_require_credentials'] = True

            if cls._optional_settings['wan_connections_require_credentials'] != \
                    cls._previous_optional_settings['wan_connections_require_credentials']:
                from iptv_proxy.http_server import HTTPRequestHandler

                message_to_log.append(
                    'Detected a change in the wan_connections_require_credentials setting\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        json.dumps(cls._previous_optional_settings[
                            'wan_connections_require_credentials']),
                        json.dumps(cls._optional_settings[
                            'wan_connections_require_credentials'])))

                HTTPRequestHandler.set_wan_connections_require_credentials(
                    cls.
                    _optional_settings['wan_connections_require_credentials'])
            # </editor-fold>

            if message_to_log:
                message_to_log.append('Action => N/A')

                logger.debug('\n'.join(message_to_log))

            for provider_name in sorted(
                    ProvidersController.get_providers_map_class()):
                ProvidersController.get_provider_map_class(
                    provider_name).optional_settings_class(
                    ).process_optional_settings_file_updates(
                        cls._optional_settings,
                        cls._previous_optional_settings)

    @classmethod
    def read_optional_settings_file(cls):
        with cls._lock.writer_lock:
            cls._backup_optional_settings()

            try:
                optional_settings_file_content = Utility.read_file(
                    cls._optional_settings_file_path)
                cls._set_optional_settings(
                    json.loads(optional_settings_file_content,
                               object_pairs_hook=OrderedDict))
            except OSError:
                logger.error('Failed to read optional settings file\n'
                             'Optional settings file path => {0}'.format(
                                 cls._optional_settings_file_path))
            except JSONDecodeError:
                logger.error('Invalid optional settings file syntax\n'
                             'Optional settings file path => {0}'.format(
                                 cls._optional_settings_file_path))

    @classmethod
    def set_optional_settings_file_path(cls, optional_settings_file_path):
        cls._optional_settings_file_path = optional_settings_file_path

    @classmethod
    def set_optional_settings_parameter(cls, parameter_name, parameter_value):
        with cls._lock.writer_lock:
            cls._optional_settings[parameter_name] = parameter_value

    @classmethod
    def start_optional_settings_file_watchdog_observer(cls):
        iptv_proxy_optional_settings_event_handler = OptionalSettingsEventHandler(
            cls._optional_settings_file_path)

        cls._optional_settings_file_watchdog_observer = Observer()
        cls._optional_settings_file_watchdog_observer.schedule(
            iptv_proxy_optional_settings_event_handler,
            os.path.dirname(cls._optional_settings_file_path),
            recursive=False)
        cls._optional_settings_file_watchdog_observer.start()

    @classmethod
    def stop_optional_settings_file_watchdog_observer(cls):
        cls._optional_settings_file_watchdog_observer.stop()
class tcpServer(SocketServer.ThreadingTCPServer):
	def __init__(self, server_address, RequestHandlerClass, clients, debug, security):
		self.allow_reuse_address = True
		SocketServer.ThreadingTCPServer.__init__(self,server_address,RequestHandlerClass)
		self.clients = clients
		#self.arg2 = arg2
		self.rwlock = RWLock()
		self.debug = debug
		self.security = security
	
	def print_debug(self, msg):
		if self.debug == True :
			print msg

	def writeToAll(self, line):				
		lines = []
		lines.append(line)
		msg = {}
		msg['messages'] = lines
		try:
			self.rwlock.acquire_read()
			keys = self.clients.keys()
			self.rwlock.release()
		except:
			err = str(sys.exc_info()[1]).strip("'")
			return 0

		for an_id in keys:
			self.clients[an_id]['lock'].acquire_write()
			conn = self.clients[an_id]['connection']
			try :
				#self.print_debug("trying to say %s to %s" % (line.strip(), an_id))
				conn.wfile.write(json.dumps(msg)+"\n")
				self.clients[an_id]['lock'].release()

			except:
				self.print_debug("Not able to speak to %s" %  an_id)
				self.clients[an_id]['lock'].release()
				timestamp = self.clients[an_id]["timestamp"]
				if timestamp != 0:
					timeout = datetime.datetime.now() - timestamp
					if timeout > datetime.timedelta(seconds = 20):
						self.print_debug("connection to %s timed out !" % an_id)
						try:
							conn.finish()
						except:
							self.print_debug("connection to %s is already finished" % an_id)
						del self.clients[an_id]
						if self.security != "low":
							#curl -k https://localhost/_auth/unset/?id=12345"
							url = 'https://localhost/_auth/unset/?id='+an_id
							proc = subprocess.Popen(['curl','-k',url], stdout=subprocess.PIPE)
							code = proc.wait()
				else:
					self.clients[an_id]['lock'].acquire_write()
					self.clients[an_id]['timestamp'] = datetime.datetime.now() 
					self.clients[an_id]['messages'].append(line)			
					self.clients[an_id]['lock'].release()
	

		return 0
Example #35
0
from random import randint, choice
from time import sleep

from config import config
from dictionary import get_random_quote
from mapping import Mapping
from disk import Raid
from threading import Thread, Lock

from rwlock import RWLock

mutex = Lock()
rwlock = RWLock()


def reader_loop(mapping, reader_no):
    while True:
        with rwlock.r_locked():
            mutex.acquire()
            key = randint(1, 10)
            try:
                val = mapping.get_value(key)
                print(f'Reader {reader_no}: {val}')
                sleep(1)
            except KeyError:
                print(f'Reader {reader_no}: Key {key} not found')


def writer_loop(mapping, writer_no):
    while True:
        with rwlock.w_locked():
class MSGControler:
	#def __init__(self, listen_ip, listen_port, log_file, debug):
	def __init__(self, configDic):
		"""
		Init Controler class
		"""
		self.running = True
		self.config = configDic
		self.log_file = self.config["log"]
		self.debug = self.config["debug"]
		self.security = self.config["security"]
		self.clients = {}
		self.lock = RWLock()
		## Start TCP comm server ##
		listen_ip = self.config["listen_ip"]
		listen_port = self.config["listen_port"]
		try:
			self.server = tcpServer((listen_ip,listen_port), handleConnection, self.clients, self.debug, self.security )
		except:
			self.log_error("Unable to bind TCP socket %s:%s !" % (listen_ip,listen_port))
			proc = subprocess.Popen(["ss", "-pant"], stdout=subprocess.PIPE)
			code = proc.wait()
			for aline in proc.stdout:
				if (str(listen_ip)+':'+str(listen_port)) in aline and "LISTEN" in aline:
					tmpstr1 = re.sub(').*', '', re.sub('.*(', '', aline))
					pid = re.sub(',.*', '', re.sub('.*pid=', tmpstr1))
					prog = re.sub('.*"', '', re.sub('",.*', '', aline))
					self.log_warning("Process %s, PID %s, is binding port %s. It will be killed." % (prog, pid, listen_port))
					os.system("kill -9 %s" % pid)
		
			time.sleep(10)
			self.log_info("Trying again to bind %s on %s." % (listen_port, listen_ip))
			self.server = tcpServer((listen_ip,listen_port), handleConnection, self.clients, self.debug, self.security )

		self.comm_thread = threading.Thread(target=self.server.serve_forever)
		self.comm_thread.daemon = True
		self.comm_thread.start()
		##### Send a keepalive message every minutes (60 sec) ##
		self.keepalive = KeepAliveTimer(60, self.send_keepalive, ["KeepAliveTimer"])
		self.keepalive.start()
	
	def log_error(self, newline):
		self.log(newline, "ERROR")
	
	def log_warning(self, newline):
		self.log(newline, "WARNING")
	
	def log_info(self, newline):
		self.log(newline, "INFO")
	
	def log_event(self, newline):
		self.log(newline, "EVENT")
	
	def log_debug(self, newline):
		if self.debug == True :
			self.log(newline, "DEBUG")

	def log(self, newline, level="INFO"):
		LOG_SIZE = os.path.getsize(self.log_file)
		# if > 1M create a new file
		if LOG_SIZE > 1000000:
			if os.path.exists(self.log_file+".4"):
				os.remove(self.log_file+".4")
				os.rename(self.log_file+".3", self.log_file+".4")
			if os.path.exists(self.log_file+".3"):
				os.rename(self.log_file+".3", self.log_file+".4")
			if os.path.exists(self.log_file+".2"):
				os.rename(self.log_file+".2", self.log_file+".3")
			if os.path.exists(self.log_file+".1"):
				os.rename(self.log_file+".1", self.log_file+".2")
				
			os.rename(self.log_file, self.log_file+".1")
			if os.path.exists('/opt/virtualisation/openkvi/debug'):
				os.remove('/opt/virtualisation/openkvi/debug')
			logs = open(self.log_file,'w')

		else:
			logs = open(self.log_file,'a')

	 	timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
		logs.write(timestamp+"::["+level+"]::"+newline+"\n")
		logs.close()
	
	def print_debug(self, msg):
		if self.debug == True :
			self.log_debug(msg)
			print msg

	def tell_all(self, event, data):
		self.keepalive.stop()
		self.print_debug("telling all %s %s"% (event, data))
		line = event+";"+json.dumps(data)
		## Acquire lock so that no messages are sent 
		## simultanously 
		self.lock.acquire_write()
		res = self.server.writeToAll(line)
		## Wait 500 ms between two message to prevent 
 		## clients being overwhelmed 
		time.sleep(0.5)
		self.lock.release()
		self.keepalive.start()
	
	def stop(self):
		self.print_debug("stop tcp server")
		self.keepalive.stop()
		self.server.socket.close()
	
	def send_keepalive(self):
		res = self.server.writeToAll("keep alive")
Example #37
0
 def __init__(self, node_info):
     self.sync = RWLock()
     self.db = "db/" + str(node_info.key) + ".db"
     self.node_info = node_info
     self.own_start = None
     self.own_end = None
Example #38
0
 def __init__(self):
     self.__id = 1
     self.__storage = []
     self.__rwlock = RWLock()
Example #39
0
class DeepDist:
    def __init__(self, model, host='127.0.0.1:5000'):
        """DeepDist - Distributed deep learning.
        :param model: provide a model that can be trained in parallel on the workers
        """
        self.model  = model
        self.lock   = RWLock()
        self.descent  = lambda model, gradient: model
        self.host     = host
        self.state    = 'serving'
        self.served   = 0
        self.received = 0

    def __enter__(self):
        Thread(target=self.start).start()
        return self
    
    def __exit__(self, type, value, traceback):
        pass # need to shut down server here
        
    def start(self):
        from flask import Flask, request

        app = Flask(__name__)

        @app.route('/')
        def index():
            return 'DeepDist'

        @app.route('/model', methods=['GET', 'POST', 'PUT'])
        def model_flask():
            i = 0
            while (self.state != 'serving') and (i < 20):
                time.sleep(1)
                i += 1

            self.lock.acquire_read()
            self.served += 1
            model = copy.deepcopy(self.model)
            self.lock.release()
            
            return pickle.dumps(model, -1)
    

        @app.route('/update', methods=['GET', 'POST', 'PUT'])
        def update_flask():
            gradient = pickle.loads(request.data)

            self.lock.acquire_write()
            state = 'receiving'
            self.received += 1
            
            self.descent(self.model, gradient)
            
            if self.received >= self.served:
                self.received = 0
                self.served   = 0
                self.state    = 'serving'
            
            self.lock.release()
            return 'OK'
        
        print 'Listening to 0.0.0.0:5000...'
        app.run(host='0.0.0.0', debug=True, threaded=True, use_reloader=False)

    def train(self, rdd, gradient, descent):
        
        self.descent = descent
        
        host = self.host   # will be pickled by rdd.mapPartitions
        
        def mapPartitions(data):
            return (send_gradient(gradient(fetch_model(host=host), data), host=host))
        
        return rdd.mapPartitions(mapPartitions).collect()
Example #40
0
class DeepDist:
    def __init__(self, model, batch=None, master='127.0.0.1:5000'):
        """DeepDist - Distributed deep learning.
        :param model: provide a model that can be trained in parallel on the workers
        """
        self.model  = model
        self.lock   = RWLock()
        self.descent  = lambda model, gradient: model
        self.master   = master
        self.state    = 'serving'
        self.served   = 0
        self.received = 0
        self.batch    = batch
        self.server   = None

    def __enter__(self):
        Thread(target=self.start).start()
        # self.server = Process(target=self.start)
        # self.server.start()
        return self
    
    def __exit__(self, type, value, traceback):
        # self.server.terminate()
        pass # need to shut down server here
        
    def start(self):
        from flask import Flask, request

        app = Flask(__name__)

        @app.route('/')
        def index():
            return 'DeepDist'

        @app.route('/model', methods=['GET', 'POST', 'PUT'])
        def model_flask():
            i = 0
            while (self.state != 'serving') and (i < 1000):
                time.sleep(1)
                i += 1

            self.lock.acquire_read()
            self.served += 1
            model = copy.deepcopy(self.model)
            self.lock.release()
            
            return pickle.dumps(model, -1)
    

        @app.route('/update', methods=['GET', 'POST', 'PUT'])
        def update_flask():
            gradient = pickle.loads(request.data)

            self.lock.acquire_write()
            state = 'receiving'
            self.received += 1
            
            self.descent(self.model, gradient)
            
            if self.received >= self.served:
                self.received = 0
                self.served   = 0
                self.state    = 'serving'
            
            self.lock.release()
            return 'OK'
        
        print 'Listening to 0.0.0.0:5000...'
        app.run(host='0.0.0.0', debug=True, threaded=True, use_reloader=False)

    def train(self, rdd, gradient, descent):
        master = self.master   # will be pickled
        print 'master0: ', master
        if master == None:
            master = rdd.ctx._conf.get('spark.master')
        print 'master1: ', master
        if master.startswith('local['):
            master = 'localhost:5000'
        else:
            if master.startswith('spark://'):
                master = '%s:5000' % urlparse.urlparse(master).netloc.split(':')[0]
            else:
                master = '%s:5000' % master.split(':')[0]
        print '\n*** master: %s\n' % master

        self.descent = descent
        
        batch = self.batch
        
        def mapPartitions(data):
            last = 'dummy'
            class Iter:
              def __iter__(self):
                self.i = 0
                return self
              def next(self):
                if (batch == None) or (self.i < batch):
                  self.i += 1
                  last = data.next()
                  return last
                else:
                  return None
            res = []
            while last != None:
              res.append(send_gradient(gradient(fetch_model(master=master), Iter()), master=master))
            return res
        
        return rdd.mapPartitions(mapPartitions).collect()
Example #41
0
class DeepDist:
    def __init__(self, model, master="127.0.0.1:5000", min_updates=0, max_updates=4096):
        """DeepDist - Distributed deep learning.
        :param model: provide a model that can be trained in parallel on the workers
        """
        self.model = model
        self.lock = RWLock()
        self.descent = lambda model, gradient: model
        self.master = master
        self.state = "serving"
        self.served = 0
        self.received = 0
        # self.server   = None
        self.pmodel = None
        self.min_updates = min_updates
        self.max_updates = max_updates

    def __enter__(self):
        Thread(target=self.start).start()
        # self.server = Process(target=self.start)
        # self.server.start()
        return self

    def __exit__(self, type, value, traceback):
        # self.server.terminate()
        pass  # need to shut down server here

    def start(self):
        from flask import Flask, request

        app = Flask(__name__)

        @app.route("/")
        def index():
            return "DeepDist"

        @app.route("/model", methods=["GET", "POST", "PUT"])
        def model_flask():
            i = 0
            while (self.state != "serving" or self.served >= self.max_updates) and (i < 1000):
                time.sleep(1)
                i += 1

            # pickle on first read
            pmodel = None
            self.lock.acquire_read()
            if not self.pmodel:
                self.lock.release()
                self.lock.acquire_write()
                if not self.pmodel:
                    self.pmodel = pickle.dumps(self.model, -1)
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            else:
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            return pmodel

        @app.route("/update", methods=["GET", "POST", "PUT"])
        def update_flask():
            gradient = pickle.loads(request.data)

            self.lock.acquire_write()
            if self.min_updates <= self.served:
                state = "receiving"
            self.received += 1

            self.descent(self.model, gradient)

            if self.received >= self.served and self.min_updates <= self.received:
                self.received = 0
                self.served = 0
                self.state = "serving"
                self.pmodel = None

            self.lock.release()
            return "OK"

        print "Listening to 0.0.0.0:5000..."
        app.run(host="0.0.0.0", debug=True, threaded=True, use_reloader=False)

    def train(self, rdd, gradient, descent):
        master = self.master  # will be pickled
        if master == None:
            master = rdd.ctx._conf.get("spark.master")
        if master.startswith("local["):
            master = "localhost:5000"
        else:
            if master.startswith("spark://"):
                master = "%s:5000" % urlparse.urlparse(master).netloc.split(":")[0]
            else:
                master = "%s:5000" % master.split(":")[0]
        print "\n*** Master: %s\n" % master

        self.descent = descent

        def mapPartitions(data):
            return [send_gradient(gradient(fetch_model(master=master), data), master=master)]

        return rdd.mapPartitions(mapPartitions).collect()
Example #42
0
  def __init__(self):
    self.db = web.database(dbn='sqlite', db='multiav.db')
    self.db.printing = False
    self.create_schema()

    self.reports_lock = RWLock()
Example #43
0
class SmoothStreams(Provider):
    __slots__ = []

    _nimble_session_id_map = {}
    _nimble_session_id_map_lock = RWLock()
    _refresh_session_timer = None
    _provider_name = SmoothStreamsConstants.PROVIDER_NAME.lower()
    _session = {}
    _session_lock = RWLock()

    @classmethod
    def _cancel_refresh_session_timer(cls):
        if cls._refresh_session_timer:
            cls._refresh_session_timer.cancel()
            cls._refresh_session_timer = None

    @classmethod
    def _clear_nimble_session_id_map(cls):
        with cls._nimble_session_id_map_lock.writer_lock:
            cls._nimble_session_id_map = {}

    @classmethod
    def _do_refresh_session(cls):
        try:
            if datetime.now(
                    pytz.utc) < (cls._get_session_parameter('expires_on') -
                                 timedelta(minutes=30)):
                return False
            else:
                logger.debug('SmoothStreams session\n'
                             'Status => Expired\n'
                             'Action => Retrieve it')

                return True
        except KeyError:
            logger.error('SmoothStreams session\n'
                         'Status => Never retrieved\n'
                         'Action => Retrieve it')

            return True

    @classmethod
    def _generate_playlist_m3u8_static_track_url(cls, track_information,
                                                 **kwargs):
        channel_number = kwargs['channel_number']
        playlist_protocol = kwargs['playlist_protocol']
        authorization_token = kwargs['authorization_token']

        track_information.append(
            '{0}://{1}.smoothstreams.tv:{2}/{3}/ch{4:02}q1.stream{5}?wmsAuthSign={6}\n'
            .format(
                'https' if playlist_protocol in ['hls', 'mpegts'] else 'rtmp',
                Configuration.get_configuration_parameter(
                    'SMOOTHSTREAMS_SERVER'),
                '443' if playlist_protocol in ['hls', 'mpegts'] else '3635',
                Configuration.get_configuration_parameter(
                    'SMOOTHSTREAMS_SERVICE'), int(channel_number),
                '/mpeg.2ts' if playlist_protocol == 'mpegts' else '',
                authorization_token))

    @classmethod
    def _get_session_parameter(cls, parameter_name):
        with cls._session_lock.reader_lock:
            return cls._session[parameter_name]

    @classmethod
    def _get_target_nimble_session_id(cls, hijacked_nimble_session_id):
        with cls._nimble_session_id_map_lock.reader_lock:
            return cls._nimble_session_id_map.get(hijacked_nimble_session_id)

    @classmethod
    def _hijack_nimble_session_id(cls, hijacked_nimble_session_id,
                                  hijacking_nimble_session_id):
        with cls._nimble_session_id_map_lock.writer_lock:
            cls._nimble_session_id_map[
                hijacked_nimble_session_id] = hijacking_nimble_session_id

    @classmethod
    def _initialize(cls, **kwargs):
        do_refresh_session = False

        if 'do_refresh_session' in kwargs:
            do_refresh_session = kwargs['do_refresh_session']
        else:
            with SmoothStreamsDatabase.get_access_lock().shared_lock:
                db_session = SmoothStreamsDatabase.create_session()

                try:
                    setting_row = SmoothStreamsDatabaseAccess.query_setting(
                        db_session, 'session')

                    if setting_row is not None:
                        cls._session = jsonpickle.decode(setting_row.value)

                        current_date_time_in_utc = datetime.now(pytz.utc)

                        if current_date_time_in_utc < cls._session[
                                'expires_on']:
                            logger.debug(
                                'Loaded SmoothStreams session\n'
                                'Authorization token => {0}\n'
                                'Expires on          => {1}'.format(
                                    cls._session['authorization_token'],
                                    cls._session['expires_on'].astimezone(
                                        tzlocal.get_localzone()).strftime(
                                            '%Y-%m-%d %H:%M:%S%z')))
                        else:
                            do_refresh_session = True
                    else:
                        do_refresh_session = True
                finally:
                    db_session.close()

        if do_refresh_session:
            cls.refresh_session(force_refresh=True)

    @classmethod
    def _map_nimble_session_id(cls, client_ip_address, channel_number,
                               client_uuid, nimble_session_id,
                               authorization_token):
        if authorization_token != cls._get_session_parameter(
                'authorization_token'):
            target_nimble_session_id = cls._get_target_nimble_session_id(
                nimble_session_id)

            if not target_nimble_session_id:
                logger.debug(
                    'SmoothStreams authorization token {0} in request from {1}/{2} expired'
                    .format(authorization_token, client_ip_address,
                            client_uuid))

                try:
                    response_text = cls.download_playlist_m3u8(
                        client_ip_address, client_uuid, '/playlist.m3u8',
                        dict(channel_number=channel_number, protocol='hls'))

                    m3u8_object = m3u8.loads(response_text)

                    requested_path_with_query_string = '/{0}'.format(
                        m3u8_object.data['playlists'][0]['uri'])
                    requested_url_components = urllib.parse.urlparse(
                        requested_path_with_query_string)
                    requested_query_string_parameters = dict(
                        urllib.parse.parse_qsl(requested_url_components.query))

                    target_nimble_session_id = requested_query_string_parameters.get(
                        'nimblesessionid', nimble_session_id)

                    logger.debug('Hijacking SmoothStreams session\n'
                                 'Expired nimble session ID => {0}\n'
                                 'Target nimble session ID  => {1}'.format(
                                     nimble_session_id,
                                     target_nimble_session_id))
                    cls._hijack_nimble_session_id(nimble_session_id,
                                                  target_nimble_session_id)
                except requests.exceptions.HTTPError:
                    target_nimble_session_id = nimble_session_id

                    (type_, value_, traceback_) = sys.exc_info()
                    logger.error('\n'.join(
                        traceback.format_exception(type_, value_, traceback_)))
        else:
            target_nimble_session_id = nimble_session_id

        return target_nimble_session_id

    @classmethod
    def _refresh_session(cls):
        requests_session = requests.Session()

        if Configuration.get_configuration_parameter(
                'SMOOTHSTREAMS_SERVICE') == 'viewmmasr':
            url = 'https://www.mma-tv.net/loginForm.php'
        else:
            url = 'https://auth.smoothstreams.tv/hash_api.php'

        username = Configuration.get_configuration_parameter(
            'SMOOTHSTREAMS_USERNAME')
        password = SecurityManager.decrypt_password(
            Configuration.get_configuration_parameter(
                'SMOOTHSTREAMS_PASSWORD')).decode()
        site = Configuration.get_configuration_parameter(
            'SMOOTHSTREAMS_SERVICE')

        logger.debug('Retrieving SmoothStreams authorization token\n'
                     'URL => {0}\n'
                     '  Parameters\n'
                     '    username => {0}\n'
                     '    password => {1}\n'
                     '    site     => {2}'.format(url, username,
                                                  '\u2022' * len(password),
                                                  site))

        response = Utility.make_http_request(
            requests_session.get,
            url,
            params={
                'username': username,
                'password': password,
                'site': site
            },
            headers=requests_session.headers,
            cookies=requests_session.cookies.get_dict())

        response_status_code = response.status_code
        if response_status_code != requests.codes.OK and response_status_code != requests.codes.NOT_FOUND:
            logger.error(Utility.assemble_response_from_log_message(response))

            response.raise_for_status()

        logger.trace(
            Utility.assemble_response_from_log_message(response,
                                                       is_content_json=True,
                                                       do_print_content=True))

        authorization_token_response = response.json()
        session = {}

        if 'code' in authorization_token_response:
            if authorization_token_response['code'] == '0':
                logger.error(
                    'Failed to retrieve SmoothStreams authorization token\n'
                    'Error => {0}'.format(
                        authorization_token_response['error']))
            elif authorization_token_response['code'] == '1':
                session['authorization_token'] = authorization_token_response[
                    'hash']
                session['expires_on'] = datetime.now(pytz.utc) + timedelta(
                    seconds=(authorization_token_response['valid'] * 60))
                session['requests_session'] = requests_session

                logger.info('Retrieved SmoothStreams authorization token\n'
                            'Hash       => {0}\n'
                            'Expires On => {1}'.format(
                                session['authorization_token'],
                                session['expires_on'].astimezone(
                                    tzlocal.get_localzone()).strftime(
                                        '%Y-%m-%d %H:%M:%S%z')))
        else:
            logger.error(
                'Failed to retrieve SmoothStreams authorization token\n'
                'Error => JSON response contains no [\'code\'] field')

        if response_status_code != requests.codes.OK:
            response.raise_for_status()

        return session

    @classmethod
    def _retrieve_fresh_authorization_token(cls):
        try:
            session = cls._refresh_session()

            return session['authorization_token']
        except (KeyError, requests.exceptions.HTTPError):
            logger.error(
                'Failed to retrieve a fresh SmoothStreams authorization token')

    @classmethod
    def _set_session_parameter(cls, parameter_name, parameter_value):
        with cls._session_lock.writer_lock:
            cls._session[parameter_name] = parameter_value

    @classmethod
    def _terminate(cls, **kwargs):
        pass

    @classmethod
    def _timed_refresh_session(cls):
        logger.debug('SmoothStreams refresh session timer triggered')

        cls.refresh_session(force_refresh=True)

    @classmethod
    def download_chunks_m3u8(cls, client_ip_address, client_uuid,
                             requested_path,
                             requested_query_string_parameters):
        authorization_token = requested_query_string_parameters.get(
            'wmsAuthSign')
        channel_number = requested_query_string_parameters.get(
            'channel_number')
        http_token = requested_query_string_parameters.get('http_token')
        nimble_session_id = requested_query_string_parameters.get(
            'nimblesessionid')

        nimble_session_id = cls._map_nimble_session_id(client_ip_address,
                                                       channel_number,
                                                       client_uuid,
                                                       nimble_session_id,
                                                       authorization_token)

        IPTVProxy.refresh_serviceable_clients(client_uuid, client_ip_address)
        IPTVProxy.set_serviceable_client_parameter(
            client_uuid, 'last_request_date_time_in_utc',
            datetime.now(pytz.utc))
        IPTVProxy.set_serviceable_client_parameter(
            client_uuid, 'last_requested_channel_number', channel_number)

        authorization_token = cls._get_session_parameter('authorization_token')
        requests_session = cls._get_session_parameter('requests_session')

        target_url = 'https://{0}.smoothstreams.tv/{1}/ch{2}q1.stream{3}'.format(
            Configuration.get_configuration_parameter('SMOOTHSTREAMS_SERVER'),
            Configuration.get_configuration_parameter('SMOOTHSTREAMS_SERVICE'),
            channel_number, re.sub(r'(/.*)?(/.*\.m3u8)', r'\2',
                                   requested_path))

        logger.debug('Proxying request\n'
                     'Source IP      => {0}\n'
                     'Requested path => {1}\n'
                     '  Parameters\n'
                     '    channel_number  => {2}\n'
                     '    client_uuid     => {3}\n'
                     'Target path    => {4}\n'
                     '  Parameters\n'
                     '    nimblesessionid => {5}\n'
                     '    wmsAuthSign     => {6}'.format(
                         client_ip_address, requested_path, channel_number,
                         client_uuid, target_url, nimble_session_id,
                         authorization_token))

        response = Utility.make_http_request(
            requests_session.get,
            target_url,
            params={
                'nimblesessionid': nimble_session_id,
                'wmsAuthSign': authorization_token
            },
            headers=requests_session.headers,
            cookies=requests_session.cookies.get_dict())

        if response.status_code == requests.codes.OK:
            logger.trace(
                Utility.assemble_response_from_log_message(
                    response, is_content_text=True, do_print_content=True))

            return response.text.replace(
                '.ts?',
                '.ts?channel_number={0}&client_uuid={1}&http_token={2}&'.
                format(channel_number, client_uuid,
                       urllib.parse.quote(http_token) if http_token else ''))
        else:
            logger.error(Utility.assemble_response_from_log_message(response))

            response.raise_for_status()

    @classmethod
    def download_playlist_m3u8(cls, client_ip_address, client_uuid,
                               requested_path,
                               requested_query_string_parameters):
        channel_number = requested_query_string_parameters.get(
            'channel_number')
        http_token = requested_query_string_parameters.get('http_token')
        protocol = requested_query_string_parameters.get('protocol')

        IPTVProxy.refresh_serviceable_clients(client_uuid, client_ip_address)
        IPTVProxy.set_serviceable_client_parameter(
            client_uuid, 'last_request_date_time_in_utc',
            datetime.now(pytz.utc))
        IPTVProxy.set_serviceable_client_parameter(
            client_uuid, 'last_requested_channel_number', channel_number)

        cls.refresh_session()

        if protocol == 'hls':
            authorization_token = cls._get_session_parameter(
                'authorization_token')
            requests_session = cls._get_session_parameter('requests_session')

            target_url = 'https://{0}.smoothstreams.tv/{1}/ch{2}q1.stream{3}'.format(
                Configuration.get_configuration_parameter(
                    'SMOOTHSTREAMS_SERVER'),
                Configuration.get_configuration_parameter(
                    'SMOOTHSTREAMS_SERVICE'), channel_number,
                re.sub(r'(/.*)?(/.*\.m3u8)', r'\2', requested_path))

            logger.debug('Proxying request\n'
                         'Source IP      => {0}\n'
                         'Requested path => {1}\n'
                         '  Parameters\n'
                         '    channel_number => {2}\n'
                         '    client_uuid    => {3}\n'
                         '    protocol       => {4}\n'
                         'Target path    => {5}\n'
                         '  Parameters\n'
                         '    wmsAuthSign    => {6}'.format(
                             client_ip_address, requested_path, channel_number,
                             client_uuid, protocol, target_url,
                             authorization_token))

            response = Utility.make_http_request(
                requests_session.get,
                target_url,
                params={'wmsAuthSign': authorization_token},
                headers=requests_session.headers,
                cookies=requests_session.cookies.get_dict())

            if response.status_code == requests.codes.OK:
                logger.trace(
                    Utility.assemble_response_from_log_message(
                        response, is_content_text=True, do_print_content=True))

                return response.text.replace(
                    'chunks.m3u8?',
                    'chunks.m3u8?channel_number={0}&client_uuid={1}&http_token={2}&'
                    .format(
                        channel_number, client_uuid,
                        urllib.parse.quote(http_token) if http_token else ''))
            else:
                logger.error(
                    Utility.assemble_response_from_log_message(response))

                response.raise_for_status()
        elif protocol == 'mpegts':
            authorization_token = cls._get_session_parameter(
                'authorization_token')

            return '#EXTM3U\n' \
                   '#EXTINF:-1 ,{0}\n' \
                   'https://{1}.smoothstreams.tv:443/{2}/ch{3}q1.stream/mpeg.2ts?' \
                   'wmsAuthSign={4}'.format(SmoothStreamsEPG.get_channel_name(int(channel_number)),
                                            Configuration.get_configuration_parameter(
                                                'SMOOTHSTREAMS_SERVER'),
                                            Configuration.get_configuration_parameter(
                                                'SMOOTHSTREAMS_SERVICE'),
                                            channel_number,
                                            authorization_token)
        elif protocol == 'rtmp':
            authorization_token = cls._get_session_parameter(
                'authorization_token')

            return '#EXTM3U\n' \
                   '#EXTINF:-1 ,{0}\n' \
                   'rtmp://{1}.smoothstreams.tv:3635/{2}/ch{3}q1.stream?' \
                   'wmsAuthSign={4}'.format(SmoothStreamsEPG.get_channel_name(int(channel_number)),
                                            Configuration.get_configuration_parameter(
                                                'SMOOTHSTREAMS_SERVER'),
                                            Configuration.get_configuration_parameter(
                                                'SMOOTHSTREAMS_SERVICE'),
                                            channel_number,
                                            authorization_token)

    @classmethod
    def download_ts_file(cls, client_ip_address, client_uuid, requested_path,
                         requested_query_string_parameters):
        authorization_token = requested_query_string_parameters.get(
            'wmsAuthSign')
        channel_number = requested_query_string_parameters.get(
            'channel_number')
        nimble_session_id = requested_query_string_parameters.get(
            'nimblesessionid')

        IPTVProxy.refresh_serviceable_clients(client_uuid, client_ip_address)
        IPTVProxy.set_serviceable_client_parameter(
            client_uuid, 'last_request_date_time_in_utc',
            datetime.now(pytz.utc))
        IPTVProxy.set_serviceable_client_parameter(
            client_uuid, 'last_requested_channel_number', channel_number)

        requests_session = cls._get_session_parameter('requests_session')

        target_url = 'https://{0}.smoothstreams.tv/{1}/ch{2}q1.stream{3}'.format(
            Configuration.get_configuration_parameter('SMOOTHSTREAMS_SERVER'),
            Configuration.get_configuration_parameter('SMOOTHSTREAMS_SERVICE'),
            channel_number, re.sub(r'(/.*)?(/.*\.ts)', r'\2', requested_path))

        logger.debug('Proxying request\n'
                     'Source IP      => {0}\n'
                     'Requested path => {1}\n'
                     '  Parameters\n'
                     '    channel_number  => {2}\n'
                     '    client_uuid     => {3}\n'
                     'Target path    => {4}\n'
                     '  Parameters\n'
                     '    nimblesessionid => {5}\n'
                     '    wmsAuthSign     => {6}'.format(
                         client_ip_address, requested_path, channel_number,
                         client_uuid, target_url, nimble_session_id,
                         authorization_token))

        response = Utility.make_http_request(
            requests_session.get,
            target_url,
            params={
                'nimblesessionid': nimble_session_id,
                'wmsAuthSign': authorization_token
            },
            headers=requests_session.headers,
            cookies=requests_session.cookies.get_dict())

        if response.status_code == requests.codes.OK:
            logger.trace(
                Utility.assemble_response_from_log_message(
                    response, is_content_binary=True))

            return response.content
        else:
            logger.error(Utility.assemble_response_from_log_message(response))

            response.raise_for_status()

    @classmethod
    def generate_playlist_m3u8_tracks(
            cls,
            generate_playlist_m3u8_tracks_mapping,
            sort_by=M388PlaylistSortOrder.CHANNEL_NUMBER.value):
        return super().generate_playlist_m3u8_tracks(
            generate_playlist_m3u8_tracks_mapping, sort_by=sort_by)

    @classmethod
    def refresh_session(cls, force_refresh=False):
        with cls._session_lock.writer_lock:
            do_start_timer = False

            if force_refresh or cls._do_refresh_session():
                do_start_timer = True

                cls._clear_nimble_session_id_map()

                session = cls._refresh_session()

                if session:
                    cls._session = session

                    with SmoothStreamsDatabase.get_write_lock(
                    ), SmoothStreamsDatabase.get_access_lock().shared_lock:
                        db_session = SmoothStreamsDatabase.create_session()

                        try:
                            db_session.merge(
                                SmoothStreamsSetting(
                                    'session',
                                    jsonpickle.encode(cls._session)))
                            db_session.commit()
                        except Exception:
                            (type_, value_, traceback_) = sys.exc_info()
                            logger.error('\n'.join(
                                traceback.format_exception(
                                    type_, value_, traceback_)))

                            db_session.rollback()
                        finally:
                            db_session.close()

                if cls._refresh_session_timer:
                    cls._refresh_session_timer.cancel()
            elif not cls._refresh_session_timer:
                do_start_timer = True

            if do_start_timer:
                interval = (cls._get_session_parameter('expires_on') -
                            datetime.now(pytz.utc)).total_seconds() - 1800
                cls._refresh_session_timer = Timer(interval,
                                                   cls._timed_refresh_session)
                cls._refresh_session_timer.daemon = True
                cls._refresh_session_timer.start()

                logger.debug('Started SmoothStreams session refresh timer\n'
                             'Interval => {0} seconds'.format(interval))
Example #44
0
class SmoothStreamsEPG(ProviderEPG):
    __slots__ = []

    _channel_name_map = OrderedDict()
    _channel_name_map_lock = RWLock()
    _do_use_provider_icons = False
    _do_use_provider_icons_lock = RWLock()
    _lock = RLock()
    _provider_name = SmoothStreamsConstants.PROVIDER_NAME.lower()
    _refresh_epg_timer = None
    _update_times = ['06:00:00']
    _update_times_lock = RWLock()

    @classmethod
    def _parse_fog_channels_json(
        cls,
        db_session,
        channel_name_map,
        do_use_provider_icons,
        parsed_channel_xmltv_id_to_channel,
    ):
        epg_json_stream = cls._request_fog_channels_json()

        logger.debug(
            'Processing Fog JSON channels\nFile name => %s',
            SmoothStreamsConstants.FOG_CHANNELS_JSON_FILE_NAME,
        )

        key = None

        channel_number = None
        channel_name = None
        channel_xmltv_id = None
        channel_icon_source = None

        try:
            ijson_parser = ijson.parse(epg_json_stream)

            for (prefix, event, value) in ijson_parser:
                if prefix.isdigit() and (event, value) == ('start_map', None):
                    key = prefix

                    channel_number = None
                    channel_name = None
                    channel_xmltv_id = None
                    channel_icon_source = None
                elif (prefix, event) == ('{0}.channum'.format(key), 'string'):
                    channel_number = int(value.strip())
                elif (prefix, event) == ('{0}.channame'.format(key), 'string'):
                    channel_name = html.unescape(value.strip())
                elif (prefix, event) == ('{0}.xmltvid'.format(key), 'string'):
                    channel_xmltv_id = html.unescape(value.strip())
                elif (prefix, event) == ('{0}.icon'.format(key), 'string'):
                    channel_icon_source = value.strip()
                elif (prefix, event) == (key, 'end_map'):
                    channel = XMLTVChannel(
                        provider='SmoothStreams',
                        m3u8_group='SmoothStreams',
                        xmltv_id=channel_xmltv_id,
                        number=channel_number,
                        display_names=[
                            XMLTVDisplayName(language=None, text=channel_name)
                        ],
                        icons=[
                            XMLTVIcon(source=channel_icon_source,
                                      width=None,
                                      height=None)
                        ],
                        urls=[],
                    )
                    cls._apply_channel_transformations(
                        channel, channel_name_map, not do_use_provider_icons)

                    parsed_channel_xmltv_id_to_channel[
                        channel_xmltv_id] = channel

                    db_session.add(
                        SmoothStreamsChannel(
                            id_=channel.xmltv_id,
                            m3u8_group='SmoothStreams',
                            number=channel.number,
                            name=channel.display_names[0].text,
                            pickle=pickle.dumps(
                                channel, protocol=pickle.HIGHEST_PROTOCOL),
                            complete_xmltv=channel.format(minimal_xmltv=False),
                            minimal_xmltv=channel.format(),
                        ))

            db_session.flush()

            logger.debug(
                'Processed Fog JSON channels\nFile name => %s',
                SmoothStreamsConstants.FOG_CHANNELS_JSON_FILE_NAME,
            )
        except Exception:
            logger.error(
                'Failed to process Fog JSON channels\nFile name => %s',
                SmoothStreamsConstants.FOG_CHANNELS_JSON_FILE_NAME,
            )

            raise

    @classmethod
    def _parse_fog_epg_xml(cls, db_session,
                           parsed_channel_xmltv_id_to_channel):
        epg_xml_stream = cls._request_fog_epg_xml()

        logger.debug(
            'Processing Fog XML EPG\nFile name => %s',
            SmoothStreamsConstants.FOG_EPG_XML_FILE_NAME,
        )

        number_of_objects_added_to_db_session = 0

        tv_element = None
        tv_date = None

        try:
            for (event, element) in etree.iterparse(
                    epg_xml_stream,
                    events=('start', 'end'),
                    tag=('channel', 'programme', 'tv'),
            ):
                if event == 'end':
                    if element.tag == 'channel':
                        element.clear()
                        tv_element.clear()
                    elif element.tag == 'programme':
                        program_start = datetime.strptime(
                            element.get('start'),
                            '%Y%m%d%H%M%S %z').astimezone(pytz.utc)
                        program_stop = datetime.strptime(
                            element.get('stop'),
                            '%Y%m%d%H%M%S %z').astimezone(pytz.utc)
                        program_pdc_start = element.get('pdc-start')
                        program_vps_start = element.get('vps-start')
                        program_show_view = element.get('showview')
                        program_video_plus = element.get('videoplus')
                        program_channel_xmltv_id = element.get('channel')
                        program_clump_index = element.get('clumpidx')
                        program_titles = []
                        program_sub_titles = []
                        program_descriptions = []
                        program_credits = None
                        program_date = None
                        program_categories = []
                        program_keywords = []
                        program_language = None
                        program_original_language = None
                        program_length = None
                        program_icons = []
                        program_urls = []
                        program_countries = []
                        program_episode_numbers = []
                        program_video = None
                        program_audio = None
                        program_previously_shown = None
                        program_premiere = None
                        program_last_chance = None
                        program_new = None
                        program_subtitles = []
                        program_ratings = []
                        program_star_ratings = []
                        program_reviews = []

                        for sub_element in list(element):
                            if sub_element.tag == 'title':
                                program_titles.append(
                                    XMLTVTitle(
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'sub-title':
                                program_sub_titles.append(
                                    XMLTVSubTitle(
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'desc':
                                program_descriptions.append(
                                    XMLTVDescription(
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'credits':
                                credits_actors = []
                                credits_adapters = []
                                credits_commentators = []
                                credits_composers = []
                                credits_directors = []
                                credits_editors = []
                                credits_guests = []
                                credits_presenters = []
                                credits_producers = []
                                credits_writers = []

                                for sub_sub_element in list(sub_element):
                                    if sub_sub_element.tag == 'actor':
                                        credits_actors.append(
                                            XMLTVActor(
                                                sub_sub_element.get('role'),
                                                sub_sub_element.text,
                                            ))
                                    elif sub_sub_element.tag == 'adapter':
                                        credits_adapters.append(
                                            XMLTVAdapter(sub_sub_element.text))
                                    elif sub_sub_element.tag == 'commentator':
                                        credits_commentators.append(
                                            XMLTVCommentator(
                                                sub_sub_element.text))
                                    elif sub_sub_element.tag == 'composer':
                                        credits_composers.append(
                                            XMLTVComposer(
                                                sub_sub_element.text))
                                    elif sub_sub_element.tag == 'director':
                                        credits_directors.append(
                                            XMLTVDirector(
                                                sub_sub_element.text))
                                    elif sub_sub_element.tag == 'editor':
                                        credits_editors.append(
                                            XMLTVEditor(sub_sub_element.text))
                                    elif sub_sub_element.tag == 'guest':
                                        credits_guests.append(
                                            XMLTVGuest(sub_sub_element.text))
                                    elif sub_sub_element.tag == 'presenter':
                                        credits_presenters.append(
                                            XMLTVPresenter(
                                                sub_sub_element.text))
                                    elif sub_sub_element.tag == 'producer':
                                        credits_producers.append(
                                            XMLTVProducer(
                                                sub_sub_element.text))
                                    elif sub_sub_element.tag == 'writer':
                                        credits_writers.append(
                                            XMLTVWriter(sub_sub_element.text))

                                program_credits = XMLTVCredits(
                                    actors=credits_actors,
                                    adapters=credits_adapters,
                                    commentators=credits_commentators,
                                    composers=credits_composers,
                                    directors=credits_directors,
                                    editors=credits_editors,
                                    guests=credits_guests,
                                    presenters=credits_presenters,
                                    producers=credits_producers,
                                    writers=credits_writers,
                                )
                            elif sub_element.tag == 'date':
                                program_date = XMLTVDate(text=sub_element.text)
                            elif sub_element.tag == 'category':
                                program_categories.append(
                                    XMLTVCategory(
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'keyword':
                                program_keywords.append(
                                    XMLTVKeyword(
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'language':
                                program_language = XMLTVLanguage(
                                    language=sub_element.get('lang'),
                                    text=sub_element.text,
                                )
                            elif sub_element.tag == 'orig-language':
                                program_original_language = XMLTVOriginalLanguage(
                                    language=sub_element.get('lang'),
                                    text=sub_element.text,
                                )
                            elif sub_element.tag == 'length':
                                program_length = XMLTVLength(
                                    units=sub_element.get('units'),
                                    text=sub_element.text,
                                )
                            elif sub_element.tag == 'icon':
                                program_icons.append(
                                    XMLTVIcon(
                                        source=sub_element.get('src'),
                                        width=sub_element.get('width'),
                                        height=sub_element.get('height'),
                                    ))
                            elif sub_element.tag == 'url':
                                program_urls.append(
                                    XMLTVURL(text=sub_element.text))
                            elif sub_element.tag == 'country':
                                program_countries.append(
                                    XMLTVCountry(
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'episode-num':
                                program_episode_numbers.append(
                                    XMLTVEpisodeNumber(
                                        system=sub_element.get('system'),
                                        text=sub_element.text,
                                    ))
                            elif sub_element.tag == 'video':
                                video_present = None
                                video_colour = None
                                video_aspect = None
                                video_quality = None

                                for sub_sub_element in list(sub_element):
                                    if sub_sub_element.tag == 'present':
                                        video_present = XMLTVPresent(
                                            sub_sub_element.text)
                                    elif sub_sub_element.tag == 'colour':
                                        video_colour = XMLTVColour(
                                            sub_sub_element.text)
                                    elif sub_sub_element.tag == 'aspect':
                                        video_aspect = XMLTVAspect(
                                            sub_sub_element.text)
                                    elif sub_sub_element.tag == 'quality':
                                        video_quality = XMLTVQuality(
                                            sub_sub_element.text)

                                if (video_present is not None
                                        or video_colour is not None
                                        or video_aspect is not None
                                        or video_quality is not None):
                                    program_video = XMLTVVideo(
                                        present=video_present,
                                        colour=video_colour,
                                        aspect=video_aspect,
                                        quality=video_quality,
                                    )
                            elif sub_element.tag == 'audio':
                                audio_present = None
                                audio_stereo = None

                                for sub_sub_element in list(sub_element):
                                    if sub_sub_element.tag == 'present':
                                        audio_present = XMLTVPresent(
                                            sub_sub_element.text)
                                    elif sub_sub_element.tag == 'stereo':
                                        audio_stereo = XMLTVStereo(
                                            sub_sub_element.text)

                                if (audio_present is not None
                                        or audio_stereo is not None):
                                    program_audio = XMLTVAudio(
                                        present=audio_present,
                                        stereo=audio_stereo)
                            elif sub_element.tag == 'previously-shown':
                                program_previously_shown = XMLTVPreviouslyShown(
                                    start=sub_element.get('start'),
                                    channel=sub_element.get('channel'),
                                )
                            elif sub_element.tag == 'premiere':
                                program_premiere = XMLTVPremiere(
                                    language=sub_element.get('lang'),
                                    text=sub_element.text,
                                )
                            elif sub_element.tag == 'last-chance':
                                program_last_chance = XMLTVLastChance(
                                    language=sub_element.get('lang'),
                                    text=sub_element.text,
                                )
                            elif sub_element.tag == 'new':
                                program_new = XMLTVNew()
                            elif sub_element.tag == 'subtitles':
                                subtitles_type = sub_element.get('type')
                                subtitles_language = None

                                for sub_sub_element in sub_element:
                                    if sub_sub_element.tag == 'language':
                                        subtitles_language = XMLTVLanguage(
                                            language=sub_sub_element.get(
                                                'lang'),
                                            text=sub_sub_element.text,
                                        )

                                program_subtitles.append(
                                    XMLTVSubtitles(
                                        type_=subtitles_type,
                                        language=subtitles_language,
                                    ))
                            elif sub_element.tag == 'rating':
                                rating_system = sub_element.get('system')
                                rating_value = None
                                rating_icons = []

                                for sub_sub_element in sub_element:
                                    if sub_sub_element.tag == 'value':
                                        rating_value = XMLTVValue(
                                            text=sub_sub_element.text)
                                    elif sub_sub_element.tag == 'icon':
                                        rating_icons.append(
                                            XMLTVIcon(
                                                source=sub_sub_element.get(
                                                    'src'),
                                                width=sub_sub_element.get(
                                                    'width'),
                                                height=sub_sub_element.get(
                                                    'height'),
                                            ))

                                program_ratings.append(
                                    XMLTVRating(
                                        system=rating_system,
                                        value=rating_value,
                                        icons=rating_icons,
                                    ))
                            elif sub_element.tag == 'star-rating':
                                star_rating_system = sub_element.get('system')
                                star_rating_value = None
                                star_rating_icons = []

                                for sub_sub_element in sub_element:
                                    if sub_sub_element.tag == 'value':
                                        star_rating_value = XMLTVValue(
                                            text=sub_sub_element.text)
                                    elif sub_sub_element.tag == 'icon':
                                        star_rating_icons.append(
                                            XMLTVIcon(
                                                source=sub_sub_element.get(
                                                    'src'),
                                                width=sub_sub_element.get(
                                                    'width'),
                                                height=sub_sub_element.get(
                                                    'height'),
                                            ))

                                program_star_ratings.append(
                                    XMLTVStarRating(
                                        system=star_rating_system,
                                        value=star_rating_value,
                                        icons=star_rating_icons,
                                    ))
                            elif sub_element.tag == 'review':
                                program_reviews.append(
                                    XMLTVReview(
                                        type_=sub_element.get('type'),
                                        source=sub_element.get('source'),
                                        reviewer=sub_element.get('reviewer'),
                                        language=sub_element.get('lang'),
                                        text=sub_element.text,
                                    ))

                        channel = parsed_channel_xmltv_id_to_channel[
                            program_channel_xmltv_id]
                        program = XMLTVProgram(
                            provider='SmoothStreams',
                            start=program_start,
                            stop=program_stop,
                            pdc_start=program_pdc_start,
                            vps_start=program_vps_start,
                            show_view=program_show_view,
                            video_plus=program_video_plus,
                            channel_xmltv_id=channel.xmltv_id,
                            clump_index=program_clump_index,
                            titles=program_titles,
                            sub_titles=program_sub_titles,
                            descriptions=program_descriptions,
                            credits_=program_credits,
                            date=program_date,
                            categories=program_categories,
                            keywords=program_keywords,
                            language=program_language,
                            original_language=program_original_language,
                            length=program_length,
                            icons=program_icons,
                            urls=program_urls,
                            countries=program_countries,
                            episode_numbers=program_episode_numbers,
                            video=program_video,
                            audio=program_audio,
                            previously_shown=program_previously_shown,
                            premiere=program_premiere,
                            last_chance=program_last_chance,
                            new=program_new,
                            subtitles=program_subtitles,
                            ratings=program_ratings,
                            star_ratings=program_star_ratings,
                            reviews=program_reviews,
                        )

                        db_session.add(
                            SmoothStreamsProgram(
                                id_='{0}'.format(uuid.uuid4()),
                                start=program.start,
                                stop=program.stop,
                                channel_xmltv_id=channel.xmltv_id,
                                channel_number=channel.number,
                                pickle=pickle.dumps(
                                    program, protocol=pickle.HIGHEST_PROTOCOL),
                                complete_xmltv=program.format(
                                    minimal_xmltv=False),
                                minimal_xmltv=program.format(),
                            ))
                        number_of_objects_added_to_db_session += 1

                        element.clear()
                        tv_element.clear()
                elif event == 'start':
                    if element.tag == 'tv':
                        tv_element = element

                        tv_date = datetime.strptime(
                            element.get('date'),
                            '%Y%m%d%H%M%S %z').replace(tzinfo=pytz.utc)

                if (number_of_objects_added_to_db_session
                        and number_of_objects_added_to_db_session % 1000 == 0):
                    db_session.flush()

            db_session.flush()

            logger.debug(
                'Processed Fog XML EPG\nFile name    => %s\nGenerated on => %s',
                SmoothStreamsConstants.FOG_EPG_XML_FILE_NAME,
                tv_date,
            )
        except Exception:
            logger.error(
                'Failed to process Fog XML EPG\nFile name    => %s',
                SmoothStreamsConstants.FOG_EPG_XML_FILE_NAME,
            )

            raise

    @classmethod
    def _parse_smoothstreams_epg_json(cls, db_session, channel_name_map,
                                      do_use_provider_icons):
        epg_json_stream = cls._request_smoothstreams_epg_json()

        logger.debug(
            'Processing SmoothStreams JSON EPG\nFile name => %s',
            SmoothStreamsConstants.EPG_FILE_NAME,
        )

        number_of_objects_added_to_db_session = 0

        data_id = None
        events_id = None

        generated_on = None

        channel_number = None
        channel_name = None
        channel_icon_source = None

        programs = []

        program_title = None
        program_description = None
        program_start = None
        program_runtime = None

        try:
            ijson_parser = ijson.parse(epg_json_stream)

            for (prefix, event, value) in ijson_parser:
                if (prefix, event) == ('generated_on', 'string'):
                    generated_on = datetime.fromtimestamp(int(value), pytz.utc)
                elif (prefix, event) == ('data', 'map_key'):
                    data_id = value
                elif (prefix, event) == ('data.{0}.events'.format(data_id),
                                         'map_key'):
                    events_id = value
                elif (prefix, event) == ('data.{0}.number'.format(data_id),
                                         'string'):
                    channel_number = int(value.strip())
                elif (prefix, event) == ('data.{0}.name'.format(data_id),
                                         'string'):
                    channel_name = html.unescape(value.strip())
                elif (prefix, event) == ('data.{0}.img'.format(data_id),
                                         'string'):
                    channel_icon_source = value.strip()
                elif (prefix, event) == (
                        'data.{0}.events.{1}'.format(data_id, events_id),
                        'end_map',
                ):
                    program_stop = program_start + timedelta(
                        minutes=program_runtime)

                    programs.append(
                        XMLTVProgram(
                            provider='SmoothStreams',
                            start=program_start,
                            stop=program_stop,
                            pdc_start=None,
                            vps_start=None,
                            show_view=None,
                            video_plus=None,
                            channel_xmltv_id=None,
                            clump_index=None,
                            titles=[
                                XMLTVTitle(language=None, text=program_title)
                            ],
                            sub_titles=[],
                            descriptions=[
                                XMLTVDescription(language=None,
                                                 text=program_description)
                            ],
                            credits_=None,
                            date=None,
                            categories=[],
                            keywords=[],
                            language=None,
                            original_language=None,
                            length=None,
                            icons=[],
                            urls=[],
                            countries=[],
                            episode_numbers=[],
                            video=None,
                            audio=None,
                            previously_shown=None,
                            premiere=None,
                            last_chance=None,
                            new=None,
                            subtitles=[],
                            ratings=[],
                            star_ratings=[],
                            reviews=[],
                        ))

                    program_title = None
                    program_description = None
                    program_start = None
                    program_runtime = None
                elif (prefix, event) == (
                        'data.{0}.events.{1}.description'.format(
                            data_id, events_id),
                        'string',
                ):
                    program_description = html.unescape(value)
                elif (prefix, event) == (
                        'data.{0}.events.{1}.name'.format(data_id, events_id),
                        'string',
                ):
                    program_title = html.unescape(value)
                elif (prefix, event) == (
                        'data.{0}.events.{1}.runtime'.format(
                            data_id, events_id),
                        'number',
                ):
                    program_runtime = value
                elif (prefix, event) == (
                        'data.{0}.events.{1}.runtime'.format(
                            data_id, events_id),
                        'string',
                ):
                    program_runtime = int(value)
                elif (prefix, event) == (
                        'data.{0}.events.{1}.time'.format(data_id, events_id),
                        'string',
                ):
                    program_start = datetime.fromtimestamp(
                        int(value), pytz.utc)
                elif (prefix, event) == ('data.{0}'.format(data_id),
                                         'end_map'):
                    channel = XMLTVChannel(
                        provider='SmoothStreams',
                        m3u8_group='SmoothStreams',
                        xmltv_id=None,
                        number=channel_number,
                        display_names=[
                            XMLTVDisplayName(language=None, text=channel_name)
                        ],
                        icons=[
                            XMLTVIcon(source=channel_icon_source,
                                      width=None,
                                      height=None)
                        ],
                        urls=[],
                    )
                    cls._apply_channel_transformations(
                        channel, channel_name_map, not do_use_provider_icons)

                    db_session.add(
                        SmoothStreamsChannel(
                            id_=channel.xmltv_id,
                            m3u8_group='SmoothStreams',
                            number=channel.number,
                            name=channel.display_names[0].text,
                            pickle=pickle.dumps(
                                channel, protocol=pickle.HIGHEST_PROTOCOL),
                            complete_xmltv=channel.format(minimal_xmltv=False),
                            minimal_xmltv=channel.format(),
                        ))
                    number_of_objects_added_to_db_session += 1

                    if (number_of_objects_added_to_db_session and
                            number_of_objects_added_to_db_session % 1000 == 0):
                        db_session.flush()

                    for program in programs:
                        program.channel_xmltv_id = channel.xmltv_id

                        db_session.add(
                            SmoothStreamsProgram(
                                id_='{0}'.format(uuid.uuid4()),
                                start=program.start,
                                stop=program.stop,
                                channel_xmltv_id=channel.xmltv_id,
                                channel_number=channel.number,
                                pickle=pickle.dumps(
                                    program, protocol=pickle.HIGHEST_PROTOCOL),
                                complete_xmltv=program.format(
                                    minimal_xmltv=False),
                                minimal_xmltv=program.format(),
                            ))
                        number_of_objects_added_to_db_session += 1

                        if (number_of_objects_added_to_db_session
                                and number_of_objects_added_to_db_session %
                                1000 == 0):
                            db_session.flush()

                    channel_number = None
                    channel_name = None
                    channel_icon_source = None

                    programs = []

            db_session.flush()

            logger.debug(
                'Processed SmoothStreams JSON EPG\n'
                'File name    => %s\n'
                'Generated on => %s',
                SmoothStreamsConstants.EPG_FILE_NAME,
                generated_on,
            )
        except Exception:
            logger.error(
                'Failed to process SmoothStreams JSON EPG\nFile name    => %s',
                SmoothStreamsConstants.EPG_FILE_NAME,
            )

            raise

    @classmethod
    def _request_fog_channels_json(cls):
        url = '{0}{1}'.format(
            SmoothStreamsConstants.FOG_EPG_BASE_URL,
            SmoothStreamsConstants.FOG_CHANNELS_JSON_FILE_NAME,
        )

        logger.debug(
            'Downloading %s\nURL => %s',
            SmoothStreamsConstants.FOG_CHANNELS_JSON_FILE_NAME,
            url,
        )

        requests_session = requests.Session()
        response = Utility.make_http_request(requests_session.get,
                                             url,
                                             headers=requests_session.headers,
                                             stream=True)

        if response.status_code == requests.codes.OK:
            response.raw.decode_content = True

            logger.trace(Utility.assemble_response_from_log_message(response))

            return response.raw

        logger.error(Utility.assemble_response_from_log_message(response))

        response.raise_for_status()

    @classmethod
    def _request_fog_epg_xml(cls):
        url = '{0}{1}'.format(
            SmoothStreamsConstants.FOG_EPG_BASE_URL,
            SmoothStreamsConstants.FOG_EPG_XML_FILE_NAME,
        )

        logger.debug(
            'Downloading %s\nURL => %s',
            SmoothStreamsConstants.FOG_EPG_XML_FILE_NAME,
            url,
        )

        requests_session = requests.Session()
        response = Utility.make_http_request(requests_session.get,
                                             url,
                                             headers=requests_session.headers,
                                             stream=True)

        if response.status_code == requests.codes.OK:
            response.raw.decode_content = True

            logger.trace(Utility.assemble_response_from_log_message(response))

            return response.raw

        logger.error(Utility.assemble_response_from_log_message(response))

        response.raise_for_status()

    @classmethod
    def _request_smoothstreams_epg_json(cls):
        url = '{0}{1}'.format(SmoothStreamsConstants.EPG_BASE_URL,
                              SmoothStreamsConstants.EPG_FILE_NAME)

        logger.debug('Downloading %s\nURL => %s',
                     SmoothStreamsConstants.EPG_FILE_NAME, url)

        requests_session = requests.Session()
        response = Utility.make_http_request(requests_session.get,
                                             url,
                                             headers=requests_session.headers,
                                             stream=True)

        if response.status_code == requests.codes.OK:
            response.raw.decode_content = True

            logger.trace(Utility.assemble_response_from_log_message(response))

            return response.raw

        logger.error(Utility.assemble_response_from_log_message(response))

        response.raise_for_status()

    @classmethod
    def _update_epg(cls, **kwargs):
        with cls._lock:
            super()._update_epg()

            channel_name_map = kwargs['channel_name_map']
            do_use_provider_icons = kwargs['do_use_provider_icons']

            was_exception_raised = False

            SmoothStreamsDatabase.initialize_temporary()

            db_session = SmoothStreamsDatabase.create_temporary_session()

            try:
                if (Configuration.get_configuration_parameter(
                        'SMOOTHSTREAMS_EPG_SOURCE') ==
                        SmoothStreamsEPGSource.FOG.value):
                    parsed_channel_xmltv_id_to_channel = {}

                    cls._parse_fog_channels_json(
                        db_session,
                        channel_name_map,
                        do_use_provider_icons,
                        parsed_channel_xmltv_id_to_channel,
                    )
                    cls._parse_fog_epg_xml(db_session,
                                           parsed_channel_xmltv_id_to_channel)
                elif (Configuration.get_configuration_parameter(
                        'SMOOTHSTREAMS_EPG_SOURCE') ==
                      SmoothStreamsEPGSource.OTHER.value):
                    cls._parse_external_epg_xml(
                        db_session,
                        channel_name_map=channel_name_map,
                        do_use_provider_icons=do_use_provider_icons,
                    )
                elif (Configuration.get_configuration_parameter(
                        'SMOOTHSTREAMS_EPG_SOURCE') ==
                      SmoothStreamsEPGSource.PROVIDER.value):
                    cls._parse_smoothstreams_epg_json(db_session,
                                                      channel_name_map,
                                                      do_use_provider_icons)

                db_session.add(
                    SmoothStreamsSetting(
                        'epg_settings_md5',
                        cls._calculate_epg_settings_md5(**kwargs)))
                db_session.add(
                    SmoothStreamsSetting(
                        'last_epg_refresh_date_time_in_utc',
                        datetime.strftime(datetime.now(pytz.utc),
                                          '%Y-%m-%d %H:%M:%S%z'),
                    ))

                db_session.commit()
            except Exception:
                was_exception_raised = True

                db_session.rollback()

                raise
            finally:
                cls._initialize_refresh_epg_timer(
                    do_set_timer_for_retry=was_exception_raised)

                db_session.close()

                if not was_exception_raised:
                    try:
                        SmoothStreamsDatabase.migrate()
                    except Exception:
                        cls._initialize_refresh_epg_timer(
                            do_set_timer_for_retry=True)

                        raise

    @classmethod
    def _terminate(cls, **kwargs):
        pass
Example #45
0
class Configuration(object):
    __slots__ = []

    _configuration = {}
    _configuration_file_path = None
    _configuration_file_watchdog_observer = None
    _lock = RWLock()
    _previous_configuration = {}

    @classmethod
    def _backup_configuration(cls):
        with cls._lock.writer_lock:
            cls._previous_configuration = copy.deepcopy(cls._configuration)

    @classmethod
    def _set_configuration(cls, configuration):
        cls._configuration = configuration

    @classmethod
    def _update_configuration_file(cls, configuration_object):
        if cls._configuration_file_watchdog_observer is not None:
            cls.stop_configuration_file_watchdog_observer()

        try:
            configuration_object.write()

            logger.debug('Updated configuration file\n'
                         'Configuration file path => {0}'.format(
                             cls._configuration_file_path))
        except OSError:
            logger.error(
                'Could not open the specified configuration file for writing\n'
                'Configuration file path => {0}'.format(
                    cls._configuration_file_path))
        finally:
            if cls._configuration_file_watchdog_observer is not None:
                cls.start_configuration_file_watchdog_observer()

    @classmethod
    def get_configuration_copy(cls):
        with cls._lock.reader_lock:
            return copy.deepcopy(cls._configuration)

    @classmethod
    def get_configuration_file_path(cls):
        return cls._configuration_file_path

    @classmethod
    def get_configuration_parameter(cls, parameter_name):
        with cls._lock.reader_lock:
            return cls._configuration[parameter_name]

    @classmethod
    def join_configuration_file_watchdog_observer(cls):
        cls._configuration_file_watchdog_observer.join()

    @classmethod
    def process_configuration_file_updates(cls):
        with cls._lock.writer_lock:
            message_to_log = []

            purge_http_sessions = False
            restart_http_server = False
            restart_https_server = False

            # <editor-fold desc="Detect and handle SERVER_PASSWORD change">
            if cls._configuration[
                    'SERVER_PASSWORD'] != cls._previous_configuration[
                        'SERVER_PASSWORD']:
                purge_http_sessions = True

                message_to_log.append(
                    'Detected a change in the password option in the [Server] section\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        cls._previous_configuration['SERVER_PASSWORD'],
                        cls._configuration['SERVER_PASSWORD']))
            # </editor-fold>

            # <editor-fold desc="Detect and handle SERVER_HOSTNAME_<LOOPBACK,PRIVATE,PUBLIC> change">
            loopback_hostname_updated = False
            private_hostname_updated = False
            public_hostname_updated = False

            if cls._configuration['SERVER_HOSTNAME_LOOPBACK'] != \
                    cls._previous_configuration['SERVER_HOSTNAME_LOOPBACK']:
                loopback_hostname_updated = True

                message_to_log.append(
                    'Detected a change in the loopback option in the [Hostnames] section\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        cls.
                        _previous_configuration['SERVER_HOSTNAME_LOOPBACK'],
                        cls._configuration['SERVER_HOSTNAME_LOOPBACK']))

            if cls._configuration[
                    'SERVER_HOSTNAME_PRIVATE'] != cls._previous_configuration[
                        'SERVER_HOSTNAME_PRIVATE']:
                private_hostname_updated = True

                message_to_log.append(
                    'Detected a change in the private option in the [Hostnames] section\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        cls._previous_configuration['SERVER_HOSTNAME_PRIVATE'],
                        cls._configuration['SERVER_HOSTNAME_PRIVATE']))

            if cls._configuration[
                    'SERVER_HOSTNAME_PUBLIC'] != cls._previous_configuration[
                        'SERVER_HOSTNAME_PUBLIC']:
                public_hostname_updated = True

                message_to_log.append(
                    'Detected a change in the public option in the [Hostnames] section\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        cls._previous_configuration['SERVER_HOSTNAME_PUBLIC'],
                        cls._configuration['SERVER_HOSTNAME_PUBLIC']))

            if loopback_hostname_updated or private_hostname_updated or public_hostname_updated:
                restart_https_server = True
            # </editor-fold>

            # <editor-fold desc="Detect and handle SERVER_HTTP_PORT change">
            if cls._configuration[
                    'SERVER_HTTP_PORT'] != cls._previous_configuration[
                        'SERVER_HTTP_PORT']:
                restart_http_server = True

                message_to_log.append(
                    'Detected a change in the http option in the [Ports] section\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        cls._previous_configuration['SERVER_HTTP_PORT'],
                        cls._configuration['SERVER_HTTP_PORT']))
            # </editor-fold>

            # <editor-fold desc="Detect and handle SERVER_HTTPS_PORT change">
            if cls._configuration[
                    'SERVER_HTTPS_PORT'] != cls._previous_configuration[
                        'SERVER_HTTPS_PORT']:
                restart_https_server = True

                message_to_log.append(
                    'Detected a change in the https option in the [Ports] section\n'
                    'Old value => {0}\n'
                    'New value => {1}\n'.format(
                        cls._previous_configuration['SERVER_HTTPS_PORT'],
                        cls._configuration['SERVER_HTTPS_PORT']))
            # </editor-fold>

            if purge_http_sessions:
                from iptv_proxy.http_server import HTTPRequestHandler

                message_to_log.append(
                    'Action => Purge all user HTTP/S sessions')

                with Database.get_write_lock():
                    db_session = Database.create_session()

                    try:
                        HTTPRequestHandler.purge_http_sessions(db_session)
                        db_session.commit()
                    except Exception:
                        (type_, value_, traceback_) = sys.exc_info()
                        logger.error('\n'.join(
                            traceback.format_exception(type_, value_,
                                                       traceback_)))

                        db_session.rollback()
                    finally:
                        db_session.close()

            if restart_http_server:
                from iptv_proxy.controller import Controller

                message_to_log.append('Action => Restart HTTP server')

                Controller.shutdown_http_server()
                Controller.start_http_server()

            if restart_https_server:
                from iptv_proxy.security import SecurityManager

                message_to_log.append('Action => Restart HTTPS server')

                if SecurityManager.get_auto_generate_self_signed_certificate():
                    SecurityManager.generate_self_signed_certificate()

                    from iptv_proxy.controller import Controller

                    Controller.shutdown_https_server()
                    Controller.start_https_server()

            if message_to_log:
                logger.debug('\n'.join(message_to_log))

            for provider_name in sorted(
                    ProvidersController.get_providers_map_class()):
                ProvidersController.get_provider_map_class(
                    provider_name).configuration_class(
                    ).process_configuration_file_updates(
                        cls._configuration, cls._previous_configuration)

    @classmethod
    def read_configuration_file(cls, initial_read=True):
        with cls._lock.writer_lock:
            cls._backup_configuration()

            try:
                configuration_object = ConfigObj(cls._configuration_file_path,
                                                 file_error=True,
                                                 indent_type='',
                                                 interpolation=False,
                                                 raise_errors=True,
                                                 write_empty_values=True)

                configuration_object_md5 = hashlib.md5(
                    '{0}'.format(configuration_object).encode()).hexdigest()

                configuration = {}
                providers = []

                non_defaultable_error = False
                error_message_to_log = []
                message_to_log = []

                password = None
                hostname_loopback = DEFAULT_HOSTNAME_LOOPBACK
                hostname_private = None
                hostname_public = None
                http_port = None
                https_port = None

                # <editor-fold desc="Read Server section">
                try:
                    server_section = configuration_object['Server']

                    try:
                        password = server_section['password']
                    except KeyError:
                        non_defaultable_error = True

                        error_message_to_log.append(
                            'Could not find a password option in the [Server] section\n'
                        )

                    try:
                        server_hostnames_section = server_section['Hostnames']

                        # <editor-fold desc="Read loopback option">
                        try:
                            hostname_loopback = server_hostnames_section[
                                'loopback']

                            if not Utility.is_valid_loopback_hostname(
                                    hostname_loopback):
                                error_message_to_log.append(
                                    'The loopback option in the [Hostnames] section has an invalid loopback hostname\n'
                                    'Defaulting to {0}\n'.format(
                                        DEFAULT_HOSTNAME_LOOPBACK))
                        except KeyError:
                            hostname_loopback = DEFAULT_HOSTNAME_LOOPBACK

                            error_message_to_log.append(
                                'The loopback option in the [Hostnames] section is missing\n'
                                'Defaulting to {0}\n'.format(
                                    DEFAULT_HOSTNAME_LOOPBACK))
                        # </editor-fold>

                        # <editor-fold desc="Read private option">
                        do_determine_private_ip_address = False

                        try:
                            hostname_private = server_hostnames_section[
                                'private']

                            if not Utility.is_valid_private_hostname(
                                    hostname_private):
                                if Utility.is_valid_public_hostname(
                                        hostname_private):
                                    error_message_to_log.append(
                                        'The private option in the [Hostnames] section has a public IP address\n'
                                    )
                                else:
                                    do_determine_private_ip_address = True
                        except KeyError:
                            do_determine_private_ip_address = True
                        # </editor-fold>

                        # <editor-fold desc="Read public option">
                        do_determine_public_ip_address = False

                        try:
                            hostname_public = server_hostnames_section[
                                'public']
                            if not Utility.is_valid_public_hostname(
                                    hostname_public):
                                do_determine_public_ip_address = True
                        except KeyError:
                            do_determine_public_ip_address = True
                        # </editor-fold>
                    except KeyError:
                        error_message_to_log.append(
                            'Could not find a [Hostnames] section in the [Server] section\n'
                        )

                        hostname_loopback = DEFAULT_HOSTNAME_LOOPBACK

                        do_determine_private_ip_address = True
                        do_determine_public_ip_address = True

                    if do_determine_private_ip_address:
                        hostname_private = Utility.determine_private_ip_address(
                        )

                        if hostname_private:
                            error_message_to_log.append(
                                'The private option in the [Hostnames] section has an invalid private IP address\n'
                                'Reverting to {0}\n'.format(hostname_private))

                    if do_determine_public_ip_address:
                        hostname_public = Utility.determine_public_ip_address()

                        if hostname_public:
                            error_message_to_log.append(
                                'The public option in the [Hostnames] section has an invalid public IP address\n'
                                'Reverting to {0}\n'.format(hostname_public))

                    try:
                        server_ports_section = server_section['Ports']

                        # <editor-fold desc="Read http option">
                        try:
                            http_port = server_ports_section['http']
                            if not Utility.is_valid_port_number(http_port):
                                non_defaultable_error = True

                                error_message_to_log.append(
                                    'The http option in the [Ports] section must be a number between 0 and 65535\n'
                                )
                        except KeyError:
                            non_defaultable_error = True

                            error_message_to_log.append(
                                'Could not find an http option in the [Ports] section\n'
                                'The http option in the [Ports] section must be a number between 0 and 65535\n'
                            )
                        # </editor-fold>

                        # <editor-fold desc="Read https option">
                        try:
                            https_port = server_ports_section['https']
                            if not Utility.is_valid_port_number(https_port):
                                non_defaultable_error = True

                                error_message_to_log.append(
                                    'The https option in the [Ports] section must be a number between 0 and 65535\n'
                                )
                        except KeyError:
                            non_defaultable_error = True

                            error_message_to_log.append(
                                'Could not find an https option in the [Ports] section\n'
                                'The https option in the [Ports] section must be a number between 0 and 65535\n'
                            )
                        # </editor-fold>
                    except KeyError:
                        non_defaultable_error = True

                        error_message_to_log.append(
                            'Could not find a [Ports] section in the [Server] section\n'
                        )
                except KeyError:
                    non_defaultable_error = True

                    error_message_to_log.append(
                        'Could not find a [Server] section\n')
                # </editor-fold>

                if not non_defaultable_error:
                    configuration = {
                        'SERVER_PASSWORD': password,
                        'SERVER_HOSTNAME_LOOPBACK': hostname_loopback,
                        'SERVER_HOSTNAME_PRIVATE': hostname_private,
                        'SERVER_HOSTNAME_PUBLIC': hostname_public,
                        'SERVER_HTTP_PORT': http_port,
                        'SERVER_HTTPS_PORT': https_port
                    }

                    message_to_log = [
                        '{0}ead configuration file\n'
                        'Configuration file path          => {1}\n\n'
                        'SERVER_PASSWORD                  => {2}\n'
                        'SERVER_HOSTNAME_LOOPBACK         => {3}\n'
                        'SERVER_HOSTNAME_PRIVATE          => {4}\n'
                        'SERVER_HOSTNAME_PUBLIC           => {5}\n'
                        'SERVER_HTTP_PORT                 => {6}\n'
                        'SERVER_HTTPS_PORT                => {7}'.format(
                            'R' if initial_read else 'Rer',
                            cls._configuration_file_path, password,
                            hostname_loopback, hostname_private,
                            hostname_public, http_port, https_port)
                    ]

                for provider_name in sorted(
                        ProvidersController.get_providers_map_class()):
                    ProvidersController.get_provider_map_class(
                        provider_name).configuration_class(
                        ).read_configuration_file(configuration_object,
                                                  configuration, providers,
                                                  message_to_log,
                                                  error_message_to_log)

                if not non_defaultable_error:
                    logger.info('\n'.join(message_to_log))

                    cls._set_configuration(configuration)

                    if configuration_object_md5 != hashlib.md5('{0}'.format(
                            configuration_object).encode()).hexdigest():
                        cls._update_configuration_file(configuration_object)

                    if initial_read:
                        ProvidersController.initialize_providers(providers)

                if error_message_to_log:
                    error_message_to_log.insert(
                        0, '{0} configuration file values\n'
                        'Configuration file path => {1}\n'.format(
                            'Invalid'
                            if non_defaultable_error else 'Warnings regarding',
                            cls._configuration_file_path))

                    if initial_read and non_defaultable_error:
                        error_message_to_log.append('Exiting...')
                    elif non_defaultable_error:
                        error_message_to_log.append(
                            'Configuration file skipped')
                    else:
                        error_message_to_log.append(
                            'Configuration file processed')

                    logger.error('\n'.join(error_message_to_log))

                    if initial_read and non_defaultable_error:
                        sys.exit()
            except OSError:
                logger.error(
                    'Could not open the specified configuration file for reading\n'
                    'Configuration file path => {0}'
                    '{1}'.format(cls._configuration_file_path,
                                 '\n\nExiting...' if initial_read else ''))

                if initial_read:
                    sys.exit()
            except SyntaxError as e:
                logger.error('Invalid configuration file syntax\n'
                             'Configuration file path => {0}\n'
                             '{1}'
                             '{2}'.format(
                                 cls._configuration_file_path, '{0}'.format(e),
                                 '\n\nExiting...' if initial_read else ''))

                if initial_read:
                    sys.exit()

    @classmethod
    def set_configuration_file_path(cls, configuration_file_path):
        cls._configuration_file_path = configuration_file_path

    @classmethod
    def set_configuration_parameter(cls, parameter_name, parameter_value):
        with cls._lock.writer_lock:
            cls._configuration[parameter_name] = parameter_value

    @classmethod
    def start_configuration_file_watchdog_observer(cls):
        iptv_proxy_configuration_event_handler = ConfigurationEventHandler(
            cls._configuration_file_path)

        cls._configuration_file_watchdog_observer = Observer()
        cls._configuration_file_watchdog_observer.schedule(
            iptv_proxy_configuration_event_handler,
            os.path.dirname(cls._configuration_file_path),
            recursive=False)
        cls._configuration_file_watchdog_observer.start()

    @classmethod
    def stop_configuration_file_watchdog_observer(cls):
        cls._configuration_file_watchdog_observer.stop()

    @classmethod
    def validate_update_configuration_request(cls, configuration):
        errors = {}

        # <editor-fold desc="Validate Server options">
        if not Utility.is_valid_server_password(
                configuration['SERVER_PASSWORD']):
            errors['serverPassword'] = '******'

        if not Utility.is_valid_loopback_hostname(
                configuration['SERVER_HOSTNAME_LOOPBACK']):
            errors['serverHostnameLoopback'] = 'Must be a valid loopback IP address or hostname\n' \
                                               'Recommended value => {0}'.format(DEFAULT_HOSTNAME_LOOPBACK)

        if not Utility.is_valid_private_hostname(
                configuration['SERVER_HOSTNAME_PRIVATE']):
            if not Utility.is_valid_public_hostname(
                    configuration['SERVER_HOSTNAME_PRIVATE']):
                private_ip_address = Utility.determine_private_ip_address()

                errors[
                    'serverHostnamePrivate'] = 'Must be a valid private IP address, public IP address, or hostname'

                if private_ip_address:
                    errors[
                        'serverHostnamePrivate'] += '\nRecommended value => {0}'.format(
                            private_ip_address)

        if not Utility.is_valid_public_hostname(
                configuration['SERVER_HOSTNAME_PUBLIC']):
            public_ip_address = Utility.determine_public_ip_address()

            errors[
                'serverHostnamePublic'] = 'Must be a valid public IP address or hostname'

            if public_ip_address:
                errors[
                    'serverHostnamePublic'] += '\nRecommended value => {0}'.format(
                        public_ip_address)

        if not Utility.is_valid_port_number(configuration['SERVER_HTTP_PORT']):
            errors['serverPort'] = 'Must be a number between 0 and 65535'
        # </editor-fold>

        for provider_name in sorted(
                ProvidersController.get_providers_map_class()):
            ProvidersController.get_provider_map_class(
                provider_name).configuration_class(
                ).validate_update_configuration_request(configuration, errors)

        return errors

    @classmethod
    def write_configuration_file(cls, configuration):
        configuration_file_path = cls._configuration_file_path

        try:
            configuration_object = ConfigObj(configuration_file_path,
                                             file_error=True,
                                             interpolation=False,
                                             write_empty_values=True)

            # <editor-fold desc="Create Server section">
            server_section = {
                'password': configuration['SERVER_PASSWORD'],
                'Ports': {
                    'http': configuration['SERVER_HTTP_PORT'],
                    'https': configuration['SERVER_HTTPS_PORT']
                },
                'Hostnames': {
                    'loopback': configuration['SERVER_HOSTNAME_LOOPBACK'],
                    'private': configuration['SERVER_HOSTNAME_PRIVATE'],
                    'public': configuration['SERVER_HOSTNAME_PUBLIC']
                }
            }

            configuration_object['Server'] = server_section
            # </editor-fold>

            for provider_name in sorted(
                    ProvidersController.get_providers_map_class()):
                provider_section = ProvidersController.get_provider_map_class(
                    provider_name).configuration_class().create_section(
                        configuration)

                if provider_section:
                    configuration_object[
                        ProvidersController.get_provider_map_class(
                            provider_name).api_class(
                            ).__name__] = provider_section

            configuration_object.write()

            logger.debug('Updated configuration file\n'
                         'Configuration file path => {0}'.format(
                             configuration_file_path))
        except OSError:
            logger.error(
                'Could not open the specified configuration file for writing\n'
                'Configuration file path => {0}'.format(
                    configuration_file_path))

            raise
Example #46
0
class DeepDist:
    def __init__(self,
                 model,
                 master='127.0.0.1:5000',
                 min_updates=0,
                 max_updates=4096):
        """DeepDist - Distributed deep learning.
        :param model: provide a model that can be trained in parallel on the workers
        """
        self.model = model
        self.lock = RWLock()
        self.descent = lambda model, gradient: model
        self.master = master
        self.state = 'serving'
        self.served = 0
        self.received = 0
        #self.server   = None
        self.pmodel = None
        self.min_updates = min_updates
        self.max_updates = max_updates
        print("THIS IS THE MASTER")
        print(self.master)
        print("\n")
        print("THIS IS THE MODEL 1")
        print(self.model)
        print("\n")

    def __enter__(self):
        Thread(target=self.start).start()
        # self.server = Process(target=self.start)
        # self.server.start()
        return self

    def __exit__(self, type, value, traceback):
        # self.server.terminate()
        pass  # need to shut down server here

    def start(self):
        from flask import Flask, request

        app = Flask(__name__)

        @app.route('/')
        def index():
            return 'DeepDist'

        @app.route('/model', methods=['GET', 'POST', 'PUT'])
        def model_flask():
            i = 0
            while (self.state != 'serving'
                   or self.served >= self.max_updates) and (i < 1000):
                time.sleep(1)
                i += 1

            # pickle on first read
            pmodel = None
            self.lock.acquire_read()
            if not self.pmodel:
                self.lock.release()
                self.lock.acquire_write()
                if not self.pmodel:
                    self.pmodel = pickleDumper.dumps(self.model, -1)
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            else:
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            return pmodel

        @app.route('/update', methods=['GET', 'POST', 'PUT'])
        def update_flask():
            gradient = pickle.loads(request.data)

            self.lock.acquire_write()
            if self.min_updates <= self.served:
                state = 'receiving'
            self.received += 1

            self.descent(self.model, gradient)

            if self.received >= self.served and self.min_updates <= self.received:
                self.received = 0
                self.served = 0
                self.state = 'serving'
                self.pmodel = None

            self.lock.release()
            return 'OK'

        print 'Listening to 0.0.0.0:5000...'
        app.run(host='0.0.0.0', debug=True, threaded=True, use_reloader=False)

    def train(self, rdd, gradient, descent):
        master = self.master  # will be pickled
        print("MASTER ROUND 2")
        print(master)
        if master == None:
            master = rdd.ctx._conf.get('spark.master')
        if master.startswith('local['):
            master = 'localhost:5000'
        else:
            if master.startswith('spark://'):
                master = '%s:5000' % urlparse.urlparse(master).netloc.split(
                    ':')[0]
            else:
                master = '%s:5000' % master.split(':')[0]
        print '\n*** Master: %s\n' % master

        self.descent = descent

        def mapPartitions(data):
            return [
                send_gradient(gradient(fetch_model(master=master), data),
                              master=master)
            ]

        return rdd.mapPartitions(mapPartitions).collect()
class DeepDist:
    def __init__(self, model, master='127.0.0.1:5000', min_updates=0, max_updates=4096):
        """DeepDist - Distributed deep learning.
        :param model: provide a model that can be trained in parallel on the workers
        """
        self.model  = model
        self.lock   = RWLock()
        self.descent  = lambda model, gradient: model
        self.master   = master
        self.state    = 'serving'
        self.served   = 0
        self.received = 0
        #self.server   = None
        self.pmodel   = None
        self.min_updates = min_updates
        self.max_updates = max_updates

    def start_server(self):
        Thread(target=self.start).start()

    def start(self):
        from flask import Flask, request

        app = Flask(__name__)

        @app.route('/')
        def index():
            return 'DeepDist'

        @app.route('/model', methods=['GET', 'POST', 'PUT'])
        def model_flask():
            i = 0
            while (self.state != 'serving' or self.served >= self.max_updates) and (i < 1000):
                time.sleep(1)
                i += 1

            # pickle on first read
            pmodel = None
            self.lock.acquire_read()
            if not self.pmodel:
                self.lock.release()
                self.lock.acquire_write()
                if not self.pmodel:
                    self.pmodel = pickle.dumps(self.model, -1)
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
            else:
                self.served += 1
                pmodel = self.pmodel
                self.lock.release()
                print "model replica weights were updated via /model"
            return pmodel
    

        @app.route('/update', methods=['GET', 'POST', 'PUT'])
        def update_flask():
            gradient = pickle.loads(request.data)

            self.lock.acquire_write()
            if self.min_updates <= self.served:
                state = 'receiving'
            self.received += 1
            
            old_syn0, old_syn1 = self.model.syn0.copy(), self.model.syn1.copy()
            print "received gradient: " + str(gradient) 
            
            self.descent(self.model, gradient)
            
            if self.received >= self.served and self.min_updates <= self.received:
                self.received = 0
                self.served   = 0
                self.state    = 'serving'
                self.pmodel = None
            
            self.lock.release()
            print "server weights were updated by model replica"
            print "old weights: "
            print old_syn0[0:3, 0:3], old_syn1[0:3, 0:3] #printing just the first few weights
            print "new weights: "
            print self.model.syn0[0:3, 0:3], self.model.syn1[0:3, 0:3]
            return 'OK'
        
        print 'Listening to 0.0.0.0:5000...'
        app.run(host='0.0.0.0', debug=True, threaded=True, use_reloader=False)

    def train(self, rdd, gradient, descent):
        master = self.master
        print '\n*** Master: %s\n' % master

        self.descent = descent

        def mapPartitions(data):
            return [send_gradient(gradient(fetch_model(master=master), data), master=master)]
        
        return rdd.mapPartitions(mapPartitions).collect()