Esempio n. 1
0
    def __init__(self, stream, server):
        super(EncodedStreamer, self).__init__()

        self.lock = BoundedSemaphore(value=1)

        self.mainloop_thread = MainLoop()
        self.mainloop_thread.start()

        self.stream = stream
        self.servers = {}
        self.tailbins = []
        self.playing = False

        self.streamname = stream["name"]
        self.description = stream["description"]
        self.genre = ", ".join(stream["genres"])
        # self.default_quality = default_quality

        logging.debug("Streamer::__init__(): Constructing pipeline")
        pipe = gst.Pipeline()

        # threads
        queue = gst.element_factory_make("queue")

        # tee

        # self.oggdemux = gst.element_factory_make('oggdemux')
        self.vorbisparse = gst.element_factory_make('vorbisparse')
        # self.oggdemux.connect("pad-added", partial(self.__on_dynamic_pad, link = self.vorbisparse))

        tee = gst.element_factory_make('tee')
        # self.typefind = gst.element_factory_make('typefind')
        pipe.add(self.vorbisparse, tee)
        gst.element_link_many(self.vorbisparse, tee)

        # fakesink = gst.element_factory_make("fakesink")
        # fakesink.set_property("sync", 1)
        # pipe.add(fakesink)
        # gst.element_link_many(tee, fakesink)

        self.tee = tee
        self.pipe = pipe

        # only one quality, because this is encoded (no reencoding is done)
        # self.tees = {default_quality:tee}

        logging.debug("Streamer::__init__(): Running distribute")

        self.scale(server, sync=1, init=True)

        self.queue = queue

        logging.debug("Streamer::__init__(): Running EncodedChannel")

        self.bus = pipe.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect('message::state-changed',
                         self.on_message_state_changed)
        self.bus.connect('message::eos', self.on_eos)
Esempio n. 2
0
    def __init__(self, medias, observers):
        self.artists = OrderedDict({})
        self.history = None
        self.lock = BoundedSemaphore(value=1)

        for media in medias:
            self.__add(media)

        # self.show()
        self.shuffle()
Esempio n. 3
0
 def __init__(self, maxsize = 0):
     if maxsize <= 0:
         maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
     self._maxsize = maxsize
     self._reader, self._writer = Pipe(duplex=False)
     self._rlock = Lock()
     self._opid = os.getpid()
     if sys.platform == 'win32':
         self._wlock = None
     else:
         self._wlock = Lock()
     self._sem = BoundedSemaphore(maxsize)
     self._after_fork()
     if sys.platform != 'win32':
         register_after_fork(self, Queue._after_fork)
     return
Esempio n. 4
0
    def __init__(self, stream_id, history, playlist, observers):
        super(Buffer, self).__init__()

        self.stream_id = stream_id
        self.observers = observers
        self.history = history
        self.playlist = playlist
        self.queue = OrderedDict({})
        self.lock = BoundedSemaphore(value=1)

        while not self.is_full():
            self.lock.acquire()
            t = self.playlist.get_track()
            self.__add_track(t)

            self.lock.release()
Esempio n. 5
0
def BoundedSemaphore(value=1):
    """
    Returns a bounded semaphore object
    """
    from multiprocessing.synchronize import BoundedSemaphore
    return BoundedSemaphore(value)
Esempio n. 6
0
def copy_s3_bucket(SOURCE_BUCKET, DEST_BUCKET, prefix=None, threads=10):
    """
	Example usage: copy_s3_bucket(SOURCE_BUCKET='my-source-bucket', DEST_BUCKET='my-destination-bucket', prefix='parent/child/dir/', threads=20)
	"""
    # Init s3
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(SOURCE_BUCKET)
    dest_bucket = conn.get_bucket(DEST_BUCKET)

    # Filter by prefix
    rs = bucket.list()
    if prefix: rs = bucket.list(prefix)

    class CopyKey(Thread):
        def __init__(self, key_name):
            Thread.__init__(self)
            self.key_name = key_name
            self.status = False

        def run(self):
            # We must create new bucket instances for each thread, passing the key is not threadsafe
            thread_conn = S3Connection(AWS_ACCESS_KEY_ID,
                                       AWS_SECRET_ACCESS_KEY)
            thread_bucket = conn.get_bucket(SOURCE_BUCKET)
            thread_dest_bucket = conn.get_bucket(DEST_BUCKET)
            thread_key = thread_bucket.get_key(self.key_name)

            # Only copy if not exists on dest bucket
            if not thread_dest_bucket.get_key(self.key_name):
                pool_sema.acquire()
                self.status = "%s : Sempahore Acquired, Copy Next" % datetime.datetime.now(
                )
                try:
                    thread_key.copy(DEST_BUCKET, self.key_name, None, False,
                                    True)
                    self.status = "%s : Copy Success : %s" % (
                        datetime.datetime.now(), self.key_name)
                except:
                    self.status = "%s : Copy Error : %s" % (
                        datetime.datetime.now(), sys.exc_info())
                finally:
                    pool_sema.release()
            else:
                self.status = "%s : Key Already Exists, will not overwrite." % datetime.datetime.now(
                )

    key_copy_thread_list = []
    pool_sema = BoundedSemaphore(value=threads)
    total_keys = 0

    # Request threads
    for key in rs:
        total_keys += 1
        print "%s : Requesting copy thread for key %s" % (
            datetime.datetime.now(), key.name)
        current = CopyKey(key.name)
        key_copy_thread_list.append(current)
        current.start()

        # Pause if max threads reached - note that enumerate returns all threads, including this parent thread
        if len(threading.enumerate()) >= threads:
            print "%s : Max Threads (%s) Reached: Pausing until threadcount reduces." % (
                datetime.datetime.now(), threads)
            while 1:
                if len(threading.enumerate()) < threads:
                    print "%s : Continuing thread creation." % datetime.datetime.now(
                    )
                    break
                time.sleep(1)

    for key_copy_thread in key_copy_thread_list:
        key_copy_thread.join(
            30
        )  # Bring this particular thread to this current "parent" thread, blocks parent until joined or 30s timeout
        if key_copy_thread.isAlive():
            print "%s : TIMEOUT on key %s" % (datetime.datetime.now(),
                                              key_copy_thread.key_name)
            continue
        print "%s : Status Output: %s" % (datetime.datetime.now(),
                                          key_copy_thread.status)

    print "%s : Complete : %s Total Keys Requested" % (datetime.datetime.now(),
                                                       total_keys)
Esempio n. 7
0
    def __init__(self, stream):
        super(Streamer, self).__init__()

        logging.debug("Streamer::__init__(): Constructing pipeline")

        pipe = gst.Pipeline("pipeline")

        self.lock = BoundedSemaphore(value=1)

        self.mainloop_thread = MainLoop()
        self.mainloop_thread.start()

        self.stream = stream
        self.servers = {}
        self.tees = {}
        self.encodebins = {}
        self.quality_tee_srcs = {}
        self.tailbins = []
        self.playing = False
        self.streamname = stream["name"]
        self.description = stream["description"]
        self.genre = ", ".join(stream["genres"])

        # mix audio
        adder = gst.element_factory_make("adder")
        self.quality_tee = gst.element_factory_make("tee")
        # queue = gst.element_factory_make("queue")

        pipe.add(adder, self.quality_tee)
        gst.element_link_many(adder, self.quality_tee)

        self.pipe = pipe
        self.bus = pipe.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect('message::state-changed',
                         self.on_message_state_changed)
        self.bus.connect('message::eos', self.on_eos)
        self.bus.connect('message::error', self.on_error)
        self.bus.connect("message::tag", self.on_tag)
        # self.bus.connect('message', self.on_message)
        self.adder = adder

        quietbin = QuietBin(self)
        sink = self.adder.get_request_pad("sink%d")
        quietbin.get_pad("src").link(sink)

        for quality in stream["quality"]:
            # this has to be to ensure double from database work as expected and key is found
            quality = float(quality)

            # if quality == min(stream["quality"]):
            eb = EncodeBin(self, quality)

            quality_tee_src = self.quality_tee.get_request_pad("src%d")
            quality_tee_src.link(eb.get_pad("sink"))

            self.quality_tee_srcs[quality] = quality_tee_src

            tee = gst.element_factory_make("tee")
            pipe.add(tee)

            sink = tee.get_pad("sink")
            src = eb.get_pad("src")
            src.link(sink)

            self.tees[quality] = tee
            self.encodebins[quality] = eb

            if quality == min(stream["quality"]):
                self.tee = tee

            self.scale(None, quality, sync=1, init=True)
Esempio n. 8
0
    def __init__(self, port):
        super(StreamManagement, self).__init__()
        self.streams = {}

        # one command at the time
        self.lock = BoundedSemaphore(value=1)

        local_ip = Helpers.ip.local_ip()
        public_ip = Helpers.ip.public_ip()

        self.db.servers.update(
            {
                "type": "pipeline",
                "local_ip": local_ip,
                "port": port
            }, {
                "level": float(0),
                "type": "pipeline",
                "local_ip": local_ip,
                "public_ip": public_ip,
                "port": port,
                "down": False
            },
            upsert=True)
        server = self.db.servers.find_one(
            {
                "type": "pipeline",
                "local_ip": local_ip,
                "port": port
            }, {
                "_id": 1,
            })
        # db = Db()
        # server = db.conn.Server.find_one({"type": "pipeline", "local_ip": local_ip, "port": port})
        # if server == None:
        #     server = db.conn.Server()
        #
        # server["level"] = float(0)
        # server["type"] = "pipeline"
        # server["local_ip"] = local_ip
        # server["public_ip"] = public_ip
        # server["port"] = port
        # server["down"] = False
        # server.save()

        self.pipeline_server_id = unicode(server["_id"])

        PipelineLoadBalancer(self.pipeline_server_id).start()

        # self.db = db

        Helpers.globals.set_id_pipeline(server["_id"])

        self.connect(handler=self.dump_dot_file, signal="dump_dot_file")
        self.connect(handler=self.playlist_update, signal="playlist_update")
        self.connect(handler=self.change_selection, signal="change_selection")
        self.connect(handler=self.next, signal="next")
        self.connect(handler=self.is_alive, signal="is_alive")
        self.connect(handler=self.scale_streaming, signal="scale")
        self.connect(handler=self.start_streaming, signal="start")
        self.connect(handler=self.print_playlist, signal="print_playlist")
        self.connect(handler=self.register_updates_observer,
                     signal="register_updates_observer")
        self.connect(handler=self.notify_current_track,
                     signal="notify_current_track")
        self.connect(handler=self.unregister_updates_observer,
                     signal="unregister_updates_observer")
        self.connect(handler=self.update_buffer, signal="update_buffer")
        self.connect(handler=self.start_live, signal="start_live")
        self.connect(handler=self.stop_streaming, signal="stop")
        self.connect(handler=self.rescale_streaming, signal="rescale")
        self.connect(handler=self.__streamer_initialized,
                     signal="streamer_initialized")
def BoundedSemaphore(value=1):
    from multiprocessing.synchronize import BoundedSemaphore
    return BoundedSemaphore(value)
Esempio n. 10
0
    def init(self, player, respond=None):
        super(BaseScheduler, self).__init__()

        self.playing = True
        self.player = player
        self.last_artist = None
        self.last_stop_unused = time.time()

        self.observers = UpdateObservers()

        # db = Db()
        # self.db = db

        self.tracks_selector = TracksSelector(self.player.stream["_id"])
        self.history = History(observers=self.observers)
        self.program_id = unicode(self.player.stream["default_program_id"])

        self.groups = yield task(self.call,
                                 self.get_default_group_ids,
                                 program_id=self.program_id)

        self.fade = yield task(self.call,
                               self.get_fade_length,
                               program_id=self.program_id)

        self.playlist_lock = BoundedSemaphore(value=1)
        self.playlist_lock.acquire()
        selection = yield task(self.call,
                               self.get_selection,
                               program_id=self.program_id)

        if selection == "shuffle":
            tracks = yield task(self.call,
                                self.tracks_selector.select_by_groups,
                                groups=self.groups,
                                sorted=False)
            print tracks
            self.playlist = RandomPlaylist(tracks, observers=self.observers)
            self.playlist.apply_history(self.history)
        else:
            tracks = yield task(self.call,
                                self.tracks_selector.select_by_groups,
                                groups=self.groups,
                                sorted=True)
            self.playlist = Playlist(tracks, observers=self.observers)

        self.buffer = Buffer(self.player.stream["_id"],
                             self.history,
                             self.playlist,
                             observers=self.observers)

        self.playlist_lock.release()

        track = self.buffer.get_track()
        file_id = track["file_id"]

        filesrc = GridFSSource(file_id)
        if self.__class__.__name__ == "EncodedScheduler":
            self.stream_bin = EncodedStreamBin(self.player, filesrc, self)
        else:
            self.stream_bin = StreamBin(self.player, filesrc, self)

        self.stream_bin.link_and_unblock()

        respond("bitch, yo!")
Esempio n. 11
0
        os.path.join(os.path.dirname(__file__), "static"),
        "ui_methods":
        ui_methods,
        "cookie_secret":
        str("__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__"),
        "login_url":
        "/admin/login.html",
        "debug":
        True,
        "db":
        motor.MotorClient(
            "mongodb://*****:*****@127.0.0.1:27017/pipeline"
        ).pipeline,
        "upload_processor": {
            "connection": parent_conn,
            "lock": BoundedSemaphore(value=1)
        },
        "facebook_api_key":
        "..",
        "facebook_secret":
        "..",
        "executor":
        ThreadPoolExecutor(max_workers=4),
        # websockets connection cache for receiving metadata and live
        "ws_cache": {}
    }

    application = tornado.web.Application([
        (r"/image.json", JSONImageHandler),
        (r"/image.jpg", JPGImageHandler),
        (r"/", MainHandler),