def transcodeVideos(self):
   logging.info("Starting VideoTranscoder thread")
   while not self.must_die_event.wait(2):
     # Bug de python <2.7: la methode wait() de threadin.Event renvoie
     # tout le temps None. On teste donc le flag pour savoir si il faut
     # quitter.
     if self.must_die_event.isSet():
       return
     try:
       video = self.transcode_queue.get_nowait()
       try:
         os.stat(video.local_filename)
       except OSError, e:
         logging.warning("%s n'existe pas !" % video.local_filename)
         continue
       if self.transcodeOneVideo(video):
         self.transcoded_files.add(video)
         # La video contient le nom unique du Pipeline dont elle est issue.
         # En recuperant son Pipeline, on recupere sa file d'attente de podcast
         pipeline = PipelineFactory.getPipelineByName(video.pipeline_name)
         if pipeline is None:
           logging.warning("Video sans Pipeline valide !")
           continue
         pipeline.podcaster_queue.put(video)
       elif self.must_die_event.isSet():
         return
     except Queue.Empty, e:
       if self.must_die_event.isSet():
         return
  def status(self):
    """Renvoie une chaine de caractere donnant le statut des pipelines.

    Cette methode est appelee par le pipeline Jabber pour repondre a la commande
    "status".
    """
    result = ["VideoTranscoder"]
    result.append(self.transcode_thread.status())
    result.append("\n")
    for (name, pipeline) in PipelineFactory.pipelines():
      result.append(name)
      result.append(pipeline.status())
    return "\n".join(result)
 def startPipelines(self):
   """Demarre tous les pipelines.
   """
   self.rss_feedserver_thread = self.rss_feedserver.start()
   self.threads.append(self.rss_feedserver_thread)
   for pipeline_name in self.pipeline_specs:
     PipelineFactory.getPipelineByName(pipeline_name).start()
   # On ne peut regarder les videos transcodees qu'apres car il faut les
   # orienter vers le bon pipeline
   for video in self.transcoded_files:
     pipeline = PipelineFactory.getPipelineByName(video.pipeline_name)
     if pipeline is not None:
       pipeline.podcaster_queue.put(video)
     else:
       logging.warning("Impossible de trouver le pipeline (%s) de %s"
                       % (video.pipeline_name, str(video)))
   # Si il y a un Pipeline Jabber, on lui passe le controlleur pour qu'il
   # puisse repondre a la commande "status".
   try:
     pipeline = PipelineFactory.getPipelineByName("gtalk_youtube")
     pipeline.jabber_bot_thread.controller = self
   except ValueError, e:
     logging.info("Pas de Pipeline Jabber")
 def createPipelines(self):
   self.threads = []
   logging.info("Demarrage des pipelines...")
   # Recupere les infos sur ce qui a deja ete fait depuis le disque dur
   self.downloaded_files = SynchronizedSet('downloaded_videos')
   self.transcoded_files = SynchronizedSet('transcoded_videos')
   if self.options.verbose:
     # Affichage de ce qu'on a deja fait
     print "Downloaded Videos:"
     for video in self.downloaded_files:
       print video
       print "Transcoded Videos:"
     for video in self.transcoded_files:
       print video
   # Serveur HTTP - un seul pour tous les pipelines
   self.http_server = PodcastHttpServer(findSettings()["podcast_port"])
   self.httpd_thread = self.http_server.start()
   self.threads.append(self.httpd_thread)
   # Transcodeur - un seul pour tous les pipelines
   transcode_queue = Queue.Queue()
   for video in self.downloaded_files:
     if not video in self.transcoded_files:
       transcode_queue.put(video)
   self.transcode_thread = VideoTranscoder(transcode_queue,
                                           self.transcoded_files)
   self.transcode_thread.start()
   self.threads.append(self.transcode_thread)
   self.rss_feedserver = RssFeedHttpServer(findSettings()["podcast_port"] + 1)
   # Tous les pipelines
   for pipeline_name in self.pipeline_specs:
     logging.info("Pipeline " + pipeline_name)
     pipeline = PipelineFactory.createPipeline(
       pipeline_name, transcode_queue,
       self.pipeline_specs[pipeline_name]["input_spec"],
       self.pipeline_specs[pipeline_name]["podcast_spec"],
       self.downloaded_files, self.transcoded_files)
     pipeline_threads = pipeline.createThreads()
     self.threads += pipeline_threads
   # On cree enfin le thread sentinelle, qui va permettre de quitter
   # proprement en cas d'erreur.
   self.sentinel_thread = SentinelThread(self.threads)
 def do_GET(self):
   # Le header "Host"
   host = self.headers["Host"]
   if ":" in host:
     host = host[:host.rindex(':')]
   # On nettoie un peu le "path"
   path = self.path
   path = path.split('?',1)[0]
   path = path.split('#',1)[0]
   path = urllib.unquote(path)
   # Ensuite, on en prend la derniere partie, qui doit correspondre a un nom
   # de Pipeline (sans le ".xml")
   pipeline_name = path.split('/')[-1]
   pipeline_name = pipeline_name[:pipeline_name.rindex(".")]
   pipeline = None
   from user_config import PipelineFactory
   try:
     pipeline = PipelineFactory.getPipelineByName(pipeline_name)
     logging.info(pipeline_name)
   except ValueError, e:
     logging.warning("Pipeline invalide : " + pipeline_name)
     self.send_error(404, "File Not Found")
     return