def createPipelines(self):
   self.threads = []
   logging.info("Demarrage des pipelines...")
   # Recupere les infos sur ce qui a deja ete fait depuis le disque dur
   self.downloaded_files = SynchronizedSet('downloaded_videos')
   self.transcoded_files = SynchronizedSet('transcoded_videos')
   if self.options.verbose:
     # Affichage de ce qu'on a deja fait
     print "Downloaded Videos:"
     for video in self.downloaded_files:
       print video
       print "Transcoded Videos:"
     for video in self.transcoded_files:
       print video
   # Serveur HTTP - un seul pour tous les pipelines
   self.http_server = PodcastHttpServer(findSettings()["podcast_port"])
   self.httpd_thread = self.http_server.start()
   self.threads.append(self.httpd_thread)
   # Transcodeur - un seul pour tous les pipelines
   transcode_queue = Queue.Queue()
   for video in self.downloaded_files:
     if not video in self.transcoded_files:
       transcode_queue.put(video)
   self.transcode_thread = VideoTranscoder(transcode_queue,
                                           self.transcoded_files)
   self.transcode_thread.start()
   self.threads.append(self.transcode_thread)
   self.rss_feedserver = RssFeedHttpServer(findSettings()["podcast_port"] + 1)
   # Tous les pipelines
   for pipeline_name in self.pipeline_specs:
     logging.info("Pipeline " + pipeline_name)
     pipeline = PipelineFactory.createPipeline(
       pipeline_name, transcode_queue,
       self.pipeline_specs[pipeline_name]["input_spec"],
       self.pipeline_specs[pipeline_name]["podcast_spec"],
       self.downloaded_files, self.transcoded_files)
     pipeline_threads = pipeline.createThreads()
     self.threads += pipeline_threads
   # On cree enfin le thread sentinelle, qui va permettre de quitter
   # proprement en cas d'erreur.
   self.sentinel_thread = SentinelThread(self.threads)
class Controller(object):
  """Contient le coeur de la logique du programme.

  Ne doit en principe etre instancie qu'une fois.
  """
  def __init__(self, options=None):
    """
    """
    self.options = options
    self.threads = []
    self.http_server = None
    self.rss_feedserver = None
    self.httpd_thread = None
    self.rss_feedserver = None
    self.rss_feedserver_thread = None
    self.transcode_thread = None
    self.sentinel_thread = None
    self.config_parser = None
    self.pipeline_specs = None
    self.downloaded_files = None
    self.transcoded_files = None
    self.readConfig()

  def readConfig(self):
    self.config_parser = ConfigParser(self.options.config_file)
    self.pipeline_specs = self.config_parser.parse()

  def writeConfig(self):
    config_file = open(self.options.config_file, "w")
    config_file.write(pprint.pformat(self.pipeline_specs))
    config_file.close()

  def createPipelines(self):
    self.threads = []
    logging.info("Demarrage des pipelines...")
    # Recupere les infos sur ce qui a deja ete fait depuis le disque dur
    self.downloaded_files = SynchronizedSet('downloaded_videos')
    self.transcoded_files = SynchronizedSet('transcoded_videos')
    if self.options.verbose:
      # Affichage de ce qu'on a deja fait
      print "Downloaded Videos:"
      for video in self.downloaded_files:
        print video
        print "Transcoded Videos:"
      for video in self.transcoded_files:
        print video
    # Serveur HTTP - un seul pour tous les pipelines
    self.http_server = PodcastHttpServer(findSettings()["podcast_port"])
    self.httpd_thread = self.http_server.start()
    self.threads.append(self.httpd_thread)
    # Transcodeur - un seul pour tous les pipelines
    transcode_queue = Queue.Queue()
    for video in self.downloaded_files:
      if not video in self.transcoded_files:
        transcode_queue.put(video)
    self.transcode_thread = VideoTranscoder(transcode_queue,
                                            self.transcoded_files)
    self.transcode_thread.start()
    self.threads.append(self.transcode_thread)
    self.rss_feedserver = RssFeedHttpServer(findSettings()["podcast_port"] + 1)
    # Tous les pipelines
    for pipeline_name in self.pipeline_specs:
      logging.info("Pipeline " + pipeline_name)
      pipeline = PipelineFactory.createPipeline(
        pipeline_name, transcode_queue,
        self.pipeline_specs[pipeline_name]["input_spec"],
        self.pipeline_specs[pipeline_name]["podcast_spec"],
        self.downloaded_files, self.transcoded_files)
      pipeline_threads = pipeline.createThreads()
      self.threads += pipeline_threads
    # On cree enfin le thread sentinelle, qui va permettre de quitter
    # proprement en cas d'erreur.
    self.sentinel_thread = SentinelThread(self.threads)

  def startPipelines(self):
    """Demarre tous les pipelines.
    """
    self.rss_feedserver_thread = self.rss_feedserver.start()
    self.threads.append(self.rss_feedserver_thread)
    for pipeline_name in self.pipeline_specs:
      PipelineFactory.getPipelineByName(pipeline_name).start()
    # On ne peut regarder les videos transcodees qu'apres car il faut les
    # orienter vers le bon pipeline
    for video in self.transcoded_files:
      pipeline = PipelineFactory.getPipelineByName(video.pipeline_name)
      if pipeline is not None:
        pipeline.podcaster_queue.put(video)
      else:
        logging.warning("Impossible de trouver le pipeline (%s) de %s"
                        % (video.pipeline_name, str(video)))
    # Si il y a un Pipeline Jabber, on lui passe le controlleur pour qu'il
    # puisse repondre a la commande "status".
    try:
      pipeline = PipelineFactory.getPipelineByName("gtalk_youtube")
      pipeline.jabber_bot_thread.controller = self
    except ValueError, e:
      logging.info("Pas de Pipeline Jabber")