コード例 #1
0
ファイル: main.py プロジェクト: BenoitLBen/PodcastCanalPlus
def main():
  option_parser = NiceOptionParser()
  option_parser.add_option("--config", help="Fichier de configuration",
                           action="store", type="string", dest="config_file",
                           default=findSettings()["config_file"])
  option_parser.add_option("--verbose", help="Mode verbeux.",
                           action="store_true", dest="verbose", default=False)
  (options, args) = option_parser.parse_args()

  if options.config_file is None or options.config_file == "":
    option_parser.print_usage()
    sys.exit(1)
  # On ne peut pas importer les modules avant car ils importent des
  # modules dependant de MyThread, dont la definition depend de la
  # valeur de with_gui, et qui est defini par initThreading().
  from user_config import PipelineFactory, ConfigParser, iTunesConfig
  config_parser = ConfigParser(options.config_file)
  pipeline_specs = config_parser.parse()
  # On fait le chdir apres avoir lu les parametres
  logging.info("chdir " + findSettings()["output_dir"])
  os.chdir(findSettings()["output_dir"])

  # On fait le menage
  downloaded_files = SynchronizedSet('downloaded_videos')
  transcoded_files = SynchronizedSet('transcoded_videos')
  deleteOldVideos(downloaded_files, transcoded_files)

  # Si on est sur Darwin (OS X), on dit poliment bonjour
  if platform.system() == "Darwin":
    subprocess.call([findSettings()["growlnotify"],
                     "-n", "PodcastCanalPlus",
                     "-t", "PodcastCanalPlus",
                     "-m", "Bonjour"])
  from cli import CliInterface
  cli_interface = CliInterface(options)
  os._exit(cli_interface.run())
コード例 #2
0
 def readConfig(self):
   self.config_parser = ConfigParser(self.options.config_file)
   self.pipeline_specs = self.config_parser.parse()
コード例 #3
0
class Controller(object):
  """Contient le coeur de la logique du programme.

  Ne doit en principe etre instancie qu'une fois.
  """
  def __init__(self, options=None):
    """
    """
    self.options = options
    self.threads = []
    self.http_server = None
    self.rss_feedserver = None
    self.httpd_thread = None
    self.rss_feedserver = None
    self.rss_feedserver_thread = None
    self.transcode_thread = None
    self.sentinel_thread = None
    self.config_parser = None
    self.pipeline_specs = None
    self.downloaded_files = None
    self.transcoded_files = None
    self.readConfig()

  def readConfig(self):
    self.config_parser = ConfigParser(self.options.config_file)
    self.pipeline_specs = self.config_parser.parse()

  def writeConfig(self):
    config_file = open(self.options.config_file, "w")
    config_file.write(pprint.pformat(self.pipeline_specs))
    config_file.close()

  def createPipelines(self):
    self.threads = []
    logging.info("Demarrage des pipelines...")
    # Recupere les infos sur ce qui a deja ete fait depuis le disque dur
    self.downloaded_files = SynchronizedSet('downloaded_videos')
    self.transcoded_files = SynchronizedSet('transcoded_videos')
    if self.options.verbose:
      # Affichage de ce qu'on a deja fait
      print "Downloaded Videos:"
      for video in self.downloaded_files:
        print video
        print "Transcoded Videos:"
      for video in self.transcoded_files:
        print video
    # Serveur HTTP - un seul pour tous les pipelines
    self.http_server = PodcastHttpServer(findSettings()["podcast_port"])
    self.httpd_thread = self.http_server.start()
    self.threads.append(self.httpd_thread)
    # Transcodeur - un seul pour tous les pipelines
    transcode_queue = Queue.Queue()
    for video in self.downloaded_files:
      if not video in self.transcoded_files:
        transcode_queue.put(video)
    self.transcode_thread = VideoTranscoder(transcode_queue,
                                            self.transcoded_files)
    self.transcode_thread.start()
    self.threads.append(self.transcode_thread)
    self.rss_feedserver = RssFeedHttpServer(findSettings()["podcast_port"] + 1)
    # Tous les pipelines
    for pipeline_name in self.pipeline_specs:
      logging.info("Pipeline " + pipeline_name)
      pipeline = PipelineFactory.createPipeline(
        pipeline_name, transcode_queue,
        self.pipeline_specs[pipeline_name]["input_spec"],
        self.pipeline_specs[pipeline_name]["podcast_spec"],
        self.downloaded_files, self.transcoded_files)
      pipeline_threads = pipeline.createThreads()
      self.threads += pipeline_threads
    # On cree enfin le thread sentinelle, qui va permettre de quitter
    # proprement en cas d'erreur.
    self.sentinel_thread = SentinelThread(self.threads)

  def startPipelines(self):
    """Demarre tous les pipelines.
    """
    self.rss_feedserver_thread = self.rss_feedserver.start()
    self.threads.append(self.rss_feedserver_thread)
    for pipeline_name in self.pipeline_specs:
      PipelineFactory.getPipelineByName(pipeline_name).start()
    # On ne peut regarder les videos transcodees qu'apres car il faut les
    # orienter vers le bon pipeline
    for video in self.transcoded_files:
      pipeline = PipelineFactory.getPipelineByName(video.pipeline_name)
      if pipeline is not None:
        pipeline.podcaster_queue.put(video)
      else:
        logging.warning("Impossible de trouver le pipeline (%s) de %s"
                        % (video.pipeline_name, str(video)))
    # Si il y a un Pipeline Jabber, on lui passe le controlleur pour qu'il
    # puisse repondre a la commande "status".
    try:
      pipeline = PipelineFactory.getPipelineByName("gtalk_youtube")
      pipeline.jabber_bot_thread.controller = self
    except ValueError, e:
      logging.info("Pas de Pipeline Jabber")