def createPipelines(self): self.threads = [] logging.info("Demarrage des pipelines...") # Recupere les infos sur ce qui a deja ete fait depuis le disque dur self.downloaded_files = SynchronizedSet('downloaded_videos') self.transcoded_files = SynchronizedSet('transcoded_videos') if self.options.verbose: # Affichage de ce qu'on a deja fait print "Downloaded Videos:" for video in self.downloaded_files: print video print "Transcoded Videos:" for video in self.transcoded_files: print video # Serveur HTTP - un seul pour tous les pipelines self.http_server = PodcastHttpServer(findSettings()["podcast_port"]) self.httpd_thread = self.http_server.start() self.threads.append(self.httpd_thread) # Transcodeur - un seul pour tous les pipelines transcode_queue = Queue.Queue() for video in self.downloaded_files: if not video in self.transcoded_files: transcode_queue.put(video) self.transcode_thread = VideoTranscoder(transcode_queue, self.transcoded_files) self.transcode_thread.start() self.threads.append(self.transcode_thread) self.rss_feedserver = RssFeedHttpServer(findSettings()["podcast_port"] + 1) # Tous les pipelines for pipeline_name in self.pipeline_specs: logging.info("Pipeline " + pipeline_name) pipeline = PipelineFactory.createPipeline( pipeline_name, transcode_queue, self.pipeline_specs[pipeline_name]["input_spec"], self.pipeline_specs[pipeline_name]["podcast_spec"], self.downloaded_files, self.transcoded_files) pipeline_threads = pipeline.createThreads() self.threads += pipeline_threads # On cree enfin le thread sentinelle, qui va permettre de quitter # proprement en cas d'erreur. self.sentinel_thread = SentinelThread(self.threads)