def generate_stages(self): stages = [] for stage_func in plugins.early_import_stages(): stages.append(plugin_stage(self, stage_func)) for stage_func in plugins.import_stages(): stages.append(plugin_stage(self, stage_func)) stages.append(manipulate_files(self)) return stages
def run(self): """Run the import task. """ self._amend_config() # Set up the pipeline. if self.query is None: stages = [read_tasks(self)] else: stages = [query_tasks(self)] if config['import']['singletons']: # Singleton importer. if config['import']['autotag']: stages += [item_lookup(self), item_query(self)] else: stages += [item_progress(self)] else: # Whole-album importer. if config['import']['group_albums']: # Split directory tasks into one task for each album stages += [group_albums(self)] if config['import']['autotag']: # Only look up and query the user when autotagging. stages += [initial_lookup(self), user_query(self)] else: # When not autotagging, just display progress. stages += [show_progress(self)] stages += [apply_choices(self)] for stage_func in plugins.import_stages(): stages.append(plugin_stage(self, stage_func)) stages += [manipulate_files(self)] stages += [finalize(self)] pl = pipeline.Pipeline(stages) # Run the pipeline. try: if config['threaded']: pl.run_parallel(QUEUE_SIZE) else: pl.run_sequential() except ImportAbort: # User aborted operation. Silently stop. pass
def run_import(**kwargs): """Run an import. The keyword arguments are the same as those to ImportConfig. """ config = ImportConfig(**kwargs) # Set up the pipeline. if config.query is None: stages = [read_tasks(config)] else: stages = [query_tasks(config)] if config.singletons: # Singleton importer. if config.autot: stages += [item_lookup(config), item_query(config)] else: stages += [item_progress(config)] else: # Whole-album importer. if config.autot: # Only look up and query the user when autotagging. stages += [initial_lookup(config), user_query(config)] else: # When not autotagging, just display progress. stages += [show_progress(config)] stages += [apply_choices(config)] for stage_func in plugins.import_stages(): stages.append(plugin_stage(config, stage_func)) stages += [manipulate_files(config)] if config.art: stages += [fetch_art(config)] stages += [finalize(config)] pl = pipeline.Pipeline(stages) # Run the pipeline. try: if config.threaded: pl.run_parallel(QUEUE_SIZE) else: pl.run_sequential() except ImportAbort: # User aborted operation. Silently stop. pass
def run(self): """Run the import task. """ self.set_config(config['import']) # Set up the pipeline. if self.query is None: stages = [read_tasks(self)] else: stages = [query_tasks(self)] if self.config['pretend']: # Only log the imported files and end the pipeline stages += [log_files(self)] else: if self.config['group_albums'] and \ not self.config['singletons']: # Split directory tasks into one task for each album stages += [group_albums(self)] if self.config['autotag']: # FIXME We should also resolve duplicates when not # autotagging. This is currently handled in `user_query` stages += [lookup_candidates(self), user_query(self)] else: stages += [import_asis(self)] stages += [apply_choices(self)] for stage_func in plugins.import_stages(): stages.append(plugin_stage(self, stage_func)) stages += [manipulate_files(self)] pl = pipeline.Pipeline(stages) # Run the pipeline. plugins.send('import_begin', session=self) try: if config['threaded']: pl.run_parallel(QUEUE_SIZE) else: pl.run_sequential() except ImportAbort: # User aborted operation. Silently stop. pass
def run(self): """Run the import task. """ self.set_config(config["import"]) # Set up the pipeline. if self.query is None: stages = [read_tasks(self)] else: stages = [query_tasks(self)] if self.config["group_albums"] and not self.config["singletons"]: # Split directory tasks into one task for each album stages += [group_albums(self)] if self.config["autotag"]: # Only look up and query the user when autotagging. # FIXME We should also resolve duplicates when not # autotagging. stages += [lookup_candidates(self), user_query(self)] else: stages += [import_asis(self)] stages += [apply_choices(self)] for stage_func in plugins.import_stages(): stages.append(plugin_stage(self, stage_func)) stages += [manipulate_files(self)] pl = pipeline.Pipeline(stages) # Run the pipeline. try: if config["threaded"]: pl.run_parallel(QUEUE_SIZE) else: pl.run_sequential() except ImportAbort: # User aborted operation. Silently stop. pass