def exportSiteStructure(context): transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), "export") if config_name is None: transmogrifier._raw = _load_config(EXPORT_CONFIG) else: transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} options = transmogrifier._raw["transmogrifier"] sections = options["pipeline"].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals["self"] # if 'quintagroup.transmogrifier.writer' section's export context is # tarball replace it with given function argument while hasattr(last_section, "previous"): if isinstance(last_section, WriterSection) and isinstance(last_section.export_context, TarballExportContext): last_section.export_context = context last_section = last_section.previous # end cycle if we get empty starter section if type(last_section) == type(iter(())): break last_section = last_section.gi_frame.f_locals["self"] # Pipeline execution for item in pipeline: pass # discard once processed
def importSiteStructure(context): # Only run step if a flag file is present if context.readDataFile('quintagroup.transmogrifier-import.txt') is None: if getattr(context, '_archive', None) is None: return transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), 'import') if config_name is None: transmogrifier._raw = _load_config(IMPORT_CONFIG) else: transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} # this function is also called when adding Plone site, so call standard handler path = '' prefix = 'structure' if 'reader' in transmogrifier._raw: path = transmogrifier._raw['reader'].get('path', '') prefix = transmogrifier._raw['reader'].get('prefix', 'structure') if not context.readDataFile('.objects.xml', subdir=os.path.join(path, prefix)): try: from Products.GenericSetup.interfaces import IFilesystemImporter IFilesystemImporter(context.getSite()).import_(context, 'structure', True) except ImportError: pass return options = transmogrifier._raw['transmogrifier'] sections = options['pipeline'].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals['self'] # if 'quintagroup.transmogrifier.writer' section's export context is # tarball replace it with given function argument while hasattr(last_section, 'previous'): if isinstance(last_section, ReaderSection) and \ isinstance(last_section.import_context, TarballImportContext): last_section.import_context = context last_section = last_section.previous # end cycle if we get empty starter section if type(last_section) == type(iter(())): break last_section = last_section.gi_frame.f_locals['self'] # Pipeline execution for item in pipeline: pass # discard once processed
def run_pipeline(self, pipeline): portal = self.context transmogrifier = ITransmogrifier(portal) logger.info('Running transmogrifier pipeline %s' % pipeline) transmogrifier(pipeline) logger.info('Transmogrifier pipeline %s complete' % pipeline)
def importSiteStructure(context): # Only run step if a flag file is present if context.readDataFile("quintagroup.transmogrifier-import.txt") is None: if getattr(context, "_archive", None) is None: return transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), "import") if config_name is None: transmogrifier._raw = _load_config(IMPORT_CONFIG) else: transmogrifier.configuration_id = config_name transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} # this function is also called when adding Plone site, so call standard handler path = "" prefix = "structure" if "reader" in transmogrifier._raw: path = transmogrifier._raw["reader"].get("path", "") prefix = transmogrifier._raw["reader"].get("prefix", "structure") if not context.readDataFile(".objects.xml", subdir=os.path.join(path, prefix)): try: from Products.GenericSetup.interfaces import IFilesystemImporter IFilesystemImporter(context.getSite()).import_(context, "structure", True) except ImportError: pass return IAnnotations(transmogrifier)[IMPORT_CONTEXT] = context options = transmogrifier._raw["transmogrifier"] sections = options["pipeline"].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals["self"] # Pipeline execution for item in pipeline: pass # discard once processed
def importSiteStructure(context): # Only run step if a flag file is present if context.readDataFile('quintagroup.transmogrifier-import.txt') is None: if getattr(context, '_archive', None) is None: return transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), 'import') if config_name is None: transmogrifier._raw = _load_config(IMPORT_CONFIG) else: transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} # this function is also called when adding Plone site, so call standard handler path = '' prefix = 'structure' if 'reader' in transmogrifier._raw: path = transmogrifier._raw['reader'].get('path', '') prefix = transmogrifier._raw['reader'].get('prefix', 'structure') if not context.readDataFile('.objects.xml', subdir=os.path.join(path, prefix)): try: from Products.GenericSetup.interfaces import IFilesystemImporter IFilesystemImporter(context.getSite()).import_(context, 'structure', True) except ImportError: pass return IAnnotations(transmogrifier)[IMPORT_CONTEXT] = context options = transmogrifier._raw['transmogrifier'] sections = options['pipeline'].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals['self'] # Pipeline execution for item in pipeline: pass # discard once processed
def importSiteStructure(context): # this function is also called when adding Plone site, so call standard handler if not context.readDataFile('.objects.xml', subdir='structure'): IFilesystemImporter(context.getSite()).import_(context, 'structure', True) return transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), 'import') if config_name is None: transmogrifier._raw = _load_config(IMPORT_CONFIG) else: transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} options = transmogrifier._raw['transmogrifier'] sections = options['pipeline'].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals['self'] # if 'quintagroup.transmogrifier.writer' section's export context is # tarball replace it with given function argument while hasattr(last_section, 'previous'): if isinstance(last_section, ReaderSection) and \ isinstance(last_section.import_context, TarballImportContext): last_section.import_context = context last_section = last_section.previous # end cycle if we get empty starter section if type(last_section) == type(iter(())): break last_section = last_section.gi_frame.f_locals['self'] # Pipeline execution for item in pipeline: pass # discard once processed
def exportSiteStructure(context): transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), 'export') if config_name is None: transmogrifier._raw = _load_config(EXPORT_CONFIG) else: transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} options = transmogrifier._raw['transmogrifier'] sections = options['pipeline'].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals['self'] # if 'quintagroup.transmogrifier.writer' section's export context is # tarball replace it with given function argument while hasattr(last_section, 'previous'): if isinstance(last_section, WriterSection) and \ isinstance(last_section.export_context, TarballExportContext): last_section.export_context = context last_section = last_section.previous # end cycle if we get empty starter section if type(last_section) == type(iter(())): break last_section = last_section.gi_frame.f_locals['self'] # Pipeline execution for item in pipeline: pass # discard once processed