def testGSContentCompatible(self): foo = self.folder[self.folder.invokeFactory('File', 'foo', title='foo', file=getData('plone.pdf'))] self.assertTrue(IFilesystemExporter(foo)) self.assertTrue(IFilesystemImporter(foo))
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) ours = tuple(rowiter) our_ids = set([item[0] for item in ours]) prior = set(context.contentIds()) preserve = import_context.readDataFile('.preserve', subdir) if not preserve: preserve = set() else: preservable = prior.intersection(our_ids) preserve = set(_globtest(preserve, preservable)) delete = import_context.readDataFile('.delete', subdir) if not delete: delete = set() else: deletable = prior.difference(our_ids) delete = set(_globtest(delete, deletable)) # if it's in our_ids and NOT in preserve, or if it's not in # our_ids but IS in delete, we're gonna delete it delete = our_ids.difference(preserve).union(delete) for id in prior.intersection(delete): context._delObject(id) existing = context.objectIds() for object_id, portal_type in ours: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir)
def handleImport(self, action): data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return ctx = TarballImportContext(self.context, data['upload']) IFilesystemImporter(self.context).import_(ctx, 'structure', True) self.status = _(u'Form imported.') IStatusMessage(self.request).addStatusMessage(self.status, type='info') url = getMultiAdapter( (self.context, self.request), name='absolute_url')() self.request.response.redirect(url)
def importSiteStructure(context): # Only run step if a flag file is present if context.readDataFile('quintagroup.transmogrifier-import.txt') is None: if getattr(context, '_archive', None) is None: return transmogrifier = ITransmogrifier(context.getSite()) # we don't use transmogrifer's __call__ method, because we need to do # some modification in pipeline sections config_name = registerPersistentConfig(context.getSite(), 'import') if config_name is None: transmogrifier._raw = _load_config(IMPORT_CONFIG) else: transmogrifier.configuration_id = config_name transmogrifier._raw = _load_config(config_name) global CONFIGFILE CONFIGFILE = None transmogrifier._data = {} # this function is also called when adding Plone site, so call standard handler path = '' prefix = 'structure' if 'reader' in transmogrifier._raw: path = transmogrifier._raw['reader'].get('path', '') prefix = transmogrifier._raw['reader'].get('prefix', 'structure') if not context.readDataFile('.objects.xml', subdir=os.path.join(path, prefix)): try: from Products.GenericSetup.interfaces import IFilesystemImporter IFilesystemImporter(context.getSite()).import_( context, 'structure', True) except ImportError: pass return IAnnotations(transmogrifier)[IMPORT_CONTEXT] = context options = transmogrifier._raw['transmogrifier'] sections = options['pipeline'].splitlines() pipeline = constructPipeline(transmogrifier, sections) last_section = pipeline.gi_frame.f_locals['self'] # Pipeline execution for item in pipeline: pass # discard once processed
def action_import(self, action, data): if data.get('purge', False) == True: # user has requested removal of existing fields self.context.manage_delObjects(ids=self.context.objectIds()) ctx = TarballImportContext(self.context, data['upload']) IFilesystemImporter(self.context).import_(ctx, 'structure', True) message = _(u'Form imported.') IStatusMessage(self.request).addStatusMessage(message, type='info') url = getMultiAdapter((self.context, self.request), name='absolute_url')() self.request.response.redirect(url) return ''
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) preserve = import_context.readDataFile('.preserve', subdir) prior = context.contentIds() if not preserve: preserve = [] else: preserve = _globtest(preserve, prior) for id in prior: if id not in preserve: context._delObject(id) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) existing = context.objectIds() for object_id, portal_type in rowiter: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir)
def importPAS(context): IFilesystemImporter(context.getSite()).import_(context, 'PAS', True)
def importSiteStructure(context): IFilesystemImporter(context.getSite()).import_(context, 'structure', True)
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) objects = self.read_data_file(import_context, '.objects', subdir) workflow_states = self.read_data_file(import_context, '.workflow_states', subdir) if objects is None: return dialect = 'excel' object_stream = StringIO(objects) wf_stream = StringIO(workflow_states) object_rowiter = reader(object_stream, dialect) ours = [_f for _f in tuple(object_rowiter) if _f] our_ids = set([item[0] for item in ours]) prior = set(context.contentIds()) preserve = self.read_data_file(import_context, '.preserve', subdir) if not preserve: preserve = set() else: preservable = prior.intersection(our_ids) preserve = set(_globtest(preserve, preservable)) delete = self.read_data_file(import_context, '.delete', subdir) if not delete: delete = set() else: deletable = prior.difference(our_ids) delete = set(_globtest(delete, deletable)) # if it's in our_ids and NOT in preserve, or if it's not in # our_ids but IS in delete, we're gonna delete it delete = our_ids.difference(preserve).union(delete) for id in prior.intersection(delete): context._delObject(id) existing = context.objectIds() for object_id, portal_type in ours: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir) if workflow_states is not None: existing = context.objectIds() wft = context.portal_workflow wf_rowiter = reader(wf_stream, dialect) wf_by_objectid = itertools.groupby(wf_rowiter, operator.itemgetter(0)) for object_id, states in wf_by_objectid: if object_id not in existing: logger = import_context.getLogger('SFWA') logger.warning("Couldn't set workflow for object %s/%s, it" " doesn't exist" % (context.id, object_id)) continue object = context[object_id] for object_id, workflow_id, state_id in states: workflow = wft.getWorkflowById(workflow_id) state_variable = workflow.state_var wf_state = { 'action': None, 'actor': None, 'comments': 'Setting state to %s' % state_id, state_variable: state_id, 'time': DateTime() } wft.setStatusOf(workflow_id, object, wf_state) workflow.updateRoleMappingsFor(object) object.reindexObject()
def testGSContentCompatible(self): foo = self.folder[self.folder.invokeFactory('Image', 'foo')] self.assertTrue(IFilesystemExporter(foo)) self.assertTrue(IFilesystemImporter(foo))