def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) ours = filter(None, tuple(rowiter)) our_ids = set([item[0] for item in ours]) prior = set(context.contentIds()) preserve = import_context.readDataFile('.preserve', subdir) if not preserve: preserve = set() else: preservable = prior.intersection(our_ids) preserve = set(_globtest(preserve, preservable)) delete = import_context.readDataFile('.delete', subdir) if not delete: delete= set() else: deletable = prior.difference(our_ids) delete = set(_globtest(delete, deletable)) # if it's in our_ids and NOT in preserve, or if it's not in # our_ids but IS in delete, we're gonna delete it delete = our_ids.difference(preserve).union(delete) for id in prior.intersection(delete): context._delObject(id) existing = context.objectIds() for object_id, portal_type in ours: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir)
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) ours = tuple(rowiter) our_ids = set([item[0] for item in ours]) prior = set(context.contentIds()) preserve = import_context.readDataFile('.preserve', subdir) if not preserve: preserve = set() else: preservable = prior.intersection(our_ids) preserve = set(_globtest(preserve, preservable)) delete = import_context.readDataFile('.delete', subdir) if not delete: delete = set() else: deletable = prior.difference(our_ids) delete = set(_globtest(delete, deletable)) # if it's in our_ids and NOT in preserve, or if it's not in # our_ids but IS in delete, we're gonna delete it delete = our_ids.difference(preserve).union(delete) for id in prior.intersection(delete): context._delObject(id) existing = context.objectIds() for object_id, portal_type in ours: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir)
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) data = import_context.readDataFile('.data', subdir) if data is not None: request = FauxDAVRequest(BODY=data, BODYFILE=StringIO(data)) response = FauxDAVResponse() context.PUT(request, response) preserve = import_context.readDataFile('.preserve', subdir) must_preserve = self._mustPreserve() prior = context.objectIds() if not preserve: preserve = [] else: preserve = _globtest(preserve, prior) preserve.extend([x[0] for x in must_preserve]) for id in prior: if id not in preserve: context._delObject(id) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) rows = filter(None, tuple(rowiter)) existing = context.objectIds() for object_id, type_name in rows: if object_id not in existing: object = self._makeInstance(object_id, type_name, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) adapted = queryAdapter(wrapped, IFilesystemImporter) if adapted is not None: adapted.import_(import_context, subdir)
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) preserve = import_context.readDataFile('.preserve', subdir) prior = context.contentIds() if not preserve: preserve = [] else: preserve = _globtest(preserve, prior) for id in prior: if id not in preserve: context._delObject(id) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) existing = context.objectIds() for object_id, portal_type in rowiter: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir)
def _checkResults(self, globpattern, namelist, expected): from Products.GenericSetup.content import _globtest found = _globtest(globpattern, namelist) self.assertEqual(len(found), len(expected)) for found_item, expected_item in zip(found, expected): self.assertEqual(found_item, expected_item)
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) objects = self.read_data_file(import_context, '.objects', subdir) workflow_states = self.read_data_file(import_context, '.workflow_states', subdir) if objects is None: return dialect = 'excel' object_stream = StringIO(objects) wf_stream = StringIO(workflow_states) object_rowiter = reader(object_stream, dialect) ours = [_f for _f in tuple(object_rowiter) if _f] our_ids = set([item[0] for item in ours]) prior = set(context.contentIds()) preserve = self.read_data_file(import_context, '.preserve', subdir) if not preserve: preserve = set() else: preservable = prior.intersection(our_ids) preserve = set(_globtest(preserve, preservable)) delete = self.read_data_file(import_context, '.delete', subdir) if not delete: delete = set() else: deletable = prior.difference(our_ids) delete = set(_globtest(delete, deletable)) # if it's in our_ids and NOT in preserve, or if it's not in # our_ids but IS in delete, we're gonna delete it delete = our_ids.difference(preserve).union(delete) for id in prior.intersection(delete): context._delObject(id) existing = context.objectIds() for object_id, portal_type in ours: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir) if workflow_states is not None: existing = context.objectIds() wft = context.portal_workflow wf_rowiter = reader(wf_stream, dialect) wf_by_objectid = itertools.groupby(wf_rowiter, operator.itemgetter(0)) for object_id, states in wf_by_objectid: if object_id not in existing: logger = import_context.getLogger('SFWA') logger.warning("Couldn't set workflow for object %s/%s, it" " doesn't exist" % (context.id, object_id)) continue object = context[object_id] for object_id, workflow_id, state_id in states: workflow = wft.getWorkflowById(workflow_id) state_variable = workflow.state_var wf_state = { 'action': None, 'actor': None, 'comments': 'Setting state to %s' % state_id, state_variable: state_id, 'time': DateTime() } wft.setStatusOf(workflow_id, object, wf_state) workflow.updateRoleMappingsFor(object) object.reindexObject()
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) objects = import_context.readDataFile('.objects', subdir) workflow_states = import_context.readDataFile('.workflow_states', subdir) if objects is None: return dialect = 'excel' object_stream = StringIO(objects) wf_stream = StringIO(workflow_states) object_rowiter = reader(object_stream, dialect) ours = filter(None, tuple(object_rowiter)) our_ids = set([item[0] for item in ours]) prior = set(context.contentIds()) preserve = import_context.readDataFile('.preserve', subdir) if not preserve: preserve = set() else: preservable = prior.intersection(our_ids) preserve = set(_globtest(preserve, preservable)) delete = import_context.readDataFile('.delete', subdir) if not delete: delete= set() else: deletable = prior.difference(our_ids) delete = set(_globtest(delete, deletable)) # if it's in our_ids and NOT in preserve, or if it's not in # our_ids but IS in delete, we're gonna delete it delete = our_ids.difference(preserve).union(delete) for id in prior.intersection(delete): context._delObject(id) existing = context.objectIds() for object_id, portal_type in ours: if object_id not in existing: object = self._makeInstance(object_id, portal_type, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) IFilesystemImporter(wrapped).import_(import_context, subdir) if workflow_states is not None: existing = context.objectIds() wft = context.portal_workflow wf_rowiter = reader(wf_stream, dialect) wf_by_objectid = itertools.groupby(wf_rowiter, operator.itemgetter(0)) for object_id, states in wf_by_objectid: if object_id not in existing: logger = import_context.getLogger('SFWA') logger.warning("Couldn't set workflow for object %s/%s as it doesn't exist" % (context.id, object_id)) continue object = context[object_id] for object_id, workflow_id, state_id in states: workflow = wft.getWorkflowById(workflow_id) state_variable = workflow.state_var wf_state = { 'action': None, 'actor': None, 'comments': "Setting state to %s" % state_id, state_variable: state_id, 'time': DateTime(), } wft.setStatusOf(workflow_id, object, wf_state) workflow.updateRoleMappingsFor(object) object.reindexObject()