def __call__(self, **kwargs): PostOnly(self.request) form = self.request.form recurse = form.get('workflow_archive_recurse', False) prev_versions = form.get('workflow_archive_previous_versions', False) val = {'initiator': form.get('workflow_archive_initiator', ''), 'custom_message': form.get('workflow_other_reason', '').strip(), 'reason': form.get('workflow_reasons_radio', 'other'), } context = self.context ploneview = getMultiAdapter((context, self.request), name='plone') if ploneview.isDefaultPageInFolder(): context = self.context.getParentNode() if recurse and not prev_versions: archive_obj_and_children(context, **val) elif recurse and prev_versions: archive_obj_and_children(context, **val) archive_previous_versions(context, also_children=True, **val) elif prev_versions and not recurse: archive_object(context, **val) archive_previous_versions(context, **val) else: archive_object(context, **val) return "OK"
def bulkArchive(self, brains=None, paths=None, initiator=None, reason=None, custom_message=None, archive_date=None, batchnr=20): """ Archive many objects in batches (multi transactions). """ info('INFO: starting bulk archive') result = [] deleted_count = 0 # Decide data source if brains: datasource = brains totobs = len(brains) elif paths: datasource = paths totobs = len(paths) else: datasource = [] # Archive loop trans_count = 0 for k in datasource: trans_count += 1 if brains: obj = k.getObject() elif paths: obj = self.unrestrictedTraverse(k, None) affected_objects = archive_obj_and_children( obj, initiator=initiator, reason=reason, custom_message=custom_message, archive_date=archive_date) affected_objects.extend( archive_translations(obj, also_children=True, also_versions=True, initiator=initiator, reason=reason, custom_message=custom_message, archive_date=archive_date)) for k in affected_objects: obj_url = k.absolute_url() info('INFO: object archived | %s', obj_url) result.append(obj_url) if trans_count % batchnr == 0: info('INFO: processing %s/%s objects', trans_count, totobs) transaction.commit() info('INFO: Done archive!') return result
def bulkArchive(self, brains=None, paths=None, initiator=None, reason=None, custom_message=None, archive_date=None, batchnr=20): """ Archive many objects in batches (multi transactions). """ info('INFO: starting bulk archive') result = [] deleted_count = 0 # Decide data source if brains: datasource = brains totobs = len(brains) elif paths: datasource = paths totobs = len(paths) else: datasource = [] # Archive loop trans_count = 0 for k in datasource: trans_count += 1 if brains: obj = k.getObject() elif paths: obj = self.unrestrictedTraverse(k, None) affected_objects = archive_obj_and_children(obj, initiator=initiator, reason=reason, custom_message=custom_message, archive_date=archive_date) affected_objects.extend( archive_translations(obj, also_children=True, also_versions=True, initiator=initiator, reason=reason, custom_message=custom_message, archive_date=archive_date)) for k in affected_objects: obj_url = k.absolute_url() info('INFO: object archived | %s', obj_url) result.append(obj_url) if trans_count % batchnr == 0: info('INFO: processing %s/%s objects', trans_count, totobs) transaction.commit() info('INFO: Done archive!') return result
def test_archive_obj_and_children(self): """ Test history version """ archive_obj_and_children(self.folder) assert IObjectArchived.providedBy(self.folder) assert IObjectArchived.providedBy(self.doc)
def test_archive_obj_and_children(self): """ Test the archival of the object and children """ archive_obj_and_children(self.folder) assert IObjectArchived.providedBy(self.folder) assert IObjectArchived.providedBy(self.doc)