def test_ignoring_fields_of_field_data_adapter_plone_4(self): self.grant('Manager') folder = create(Builder('folder').titled(u'Foo')) data = encode_after_json(json.loads(Extractor()(folder, 'push', {}))) self.assertIn('field_data_adapter', data) self.assertIn('description', data['field_data_adapter']) IConfig(self.portal).set_ignored_fields({'Folder': ['description']}) data = encode_after_json(json.loads(Extractor()(folder, 'push', {}))) self.assertNotIn('description', data['field_data_adapter'])
def test_delete_job_get_metadata_plone_5(self): self.grant('Manager') with freeze(datetime(2030, 1, 2, 4, 5)) as clock: folder = create( Builder('folder').titled(u'The Folder').within( create(Builder('folder').titled(u'Foo')))) clock.forward(hours=1) data = encode_after_json( json.loads(Extractor()(folder, 'delete', {}))) self.maxDiff = None self.assertEquals( { 'metadata': { 'UID': 'testdeletejobgetmetadatapl000002', 'action': 'delete', 'id': 'the-folder', 'modified': '2030/01/02 04:05:00 GMT+1', 'physicalPath': '/foo/the-folder', 'portal_type': 'Folder', 'review_state': '', 'sibling_positions': { 'the-folder': 0 } } }, data)
def __call__(self): logger = getLogger() action = self.request.form['action'] filepath = self.request.form['filepath'] path = self.request.form['path'] additional_data = encode_after_json( json.loads(self.request.form['additional_data'])) obj = api.portal.get().unrestrictedTraverse(path, None) require_token = self.request.form['token'] attempt = self.request.form['attempt'] if obj is None: if attempt < MAX_ATTEMPTS: sleep(TIMEOUT_BETWEEN_ATTEMPTS) return enqueue_deferred_extraction(None, action, filepath, additional_data, attempt=attempt + 1, token=require_token, path=path) else: os.remove(filepath) logger.warning( 'Removed "{0}", since the destination {1} no longer ' 'exists.'.format(filepath, path)) return 'JSON File "{0}" removed'.format(filepath) current_token = IAnnotations(obj).get(TOKEN_ANNOTATION_KEY, None) if current_token != require_token: # The current version of the object is not the version we have # planned to extract. if attempt < MAX_ATTEMPTS: # Lets retry for solving the problem that the worker is too # early and the transaction which triggered the action was not # yet commited to the database. sleep(TIMEOUT_BETWEEN_ATTEMPTS) return enqueue_deferred_extraction(obj, action, filepath, additional_data, attempt=attempt + 1, token=require_token) else: raise Exception( 'Unexpected object version' + ' after {!r} attempts.'.format(attempt) + ' Required token: {!r},'.format(require_token) + ' got token: {!r}'.format(current_token)) extractor = Extractor() data = extractor(obj, action, additional_data) with open(filepath, 'w') as target: target.write(data) return 'OK'
def test_dexterity_image_extractor(self): self.grant('Manager') self.maxDiff = None with freeze(datetime(2030, 1, 2, 4, 5)): image = create( Builder('image').titled(u'An image').with_dummy_content(). having(description=u'Description of the image').within( create(Builder('folder').titled(u'Foo')))) self.assertDictEqual( encode_after_json(json.loads(self.asset('image_dx.json').text())), encode_after_json(json.loads(Extractor()(image, 'push', {}))))
def test_archetypes_folder_extractor(self): self.grant('Manager') self.maxDiff = None with freeze(datetime(2030, 1, 2, 4, 5)): folder = create( Builder('folder').titled(u'A folder').having( description=u'Description of the folder').within( create(Builder('folder').titled(u'Foo')))) self.assertDictEqual( encode_after_json(json.loads(self.asset('folder_at.json').text())), encode_after_json(json.loads(Extractor()(folder, 'push', {}))))