def test_overridden_realms_config(self): self.layer.load_zcml_string( '\n'.join(( '<configure xmlns:publisher="http://namespaces.' + \ 'zope.org/ftw.publisher">', ' <publisher:override-realm', ' url="http://localhost:9090/site"', ' username="******"', ' password="******" />', '</configure>' ))) portal = self.providing_stub([IPloneSiteRoot, IAttributeAnnotatable]) self.expect(portal.portal_url.getPortalObject()).result(portal) self.replay() config = IConfig(portal) self.assertTrue(config) self.assertFalse(config.is_update_realms_possible()) self.assertEquals(len(config.getRealms()), 1) with self.assertRaises(AttributeError): config.appendRealm(Realm(1, 'http://site', 'foo', 'pw'))
def test_adapter_not_called_when_publishing_disabled(self): self.expect(self.notifier()).count(0) self.replay() config = IConfig(self.portal) config.set_publishing_enabled(False) self.portal.unrestrictedTraverse('@@publisher.executeQueue')()
def handleSave(self, action): """ """ data, errors = self.extractData() config = IConfig(self.context) assert config.is_update_realms_possible() if len(errors)==0: # get realm currentRealm = self.getRealmById(data['id']) if not currentRealm: raise Exception('Could not find realm') # no other realm should have same url+username for realm in config.getRealms(): if realm!=currentRealm: if realm.username==data['username'] and\ realm.url==data['url']: self.statusMessage( 'This URL / Username combination already exists!', ) return # update realm currentRealm.active = data['active'] and 1 or 0 currentRealm.url = data['url'] currentRealm.username = data['username'] if data['password'] and len(data['password'])>0: currentRealm.password = data['password'] self.statusMessage('Updated realm successfully') return self.request.RESPONSE.redirect('./@@publisher-config')
def handleAdd(self, action): """ This handler handles a click on the "Add Realm"-Button. If no errors occured, it adds a new Realm to the Config. @param action: ActionInfo object provided by z3c.form @return: None (form is shown) or Response-redirect """ data, errors = self.extractData() config = IConfig(self.context) if len(errors)==0: assert config.is_update_realms_possible() # url + username has to be unique for realm in config.getRealms(): if realm.url==data['url'] and realm.username==data['username']: self.statusMessage( 'This URL / Username combination already exists!', 'error') return kwargs = { 'active': data['active'] and 1 or 0, 'url': data['url'], 'username': data['username'], 'password': data['password'], } realm = Realm(**kwargs) config.appendRealm(realm) self.statusMessage('Added realm successfully') return self.request.RESPONSE.redirect('./@@publisher-config')
def test_sender(self, browser): intercepted_data = {} class MockedReceiverView(BrowserView): def __call__(self): intercepted_data['jsondata'] = self.request.form.get('jsondata') return createResponse(SuccessState()) config = IConfig(self.portal) config.appendRealm(Realm(1, self.portal.absolute_url(), SITE_OWNER_NAME, SITE_OWNER_PASSWORD)) transaction.commit() with view_registered(MockedReceiverView, 'global_statusmessage_config_receiver', required=(IPloneSiteRoot, Interface)): browser.login(SITE_OWNER_NAME).open(view='@@global_statusmessage_config') browser.fill(self.form_data) browser.click_on('Save and publish') self.assertDictEqual( self.expected_settings, encode_after_json(json.loads(intercepted_data['jsondata'])) ) self.assertEqual( ['Changes saved and published.'], info_messages() )
def test_sender(self, browser): intercepted_data = {} class MockedReceiverView(BrowserView): def __call__(self): intercepted_data['jsondata'] = self.request.form.get( 'jsondata') return createResponse(SuccessState()) config = IConfig(self.portal) config.appendRealm( Realm(1, self.portal.absolute_url(), SITE_OWNER_NAME, SITE_OWNER_PASSWORD)) transaction.commit() with view_registered(MockedReceiverView, 'global_statusmessage_config_receiver', required=(IPloneSiteRoot, Interface)): browser.login(SITE_OWNER_NAME).open( view='@@global_statusmessage_config') browser.fill(self.form_data) browser.click_on('Save and publish') self.assertDictEqual( self.expected_settings, encode_after_json(json.loads(intercepted_data['jsondata']))) self.assertEqual(['Changes saved and published.'], info_messages())
def handleSave(self, action): """ """ data, errors = self.extractData() config = IConfig(self.context) assert config.is_update_realms_possible() if len(errors)==0: # get realm currentRealm = self.getRealmById(data['id']) if not currentRealm: raise Exception('Could not find realm') # no other realm should have same url+username for realm in config.getRealms(): if realm!=currentRealm: if realm.username==data['username'] and\ realm.url==data['url']: self.statusMessage( 'This URL / Username combination already exists!', ) return # update realm currentRealm.active = data['active'] and 1 or 0 currentRealm.url = data['url'] currentRealm.username = data['username'] if data['password'] and len(data['password'])>0: currentRealm.password = data['password'] self.statusMessage('Updated realm successfully') return self.request.RESPONSE.redirect('./@@publisher-config')
def test_blacklist_does_not_affect_parents(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) config.appendPathToBlacklist('/foo/bar/one/two/three') blacklist = IPathBlacklist(self.portal) self.assertFalse(blacklist.is_blacklisted('/foo/bar')) self.assertFalse(blacklist.is_blacklisted('/foo/bar/one/two')) self.assertTrue(blacklist.is_blacklisted('/foo/bar/one/two/three'))
def test_blacklisting_intermediate_wildcards_does_not_affect_parents(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) config.appendPathToBlacklist('/demo/one*two') blacklist = IPathBlacklist(self.portal) self.assertFalse(blacklist.is_blacklisted('/demo/one')) self.assertFalse(blacklist.is_blacklisted('/demo/oneXtwo')) self.assertFalse(blacklist.is_blacklisted('/demo/one/two'))
def __call__(self): """ Handles logging purposes and calls execute() method. """ # get config and queue self.config = IConfig(self.context) portal = self.context.portal_url.getPortalObject() self.queue = IQueue(portal) event.notify(BeforeQueueExecutionEvent(portal, self.queue)) # prepare logger self.logger = getLogger() self.error_logger = getErrorLogger() # is it allowed to publish? if not self.config.publishing_enabled(): self.logger.warning('PUBLISHING IS DISABLED') return 'PUBLISHING IS DISABLED' if self.config.locking_enabled(): self.logger.info('LOCKING IS ENABLED') else: self.logger.info('LOCKING IS DISABLED') # lock - check for locking flag if self.config.locking_enabled( ) and not self.get_lock_object().acquire(0): self.logger.warning('Already publishing') return 'Already publishing' # register our own logging handler for returning logs afterwards logStream = StringIO() logHandler = logging.StreamHandler(logStream) self.logger.addHandler(logHandler) # be sure to remove the handler! try: # execute queue self.execute() except Exception: self.logger.removeHandler(logHandler) if self.config.locking_enabled(): self.get_lock_object().release() # re-raise exception raise # get logs self.logger.removeHandler(logHandler) logStream.seek(0) log = logStream.read() del logStream del logHandler # unlock if self.config.locking_enabled(): self.get_lock_object().release() event.notify(QueueExecutedEvent(portal, log)) return log
def add_saved_input_to_ignored_fields(site): config = IConfig(site) ignored_fields = config.get_ignored_fields() if 'FormSaveDataAdapter' in ignored_fields.keys(): if 'SavedFormInput' not in ignored_fields['FormSaveDataAdapter']: ignored_fields['FormSaveDataAdapter'].append('SavedFormInput') else: ignored_fields['FormSaveDataAdapter'] = ['SavedFormInput', ] config.set_ignored_fields(ignored_fields)
def test_adapter_is_called_even_when_publishing_disabled(self): self.config.set_threshold(2) self.stub_current_queue_length(3) config = IConfig(self.portal) config.set_publishing_enabled(False) self.expect(self.notifier(ARGS, KWARGS)) self.replay() self.portal.unrestrictedTraverse('@@publisher.executeQueue')()
def test_portal_can_be_blacklisted(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) blacklist = IPathBlacklist(self.portal) # The portal is not blacklisted by default. self.assertFalse(blacklist.is_blacklisted(self.portal)) # Blacklist the portal. config.appendPathToBlacklist('/'.join(self.portal.getPhysicalPath())) self.assertTrue(blacklist.is_blacklisted(self.portal))
def __call__(self, object, action): """ Extracts the required data (action dependent) from a object for creating a Job. @param object: Plone Object to export data from @param action: Action to perform [push|delete] @type action: string @return: data (json "encoded") @rtype: string """ self.object = object self.is_root = IPloneSiteRoot.providedBy(self.object) data = {} if action not in ["delete", "move"]: adapters = sorted(getAdapters((self.object,), IDataCollector)) for name, adapter in adapters: data[name] = adapter.getData() # gets the metadata, we dont use an adapter in this case, # cause metdata is the most important data-set we need data["metadata"] = self.getMetadata(action) # remove ignored fields portal = self.object.portal_url.getPortalObject() config = IConfig(portal) ignore = config.get_ignored_fields() for ptype, fields in ignore.items(): if data["metadata"]["portal_type"] == ptype: for field in fields: if "field_data_adapter" in data and field in data["field_data_adapter"]: del data["field_data_adapter"][field] if action == "move": # read out data from event_information attr move_data = getattr(self.object, "event_information", None) # make data convertable and shrink amount of data # (replace objects by path) del move_data["object"] portal_path = "/".join(self.object.portal_url.getPortalObject().getPhysicalPath()) new_parent_path = "/".join(move_data["newParent"].getPhysicalPath()) new_parent_rpath = new_parent_path[len(portal_path) :] move_data["newParent"] = new_parent_rpath old_parent_path = "/".join(move_data["oldParent"].getPhysicalPath()) old_parent_rpath = old_parent_path[len(portal_path) :] move_data["oldParent"] = old_parent_rpath move_data["newTitle"] = self.object.Title().decode("utf-8") data["move"] = move_data # finally remove event_information from object delattr(self.object, "event_information") # convert to json jsondata = self.convertToJson(data) return jsondata
def get_realm_options(self): portal = self.context.portal_url.getPortalObject() config = IConfig(portal) controller = IStatisticsCacheController(portal) current_realm = controller.get_current_realm() for realm in config.getRealms(): if realm.active: label = '%s : %s' % (realm.url, realm.username) yield {'id': self.make_realm_id(realm), 'label': label, 'selected': current_realm==realm, }
def add_saved_input_to_ignored_fields(site): config = IConfig(site) ignored_fields = config.get_ignored_fields() if 'FormSaveDataAdapter' in ignored_fields.keys(): if 'SavedFormInput' not in ignored_fields['FormSaveDataAdapter']: ignored_fields['FormSaveDataAdapter'].append('SavedFormInput') else: ignored_fields['FormSaveDataAdapter'] = [ 'SavedFormInput', ] config.set_ignored_fields(ignored_fields)
def handleAdd(self, action): portal = self.context.portal_url.getPortalObject() config = IConfig(portal) data, errors = self.extractData() if not len(errors): path = data.get('path').strip() if not path.startswith('/'): raise Exception('Path does not start with /') config.appendPathToBlacklist(path) message = _(u'info_path_added', default=u'Path added') IStatusMessage(self.request).addStatusMessage(message, type='info') return self.request.RESPONSE.redirect( './@@publisher-config-blacklist')
def handleAdd(self, action): portal = self.context.portal_url.getPortalObject() config = IConfig(portal) data, errors = self.extractData() if not len(errors): path = data.get('path').strip() if not path.startswith('/'): raise Exception('Path does not start with /') config.appendPathToBlacklist(path) message = _(u'info_path_added', default=u'Path added') IStatusMessage(self.request).addStatusMessage( message, type='info') return self.request.RESPONSE.redirect('./@@publisher-config-blacklist')
def test_item_can_be_added_to_blacklist(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) # The blacklist is empty by default. self.assertEqual(0, len(config.getPathBlacklist())) # Items can be added to the blacklist. config.appendPathToBlacklist('hans') self.assertIn('hans', config.getPathBlacklist()) self.assertEqual(1, len(config.getPathBlacklist()))
def test_ignoring_fields_of_field_data_adapter_plone_4(self): self.grant('Manager') folder = create(Builder('folder').titled(u'Foo')) data = encode_after_json(json.loads(Extractor()(folder, 'push', {}))) self.assertIn('field_data_adapter', data) self.assertIn('description', data['field_data_adapter']) IConfig(self.portal).set_ignored_fields({'Folder': ['description']}) data = encode_after_json(json.loads(Extractor()(folder, 'push', {}))) self.assertNotIn('description', data['field_data_adapter'])
def execute_single_job(self, job): """ Executes a single job without calling the view """ self.logger = getLogger() self.error_logger = getErrorLogger() portal = self.context.portal_url.getPortalObject() self.config = IConfig(portal) self.queue = IQueue(portal) # remove job from queue if job in self.queue.getJobs(): self.queue.removeJob(job) elif job in self.queue.get_executed_jobs(): self.queue.remove_executed_job(job) # execute it self.executeJob(job) # move json file job.move_jsonfile_to(self.config.get_executed_folder()) # add to executed list return self.queue.append_executed_job(job)
def download(self, REQUEST=None, RESPONSE=None): """Download the saved data """ url_tool = getToolByName(self, 'portal_url') config = IConfig(url_tool.getPortalObject()) pub_state = getMultiAdapter((self, REQUEST), IPublisherContextState) realms = config.getRealms() download_format = getattr(self, 'DownloadFormat', 'csv') if len(realms) == 0 or not pub_state.is_parent_published(): if download_format == 'tsv': return self.download_tsv(REQUEST, RESPONSE) else: assert download_format == 'csv', 'Unknown download format' return self.download_csv(REQUEST, RESPONSE) elif len(realms) == 1: data = {'uid': self.UID(), 'download_format': download_format} return_data_realm = sendRequestToRealm(data, realms[0], 'formgen_get_saved_data') return_data_this = self.getSavedFormInputForEdit() return_data = '{}{}'.format(return_data_realm, return_data_this) filename = self.id if filename.find('.') < 0: filename = '%s.%s' % (filename, download_format) header_value = contentDispositionHeader('attachment', self.getCharset(), filename=filename) RESPONSE.setHeader("Content-Disposition", header_value) sep_type = download_format == 'csv' and 'comma' or 'tab' RESPONSE.setHeader("Content-Type", 'text/%s-separated-values;' 'charset=%s' % (sep_type, self.getCharset())) return return_data else: messages = IStatusMessage(self.request) messages.add(_(u"couldn't determine correct realm to fetch from."), type=u"error") return RESPONSE.redirect(self.context.absolute_url())
def setUp(self): super(TestFormGenIntegration, self).setUp() self.portal = self.layer['portal'] setRoles(self.portal, TEST_USER_ID, ['Manager']) self.wftool = getToolByName(self.portal, 'portal_workflow') self.wftool.setChainForPortalTypes(['FormFolder', 'Form Folder'], 'publisher-example-workflow') self.formfolder = create(Builder('form folder')) self.save_data_adapter = create( Builder('save data adapter').within(self.formfolder)) self.data_string = "[email protected], Test, only a Test \n \ [email protected], another Test, Still a Test" self.save_data_adapter.setSavedFormInput(self.data_string) config = IConfig(self.portal) config.appendRealm(Realm(1, 'http://site', 'foo', 'pw'))
def setUp(self): super(TestFormGenIntegration, self).setUp() self.portal = self.layer['portal'] setRoles(self.portal, TEST_USER_ID, ['Manager']) self.wftool = getToolByName(self.portal, 'portal_workflow') self.wftool.setChainForPortalTypes(['FormFolder', 'Form Folder'], 'publisher-example-workflow') self.formfolder = create(Builder('form folder')) self.save_data_adapter = create(Builder( 'save data adapter').within(self.formfolder)) self.data_string = "[email protected], Test, only a Test \n \ [email protected], another Test, Still a Test" self.save_data_adapter.setSavedFormInput(self.data_string) config = IConfig(self.portal) config.appendRealm(Realm(1, 'http://site', 'foo', 'pw'))
def __call__(self, object, action, additional_data): """ Extracts the required data (action dependent) from a object for creating a Job. @param object: Plone Object to export data from @param action: Action to perform [push|delete] @type action: string @param additional_data: Additional infos. @type additional_data: dict @return: data (json "encoded") @rtype: string """ self.object = object self.is_root = IPloneSiteRoot.providedBy(self.object) data = {} if action not in ['delete', 'move']: adapters = sorted(getAdapters((self.object, ), IDataCollector)) for name, adapter in adapters: data[name] = adapter.getData() # gets the metadata, we dont use an adapter in this case, # cause metdata is the most important data-set we need data['metadata'] = self.getMetadata(action) # remove ignored fields portal = self.object.portal_url.getPortalObject() config = IConfig(portal) ignore = config.get_ignored_fields() for field_to_ignore in ignore.get(data['metadata']['portal_type'], ()): # AT: data.get('field_data_adapter', {}).pop(field_to_ignore, None) # DX: for schemata in data.get('dx_field_data_adapter', {}).values(): schemata.pop(field_to_ignore, None) if action == 'move': data['move'] = additional_data['move_data'] # convert to json jsondata = self.convertToJson(data) return jsondata
def __call__(self, object, action, additional_data): """ Extracts the required data (action dependent) from a object for creating a Job. @param object: Plone Object to export data from @param action: Action to perform [push|delete] @type action: string @param additional_data: Additional infos. @type additional_data: dict @return: data (json "encoded") @rtype: string """ self.object = object self.is_root = IPloneSiteRoot.providedBy(self.object) data = {} if action not in ['delete', 'move']: adapters = sorted(getAdapters((self.object, ), IDataCollector)) for name, adapter in adapters: data[name] = adapter.getData() # gets the metadata, we dont use an adapter in this case, # cause metdata is the most important data-set we need data['metadata'] = self.getMetadata(action) # remove ignored fields portal = self.object.portal_url.getPortalObject() config = IConfig(portal) ignore = config.get_ignored_fields() for field_to_ignore in ignore.get(data['metadata']['portal_type'], ()): # AT: data.get('field_data_adapter', {}).pop(field_to_ignore, None) # DX: for schemata in data.get('dx_field_data_adapter', {}).values(): schemata.pop(field_to_ignore, None) if action == 'move': data['move'] = additional_data['move_data'] # convert to json jsondata = self.convertToJson(data) return jsondata
def __call__(self): """ Handles logging purposes and calls execute() method. """ # get config and queue self.config = IConfig(self.context) portal = self.context.portal_url.getPortalObject() self.queue = IQueue(portal) event.notify(BeforeQueueExecutionEvent(portal, self.queue)) # prepare logger self.logger = getLogger() self.error_logger = getErrorLogger() # is it allowed to publish? if not self.config.publishing_enabled(): self.logger.warning('PUBLISHING IS DISABLED') return 'PUBLISHING IS DISABLED' if self.config.locking_enabled(): self.logger.info('LOCKING IS ENABLED') else: self.logger.info('LOCKING IS DISABLED') # lock - check for locking flag if self.config.locking_enabled() and not self.get_lock_object().acquire(0): self.logger.warning('Already publishing') return 'Already publishing' # register our own logging handler for returning logs afterwards logStream = StringIO() logHandler = logging.StreamHandler(logStream) self.logger.addHandler(logHandler) # be sure to remove the handler! try: # execute queue self.execute() except: self.logger.removeHandler(logHandler) if self.config.locking_enabled(): self.get_lock_object().release() # re-raise exception raise # get logs self.logger.removeHandler(logHandler) logStream.seek(0) log = logStream.read() del logStream del logHandler # unlock if self.config.locking_enabled(): self.get_lock_object().release() event.notify(QueueExecutedEvent(portal, log)) return log
def test_default_realms_config(self): portal = self.providing_stub([IPloneSiteRoot, IAttributeAnnotatable]) portal.portal_url.getPortalObject.return_value = portal config = IConfig(portal) self.assertTrue(config) self.assertTrue(config.is_update_realms_possible()) self.assertEquals(len(config.getRealms()), 0) config.appendRealm(Realm(1, 'http://site', 'foo', 'pw')) self.assertEquals(len(config.getRealms()), 1)
def send_to_receiver(self): data = json.dumps( decode_for_json(self.get_settings_data()) ) realms = IConfig(api.portal.get()).getRealms() for realm in realms: sendJsonToRealm( data, realm, 'global_statusmessage_config_receiver' )
def is_blacklisted(self, context=None, path=None): """ Checks if the adapter the context, the given `context` or the given `path` is blacklisted. """ if context and path: raise ValueError( 'Only one of `context` and `path` can be checked at once.') elif not context and not path: context = self.context elif not path and type(context) in (str, unicode): path = context context = None if not path and isinstance(context, CatalogBrains.AbstractCatalogBrain): # context is a brain path = context.getPath() if not path: path = '/'.join(context.getPhysicalPath()) path = path.strip() if path.endswith('/'): path = path[:-1] # check the path config = IConfig(self.portal) for blocked_path in config.getPathBlacklist(): blocked_path = blocked_path.strip() if path == blocked_path: return True if blocked_path.endswith('*') and \ path.startswith(blocked_path[:-1]): if path == blocked_path[:-1]: return True elif blocked_path[-2] != '/' and \ path[len(blocked_path) - 1] == '/': return False else: return True return False
def test_overridden_realms_config(self): self.layer.load_zcml_string( '\n'.join(( '<configure xmlns:publisher="http://namespaces.' + \ 'zope.org/ftw.publisher">', ' <publisher:override-realm', ' url="http://localhost:9090/site"', ' username="******"', ' password="******" />', '</configure>' ))) portal = self.providing_stub([IPloneSiteRoot, IAttributeAnnotatable]) portal.portal_url.getPortalObject.return_value = portal config = IConfig(portal) self.assertTrue(config) self.assertFalse(config.is_update_realms_possible()) self.assertEquals(len(config.getRealms()), 1) with self.assertRaises(AttributeError): config.appendRealm(Realm(1, 'http://site', 'foo', 'pw'))
def handleAdd(self, action): """ This handler handles a click on the "Add Realm"-Button. If no errors occured, it adds a new Realm to the Config. @param action: ActionInfo object provided by z3c.form @return: None (form is shown) or Response-redirect """ data, errors = self.extractData() config = IConfig(self.context) if len(errors)==0: assert config.is_update_realms_possible() # url + username has to be unique for realm in config.getRealms(): if realm.url==data['url'] and realm.username==data['username']: self.statusMessage( 'This URL / Username combination already exists!', 'error') return kwargs = { 'active': data['active'] and 1 or 0, 'url': data['url'], 'username': data['username'], 'password': data['password'], } realm = Realm(**kwargs) config.appendRealm(realm) self.statusMessage('Added realm successfully') return self.request.RESPONSE.redirect('./@@publisher-config')
def test_item_can_be_added_to_blacklist(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) # The blacklist is empty by default. self.assertEqual(0, len(config.getPathBlacklist())) # Items can be added to the blacklist. config.appendPathToBlacklist('hans') self.assertIn( 'hans', config.getPathBlacklist() ) self.assertEqual(1, len(config.getPathBlacklist()))
def download(self, REQUEST=None, RESPONSE=None): """Download the saved data """ url_tool = getToolByName(self, 'portal_url') config = IConfig(url_tool.getPortalObject()) pub_state = getMultiAdapter((self, REQUEST), IPublisherContextState) realms = config.getRealms() download_format = getattr(self, 'DownloadFormat', 'csv') if len(realms) == 0 or not pub_state.is_parent_published(): if download_format == 'tsv': return self.download_tsv(REQUEST, RESPONSE) else: assert download_format == 'csv', 'Unknown download format' return self.download_csv(REQUEST, RESPONSE) elif len(realms) == 1: data = {'uid': self.UID(), 'download_format': download_format} return_data = sendRequestToRealm(data, realms[0], 'formgen_get_saved_data') filename = self.id if filename.find('.') < 0: filename = '%s.%s' % (filename, download_format) header_value = contentDispositionHeader('attachment', self.getCharset(), filename=filename) RESPONSE.setHeader("Content-Disposition", header_value) sep_type = download_format == 'csv' and 'comma' or 'tab' RESPONSE.setHeader("Content-Type", 'text/%s-separated-values;' 'charset=%s' % (sep_type, self.getCharset())) return return_data else: messages = IStatusMessage(self.request) messages.add(_(u"couldn't determine correct realm to fetch from."), type=u"error") return RESPONSE.redirect(self.context.absolute_url())
def is_blacklisted(self, context=None, path=None): """ Checks if the adapter the context, the given `context` or the given `path` is blacklisted. """ if context and path: raise ValueError('Only one of `context` and `path` can be checked at once.') elif not context and not path: context = self.context elif not path and type(context) in (str, unicode): path = context context = None if not path and isinstance(context, CatalogBrains.AbstractCatalogBrain): # context is a brain path = context.getPath() if not path: path = '/'.join(context.getPhysicalPath()) path = path.strip() if path.endswith('/'): path = path[:-1] # check the path config = IConfig(self.portal) for blocked_path in config.getPathBlacklist(): blocked_path = blocked_path.strip() if path == blocked_path: return True if blocked_path.endswith('*') and \ path.startswith(blocked_path[:-1]): if path == blocked_path[:-1]: return True elif blocked_path[-2] != '/' and \ path[len(blocked_path) - 1] == '/': return False else: return True return False
class PathBlacklistView(BrowserView): def __init__(self, *args, **kwargs): super(PathBlacklistView, self).__init__(*args, **kwargs) self.portal = self.context.portal_url.getPortalObject() self.config = IConfig(self.portal) def __call__(self, *args, **kwargs): delete = self.request.get('delete', None) if delete: if self.config.removePathFromBlacklist(delete): msg = _(u'info_path_removed', default=u'Removed path ${path} from blacklist', mapping={'path': delete}) IStatusMessage(self.request).addStatusMessage(msg, type='info') return self.request.RESPONSE.redirect('./@@publisher-config-blacklist') return super(PathBlacklistView, self).__call__(*args, **kwargs) def render_table(self): generator = getUtility(ITableGenerator, 'ftw.tablegenerator') return generator.generate(self._table_rows(), self._table_columns()) def render_add_form(self): z2.switch_on(self) form = AddPathForm(self.context, self.request) return form() def _table_rows(self): for path in self.config.getPathBlacklist(): yield { 'Path': path, '': '<a href="./@@publisher-config-blacklist?delete=%s">Delete</a>' % \ path, } def _table_columns(self): return ('Path', '')
def test_blacklist_does_not_affect_parents(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) config.appendPathToBlacklist('/foo/bar/one/two/three') blacklist = IPathBlacklist(self.portal) self.assertFalse(blacklist.is_blacklisted('/foo/bar')) self.assertFalse(blacklist.is_blacklisted('/foo/bar/one/two')) self.assertTrue(blacklist.is_blacklisted('/foo/bar/one/two/three'))
def test_blacklisting_intermediate_wildcards_does_not_affect_parents(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) config.appendPathToBlacklist('/demo/one*two') blacklist = IPathBlacklist(self.portal) self.assertFalse(blacklist.is_blacklisted('/demo/one')) self.assertFalse(blacklist.is_blacklisted('/demo/oneXtwo')) self.assertFalse(blacklist.is_blacklisted('/demo/one/two'))
class PathBlacklistView(BrowserView): def __init__(self, *args, **kwargs): super(PathBlacklistView, self).__init__(*args, **kwargs) self.portal = self.context.portal_url.getPortalObject() self.config = IConfig(self.portal) def __call__(self, *args, **kwargs): delete = self.request.get('delete', None) if delete: if self.config.removePathFromBlacklist(delete): msg = _(u'info_path_removed', default=u'Removed path ${path} from blacklist', mapping={'path': delete}) IStatusMessage(self.request).addStatusMessage(msg, type='info') return self.request.RESPONSE.redirect( './@@publisher-config-blacklist') return super(PathBlacklistView, self).__call__(*args, **kwargs) def render_table(self): generator = getUtility(ITableGenerator, 'ftw.tablegenerator') return generator.generate(self._table_rows(), self._table_columns()) def render_add_form(self): z2.switch_on(self) form = AddPathForm(self.context, self.request) return form() def _table_rows(self): for path in self.config.getPathBlacklist(): yield { 'Path': path, '': '<a href="./@@publisher-config-blacklist?delete=%s">Delete</a>' % \ path, } def _table_columns(self): return ('Path', '')
def test_portal_can_be_blacklisted(self): config = IConfig(self.portal) config.setPathBlacklist(PersistentList()) blacklist = IPathBlacklist(self.portal) # The portal is not blacklisted by default. self.assertFalse(blacklist.is_blacklisted(self.portal)) # Blacklist the portal. config.appendPathToBlacklist('/'.join(self.portal.getPhysicalPath())) self.assertTrue(blacklist.is_blacklisted(self.portal))
def test_default_realms_config(self): portal = self.providing_stub([IPloneSiteRoot, IAttributeAnnotatable]) self.expect(portal.portal_url.getPortalObject()).result(portal) self.replay() config = IConfig(portal) self.assertTrue(config) self.assertTrue(config.is_update_realms_possible()) self.assertEquals(len(config.getRealms()), 0) config.appendRealm(Realm(1, 'http://site', 'foo', 'pw')) self.assertEquals(len(config.getRealms()), 1)
def execute_single_job(self, job): """ Executes a single job without calling the view """ self.logger = getLogger() self.error_logger = getErrorLogger() portal = self.context.portal_url.getPortalObject() self.config = IConfig(portal) self.queue = IQueue(portal) # remove job from queue if job in self.queue.getJobs(): self.queue.removeJob(job) elif job in self.queue.get_executed_jobs(): self.queue.remove_executed_job(job) # execute it self.executeJob(job) # move json file job.move_jsonfile_to(self.config.get_executed_folder()) # add to executed list return self.queue.append_executed_job(job)
class PublisherConfigletView(BrowserView): def __init__(self, *args, **kwargs): super(PublisherConfigletView, self).__init__(*args, **kwargs) self.config = IConfig(self.context) self.queue = IQueue(self.context) def makeRealmId(self, realm): return md5.md5('%s-%s' % (realm.url, realm.username)).hexdigest() def getRealmById(self, id): for realm in self.config.getRealms(): if self.makeRealmId(realm)==id: return realm return None def statusMessage(self, message, type='info'): IStatusMessage(self.request).addStatusMessage( message, type=type)
class PublisherConfigletView(BrowserView): def __init__(self, *args, **kwargs): super(PublisherConfigletView, self).__init__(*args, **kwargs) self.config = IConfig(self.context) self.queue = IQueue(self.context) def makeRealmId(self, realm): return md5.md5('%s-%s' % (realm.url, realm.username)).hexdigest() def getRealmById(self, id): for realm in self.config.getRealms(): if self.makeRealmId(realm)==id: return realm return None def statusMessage(self, message, type='info'): IStatusMessage(self.request).addStatusMessage( message, type=type)
def __init__(self, *args, **kwargs): super(PublisherConfigletView, self).__init__(*args, **kwargs) self.config = IConfig(self.context) self.queue = IQueue(self.context)
def get_realm_by_id(self, id): config = IConfig(self.context.portal_url.getPortalObject()) for realm in config.getRealms(): if self.make_realm_id(realm) == id: return realm return None
def __init__(self, *args, **kwargs): super(PathBlacklistView, self).__init__(*args, **kwargs) self.portal = self.context.portal_url.getPortalObject() self.config = IConfig(self.portal)
def __init__(self, *args, **kwargs): super(PathBlacklistView, self).__init__(*args, **kwargs) self.portal = self.context.portal_url.getPortalObject() self.config = IConfig(self.portal)
def __init__(self, *args, **kwargs): super(PublisherConfigletView, self).__init__(*args, **kwargs) self.config = IConfig(self.context) self.queue = IQueue(self.context)
class ExecuteQueue(BrowserView): """Executes the Queue and sends all Jobs to the target realms. """ def execute_single_job(self, job): """ Executes a single job without calling the view """ self.logger = getLogger() self.error_logger = getErrorLogger() portal = self.context.portal_url.getPortalObject() self.config = IConfig(portal) self.queue = IQueue(portal) # remove job from queue if job in self.queue.getJobs(): self.queue.removeJob(job) elif job in self.queue.get_executed_jobs(): self.queue.remove_executed_job(job) # execute it self.executeJob(job) # move json file job.move_jsonfile_to(self.config.get_executed_folder()) # add to executed list return self.queue.append_executed_job(job) def __call__(self): """ Handles logging purposes and calls execute() method. """ # get config and queue self.config = IConfig(self.context) portal = self.context.portal_url.getPortalObject() self.queue = IQueue(portal) event.notify(BeforeQueueExecutionEvent(portal, self.queue)) # prepare logger self.logger = getLogger() self.error_logger = getErrorLogger() # is it allowed to publish? if not self.config.publishing_enabled(): self.logger.warning('PUBLISHING IS DISABLED') return 'PUBLISHING IS DISABLED' if self.config.locking_enabled(): self.logger.info('LOCKING IS ENABLED') else: self.logger.info('LOCKING IS DISABLED') # lock - check for locking flag if self.config.locking_enabled( ) and not self.get_lock_object().acquire(0): self.logger.warning('Already publishing') return 'Already publishing' # register our own logging handler for returning logs afterwards logStream = StringIO() logHandler = logging.StreamHandler(logStream) self.logger.addHandler(logHandler) # be sure to remove the handler! try: # execute queue self.execute() except Exception: self.logger.removeHandler(logHandler) if self.config.locking_enabled(): self.get_lock_object().release() # re-raise exception raise # get logs self.logger.removeHandler(logHandler) logStream.seek(0) log = logStream.read() del logStream del logHandler # unlock if self.config.locking_enabled(): self.get_lock_object().release() event.notify(QueueExecutedEvent(portal, log)) return log def get_lock_object(self): if getattr(self.__class__, '_lock', None) is None: self.__class__._lock = RLock() return self.__class__._lock def getActiveRealms(self): """ @return: a list of active Realms @rtype: list """ if '_activeRealms' not in dir(self): self._activeRealms = [ r for r in self.config.getRealms() if r.active ] return self._activeRealms def execute(self): """ Executes the jobs from the queue. @return: None """ jobs = self.queue.countJobs() self.queue.move_to_worker_queue() self.logger.info('Executing Queue: %i of %i objects to %i realms' % ( jobs, self.queue.countJobs(), len(self.getActiveRealms()), )) while len(self.queue.get_worker_queue()): job = self.queue.popJob() if not job.json_file_exists(): continue try: # execute job self.executeJob(job) except (ConflictError, Retry): raise except URLError: raise except ReceiverTimeoutError: raise except Exception: # print the exception to the publisher error log exc = ''.join(traceback.format_exception(*sys.exc_info())) self.error_logger.error(exc) job.executed_exception = exc job.move_jsonfile_to(self.config.get_executed_folder()) self.queue.append_executed_job(job) transaction.commit() def executeJob(self, job): """ Executes a Job: sends the job to all available realms. @param job: Job object to execute @type job: Job """ objTitle = job.objectTitle if isinstance(objTitle, unicode): objTitle = objTitle.encode('utf8') # is the object blacklisted? if IPathBlacklist(self.context).is_blacklisted(job.objectPath): self.logger.error('blacklisted: "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) self.error_logger.error( 'blacklisted: "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) return False # get data from chache file state = None json = job.getData() self.logger.info('-' * 100) self.logger.info('executing "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) self.logger.info('... request data length: %i' % len(json)) state_entries = {'date': datetime.now()} for realm in self.getActiveRealms(): self.logger.info('... to realm %s' % (realm.url, )) # send data to each realm state = sendJsonToRealm(json, realm, 'publisher.receive') if isinstance(state, states.ErrorState): self.logger.error('... got result: %s' % state.toString()) self.error_logger.error( 'executing "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) self.error_logger.error('... got result: %s' % state.toString()) else: self.logger.info('... got result: %s' % state.toString()) state_entries[realm] = state job.executed_with_states(state_entries) # fire AfterPushEvent obj = uuidToObject(job.objectUID) if state is not None: event.notify(AfterPushEvent(obj, state, job))
class ExecuteQueue(BrowserView): """Executes the Queue and sends all Jobs to the target realms. """ def execute_single_job(self, job): """ Executes a single job without calling the view """ self.logger = getLogger() self.error_logger = getErrorLogger() portal = self.context.portal_url.getPortalObject() self.config = IConfig(portal) self.queue = IQueue(portal) # remove job from queue if job in self.queue.getJobs(): self.queue.removeJob(job) elif job in self.queue.get_executed_jobs(): self.queue.remove_executed_job(job) # execute it self.executeJob(job) # move json file job.move_jsonfile_to(self.config.get_executed_folder()) # add to executed list return self.queue.append_executed_job(job) def __call__(self): """ Handles logging purposes and calls execute() method. """ # get config and queue self.config = IConfig(self.context) portal = self.context.portal_url.getPortalObject() self.queue = IQueue(portal) event.notify(BeforeQueueExecutionEvent(portal, self.queue)) # prepare logger self.logger = getLogger() self.error_logger = getErrorLogger() # is it allowed to publish? if not self.config.publishing_enabled(): self.logger.warning('PUBLISHING IS DISABLED') return 'PUBLISHING IS DISABLED' if self.config.locking_enabled(): self.logger.info('LOCKING IS ENABLED') else: self.logger.info('LOCKING IS DISABLED') # lock - check for locking flag if self.config.locking_enabled() and not self.get_lock_object().acquire(0): self.logger.warning('Already publishing') return 'Already publishing' # register our own logging handler for returning logs afterwards logStream = StringIO() logHandler = logging.StreamHandler(logStream) self.logger.addHandler(logHandler) # be sure to remove the handler! try: # execute queue self.execute() except: self.logger.removeHandler(logHandler) if self.config.locking_enabled(): self.get_lock_object().release() # re-raise exception raise # get logs self.logger.removeHandler(logHandler) logStream.seek(0) log = logStream.read() del logStream del logHandler # unlock if self.config.locking_enabled(): self.get_lock_object().release() event.notify(QueueExecutedEvent(portal, log)) return log def get_lock_object(self): if getattr(self.__class__, '_lock', None) == None: self.__class__._lock = RLock() return self.__class__._lock def getActiveRealms(self): """ @return: a list of active Realms @rtype: list """ if '_activeRealms' not in dir(self): self._activeRealms = [r for r in self.config.getRealms() if r.active] return self._activeRealms def execute(self): """ Executes the jobs from the queue. @return: None """ jobs = self.queue.countJobs() self.queue.move_to_worker_queue() self.logger.info('Executing Queue: %i of %i objects to %i realms' % ( jobs, self.queue.countJobs(), len(self.getActiveRealms()), )) while len(self.queue.get_worker_queue()): job = self.queue.popJob() if not job.json_file_exists(): continue try: # execute job self.executeJob(job) except (ConflictError, Retry): raise except URLError: raise except ReceiverTimeoutError: raise except: # print the exception to the publisher error log exc = ''.join(traceback.format_exception(*sys.exc_info())) self.error_logger.error(exc) job.executed_exception = exc job.move_jsonfile_to(self.config.get_executed_folder()) self.queue.append_executed_job(job) transaction.commit() def executeJob(self, job): """ Executes a Job: sends the job to all available realms. @param job: Job object to execute @type job: Job """ objTitle = job.objectTitle if isinstance(objTitle, unicode): objTitle = objTitle.encode('utf8') # is the object blacklisted? if IPathBlacklist(self.context).is_blacklisted(job.objectPath): self.logger.error('blacklisted: "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) self.error_logger.error( 'blacklisted: "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) return False # get data from chache file state = None json = job.getData() self.logger.info('-' * 100) self.logger.info('executing "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) self.logger.info('... request data length: %i' % len(json)) state_entries = {'date': datetime.now()} for realm in self.getActiveRealms(): self.logger.info('... to realm %s' % ( realm.url, )) # send data to each realm state = sendJsonToRealm(json, realm, 'publisher.receive') if isinstance(state, states.ErrorState): self.logger.error('... got result: %s' % state.toString()) self.error_logger.error( 'executing "%s" on "%s" (at %s | UID %s)' % ( job.action, objTitle, job.objectPath, job.objectUID, )) self.error_logger.error('... got result: %s' % state.toString()) else: self.logger.info('... got result: %s' % state.toString()) state_entries[realm] = state job.executed_with_states(state_entries) # fire AfterPushEvent reference_catalog = getToolByName(self.context, 'reference_catalog') obj = reference_catalog.lookupObject(job.objectUID) if state is not None: event.notify(AfterPushEvent(obj, state, job))
def getRealmById(self, id): for realm in IConfig(self.context).getRealms(): if self.makeRealmId(realm)==id: return realm return None
def download(self, response, delimiter=""): """This patch combines the data from the sender installation and the receiver installation to one csv / tsv. We assume that the form config is the same on both sides. The concept is to first execute the standard implementation, which sets the response headers and streams the local data. If the context is considered public, the second step is to get the data from the remote realms and append it to the response body (with response.write). This will combine the data of the all sites in a streamed http response. This implementation also works in local development with two plone sites, assuming that the receiver side does not have any realms configured. """ site = getSite() realms = IConfig(site).getRealms() if not realms: return if site.REQUEST.get('is_publisher', None): # Prevent endless loop if sender and receiver are running on the same machine self._old_download(response, delimiter) return pub_state = getMultiAdapter((get_context(self), response), IPublisherContextState) if not pub_state.is_parent_published(): return site_path = '/'.join(site.getPhysicalPath()) context_path = '/'.join(get_context(self).getPhysicalPath()) relative_path = os.path.relpath(context_path, site_path) view_path = '/'.join((relative_path, '@@actions', self.__name__, '@@data')) is_xlsx = getattr(self, 'DownloadFormat', 'tsv') == 'xlsx' is_csv = getattr(self, 'DownloadFormat', 'tsv') == 'csv' if is_csv and len(delimiter) == 0: delimiter = ',' if not is_xlsx: self._old_download(response, delimiter) for realm in realms: try: remote_response = sendRequestToRealm(getSite().REQUEST.form.copy(), realm, view_path.lstrip('/') + '?is_publisher=1', return_response=True) except HTTPError: remote_response = False # Nothing to combine if remote_response and remote_response.code != 200: raise ValueError( 'Bad response from remote realm ({} {}): {!r}..'.format( remote_response.code, remote_response.msg, remote_response.read(100), )) try: if is_xlsx: use_title_row = getattr(self, "UseColumnNames", False) local_excel = self.get_saved_form_input_as_xlsx(use_title_row) combine_excel(response, local_excel, remote_response, use_title_row) else: response.write(remote_response.read()) except ImportError: raise Exception( 'Was not able to combine excel, since openpyxl is missing') finally: if remote_response: remote_response.close()