def handleUpload(self, action): (data, errors) = self.extractData() if errors: return input = self.parseInput(data["file"].data) if input is None: raise WidgetActionExecutionError("file", Invalid(_("error_invalid_session_file", default=u"Geen valide RI&E bestand."))) survey = self.findSurvey(input) if survey is None: raise WidgetActionExecutionError("file", Invalid(_("error_unknown_survey", default=u"De gebruikte vragenlijst bestaat niet op deze site."))) session = SessionManager.start( attr_unicode(input, "rienaam", u"RI&E import"), survey) self.updateCompany(input, session) (profile, keuzemap) = self.buildProfile(input, survey, session) BuildSurveyTree(survey, profile, session) self.updateAnswers(input, keuzemap, survey, session) question = FindFirstQuestion(dbsession=session) if question is None: transaction.get().doom() raise WidgetActionExecutionError("file", Invalid(u"Deze RI&E is helaas teveel veranderd om te kunnen gebruiken.")) self.request.response.redirect( QuestionURL(survey, question, phase="identification"))
def checkNonASCIITransactionMetadata(self): # Verify the database stores and retrieves non-ASCII text # in transaction metadata. ugly_string = ''.join(chr(c) for c in range(256)) if not isinstance(ugly_string, bytes): # Py3 check_string = ugly_string.encode("latin-1") else: check_string = ugly_string db = DB(self._storage) try: c1 = db.open() r1 = c1.root() r1['alpha'] = 1 transaction.get().setUser(ugly_string) transaction.commit() r1['alpha'] = 2 transaction.get().note(ugly_string) transaction.commit() info = self._storage.undoInfo() self.assertEqual(info[0]['description'], check_string) self.assertEqual(info[1]['user_name'], b'/ ' + check_string) finally: db.close()
def render(self): dbtable_surveys = 'statistics_surveys' info_surveys = self.getSurveysInfo() # write to db session = Session() session.execute('''DELETE FROM %s;''' % dbtable_surveys) def clean(value): if isinstance(value, basestring): return safe_unicode(value).strip().encode('utf-8') return value def pg_format(value): if value is None: return 'NULL' if isinstance(value, datetime): return "TIMESTAMP '%s'" % value.isoformat() return "'%s'" % value for line in info_surveys: insert = '''INSERT INTO %s VALUES %s;''' % \ (dbtable_surveys, '(%s)' % ', '.join(map(pg_format, map(clean, line)))) session.execute(insert) datamanager.mark_changed(session) transaction.get().commit() from pprint import pformat return "Written:\n" + pformat(info_surveys)
def view_user(context, request): if request.referrer and 'came_from' not in request.session: request.session['came_from'] = request.referrer editable = has_permission(WRITE, context, request) schema = UserSchema().bind(context=context) form = deform.Form(schema, buttons=(_('Save changes'),)) if editable and request.method == 'POST': try: note = 'Edited user: %s.' % context.fullname data = form.validate(request.params.items()) context.fullname = data.get('fullname', context.fullname) context.email = data.get('email', context.email) if data['password']: note += ' Changed password.' context.set_password(data['password']) if data['userid'] != context.__name__: folder = context.__parent__ del folder[context.__name__] folder[data['userid']] = context redirect_to = request.session.pop( 'came_from', request.application_url) transaction.get().note(note) return HTTPFound(redirect_to) except deform.ValidationFailure, form: rendered = HTML(form.render())
def _handleSave(self, action, msg='Saved form data'): self.save_attempt = True data, errors = self.extractData() if errors or IFormDefinition.providedBy(self.context) or self.saved: return False # just validate if errors, or if context if defn if not self.saved: result = {} # submitted data. k: group name; v: dict of name/value group_keys = [] for group in self.groups: groupdata = {} form_group_data = group.extractData()[0] for name, _field in group.fields.items(): group_keys.append(name) fieldname = _field.field.__name__ default = getattr(_field.field, 'default', None) field_data = form_group_data.get(name, default) if ICollection.providedBy(_field.field): if isinstance(_field.field.value_type, DictRow): is_nonempty_row = lambda v: any(v.values()) field_data = filter(is_nonempty_row, field_data) groupdata[fieldname] = field_data result[group.__name__] = groupdata # filter default fieldset values, ignore group values in data dict: result[''] = dict([(k, v) for k, v in data.items() if k not in group_keys]) self._saveResult(result) self.saved = True history_log(self.context, message=msg, set_modified=True) notify(ObjectModifiedEvent(self.context)) transaction.get().note(msg) self._status.addStatusMessage('Saved form data', type='info') return True
def checkPackWithGCOnDestinationAfterRestore(self): raises = self.assertRaises db = DB(self._storage) conn = db.open() root = conn.root() root.obj = obj1 = MinPO(1) txn = transaction.get() txn.note("root -> obj") txn.commit() root.obj.obj = obj2 = MinPO(2) txn = transaction.get() txn.note("root -> obj -> obj") txn.commit() del root.obj txn = transaction.get() txn.note("root -X->") txn.commit() # Now copy the transactions to the destination self._dst.copyTransactionsFrom(self._storage) # Now pack the destination. snooze() self._dst.pack(time.time(), referencesf) # And check to see that the root object exists, but not the other # objects. data, serial = self._dst.load(root._p_oid, "") raises(KeyError, self._dst.load, obj1._p_oid, "") raises(KeyError, self._dst.load, obj2._p_oid, "")
def update_list_mailtos(context, new_fqdn): """ http://www.openplans.org/projects/opencore/lists/opencore-dev/archive/2009/03/1238095821932/forum_view#1240922987323 """ catalog = getToolByName(context, 'portal_catalog') lookup_utility = getUtility(IListLookup, context=context) i = 0; changed = False for brain in catalog.unrestrictedSearchResults(portal_type= 'Open Mailing List'): i += 1 try: ml = brain.getObject() except AttributeError: # ignore catalog ghosts continue mailto, old_fqdn = ml.mailto.split('@') if old_fqdn == new_fqdn: continue ml.mailto = mailto lookup_utility.updateList(ml) changed = True if changed and i % 400 == 0: transaction.get().note('Batch commit of mailing list FQDN update') transaction.commit() changed = False transaction.get().note('Final commit of mailing list FQDN update') transaction.commit()
def handleTraceback(object): context = object.context entry_url = object.entry_url if entry_url is None: return LOGGER.info("handle traceback [%s]" % entry_url) try: cleanup_lock.acquire() # we don't want to produce any errors here, thus, we'll be nice and die # silently if an error occurs here try: transaction.begin() # get our logbook view to use the api logbook = context.unrestrictedTraverse('@@logbook') # get the generated error url from Products.SiteErrorLog err_id = urllib.splitvalue(entry_url)[1] # save error logbook.save_error(err_id, context=aq_parent(context)) transaction.get().note('collective.logbook traceback [%s]' % entry_url) transaction.commit() finally: cleanup_lock.release() # only warning except Exception, e: LOGGER.warning("An error occured while handling the traceback") LOGGER.warning("%s" % e) LOGGER.exception(e)
def testFailedJoin(self): # When a join is issued while the transaction is in COMMITFAILED, the # session is never closed and the session id stays in _SESSION_STATE, # which means the session won't be joined in the future either. This # causes the session to stay open forever, potentially accumulating # data, but never issuing a commit. dummy = DummyDataManager(key='dummy.first') transaction.get().join(dummy) try: transaction.commit() except DummyException: # Commit raised an error, we are now in COMMITFAILED pass self.assertEqual(transaction.get().status, ZopeStatus.COMMITFAILED) session = Session() # try to interact with the session while the transaction is still # in COMMITFAILED self.assertRaises(TransactionFailedError, session.query(User).all) transaction.abort() # start a new transaction everything should be ok now transaction.begin() session = Session() self.assertEqual([], session.query(User).all()) session.add(User(id=1, firstname='udo', lastname='juergens')) # abort transaction, session should be closed without commit transaction.abort() self.assertEqual([], session.query(User).all())
def test_request_not_closed_when_tm_middleware_active(self): import transaction from ZPublisher import WSGIPublisher environ = self._makeEnviron() environ['repoze.tm.active'] = 1 start_response = DummyCallable() _request = DummyRequest() _request._closed = False def _close(): _request._closed = True _request.close = _close def _request_factory(stdin, environ, response): return _request _publish = DummyCallable() _publish._result = DummyResponse() app_iter = self._callFUT(environ, start_response, _publish, _request_factory=_request_factory) self.assertFalse(_request._closed) txn = transaction.get() self.assertTrue(txn in WSGIPublisher._request_closer_for_repoze_tm.requests) txn.commit() self.assertTrue(_request._closed) self.assertFalse(txn in WSGIPublisher._request_closer_for_repoze_tm.requests) # try again, but this time raise an exception and abort _request._closed = False _publish._raise = Exception('oops') self.assertRaises(Exception, self._callFUT, environ, start_response, _publish, _request_factory=_request_factory) self.assertFalse(_request._closed) txn = transaction.get() self.assertTrue(txn in WSGIPublisher._request_closer_for_repoze_tm.requests) txn.abort() self.assertFalse(txn in WSGIPublisher._request_closer_for_repoze_tm.requests) self.assertTrue(_request._closed)
def checkPackVersionReachable(self): db = DB(self._storage) cn = db.open() root = cn.root() names = "a", "b", "c" for name in names: root[name] = MinPO(name) transaction.commit() for name in names: cn2 = db.open(version=name) rt2 = cn2.root() obj = rt2[name] obj.value = MinPO("version") transaction.commit() cn2.close() root["d"] = MinPO("d") transaction.commit() snooze() self._storage.pack(time.time(), referencesf) cn.sync() # make sure all the non-version data is there for name, obj in root.items(): self.assertEqual(name, obj.value) # make sure all the version-data is there, # and create a new revision in the version for name in names: cn2 = db.open(version=name) rt2 = cn2.root() obj = rt2[name].value self.assertEqual(obj.value, "version") obj.value = "still version" transaction.commit() cn2.close() db.abortVersion("b") txn = transaction.get() txn.note("abort version b") txn.commit() t = time.time() snooze() L = db.undoInfo() db.undo(L[0]["id"]) txn = transaction.get() txn.note("undo abort") txn.commit() self._storage.pack(t, referencesf) cn2 = db.open(version="b") rt2 = cn2.root() self.assertEqual(rt2["b"].value.value, "still version")
def install_package(app, module, init_func, raise_exc=False, log_exc=True): """Installs a Python package like a product.""" from App.ProductContext import ProductContext try: do_install = doInstall() name = module.__name__ if do_install: product = App.Product.initializeProduct(module, name, module.__path__[0], app) else: product = FactoryDispatcher.Product(name) app = None product.package_name = name if init_func is not None: newContext = ProductContext(product, app, module) init_func(newContext) package_initialized(module, init_func) if do_install: transaction.get().note('Installed package %s' % module.__name__) transaction.commit() except Exception: if log_exc: LOG.error("Couldn't install %s" % module.__name__, exc_info=True) transaction.abort() if raise_exc: raise
def sql_create_all(context): """Add all missing SQL tables and indices. """ session = Session() transaction.get().commit() model.metadata.create_all(session.bind, checkfirst=True) datamanager.mark_changed(session)
def checkNonASCIITransactionMetadata(self): # Verify the database stores and retrieves non-ASCII text # in transaction metadata. ugly_string = ''.join(chr(c) for c in range(256)) if isinstance(ugly_string, bytes): # Always text. Use latin 1 because it can decode any arbitrary # bytes. ugly_string = ugly_string.decode('latin-1') # The storage layer is defined to take bytes (implicitly in # older ZODB releases, explicitly in ZODB 5.something), but historically # it can accept either text or bytes. However, it always returns bytes check_string = ugly_string.encode("utf-8") db = DB(self._storage) try: c1 = db.open() r1 = c1.root() r1['alpha'] = 1 transaction.get().setUser(ugly_string) transaction.commit() r1['alpha'] = 2 transaction.get().note(ugly_string) transaction.commit() info = self._storage.undoInfo() self.assertEqual(info[0]['description'], check_string) self.assertEqual(info[1]['user_name'], b'/ ' + check_string) finally: db.close()
def flush_backend(self): """Take all transactions that are dirty or marked for removal writes them out / removes them out if possible. When this is done, it does a zopedb transaction commit, if you're sharing a zopedb thread with this you'll want to be sure your data is in a state you're comfortable having commited """ # if we can write to the backend if self.can_write(): dirty_set_copy = self.dirty_transaction_set.copy() try: self.__advance_all_dirty_transaction_state_machine(True) # save, and let all dirty transactions change thier state # with the knowledge that a save just took place try: self.save() except BoKeepBackendException, e: # call close, which also triggers # __set_all_transactions_to_reset_and_advance() self.close('called close() because save failed ' + \ str(e)) else: for dirty_trans_id in self.dirty_transaction_set.iterkeys(): self.dirty_transaction_set[dirty_trans_id] = \ BackendDataStateMachine.LAST_ACT_SAVE self._p_changed = True transaction.get().commit() self.__advance_all_dirty_transaction_state_machine()
def queue_msg(msg, queue=None): """ Queues a rabbitmq message in the given queue """ _mdm = MessagesDataManager() transaction.get().join(_mdm) _mdm.add(queue, msg)
def initialize(context): """Initializer called when used as a Zope 2 product.""" root = Zope2.app() sites = root.objectValues("Plone Site") version = os.environ.get("EEA_KGS_VERSION", "") if not version: return changed = False for site in sites: anno = queryAdapter(site, IAnnotations) if not anno: continue if not anno.get("EEA_KGS_VERSION", None): anno["EEA_KGS_VERSION"] = OOBTree() changed = True if not anno["EEA_KGS_VERSION"].get(version, None): anno["EEA_KGS_VERSION"][version] = datetime.now() changed = True if changed: transaction.get().note('eea.design: updating EEA_KGS_VERSION') try: transaction.commit() except Exception as err: logger.warn("EEA_KGS_VERSION already updated elsewhere: %s", err) transaction.abort() else: logger.info("EEA_KGS_VERSION updated to: %s", version)
def run_plone_migrations(context): """ Runs the migrations that are registered w/ Plone's portal_migrations tool. """ migtool = getToolByName(context, 'portal_migration') if not migtool.needUpgrading(): return inst_version = migtool.getInstanceVersion() if 'svn' in inst_version: # it's an unreleased version, bump down the version number and # use forced upgrade inst_version = inst_version.split()[0] from Products.CMFPlone.MigrationTool import _upgradePaths for vfrom, vto in _upgradePaths.items(): if vto[0] == inst_version: inst_version = vfrom break req = context.REQUEST req.environ['REQUEST_METHOD'] = 'POST' req.form = {'force_instance_version': inst_version} req.force_instance_version = inst_version result = migtool.upgrade(REQUEST=req) else: result = migtool.upgrade() if not migtool.needUpgrading(): transaction.get().note('Plone migrations run') transaction.commit() else: raise RuntimeError, "Plone migrations failed"
def __init__(self, wd, db, head, name): self.wd = wd self.db = db self.name = name self.lock_file = os.path.join(db, 'acidfs.lock') transaction.get().join(self) curhead = open(os.path.join(db, 'HEAD')).read().strip()[16:] if head == curhead: head = 'HEAD' if head == 'HEAD': self.headref = os.path.join(db, 'refs', 'heads', curhead) else: self.headref = os.path.join(db, 'refs', 'heads', head) self.head = head if os.path.exists(self.headref): # Existing head, get head revision self.prev_commit = _check_output( ['git', 'rev-list', '--max-count=1', head], cwd=db).strip() self.tree = _TreeNode.read(db, self.prev_commit) else: # New head, no commits yet self.tree = _TreeNode(db) # empty tree self.prev_commit = None
def render(self): self._started = time.time() pruneTruncations() self.obj = self.selectObjectToView() # Not using IObjectHistory(self.obj) because LP#1185175 self.history = ZodbObjectHistory(self.obj) self.latest = True if self.request.get('tid'): self.state = ZodbObjectState(self.obj, p64(int(self.request['tid'], 0)), _history=self.history) self.latest = False else: self.state = ZodbObjectState(self.obj, _history=self.history) if 'CANCEL' in self.request: self._redirectToSelf() return '' if 'ROLLBACK' in self.request: rtid = p64(int(self.request['rtid'], 0)) self.requestedState = self._tidToTimestamp(rtid) if self.request.get('confirmed') == '1': self.history.rollback(rtid) transaction.get().note(u'Rollback to old state %s' % self.requestedState) self.made_changes = True self._redirectToSelf() return '' # will show confirmation prompt return self.confirmation_template() return self.template()
def checkMultipleUndoInOneTransaction(self): # Verify that it's possible to perform multiple undo # operations within a transaction. If ZODB performs the undo # operations in a nondeterministic order, this test will often # fail. conn = self._db.open() try: root = conn.root() # Add transactions that set root["state"] to (0..5) for state_num in range(6): transaction.begin() root['state'] = state_num transaction.get().note('root["state"] = %d' % state_num) transaction.commit() # Undo all but the first. Note that no work is actually # performed yet. transaction.begin() log = self._db.undoLog() self._db.undoMultiple([log[i]['id'] for i in range(5)]) transaction.get().note('undo states 1 through 5') # Now attempt all those undo operations. transaction.commit() # Sanity check: we should be back to the first state. self.assertEqual(root['state'], 0) finally: transaction.abort() conn.close()
def create_dangling_ref(db): rt = db.open().root() rt[1] = o1 = P() transaction.get().note(u"create o1") transaction.commit() rt[2] = o2 = P() transaction.get().note(u"create o2") transaction.commit() c = o1.child = P() transaction.get().note(u"set child on o1") transaction.commit() o1.child = P() transaction.get().note(u"replace child on o1") transaction.commit() time.sleep(2) # The pack should remove the reference to c, because it is no # longer referenced from o1. But the object still exists and has # an oid, so a new commit of it won't create a new object. db.pack() print(repr(c._p_oid)) o2.child = c transaction.get().note(u"set child on o2") transaction.commit()
def batch_upgrade(request): request.datastore = 'database' transaction.get().setExtendedInfo('upgrade', True) batch = request.json['batch'] root = request.root storage = request.registry[STORAGE].write session = storage.DBSession() results = [] for uuid in batch: item_type = None update = False error = False sp = session.begin_nested() try: item = find_resource(root, uuid) item_type = item.type_info.item_type update, errors = update_item(storage, item) except Exception: logger.exception('Error updating: /%s/%s', item_type, uuid) sp.rollback() error = True else: if errors: errortext = [ '%s: %s' % ('/'.join(error.path) or '<root>', error.message) for error in errors] logger.error( 'Validation failure: /%s/%s\n%s', item_type, uuid, '\n'.join(errortext)) sp.rollback() error = True else: sp.commit() results.append((item_type, uuid, update, error)) return {'results': results}
def calculate(self): """Move related delivery in 'calculating' state by activity Activity to update causality state is delayed until all related simulation movement are reindexed. This method should be only called by simulation_movement_causality_interaction_workflow. """ delivery = self.getDeliveryValue() if delivery is not None: delivery = delivery.getRootDeliveryValue() tv = getTransactionalVariable() path = self.getPath() delivery_path = delivery.getPath() key = 'SimulationMovement.calculate', delivery_path try: tv[key].append(path) except KeyError: tv[key] = [path] def before_commit(): method_id_list = ('immediateReindexObject', 'recursiveImmediateReindexObject') tag = delivery_path + '_calculate' delivery.activate(tag=tag).Delivery_calculate(activate_kw= {'after_path_and_method_id': (tv[key], method_id_list)}) tv[key] = None # disallow further calls to 'calculate' transaction.get().addBeforeCommitHook(before_commit)
def __call__(self, batch=1000, dryrun=False): """ find all btree-based folder below the context, potentially migrate them & provide some logging and statistics doing so """ log = self.mklog() log('migrating btree-based folders from %r:' % self.context) real = timer() # real time lap = timer() # real lap time (for intermediate commits) cpu = timer(clock) # cpu time processed = 0 def checkPoint(): msg = 'intermediate commit '\ '(%d objects processed, last batch in %s)...' log(msg % (processed, next(lap))) trx = get() trx.note(u'migrated %d btree-folders' % processed) trx.savepoint() cpi = checkpointIterator(checkPoint, batch) for path, obj in findObjects(self.context): if isinstance(obj, BTreeFolder): if self.migrate(obj): processed += 1 next(cpi) self.postprocess(obj) checkPoint() # commit last batch if dryrun: get().abort() # abort on test-run... msg = 'processed %d object(s) in %s (%s cpu time).' msg = msg % (processed, next(real), next(cpu)) log(msg) logger.info(msg)
def handle_photo_upload(context, form): upload = form.get("photo", None) if upload is not None and upload.file is not None: request = get_current_request() userid = authenticated_userid(request) upload_file = upload.file if hasattr(upload, "type"): upload_type = upload.type # FieldStorage else: upload_type = upload.mimetype # Formish File object assert upload_type photo = create_content( ICommunityFile, title="Photo of " + context.title, stream=upload_file, mimetype=upload_type, filename=basename_of_filepath(upload.filename), creator=userid, ) if not photo.is_image: transaction.get().doom() raise Invalid({"photo": "Uploaded file is not a valid image."}) if "photo" in context: del context["photo"] alsoProvides(photo, IPhoto) context["photo"] = photo check_upload_size(context, photo, "photo") # Handle delete photo (ignore if photo also uploaded) elif form.get("photo_delete", False) or (upload and upload.metadata.get("remove", False)): if "photo" in context: del context["photo"]
def upgrade(app, args): # Display all messages on stderr logger.setLevel(logging.DEBUG) logger.handlers[0].setLevel(logging.DEBUG) existing = app.objectValues('Plone Site') site = existing and existing[0] or None if site is None: logger.error("No Plone site found in the database.") sys.exit(1) _, site = _setup(app, site) from intranett.policy.config import config logger.info("Starting the upgrade.\n\n") setup = site.portal_setup config.run_all_upgrades(setup) logger.info("Ran upgrade steps.") # Recook resources, as some CSS/JS files might have changed. # TODO: We could try to determine if this is needed in some way site.portal_javascripts.cookResources() site.portal_css.cookResources() logger.info("Resources recooked.") transaction.get().note('Upgraded profiles and recooked resources.') transaction.get().commit() sys.exit(0)
def _bootstrap(self): bt_name = 'erp5_property_sheets' from Products.ERP5.ERP5Site import ERP5Generator ERP5Generator.bootstrap(self, bt_name, 'PropertySheetTemplateItem', ( 'BaseType', 'BusinessTemplate', 'Folder', 'SimpleItem', 'Version', 'Comment', # the following ones are required to upgrade an existing site 'Reference', 'BaseCategory', 'SQLIdGenerator', )) def install(): from ZPublisher.BaseRequest import RequestContainer from Products.ERP5Type.Globals import get_request portal = self.getPortalObject() # BusinessTemplate.install needs a request template_tool = portal.aq_base.__of__(portal.aq_parent.__of__( RequestContainer(REQUEST=get_request()))).portal_templates if template_tool.getInstalledBusinessTemplate(bt_name) is None: from Products.ERP5.ERP5Site import getBootstrapBusinessTemplateUrl url = getBootstrapBusinessTemplateUrl(bt_name) template_tool.download(url).install() transaction.get().addBeforeCommitHook(unrestricted_apply, (install,))
def install_products(app): # Install a list of products into the basic folder class, so # that all folders know about top-level objects, aka products folder_permissions = get_folder_permissions() meta_types=[] done={} debug_mode = getConfiguration().debug_mode transaction.get().note('Prior to product installs') transaction.commit() products = get_products() for priority, product_name, index, product_dir in products: # For each product, we will import it and try to call the # intialize() method in the product __init__ module. If # the method doesnt exist, we put the old-style information # together and do a default initialization. if done.has_key(product_name): continue done[product_name]=1 install_product(app, product_dir, product_name, meta_types, folder_permissions, raise_exc=debug_mode) # Delayed install of packages-as-products for module, init_func in tuple(get_packages_to_initialize()): install_package(app, module, init_func, raise_exc=debug_mode) Products.meta_types=Products.meta_types+tuple(meta_types) InitializeClass(Folder.Folder)
def window_close(*args): book.get_backend_plugin().flush_backend() transaction.get().commit() bookset.close() # should change guistate (default shell persistent storage) # to be on this specific trans id now main_quit()
def install_standards(app): # Check to see if we've already done this before if getattr(app, '_standard_objects_have_been_added', 0): return # Install the replaceable standard objects from App.Common import package_home from App.special_dtml import DTMLFile from Products.PageTemplates.PageTemplateFile import PageTemplateFile std_dir = os.path.join(package_home(globals()), 'standard') wrote = False for fn in os.listdir(std_dir): base, ext = os.path.splitext(fn) if ext == '.dtml': if hasattr(app, base): continue ob = DTMLFile(base, std_dir) app.manage_addProduct['OFSP'].manage_addDTMLMethod(id=base, file=open( ob.raw)) elif ext in ('.pt', '.zpt'): if hasattr(app, base): continue ob = PageTemplateFile(fn, std_dir, __name__=fn) app.manage_addProduct['PageTemplates'].manage_addPageTemplate( id=base, title='', text=open(ob.filename)) elif ext in ('.ico', '.gif', '.png'): if hasattr(app, fn): continue app.manage_addProduct['OFSP'].manage_addImage( id=fn, title='', file=open(os.path.join(std_dir, fn))) else: continue wrote = True if wrote: app._standard_objects_have_been_added = 1 transaction.get().note('Installed standard objects') transaction.commit()
def onObjectRemovedEvent(obj, event): """Subscriber for ``ObjectRemovedEvent``.""" # if we're on delete_confirmation, only trigger if submitted and not cancelled if obj.REQUEST.getURL().endswith('delete_confirmation') and \ (obj.REQUEST.form.get('form.button.Cancel', None) == 'Cancel' or not obj.REQUEST.form.get('form.submitted')): return currentTransaction = transaction.get() # check if we already set the hook if len([x for x in currentTransaction.getAfterCommitHooks() if x[0] == onItemRemoval_hook and obj in x[1]]) == 0: # we wait for transaction completion as removal can be aborted currentTransaction.addAfterCommitHook(onItemRemoval_hook, args=(obj, ))
def test_cropped_when_too_long_even_without_description(self): profileid = 'my.package:default' transaction.get().note('Some notes..') note = TransactionNote() for destination in range(1, (65535 / len(profileid)) + 2): note.add_upgrade(profileid, (str(destination),), '') note.set_transaction_note() result = transaction.get().description expected_start = 'Some notes..\nmy.package:default -> 1\n' self.assertTrue( result.startswith(expected_start), ('Expected transaction note to start with "%s",' ' but it started with "%s"') % ( expected_start, result[:50])) self.assertTrue( result.endswith('...'), 'Expected transaction note to be cropped, ending with "..." ' 'but it ends with "%s"' % result[-30:])
def duplicate(self, conn, abort_it): transaction.begin() transaction.get().note(u'duplication') root = conn.root() ob = root['test'] assert len(ob) > 10, 'Insufficient test data' try: import tempfile with tempfile.TemporaryFile(prefix="DUP") as f: ob._p_jar.exportFile(ob._p_oid, f) assert f.tell() > 0, 'Did not export correctly' f.seek(0) new_ob = ob._p_jar.importFile(f) self.assertEqual(new_ob, ob) root['dup'] = new_ob if abort_it: transaction.abort() else: transaction.commit() except: transaction.abort() raise
def AfterTransitionEventHandler(instance, event): wf = getToolByName(instance, 'portal_workflow') pc = getToolByName(instance, 'portal_catalog') rc = getToolByName(instance, REFERENCE_CATALOG) pu = getToolByName(instance, 'plone_utils') # creation doesn't have a 'transition' if not event.transition: return if event.transition.id == "deactivate": # A instance cannot be deactivated if it contains services bsc = getToolByName(instance, 'bika_setup_catalog') ars = bsc(portal_type='AnalysisService', getCategoryUID=instance.UID()) if ars: message = _( "Category cannot be deactivated because it contains Analysis Services" ) pu.addPortalMessage(message, 'error') transaction.get().abort() raise WorkflowException
def test_commit_metadata_user_path_is_blank(factory, tmp): # pyramid_tm calls setUser with '' for path fs = factory() tx = transaction.get() tx.note("A test commit.") tx.setUser('Fred', '') tx.setExtendedInfo('email', '*****@*****.**') fs.open('foo', 'wb').write(b'Howdy!') transaction.commit() output = _check_output(['git', 'log'], cwd=tmp) assert b'Author: Fred <*****@*****.**>' in output assert b'A test commit.' in output
def test_should_be_able_to_use_session_across_transactions(self): self.doc['name'] = 'Saruman' doc2 = MongoDocument(self.session, colname) # doc2 not part of this transaction since no data self.assertEqual(transaction.get()._resources, [self.doc]) transaction.commit() doc2['name'] = 'Gandalf' # doc not part of this transaction since no changes self.assertEqual(transaction.get()._resources, [doc2]) transaction.commit() self.doc['profession'] = 'wizard' doc2['profession'] = 'wizardToo' # both should be part of this transaction self.assertEqual(transaction.get()._resources, [self.doc, doc2]) doc3 = MongoDocument(self.session, colname, retrieve={'name': 'Saruman'}) doc4 = MongoDocument(self.session, colname, retrieve={'name': 'Gandalf'}) self.assertEqual(self.doc.committed, doc3.committed) self.assertEqual(doc2.committed, doc4.committed)
def zodb_transaction(): try: txn = transaction.get() yield txn except Exception: if txn is not transaction.get(): raise InvalidTransactionError( "could not abort transaction, was already aborted/committed within 'with' body" ) try: txn.abort() # catch any internal exceptions that happen during abort except Exception: pass raise else: try: if txn is transaction.get(): txn.commit() # catch any internal exceptions that happen during commit except Exception: pass
def render(self): self.reset_mark(getFullRequestUrl(self.request)) pruneTruncations() self.obj = self.selectObjectToView() self.debug_mark('- loading object history') # Not using IObjectHistory(self.obj) because LP: #1185175 self.history = getObjectHistory(self.obj) self.latest = True self.debug_mark('- loading object state') if self.request.get('tid'): self.state = ZodbObjectState(self.obj, p64(int(self.request['tid'], 0)), _history=self.history) self.latest = False else: self.state = ZodbObjectState(self.obj, _history=self.history) if 'CANCEL' in self.request: self._redirectToSelf() return '' if 'ROLLBACK' in self.request: rtid = p64(int(self.request['rtid'], 0)) self.requestedState = self._tidToTimestamp(rtid) if self.request.get('confirmed') == '1': self.history.rollback(rtid) transaction.get().note(u'Rollback to old state %s' % self.requestedState) self.made_changes = True self._redirectToSelf() return '' # will show confirmation prompt return self.confirmation_template() self.debug_mark('- rendering') try: return self.template() finally: self.debug_mark('- done (%s)' % formatTime(self.time_elapsed()))
def transaction_note(note): """Write human legible note""" T = transaction.get() if isinstance(note, unicode): # Convert unicode to a regular string for the backend write IO. # UTF-8 is the only reasonable choice, as using unicode means # that Latin-1 is probably not enough. note = note.encode('utf-8', 'replace') if (len(T.description) + len(note)) >= 65533: log('Transaction note too large omitting %s' % str(note)) else: T.note(str(note))
def update_transaction_note(*args): import transaction, re def label_with_count(count): return "(Saving %d new i18n messages)" % count def increment_count(match): return label_with_count(int(match.group('count')) + 1) p = re.compile(r'\(Saving (?P<count>\d+) new i18n messages\)') t = transaction.get() if p.search(t.description) is None: t.note(label_with_count(0)) t.description = p.sub(increment_count, t.description)
def install_package(app, module, init_func, raise_exc=False, log_exc=True): """Installs a Python package like a product.""" try: product = App.Product.initializeProduct(module, module.__name__, module.__path__[0], app) product.package_name = module.__name__ if init_func is not None: newContext = ProductContext(product, app, module) init_func(newContext) if not doInstall(): transaction.abort() else: transaction.get().note('Installed package %s' % module.__name__) transaction.commit() except: if log_exc: LOG.error("Couldn't install %s" % module.__name__, exc_info=True) transaction.abort() if raise_exc: raise
def checkPackWithGCOnDestinationAfterRestore(self): raises = self.assertRaises closing = self._closing db = closing(DB(self._storage)) conn = closing(db.open()) root = conn.root() root.obj = obj1 = MinPO(1) txn = transaction.get() txn.note(u'root -> obj') txn.commit() root.obj.obj = obj2 = MinPO(2) txn = transaction.get() txn.note(u'root -> obj -> obj') txn.commit() del root.obj txn = transaction.get() txn.note(u'root -X->') txn.commit() # Now copy the transactions to the destination self._dst.copyTransactionsFrom(self._storage) # If the source storage is a history-free storage, all # of the transactions are now marked as packed in the # destination storage. To trigger a pack, we have to # add another transaction to the destination that is # not packed. db2 = closing(DB(self._dst)) conn2 = closing(db2.open()) conn2.root().extra = 0 txn = transaction.get() txn.note(u'root.extra = 0') txn.commit() # Now pack the destination. snooze() self._dst.pack(time.time(), referencesf) # And check to see that the root object exists, but not the other # objects. _data, _serial = self._dst.load(root._p_oid, '') raises(KeyError, self._dst.load, obj1._p_oid, '') raises(KeyError, self._dst.load, obj2._p_oid, '')
def checkPackUnlinkedFromRoot(self): eq = self.assertEqual db = DB(self._storage) conn = db.open() root = conn.root() txn = transaction.get() txn.note('root') txn.commit() now = packtime = time.time() while packtime <= now: packtime = time.time() obj = C() obj.value = 7 root['obj'] = obj txn = transaction.get() txn.note('root -> o1') txn.commit() del root['obj'] txn = transaction.get() txn.note('root -x-> o1') txn.commit() self._storage.pack(packtime, referencesf) log = self._storage.undoLog() tid = log[0]['id'] db.undo(tid) txn = transaction.get() txn.note('undo root -x-> o1') txn.commit() conn.sync() eq(root['obj'].value, 7)
def createUtils(root_folder, connection=None, dummy_db=None): madeAdmUtilReports = ensureUtility(root_folder, IAdmUtilReports, 'AdmUtilReports', AdmUtilReports, name='AdmUtilReports', copy_to_zlog=False) if isinstance(madeAdmUtilReports, AdmUtilReports): logger.info(u"bootstrap: Ensure named AdmUtilReports") dcore = IWriteZopeDublinCore(madeAdmUtilReports) dcore.title = u"Reports Utility" dcore.created = datetime.utcnow() madeAdmUtilReports.ikName = dcore.title madeAdmUtilReports.__post_init__() sitem = root_folder.getSiteManager() utils = [ util for util in sitem.registeredUtilities() if util.provided.isOrExtends(IAdmUtilSupervisor)] instAdmUtilSupervisor = utils[0].component instAdmUtilSupervisor.appendEventHistory(\ u" bootstrap: made IAdmUtilReports-Utility") transaction.get().commit() if connection is not None: connection.close()
def _testrun(self): cn = self.db.open() while not self.stop.isSet(): try: tree = cn.root()["tree"] break except (ConflictError, KeyError): transaction.abort() key = self.startnum while not self.stop.isSet(): try: tree[key] = self.threadnum transaction.get().note("add key %s" % key) transaction.commit() self.commitdict[self] = 1 if self.sleep: time.sleep(self.sleep) except (ReadConflictError, ConflictError), msg: transaction.abort() else: self.added_keys.append(key) key += self.step
def catalog_object(self, obj, uid=None, idxs=None, update_metadata=1, pghandler=None): if uid is None: try: uid = obj.getPhysicalPath except AttributeError: raise CatalogError( "A cataloged object must support the 'getPhysicalPath' " "method if no unique id is provided when cataloging") else: uid = '/'.join(uid()) elif not isinstance(uid, str): raise CatalogError('The object unique id must be a string.') self._catalog.catalogObject(obj, uid, None, idxs, update_metadata=update_metadata) # None passed in to catalogObject as third argument indicates # that we shouldn't try to commit subtransactions within any # indexing code. We throw away the result of the call to # catalogObject (which is a word count), because it's # worthless to us here. if self.threshold is not None: # figure out whether or not to commit a subtransaction. t = id(transaction.get()) if t != self._v_transaction: self._v_total = 0 self._v_transaction = t self._v_total = self._v_total + 1 # increment the _v_total counter for this thread only and get # a reference to the current transaction. # the _v_total counter is zeroed if we notice that we're in # a different transaction than the last one that came by. # self.threshold represents the number of times that # catalog_object needs to be called in order for the catalog # to commit a subtransaction. The semantics here mean that # we should commit a subtransaction if our threshhold is # exceeded within the boundaries of the current transaction. if self._v_total > self.threshold: transaction.savepoint(optimistic=True) self._p_jar.cacheGC() self._v_total = 0 if pghandler: pghandler.info('committing subtransaction')
def install_products(app): # Install a list of products into the basic folder class, so # that all folders know about top-level objects, aka products folder_permissions = get_folder_permissions() meta_types = [] done = {} debug_mode = App.config.getConfiguration().debug_mode transaction.get().note('Prior to product installs') transaction.commit() products = get_products() for priority, product_name, index, product_dir in products: # For each product, we will import it and try to call the # intialize() method in the product __init__ module. If # the method doesnt exist, we put the old-style information # together and do a default initialization. if done.has_key(product_name): continue done[product_name] = 1 install_product(app, product_dir, product_name, meta_types, folder_permissions, raise_exc=debug_mode) # Delayed install of packages-as-products for module, init_func in getattr(Products, '_packages_to_initialize', []): install_package(app, module, init_func, raise_exc=debug_mode) if hasattr(Products, '_packages_to_initialize'): del Products._packages_to_initialize Products.meta_types = Products.meta_types + tuple(meta_types) InitializeClass(Folder.Folder)
def batch_upgrade(request): request.datastore = 'database' transaction.get().setExtendedInfo('upgrade', True) batch = request.json['batch'] root = request.root storage = request.registry[STORAGE].write session = storage.DBSession() results = [] for uuid in batch: item_type = None update = False error = False sp = session.begin_nested() try: item = find_resource(root, uuid) item_type = item.type_info.item_type update, errors = update_item(storage, item) except Exception as e: logger.error('Error %s updating: /%s/%s' % (e, item_type, uuid)) sp.rollback() error = True else: if errors: # redmine 5161 sometimes error.path has an int errortext = [ '%s: %s' % ('/'.join([str(x) or '<root>' for x in error.path]), error.message) for error in errors ] logger.error('Validation failure: /%s/%s\n%s', item_type, uuid, '\n'.join(errortext)) sp.rollback() error = True else: sp.commit() results.append((item_type, uuid, update, error)) return {'results': results}
def createUtils(root_folder, connection=None, dummy_db=None): madeAdmUtilSnmpd = ensureUtility(root_folder, IAdmUtilSnmpd, 'AdmUtilSnmpd', AdmUtilSnmpd, name='AdmUtilSnmpd', copy_to_zlog=False) if isinstance(madeAdmUtilSnmpd, AdmUtilSnmpd): logger.info(u"bootstrap: Ensure named AdmUtilSnmpd") dcore = IWriteZopeDublinCore(madeAdmUtilSnmpd) dcore.title = u"Snmpd Utility" dcore.created = datetime.utcnow() madeAdmUtilSnmpd.ikName = dcore.title madeAdmUtilSnmpd.__post_init__() sitem = root_folder.getSiteManager() utils = [ util for util in sitem.registeredUtilities() if util.provided.isOrExtends(IAdmUtilSupervisor)] instAdmUtilSupervisor = utils[0].component instAdmUtilSupervisor.appendEventHistory(\ u" bootstrap: made IAdmUtilSnmpd-Utility") dbgOut = getNewMrtgData(madeAdmUtilSnmpd) if dbgOut: instAdmUtilSupervisor.appendEventHistory(dbgOut) else: # search in global component registry sitem = root_folder.getSiteManager() utils = [ util for util in sitem.registeredUtilities() if util.provided.isOrExtends(IAdmUtilSnmpd)] instAdmUtilSnmpd = utils[0].component utils = [ util for util in sitem.registeredUtilities() if util.provided.isOrExtends(IAdmUtilSupervisor)] instAdmUtilSupervisor = utils[0].component dbgOut = updateMrtgData(instAdmUtilSnmpd) if dbgOut: instAdmUtilSupervisor.appendEventHistory(dbgOut) transaction.get().commit() if connection is not None: connection.close()
def createUtils(root_folder, connection=None, dummy_db=None): madeAdmUtilCategories = ensureUtility(root_folder, IAdmUtilCategories, 'AdmUtilCategories', AdmUtilCategories, name='AdmUtilCategories', copy_to_zlog=False, asObject=True) if isinstance(madeAdmUtilCategories, AdmUtilCategories): logger.info(u"bootstrap: Ensure named AdmUtilCategories") dcore = IWriteZopeDublinCore(madeAdmUtilCategories) dcore.title = u"Categories Utility" dcore.created = datetime.utcnow() madeAdmUtilCategories.ikName = dcore.title for strHostGroup in [ u'DNS-Server', u'File-Server', u'Miscellaneous-Server', u'SMTP-Server', u'Terminal-Server', u'Utility-Server', u'Workstation', ]: newHostGroup = AdmUtilCatHostGroup() newHostGroup.__setattr__("ikName", strHostGroup) dcore = IWriteZopeDublinCore(newHostGroup) dcore.created = datetime.utcnow() dcore.title = strHostGroup madeAdmUtilCategories[newHostGroup.objectID] = newHostGroup madeAdmUtilCategories.__post_init__() sitem = root_folder.getSiteManager() utils = [ util for util in sitem.registeredUtilities() if util.provided.isOrExtends(IAdmUtilSupervisor)] instAdmUtilSupervisor = utils[0].component instAdmUtilSupervisor.appendEventHistory(\ u" bootstrap: made IAdmUtilCategories-Utility") transaction.get().commit() if connection is not None: connection.close()
def signup(self): err_msg = partial(signup_error, self.request, self.index) errors = reduce(err_reducer, (name(self.request) for name in VALIDATORS_SIGNUP), '') if errors: return err_msg(errors) try: subscribers = self.context.get('subscribers') self.validate(subscribers) user = api.user.get_current() uid = user.getId() props = dict( title=user.getProperty('fullname', uid), id=uid, userid=uid, email=user.getProperty('email', ''), reimbursed=bool(self.request.get('reimbursed')), role=self.request.get('role'), visa=bool(self.request.get('visa')), role_other=self.request.get('role_other', ''), request_data_deletion=( self.request.get('request_data_deletion') is not None), ) views.add_subscriber(subscribers, **props) notify(SendNewSubscriberEmailEvent(self.context)) except socket.gaierror: # Make sure the transaction gets aborted. transaction.get().abort() return err_msg('Cannot send email!') except Exception as e: return err_msg(e.message or e) return self.response.redirect(self.context.absolute_url() + '/register')
def mkstore(*args, **kw): store = AcidFS(tmp, *args, **kw) tx = transaction.get() tx.setUser('Test User') tx.setExtendedInfo('email', '*****@*****.**') os.chdir(tmp) subprocess.check_call(['git', 'config', 'user.name', 'Test User']) subprocess.check_call( ['git', 'config', 'user.email', '*****@*****.**']) os.chdir(cwd) return store
def test_archive_many(self): p, f = self.page, self.wiki # set up some children p.create('A') p.create('B') f.B.create('B1') f.B.create('B2') # and a sibling p.create('TestPage2') f.TestPage2.reparent(REQUEST=self.request) # B2 has a parent outside the archived tree f.B2.addParent('TestPage2') # make sure they all have a _p_jar transaction.get().savepoint() p.archive() self.assertEqual(set(pageIds(f)), set(['TestPage2', 'B2'])) self.assertEqual(set(pageIds(p.archiveFolder())), set(['TestPage', 'A', 'B', 'B1'])) # B2's parents list still refers to the archived page self.assertEqual(set(f.B2.getParents()), set(['B', 'TestPage2'])) # but normal cleanup will take care of that f.B2.ensureValidParents() self.assertEqual(f.B2.getParents(), ['TestPage2'])
def migrate_multiform_add_catalogs(app): for name in SITENAMES: site = app[name] setSite(site) r = site.portal_catalog.unrestrictedSearchResults( {'portal_type': FORMTYPE}) forms = map(lambda b: b._unrestrictedGetObject(), r) for form in forms: notify(ObjectModifiedEvent(form)) # forces catalog creation, index assert form.catalog is not None txn = transaction.get() txn.note('/'.join(site.getPhysicalPath())) txn.note('Added form search catalogs to all multi-record forms.') txn.commit()
def reinstall_product(app, product_name): folder_permissions = get_folder_permissions() meta_types = [] transaction.get().note('Prior to product reinstall') transaction.commit() for product_dir in Products.__path__: product_names = os.listdir(product_dir) product_names.sort() if product_name in product_names: removeProductMetaTypes(product_name) install_product(app, product_dir, product_name, meta_types, folder_permissions, raise_exc=1, log_exc=0) break Products.meta_types = Products.meta_types + tuple(meta_types) InitializeClass(Folder.Folder)
def sendSurveyInviteAll(self, send_to_all=False, use_transactions=False): """Send survey Invites to all respondents""" number_sent = 0 if use_transactions: transaction.abort() respondents = self.acl_users.getUsers() already_completed = self.getRespondents() for respondent in respondents: if use_transactions: transaction.get() email_address = respondent.getId() respondent_details = self.getAuthenticatedRespondent(email_address) if email_address in already_completed: # don't send out an invite if already responded continue if not send_to_all: # don't send an email if one already sent if respondent_details['email_sent']: continue self.sendSurveyInvite(email_address) number_sent += 1 # return number of invites sent return number_sent
def install_inituser(self): app = self.getApp() # Install the initial user. if hasattr(app, 'acl_users'): users = app.acl_users if hasattr(users, '_createInitialUser'): app.acl_users._createInitialUser() self.commit('Created initial user') users = aq_base(users) migrated = getattr(users, '_ofs_migrated', False) if not migrated: klass = users.__class__ from OFS.userfolder import UserFolder if klass is UserFolder: # zope.deferredimport does a thourough job, so the class # looks like it's from the new location already. And we # don't want to migrate any custom user folders here. users.__class__ = UserFolder users._ofs_migrated = True users._p_changed = True app._p_changed = True transaction.get().note('Migrated user folder') transaction.commit()
def checkUndoLogMetadata(self): # test that the metadata is correct in the undo log t = transaction.get() t.note(u't1') t.setExtendedInfo('k2', 'this is transaction metadata') t.setUser(u'u3', path=u'p3') db = DB(self._storage) conn = db.open() try: root = conn.root() o1 = C() root['obj'] = o1 txn = transaction.get() txn.commit() l = self._storage.undoLog() self.assertEqual(len(l), 2) d = l[0] self.assertEqual(d['description'], b't1') self.assertEqual(d['k2'], 'this is transaction metadata') self.assertEqual(d['user_name'], b'p3 u3') finally: conn.close() db.close()
def _makeTransactionNote(self, obj, msg=''): #TODO Why not aq_parent()? relative_path = '/'.join(getToolByName(self, 'portal_url') \ .getRelativeContentPath(obj)[:-1]) if not msg: msg = relative_path + '/' + obj.title_or_id() \ + ' has been modified.' if isinstance(msg, UnicodeType): # Convert unicode to a regular string for the backend write IO. # UTF-8 is the only reasonable choice, as using unicode means # that Latin-1 is probably not enough. msg = msg.encode('utf-8') if not transaction.get().description: transaction_note(msg)