def ensure_referential_integrity(obj, event): if event.oldParent is None: # The object was not in the hierarchy before so it didn't have a path # and can't be referenced currently. return # 1. Compute the old path to the container that an object was removed from old_parent_path = zope.traversing.api.getPath(event.oldParent) if old_parent_path == '/': # The object that was removed was located in the root folder. We # remove the slash to avoid ending up with a double-slash in the next # step. old_parent_path = '' # 2. Compute the path to the object that was removed old_path = (old_parent_path + '/' + event.oldName) # 3. If our obj is not the moved object then we're a sublocation and need # to reconstruct the further path to the sublocation. suffix = '' suffix_obj = obj while suffix_obj is not event.object: suffix = '/%s%s' % (suffix_obj.__name__, suffix) suffix_obj = suffix_obj.__parent__ old_path = old_path + suffix manager = zope.component.getUtility( gocept.reference.interfaces.IReferenceManager) if manager.is_referenced(old_path): transaction.doom() raise gocept.reference.interfaces.IntegrityError(event.object, obj)
def put(self, host_id=None, *a, **kw): h = DBSession.query(Host).filter_by(id=host_id).first() if not h: raise HTTPNotFound errors = _h.get_validation_errors() h.user = request.identity['user'] h.group = _h.get_group_by_name(kw.get('group_name', None)) h.address = unicode(kw['address'].strip()) h.online_status = DBSession.query(Status)\ .filter_by(label='Offline').first() res = self._get_geoip_data(h.address) if not res: errors['host_address'] = "The host '%s' could not be " + \ "identified via GeoIP. Please " + \ "ensure the hostname resolves" % h.address if errors: transaction.doom() return dict(errors=errors, host=h) h.city = unicode(res.get('city', None)) h.region_name = unicode(res.get('region_name', None)) h.longitude = res.get('longitude', None) h.latitude = res.get('latitude', None) h.country_name = unicode(res.get('country_name', None)) h.country_code = unicode(res.get('country_code', None)) h.country_code3 = unicode(res.get('country_code3', None)) h.postal_code = res.get('postal_code', None) flash(_("%s updated successfully!" % kw['address']), 'info') redirect(url('/dashboard'))
def add(self, obj): try: return super(AddForm, self).add(obj) except zope.container.interfaces.DuplicateIDError as e: transaction.doom() raise z3c.form.interfaces.ActionExecutionError( zope.interface.Invalid(_(e.args[0])))
def assign_project(self, site_id, *a, **kw): errors = _h.get_validation_errors() s = DBSession.query(Site).filter_by(id=site_id).first() p = DBSession.query(Project).filter_by(id=kw.get('project_id', None))\ .first() all_p = DBSession.query(Project).all() _h.protect_obj(s) _h.protect_obj(p) if not s: raise HTTPNotFound if not p: raise HTTPNotFound all_projects = [x for x in all_p if x not in s.projects] if errors: transaction.doom() return dict(errors=errors, site=s, all_projects=all_projects) _s_label = s.label s.projects.append(p) if kw.get('sync_path', None): self._add_site_sync_path(s.id, p.id, kw['sync_path']) else: transaction.doom() flash(_('Site sync path required for each project.'), 'warning') redirect(url("/site/%s/edit#mirrored_projects" % _s_label)) transaction.commit() redirect(url("/site/%s/edit#projects" % _s_label))
def reindex_cataloged(self, commit_interval=100, idxs=None, start=0, end=-1, query=None, doom=True): """Reindex all cataloged content in Solr.""" query = query or {} for key, value in self.request.form.items(): if key in ['UID', 'path', 'created', 'modified', 'portal_type', 'object_provides', 'sort_on', 'sort_order']: query[key] = value if 'sort_on' not in query: query['sort_on'] = 'path' catalog = getToolByName(self.context, 'portal_catalog') items = catalog.unrestrictedSearchResults(**query) try: start = int(start) end = int(end) commit_interval = int(commit_interval) except ValueError: start = 0 end = -1 commit_interval = 100 processed = 0 real = timer() lap = timer() cpu = timer(clock) if doom: transaction.doom() zodb_conn = self.context._p_jar def commit(): conn = self.manager.connection conn.commit(extract_after_commit=False) zodb_conn.cacheGC() self.log( 'Intermediate commit (%d items processed, last batch in %s)', processed, lap.next()) cpi = checkpoint_iterator(commit, interval=commit_interval) self.log('Reindexing Solr...') site = getSite() for item in items[start:end]: path = item.getPath() obj = site.unrestrictedTraverse(path, None) if obj is None: logger.warning("Object at path %s doesn't exist", path) continue handler = getMultiAdapter((obj, self.manager), ISolrIndexHandler) handler.add(idxs) processed += 1 cpi.next() commit() self.log('Solr index rebuilt.') self.log( 'Processed %d items in %s (%s cpu time).', processed, real.next(), cpu.next())
def post(self, *a, **kw): errors = _h.get_validation_errors() group = _h.get_group_by_name(kw.get('group_name', None)) protocol = _h.get_protocol_by_name(kw.get('sync_protocol', None)) all_protocols = DBSession.query(SyncProtocol).all() if not group: errors['group'] = 'Group does not exist!' if not protocol: errors['sync_protocol'] = 'Sync Protocol does not exist!' p = Project() p.label = unicode(re.sub(' ', '_', kw['label']).lower()) _label = p.label p.display_name = unicode(kw.get('display_name', None)) p.desc = kw.get('desc', None) p.url = unicode(kw.get('url', None)) p.user = request.identity['user'] p.sync_base_path = unicode(kw.get('sync_base_path', None)) p.sync_flags = unicode(kw.get('sync_flags', None)) p.group = group p.sync_protocol = protocol if len(errors) > 0: transaction.doom() return dict(page="project", errors=errors, project=p, all_protocols=all_protocols) DBSession.add(p) transaction.commit() flash(_("%s created successfully!" % kw['display_name']), 'info') redirect(url('/project/%s/edit' % _label))
def update_model(self, form, model): """ Update model from form. :param form: Form instance :param model: Model instance """ try: with self.session.begin_nested(): form.populate_obj(model) self._on_model_change(form, model, False) except Exception as ex: if not self.handle_view_exception(ex): flash(gettext('Failed to update record. %(error)s', error=str(ex)), 'error') log.exception('Failed to update record.') transaction.doom() return False else: self.after_model_change(form, model, False) return True
def create_model(self, form): """ Create model from form. :param form: Form instance """ try: with self.session.begin_nested(): model = self.model() form.populate_obj(model) self.session.add(model) self._on_model_change(form, model, True) self.session.flush() except Exception as ex: if not self.handle_view_exception(ex): flash(gettext('Failed to create record. %(error)s', error=str(ex)), 'error') log.exception('Failed to create record.') transaction.doom() return False else: self.after_model_change(form, model, True) return model
def main(): app = setup_app() parser = setup_option_parser() parser.add_option("-n", dest="dry_run", action="store_true", default=False) (options, args) = parser.parse_args() print SEPARATOR print SEPARATOR print "Date: {}".format(datetime.now().isoformat()) setup_plone(app, options) if options.dry_run: transaction.doom() print "DRY-RUN" fixer = FixReferenceNumbers(options) fixer.fix_reference_numbers() if not options.dry_run: import pdb; pdb.set_trace() transaction.commit() print "Done." print SEPARATOR print SEPARATOR
def put_collection(self, session): """ Update one or multiple objects. Each object MUST contain its original pk value. """ data_list = self.submitted_collection_data # Update all members in data list count = 0 query = session.query(self.model) try: for data in data_list: pk_value = data.pop('id') update_query = query.filter(self.model.id == pk_value) update_query.update(data) count += 1 except: LOG.exception('Error updating object(s) during PUT request') transaction.doom() return error_response(_("Object(s) update failed")) if not count: return error_response(_("No object(s) updated")) msg = _("Object(s) updated successfully") # TODO: Check support for rowcount # http://docs.sqlalchemy.org/en/latest/core/connections.html # sqlalchemy.engine.ResultProxy.rowcount return info_response(msg, data={'count': count})
def test_abort_transaction_if_doomed(app): with app.test_request_context(): db['answer'] = 42 transaction.doom() with app.test_request_context(): assert 'answer' not in db
def handleCut(self, action): if not len(self.selectedItems): self.status = self.cutNoItemsMessage return items = [] append = items.append for obj in self.selectedItems: mover = IObjectMover(obj) __name__ = api.getName(obj) if not mover.moveable(): m = {"name": __name__} if name: m["name"] = __name__ self.status = _( "Object '${name}' (${name}) cannot be moved", mapping=m) else: self.status = _("Object '${name}' cannot be moved", mapping=m) transaction.doom() return append(api.joinPath(api.getPath(self.context), __name__)) self.status = self.cutItemsSelected # store the requested operation in the principal annotations: self.clipboard.clearContents() self.clipboard.addItems('cut', items)
def preview_wikipage_view(context, request, WikiPage=WikiPage, tz=None): version_num = int(request.params['version_num']) repo = find_repo(context) for version in repo.history(context.docid): if version.version_num == version_num: break else: raise NotFound("No such version: %d" % version_num) page = WikiPage() page.__parent__ = context.__parent__ page.revert(version) is_front_page = (context.__name__ == 'front_page') if is_front_page: community = find_interface(context, ICommunity) page_title = '%s Community Wiki Page' % community.title else: page_title = page.title profiles = find_profiles(context) author = profiles[version.user] # Extra paranoia, probably not strictly necessary. I just want to make # extra special sure that the temp WikiPage object we create above # doesn't accidentally get attached to the persistent object graph. transaction.doom() return { 'date': format_local_date(version.archive_time, tz), 'author': author.title, 'title': page_title, 'body': page.cook(request), }
def connection_raw_execute(self, connection, raw_cursor, statement, params): """See `TimeoutTracer`""" # Only perform timeout handling on LaunchpadDatabase # connections. if not isinstance(connection._database, LaunchpadDatabase): return # If we are outside of a request, don't do timeout adjustment. try: if self.get_remaining_time() is None: return super(LaunchpadTimeoutTracer, self).connection_raw_execute(connection, raw_cursor, statement, params) except (RequestExpired, TimeoutError): # XXX: This code does not belong here - see bug=636804. # Robert Collins 20100913. OpStats.stats['timeouts'] += 1 # XXX bug=636801 Robert Colins 20100914 This is duplicated # from the statement tracer, because the tracers are not # arranged in a stack rather a queue: the done-code in the # statement tracer never runs. action = getattr(connection, '_lp_statement_action', None) if action is not None: # action may be None if the tracer was installed after # the statement was submitted. action.finish() info = sys.exc_info() transaction.doom() try: reraise(info[0], info[1], tb=info[2]) finally: info = None
def main(): raise Exception( "this script will assign new reference numbers while moving, " "most certainly you don't want that to happen..." ) app = setup_app() parser = setup_option_parser() parser.add_option("-n", dest="dry_run", action="store_true", default=False) (options, args) = parser.parse_args() print SEPARATOR print SEPARATOR print "Date: {}".format(datetime.now().isoformat()) setup_plone(app, options) if options.dry_run: transaction.doom() print "DRY-RUN" rename_repositories(options) print "Done." print SEPARATOR print SEPARATOR
def dump_oggbundle_schemas(): """Dump JSON Schemas for the OGGBundle exchange format to the filesystem. """ transaction.doom() writer = OGGBundleJSONSchemaDumpWriter() result = writer.dump() return result
def main(): app = setup_app() parser = setup_option_parser() parser.add_option("-n", dest="dry_run", action="store_true", default=False) parser.add_option("-p", dest="profile", help="profile that contains the repository excel file.") options, args = parser.parse_args() if not options.profile: logger.error("the profile (-p) argument is required.") return if ":" not in options.profile: logger.error("invalid profile id: '{}', missing ':'" .format(options.profile)) return if options.dry_run: logger.warn('transaction doomed because we are in dry-mode.') transaction.doom() plone = setup_plone(app, options) RetentionPeriodFixer(plone, options).run() if options.dry_run: logger.warn('skipping commit because we are in dry-mode.') else: transaction.commit() logger.info('done.')
def test_add_processing_note(self): transaction.doom() dstore = LegacyDatastore() cruise = Cruise() cruise.ExpoCode = 'EXPO' lsesh().add(cruise) tempdir = mkdtemp() try: with open(os.path.join(tempdir, 'uow.json'), 'w') as fff: fff.write("""\ { "expocode": "EXPO", "alias": "ALIAS", "data_types_summary": "SUMMARY", "params": "PARAMS", "q_infos": [] } """) readme = unicode(ProcessingReadme(tempdir)) note_id = dstore.add_processing_note( readme, 'EXPO', 'title', 'summary', [123], dryrun=True) event = lsesh().query(Event).get(note_id) self.assertEqual(event.Note[0], '=') finally: rmtree(tempdir)
def saveSO(self, **kw): #DBSession.begin(subtransactions = True) try: _config = {"so":"soNo", "soRemark":"soRemark", } for k, v in kw.items(): key = k.split("_")[0] ids = k.split("_")[1:] rs = DBSession.query(TAGItem).filter(TAGItem.id.in_([int(id) for id in ids if id])).all() if rs: for r in rs: oldValue = getattr(r, _config[key]) setattr(r, _config[key], str(v.strip())) r.soDate = dt.now() his = TAGHistory(item = r) actionType = "modify" if oldValue else "Add" his.actionKind = actionType his.actionContent = "Change <%s> from [%s] to [%s]." % (key, oldValue, str(v.strip())) his.actionUser = request.identity["user"] DBSession.add(his) DBSession.flush() return {"flag":"OK"} except: #DBSession.rollback() transaction.doom() traceback.print_exc() return {"flag":"error"}
def put(self, project_id=None, *a, **kw): errors = _h.get_validation_errors() p = DBSession.query(Project).filter_by(id=project_id).first() if not p: raise HTTPNotFound if kw['label'] != p.label: other_p = DBSession.query(Project).filter_by(label=kw['label'])\ .first() if other_p: errors['label'] = "%s already exists, use another label." % \ other_p.label group = _h.get_group_by_name(kw.get('group_name', None)) protocol = _h.get_protocol_by_name(kw.get('sync_protocol', None)) all_protocols = DBSession.query(SyncProtocol).all() _h.protect_obj_modify(p) p.display_name = unicode(kw['display_name']) p.desc = kw['desc'] p.url = unicode(kw['url']) p.sync_base_path = unicode(kw.get('sync_base_path', None)) p.sync_flags = unicode(kw.get('sync_flags', None)) p.sync_protocol = protocol p.group = group if len(errors) > 0: transaction.doom() return dict(errors=errors, project=p, all_protocols=all_protocols) p.label = unicode(re.sub(' ', '_', kw['label']).lower()) _label = p.label transaction.commit() flash(_("%s updated successfully!" % kw['display_name']), 'info') redirect(url('/project/%s/edit' % _label))
def put(self, product_label=None, *a, **kw): p = DBSession.query(Product).filter_by(label=product_label).first() project = p.project _project_label = project.label if not p: raise HTTPNotFound if not project: raise HTTPNotFound _h.protect_obj_modify(project) _h.protect_obj_modify(p.project) errors = _h.get_validation_errors() if kw.get('label', None) != p.label: _existing_p = _h.get_product_by_name(kw.get('label', None)) if _existing_p: errors['label'] = "%s already exists!" % kw.get('label', None) p.label = unicode(re.sub(' ', '_', kw['label']).lower()) p.display_name = unicode(kw['display_name']) p.desc = kw['desc'] p.project = project if errors: transaction.doom() return dict(project=p.project, errors=errors, product=p) transaction.commit() flash(_("%s updated successfully!" % kw['display_name']), 'info') redirect(url("/project/%s/edit#products" % _project_label))
def connection_raw_execute(self, connection, raw_cursor, statement, params): """See `TimeoutTracer`""" # Only perform timeout handling on LaunchpadDatabase # connections. if not isinstance(connection._database, LaunchpadDatabase): return # If we are outside of a request, don't do timeout adjustment. try: if self.get_remaining_time() is None: return super(LaunchpadTimeoutTracer, self).connection_raw_execute( connection, raw_cursor, statement, params) except (RequestExpired, TimeoutError): # XXX: This code does not belong here - see bug=636804. # Robert Collins 20100913. OpStats.stats['timeouts'] += 1 # XXX bug=636801 Robert Colins 20100914 This is duplicated # from the statement tracer, because the tracers are not # arranged in a stack rather a queue: the done-code in the # statement tracer never runs. action = getattr(connection, '_lp_statement_action', None) if action is not None: # action may be None if the tracer was installed after # the statement was submitted. action.finish() info = sys.exc_info() transaction.doom() try: raise info[0], info[1], info[2] finally: info = None
def preview_wikipage_view(context, request, WikiPage=WikiPage): version_num = int(request.params['version_num']) repo = find_repo(context) for version in repo.history(context.docid): if version.version_num == version_num: break else: raise NotFound("No such version: %d" % version_num) page = WikiPage() page.__parent__ = context.__parent__ page.revert(version) is_front_page = (context.__name__ == 'front_page') if is_front_page: community = find_interface(context, ICommunity) page_title = '%s Community Wiki Page' % community.title else: page_title = page.title profiles = find_profiles(context) author = profiles[version.user] # Extra paranoia, probably not strictly necessary. I just want to make # extra special sure that the temp WikiPage object we create above # doesn't accidentally get attached to the persistent object graph. transaction.doom() return { 'date': format_local_date(version.archive_time), 'author': author.title, 'title': page_title, 'body': page.cook(request), }
def put(self, site_label=None, *a, **kw): errors = _h.get_validation_errors() s = DBSession.query(Site).filter_by(label=site_label).first() group = _h.get_group_by_name(kw.get('group_name', None)) all_p = DBSession.query(Project).all() _h.protect_obj(s) if not s: raise HTTPNotFound if not group: errors['group'] = 'Group does not exist!' s.label = unicode(re.sub(' ', '_', kw['label']).lower()) s.display_name = unicode(kw.get('display_name', None)) s.desc = kw.get('desc', None) s.url = unicode(kw.get('url', None)) s.contact_name = unicode(kw.get('contact_name', None)) s.contact_email = unicode(kw.get('contact_email', None)) s.sync_base_path = unicode(kw.get('sync_base_path', None)) s.user = request.identity['user'] s.group = group if len(errors) > 0: all_projects = [x for x in all_p if x not in s.projects] all_hosts = [x for x in request.identity['user'].hosts \ if x not in s.hosts] transaction.doom() return dict(page='site', errors=errors, site=s, all_projects=all_projects, all_hosts=all_hosts) transaction.commit() flash(_("%s updated successfully!" % kw['display_name']), 'info') redirect(url('/site/%s/edit' % kw['label']))
def get_delete(self, *args, **kw): '''This is the code that creates a confirm_delete page''' if not self.allow_delete: abort(403) pks = self.provider.get_primary_fields(self.model) kw, d = {}, {} for i, pk in enumerate(pks): kw[pk] = args[i] for i, arg in enumerate(args): d[pks[i]] = arg obj = self.provider.delete(self.model, d) deps = u'<dl>' for k, g in groupby(sorted(o for o in DBSession.deleted if o != obj), lambda x: type(x)): deps += u'<dt>' + unicode(k.__name__) + u'</dt>' deps += u'<dd>' + u', '.join(sorted(unicode(o) for o in g)) + u'</dd>' deps += u'</dl>' transaction.doom() #obj = self.edit_filler.__provider__.get_obj(self.model, params=kw, fields=self.edit_filler.__fields__) pklist = u'/'.join(map(lambda x: unicode(getattr(obj, x)), pks)) return dict(obj=obj, model=self.model.__name__, deps=deps, pk_count=len(pks), pklist=pklist)
def update(self): if self.update_status is not None: # We've been called before. Just return the status we previously # computed. return self.update_status status = '' if Update in self.request: try: changed = applyWidgetsChanges(self, self.schema, target=self.data, names=self.fieldNames) except WidgetsError as errors: # pragma: no cover self.errors = errors status = _("An error occurred.") transaction.doom() else: if changed: status = self.setData(self.data) setUpWidgets(self, self.schema, IInputWidget, initial=self.data, ignoreStickyValues=True, names=self.fieldNames) self.update_status = status return status
def handleBuild(self, action): """\ Build the thing """ # validate all groups. errors = extractError(self) if errors: self.status = _(u"Unable to build exposure due to input error; " "please review the form and make the appropriate changes, " "update each subsection using the provided button, and try " "again.") return wh = zope.component.getAdapter(self.context, IExposureWizard) try: moldExposure(self.context, self.request, wh.structure) except ProcessingError, e: # XXX almost certainly a bad idea but this fixes # tracker item 3610. import transaction transaction.doom() self._doomed = True status = IStatusMessage(self.request) status.addStatusMessage(_(str(e)), 'error') # maybe the subscriber to this can do the rollback? raise ActionExecutionError(e)
def save(self, **kw): try: save_items = [] itemIDs = kw.get('itemIDs').split('|') reHeader = ReceiveItemHeader() reHeader.no = 'REC%s' % dt.now().strftime('%Y%m%d') reHeader.remark = kw.get('remark', '') reHeader.warehouseID = kw.get('warehouseID') reHeader.issuedBy = request.identity["user"] reHeader.lastModifyBy = request.identity["user"] reHeader.createTime = kw.get('receivedDate', dt.now()) for itemID in itemIDs: _r = ReceiveItem() _r.qty = int(kw.get('qty-%s' % itemID)) _r.internalPO = kw.get('internalPO-%s' % itemID) _r.header = reHeader _r.itemID = itemID _r.warehouseID = int(kw.get('warehouseID')) _r.issuedBy = request.identity["user"] _r.lastModifyBy = request.identity["user"] _r.createTime = reHeader.createTime save_items.append(_r) DBSession.add_all(save_items) DBSession.flush() reHeader.no = '%s%05d' % (reHeader.no, reHeader.id) except: transaction.doom() traceback.print_exc() flash("The service is not avaiable now!", "warn") else: flash("Confirmed successfully!") redirect("/receive/view?recid=%s" % reHeader.id)
def save_order(self, **kw): try: kw = extract_inline_list('fabric_set',** kw) if kw.has_key('fabric_set'): for i in kw['fabric_set']: for k,v in i.iteritems(): if type(v)==list or type(v)==tuple: v = ','.join(map(str, v)) if not kw.has_key(k): kw[k] = v elif k=='fabric_ids': kw[k] = '%s,%s' % (kw[k], v) else: kw[k] = '%s|%s' % (kw[k], v) del kw['fabric_set'] else: kw['fabric_ids'] = None kw['composition_percents'] = None kw['composition_ids'] = None if kw.get('id', None): order = OrchestraOrder.get(kw['id']) order.update(**kw) flash("Order update success!") else: if kw.has_key('id'): del kw['id'] order = OrchestraOrder.create(**kw) flash("Order create success!") order.attachment = None DBSession.flush() except Exception, e: log.exception(str(e)) transaction.doom() flash("Error occor on the server side!", 'warn') redirect('/orchestra/%s/index' % kw['team'])
def update(self): if self.update_status is not None: # We've been called before. Just return the status we previously # computed. return self.update_status status = '' content = self.adapted if Update in self.request: changed = False try: changed = applyWidgetsChanges(self, self.schema, target=content, names=self.fieldNames) # We should not generate events when an adapter is used. # That's the adapter's job. if changed and self.context is self.adapted: description = Attributes(self.schema, *self.fieldNames) notify(ObjectModifiedEvent(content, description)) except WidgetsError, errors: self.errors = errors status = _("An error occurred.") transaction.doom() else: setUpEditWidgets(self, self.schema, source=self.adapted, ignoreStickyValues=True, names=self.fieldNames) if changed: self.changed() formatter = self.request.locale.dates.getFormatter( 'dateTime', 'medium') status = _("Updated on ${date_time}", mapping={'date_time': formatter.format(datetime.utcnow())})
def save_vendor(self, **kw): try: kw = extract_inline_list('vendor_obj','shipto_set','user_set','billto_set',** kw) if kw.has_key('_create'): vendor = CabelasVendor.create(**kw['vendor_obj']) DBSession.flush() for type in ['shipto', 'billto']: type_set = '%s_set' % type if kw.has_key(type_set): for i in kw[type_set]: vendor_info = CabelasVendorInfo.create(vendor_id=vendor.id, type=type, **i) else: vendor = CabelasVendor.get(kw['vendor_obj']['id']) vendor_info_map = {} for i in vendor.vendor_infos: vendor_info_map.update({str(i.id):i}) vendor.update(**kw['vendor_obj']) for type in ['shipto', 'billto']: type_set = '%s_set' % type if kw.has_key(type_set): for i in kw[type_set]: if i.has_key('id'): vendor_info = CabelasVendorInfo.get(i['id']) vendor_info.update(**i) del vendor_info_map[str(vendor_info.id)] else: vendor_info = CabelasVendorInfo.create(vendor_id=vendor.id, type=type, **i) for k,v in vendor_info_map.iteritems(): v.disable() flash("The vendor saved successful!") except Exception, e: flash("Error occor on the server side!", 'warn') log.exception(str(e)) transaction.doom()
def ajaxShareFile(self, **kw): _id = kw.get('id', None) or None if not _id : return {'flag' : 1, 'msg' : 'No ID provided!'} obj = qry(FileObject).get(_id) if not obj: return {'flag' : 1, 'msg' : 'The record does not exist!'} try: obj.share = 'Y' obj.updateTime = dt.now() obj.updateById = request.identity["user"].user_id # send email to BBB item = qry(Item).get(obj.referto) subject = "[BBB] A file of job[%s] is shared with you" % item.jobNo to = self._get_email_users('AE', 'BBB') content = [ "Dear User:"******"A file of job[%s] is shared with you ,please check the below URL to check the job's detail." % item.jobNo, "%s/logic/detail?id=%s" % (config.get('website_url', ''), item.id), "", "Thanks", ] self._sendEmail(subject, to, content) return {'flag' : 0} except: transaction.doom() traceback.print_exc() return {'flag' : 1, 'msg' : 'Error occur on the server side!'}
def saveNew( self, ** kw ): item_code = kw.get( 'item_code', '' ).strip() item = DBAItem.get_by_code( item_code ) if item: flash( "%s already in the system!" ) redirect( "/%s/index?item_code=%s" % ( self.url, item_code ) ) else: try: file_dir = os.path.join( config.get( 'public_dir' ), 'images/dba' ) image = kw.get( 'image' ) new_filename = '%s_%s' % ( item_code, datetime.now().strftime( "%Y%m%d%H%M%S" ) ) save_file = os.path.join( file_dir, '%s%s' % ( new_filename, '.jpg' ) ) with open( save_file, 'wb' ) as f: f.write( image.file.read() ) obj = self.dbObj() obj.item_code = item_code obj.category_id = kw.get( 'category_id' ).strip() obj.type_id = kw.get( 'type_id' ).strip() obj.flatted_size = kw.get( 'flatted_size' ).strip() obj.image = new_filename DBSession.add( obj ) except: transaction.doom() flash( "The service is not avaiable now.", "warn" ) redirect( "/%s/add" % self.url ) else: flash( "Save the new DBA item successfully!" ) redirect( "/%s/index?item_code=%s" % ( self.url, item_code ) )
def get_delete(self, *args, **kw): '''This is the code that creates a confirm_delete page The delete operation will be simulated to be able to display all related objects that would be deleted too. ''' if not self.allow_delete: abort(403) pks = self.provider.get_primary_fields(self.model) kw, d = {}, {} for i, pk in enumerate(pks): kw[pk] = args[i] for i, arg in enumerate(args): d[pks[i]] = arg obj = self.provider.delete(self.model, d) deps = u'<dl>' for k, g in groupby(sorted(o for o in DBSession.deleted if o != obj), lambda x: type(x)): deps += u'<dt>' + unicode(k.__name__) + u's' + u'</dt>' deps += u'<dd>' + u', '.join(sorted(unicode(o) for o in g)) + u'</dd>' deps += u'</dl>' transaction.doom() #obj = self.edit_filler.__provider__.get_obj(self.model, params=kw, fields=self.edit_filler.__fields__) pklist = u'/'.join(map(lambda x: unicode(getattr(obj, x)), pks)) return dict(obj=obj, model=self.model.__name__, deps=deps, pk_count=len(pks), pklist=pklist)
def action_delete(self, ids): try: query = get_query_for_ids(self.get_query(), self.model, ids) if self.fast_mass_delete: count = query.delete(synchronize_session=False) else: count = 0 for m in query.all(): if self.delete_model(m): count += 1 self.session.flush() flash(ngettext('Record was successfully deleted.', '%(count)s records were successfully deleted.', count, count=count)) except Exception as ex: if not self.handle_view_exception(ex): raise transaction.doom() flash(gettext('Failed to delete records. %(error)s', error=str(ex)), 'error')
def save_update(self, **kw): h = getOr404(BBYJobHeader, kw["id"], "/bbycasepack/index") option = self._get_final_option(h) if not option : flash("No final option for this SKU, please update the mockup info!") redirect("index") try: log = [] _f = lambda n : kw.get(n, None) or None for component in option.components: component.factory_id = _f("factory_id_%d" % component.id) old_ids = [str(cp.id) for cp in component.casepack_details] new_ids = [k[k.rindex("_") + 1:] for (k, v) in self._filterAndSorted("qty_%d_" % component.id, kw)] new_ids, delete_ids, update_ids = self._split(old_ids, new_ids) #handle delete for id in delete_ids : dbobj = DBSession.query(BBYCasepackDetail).get(id) dbobj.active = 1 log.append("Delete detail whose 'Required Ready Date' is '%s'." % Date2Text(dbobj.required_date)) #handle new for id in new_ids: qty = _f("qty_%d_%s" % (component.id, id)) required_date = _f("required_date_%d_%s" % (component.id, id)) ship_to_id = _f("ship_to_id_%d_%s" % (component.id, id)) attention = _f("attention_%d_%s" % (component.id, id)) remark = _f("remark_%d_%s" % (component.id, id)) if not any([qty, required_date, ship_to_id, attention, remark]) : continue DBSession.add(BBYCasepackDetail( component = component, qty = qty, required_date = required_date, ship_to_id = ship_to_id, attention = attention, remark = remark, )) log.append("Add new detail whose 'Required Ready Date' is '%s'." % required_date) #handle update for id in update_ids: d = DBSession.query(BBYCasepackDetail).get(id) d.qty = _f("qty_%d_%s" % (component.id, id)) d.required_date = _f("required_date_%d_%s" % (component.id, id)) d.ship_to_id = _f("ship_to_id_%d_%s" % (component.id, id)) d.attention = _f("attention_%d_%s" % (component.id, id)) d.remark = _f("remark_%d_%s" % (component.id, id)) if log : DBSession.add(BBYLog(job_id = h.id, action_type = "UPDATE", remark = "[Casepack] " + " ".join(log))) flash("Save the update successfully!") except Exception, e: log.exception(str(e)) transaction.doom() flash("The service is not avaiable now,please try it laster!")
def _check_duplicate_item(self, folder, name): if name in folder: transaction.doom() self.errors = (DuplicateVolumeWarning(),) self.status = _('There were errors') self.form_reset = False return True return False
def out_of_uow_flush(self, entity=None): DBSession.add(entity) try: DBSession.flush() except IntegrityError: transaction.doom() raise DalIntegrityError return entity
def update(self): if self._completeable(): # Otherwise the user has clicked on this step in navigation before # entring data. self._update_persons() # Make sure that changes are not yet persisted: transaction.doom() super(Result, self).update()
def transactional_wrap(self): session = DBSESSION() result = method(self, session=session) # When an error response is returned rollback transaction is_response = isinstance(result, response.Response) if is_response and result.status_int == 500: transaction.doom() return result
def ask_before_adding_author_twice(self, author): if self.confirmed_duplicate or not author.exists: return False transaction.doom() self.need_confirmation_checkbox = True self.errors = (DuplicateAuthorWarning(), ) self.status = _('There were errors') self.form_reset = False return True
def dump_schemas(): """Dump JSON Schemas of common GEVER content types to the filesystem. Dumps will be JSON Schema representations of the schemas and their fields. """ transaction.doom() writer = JSONSchemaDumpWriter() result = writer.dump() return result
def moveContent(self, add_to, object_path): container = self.getAddContext(add_to) obj = zope.traversing.interfaces.ITraverser( self.context).traverse(object_path) try: self.context.moveObject( obj, container, insert=self.expanded(container)) except ValueError: transaction.doom() return self()
def _register_key(self, key, count=None): gocept.reference.reference.lookup(key) if count is None: count = self._ensured_usage_count try: gocept.reference.reference.get_manager().register_reference( key, count) except gocept.reference.interfaces.IntegrityError: # _register is called after data structures have been changed. transaction.doom() raise
def ask_before_adding_author_twice(self, author): if (FEATURE_TOGGLES.find('author_lookup_in_hdok') or self.confirmed_duplicate or not author.exists(author.firstname, author.lastname)): return False transaction.doom() self.need_confirmation_checkbox = True self.errors = (DuplicateAuthorWarning(),) self.status = _('There were errors') self.form_reset = False return True
def _handle_conflict(self): transaction.doom() if self.request.form.get('redirect', '').lower() == 'false': raise zeit.cms.repository.interfaces.ConflictError( self.context.uniqueId, _('There was a conflict while adding ${name}', mapping=dict(name=self.context.uniqueId))) view = zope.component.getMultiAdapter( (self.context, self.request), zope.browser.interfaces.IBrowserView, name='checkin-conflict-error') return view()
def create_session(context, request): """Create a kaltura session. """ api = TemplateAPI(context, request) kaltura_info = api.kaltura_info kc = KalturaConfiguration(kaltura_info['partner_id'], kaltura_info['sub_partner_id']) client = KalturaClient(kc) user = KalturaSessionUser(kaltura_info['local_user']) session = client.startSession(user, kaltura_info['admin_secret'], admin=2) result = xmltodict(session) if result['error']: transaction.doom() return result
def test_doom(self): import transaction mailer = _makeMailerStub() delivery = DirectMailDelivery(mailer) ( fromaddr , toaddrs ) = fromaddr_toaddrs() message = sample_message() msgid = delivery.send(fromaddr, toaddrs, message) self.assertEqual(msgid, '<*****@*****.**>') self.assertEqual(mailer.sent_messages, []) transaction.doom() transaction.abort() transaction.commit() self.assertEqual(mailer.sent_messages, [])
def _register(self, instance): if not self.needs_registration(instance): return target_key = get_storage(instance).get(self.__name__) if target_key is None: # The reference source is being put under integrity ensurance but # this reference has not yet been set. return try: lookup(target_key) except gocept.reference.interfaces.LookupError: # _register is called after data structures have been changed. transaction.doom() raise get_manager().register_reference(target_key)
def handle_database_integrity_errors(exc, request): """ Generic error handler for database IntegrityErrors. Traceback error message is also added to response. """ # Mark current transaction to be aborted ath the end of request transaction.doom() # Add exception message to response details = exc.message.replace('(IntegrityError)', '') data = { 'code': 'DATA_INTEGRITY_ERROR', 'details': details.strip(), } message = error.CODES.get(data['code']) return error_response(message, data=data)
def ClassFactory(jar, module, name, _silly=('__doc__', ), _globals={}): try: m = __import__(module, _globals, _globals, _silly) return getattr(m, name) except: logger.info('%s' % module) # create the modules realmodule, obj = create_module(module, name, _globals=_globals, _silly=_silly) # don't want to save this object... import transaction try: transaction.doom() except AttributeError: transaction.abort() return obj
def put_member(self): """ Update current object data. """ query = self.object.query() try: count = query.update(self.submitted_member_data) except colander.Invalid: raise except: LOG.exception('Error updating object during PUT request') transaction.doom() return error_response(_("Object update failed")) if not count: return error_response(_("No object was updated")) return self.object
def applyChanges(self, data): current_principal_id = ( self.request.interaction.participations[0].principal.id) current_login_name = None auth = zope.component.getUtility( zope.authentication.interfaces.IAuthentication) current_login_names = [ getattr(plugin.principalInfo(current_principal_id), 'login', None) for key, plugin in auth.getAuthenticatorPlugins() ] assert len(current_login_names) == 1 current_login_name = current_login_names[0] old_login_name = self.context.login editing_own_data = (old_login_name == current_login_name) try: changes = super(EditForm, self).applyChanges(data) except ValueError as e: transaction.doom() raise z3c.form.interfaces.ActionExecutionError( zope.interface.Invalid(_(e.args[0]))) if not editing_own_data: # User is not editing his own data principal's data: return changes changed_field_names = [] changed_field_names.extend( changes.get(icemac.addressbook.principals.interfaces.IPrincipal, [])) changed_field_names.extend( changes.get( icemac.addressbook.principals.interfaces.IPasswordFields, [])) if 'login' in changed_field_names: self.send_flash(_('You changed the login name, please re-login.')) if ('password' in changed_field_names and 'password_repetition' in changed_field_names and data['password']): self.send_flash(_('You changed the password, please re-login.')) return changes
def surgery_command(portal_catalog, args, formatter): if args.dryrun: formatter.info('Performing dryrun!') formatter.info('') transaction.doom() result = _run_healthcheck(portal_catalog, formatter) if result.is_healthy(): transaction.doom() # extra paranoia, prevent erroneous commit formatter.info('Catalog is healthy, no surgery is needed.') return formatter.info('Performing surgery:') scheduler = SurgeryScheduler(result, catalog=portal_catalog) scheduler.perform_surgeries() scheduler.write_result(formatter) if not scheduler.is_successful(): return processQueue() formatter.info('Performing post-surgery healthcheck:') post_result = _run_healthcheck(portal_catalog, formatter) if not post_result.is_healthy(): transaction.doom() # extra paranoia, prevent erroneous commit formatter.info('Not all health problems could be fixed, aborting.') return if args.dryrun: formatter.info('Surgery would have been successful, but was aborted ' 'due to dryrun!') else: transaction.commit() formatter.info('Surgery was successful, known health problems could ' 'be fixed!')
def doomIt(event): """Doom the transaction if needed. The transaction will be doomed if lockdown is enabled and none of the activated commit conditions are met. See: https://zodb.readthedocs.io/en/latest/transactions.html#dooming-a-transaction """ request = event.request published = request.PARENTS[0] mt = getattr(getattr(published, 'aq_base', None), 'meta_type', getattr(published, 'meta_type', None)) if (mt not in _blacklisted_meta_types) and ILayer.providedBy(request): if not _get_setting('enabled', False): # skip out of here first return status_message = _get_setting('status_message', None) or u'' status_message = status_message.strip() if status_message and (not api.user.is_anonymous()): api.portal.show_message(status_message, request=request, type='warn') # let's check if this is valid now. activated = _get_setting('activated', set()) try: checker = CommitChecker(request, activated) if checker.can_commit(): return except Exception: # if there is any error, ignore and doom. better to be safe... logger.warn('Error checking conditions, dooming the ' 'transaction: {}'.format(traceback.format_exc())) transaction.doom()