def getCriterionList(self, **kw): """ Returns the list of criteria which are defined by the Predicate. Each criterion is returned in a TempBase instance intended to be displayed in a ListBox. XXX - It would be better to return criteria in a Criterion class instance """ # We do not create PersistentMappings first time we *see* Predicate_view. # Instead, we create them first time we modify Predicate document. if not self.getCriterionPropertyList(): return [] if getattr(aq_base(self), '_identity_criterion', None) is None: self._identity_criterion = PersistentMapping() self._range_criterion = PersistentMapping() criterion_dict = {} for p in self.getCriterionPropertyList(): criterion_dict[p] = newTempBase(self, 'new_%s' % p) criterion_dict[p].identity = self._identity_criterion.get(p, None) criterion_dict[p].uid = 'new_%s' % p criterion_dict[p].property = p criterion_dict[p].min = self._range_criterion.get(p, (None, None))[0] criterion_dict[p].max = self._range_criterion.get(p, (None, None))[1] criterion_list = criterion_dict.values() criterion_list.sort() return criterion_list
def edit(self, **kwd): """ The edit method is overriden so that any time a criterion_property_list property is defined, a list of criteria is created to match the provided criterion_property_list. """ if getattr(aq_base(self), '_identity_criterion', None) is None: self._identity_criterion = PersistentMapping() self._range_criterion = PersistentMapping() if 'criterion_property_list' in kwd: criterion_property_list = kwd['criterion_property_list'] identity_criterion = PersistentMapping() range_criterion = PersistentMapping() for criterion in self._identity_criterion.iterkeys(): if criterion in criterion_property_list: identity_criterion[criterion] = self._identity_criterion[ criterion] for criterion in self._range_criterion.iterkeys(): if criterion in criterion_property_list: range_criterion[criterion] = self._range_criterion[ criterion] self._identity_criterion = identity_criterion self._range_criterion = range_criterion kwd['reindex_object'] = 1 return self._edit(**kwd)
def setCriterion(self, property, identity=None, min=None, max=None, **kw): """ This methods sets parameters of a criterion. There is at most one criterion per property. Defined parameters are identity -- if not None, allows for testing identity of the property with the provided value min -- if not None, allows for testing that the property is greater than min max -- if not None, allows for testing that the property is greater than max """ # XXX 'min' and 'max' are built-in functions. if getattr(aq_base(self), '_identity_criterion', None) is None: self._identity_criterion = PersistentMapping() self._range_criterion = PersistentMapping() if identity is not None : self._identity_criterion[property] = identity if min == '': min = None if max == '': max = None if min is None and max is None: try: del self._range_criterion[property] except KeyError: pass else: self._range_criterion[property] = (min, max) self.reindexObject()
def __init__ (self, id, title='', pdf_file=''): # holds information about all cells, even those not related to this form self.all_cells = PersistentMapping() # holds the cells related to this pdf form self.cells = PersistentMapping() # File constructor will set the file content File.__init__(self, id, title, pdf_file)
def _setCellRange(self, *args, **kw): """Set a new range for a matrix Each value for each axis is assigned an integer id. If the number of axis changes, everything is reset. Otherwise, ids are never changed, so that cells never need to be renamed: this means no sort is garanteed, and there can be holes. """ base_id = kw.get('base_id', 'cell') # Get (initialize if necessary) index for considered matrix (base_id). try: index = aq_base(self).index except AttributeError: index = self.index = PersistentMapping() to_delete = [] try: index = index[base_id] if len(args) != len(index): # The number of axis changes so we'll delete all existing cells and # renumber everything from 1. to_delete = INFINITE_SET, index.clear() except KeyError: index[base_id] = index = PersistentMapping() # For each axis ... for i, axis in enumerate(args): # ... collect old axis keys and allocate ids for new ones. axis = set(axis) last_id = -1 try: id_dict = index[i] except KeyError: index[i] = id_dict = PersistentMapping() else: delete = set() to_delete.append(delete) for k, v in id_dict.items(): try: axis.remove(k) if last_id < v: last_id = v except KeyError: delete.add(v) del id_dict[k] # At this point, last_id contains the greatest id. for k in sorted(axis): last_id += 1 id_dict[k] = last_id # Remove old cells if any. if any(to_delete): prefix = base_id + '_' prefix_len = len(prefix) for cell_id in list(self.objectIds()): if cell_id.startswith(prefix): for i, j in enumerate(cell_id[prefix_len:].split('_')): if int(j) in to_delete[i]: self._delObject(cell_id) break
def setPredicateCategoryList(self, category_list): """ This method updates a Predicate by implementing an AND operation on all predicates (or categories) provided in category_list. Categories behave as a special kind of predicate which only acts on category membership. WARNING: this method does not take into account scripts at this point. """ category_tool = aq_inner(self.portal_categories) base_category_id_list = category_tool.objectIds() membership_criterion_category_list = [] membership_criterion_base_category_list = [] multimembership_criterion_base_category_list = [] test_method_id_list = [] criterion_property_list = [] # reset criterions self._identity_criterion = PersistentMapping() self._range_criterion = PersistentMapping() for c in category_list: bc = c.split('/')[0] if bc in base_category_id_list: # This is a category membership_criterion_category_list.append(c) membership_criterion_base_category_list.append(bc) else: predicate_value = category_tool.resolveCategory(c) if predicate_value is not None: criterion_property_list.extend( predicate_value.getCriterionPropertyList()) membership_criterion_category_list.extend( predicate_value.getMembershipCriterionCategoryList()) membership_criterion_base_category_list.extend( predicate_value.getMembershipCriterionBaseCategoryList( )) multimembership_criterion_base_category_list.extend( predicate_value. getMultimembershipCriterionBaseCategoryList()) test_method_id_list += list( predicate_value.getTestMethodIdList() or []) for p in predicate_value.getCriterionList(): self.setCriterion(p.property, identity=p.identity, min=p.min, max=p.max) self.setCriterionPropertyList(criterion_property_list) self._setMembershipCriterionCategoryList( membership_criterion_category_list) self._setMembershipCriterionBaseCategoryList( membership_criterion_base_category_list) self._setMultimembershipCriterionBaseCategoryList( multimembership_criterion_base_category_list) self._setTestMethodIdList(test_method_id_list) self.reindexObject()
def AddNewLocalVariableDict(self): """ Function to add a new Local Variable for a Data Notebook """ new_dict = PersistentMapping() variable_dict = PersistentMapping() module_dict = PersistentMapping() new_dict['variables'] = variable_dict new_dict['imports'] = module_dict return new_dict
def generateNewId(self, id_group=None, default=None, method=_marker, id_generator=None): """ Generate the next id in the sequence of ids of a particular group """ if id_group in (None, 'None'): raise ValueError, '%s is not a valid id_group' % (repr(id_group), ) # for compatibilty with sql data, must not use id_group as a list if not isinstance(id_group, str): id_group = repr(id_group) warnings.warn( 'id_group must be a string, other types ' 'are deprecated.', DeprecationWarning) if id_generator is None: id_generator = 'document' if method is not _marker: warnings.warn("Use of 'method' argument is deprecated", DeprecationWarning) try: #use _getLatestGeneratorValue here for that the technical level #must not call the method last_generator = self._getLatestGeneratorValue(id_generator) new_id = last_generator.generateNewId(id_group=id_group, \ default=default) except KeyError: # XXX backward compatiblity if self.getTypeInfo(): LOG('generateNewId', ERROR, 'while generating id') raise else: # Compatibility code below, in case the last version of erp5_core # is not installed yet warnings.warn( "You are using an old version of erp5_core to generate" "ids.\nPlease update erp5_core business template to " "use new id generators", DeprecationWarning) dict_ids = getattr(aq_base(self), 'dict_ids', None) if dict_ids is None: dict_ids = self.dict_ids = PersistentMapping() new_id = None # Getting the last id if default is None: default = 0 marker = [] new_id = dict_ids.get(id_group, marker) if method is _marker: if new_id is marker: new_id = default else: new_id = new_id + 1 else: if new_id is marker: new_id = default new_id = method(new_id) # Store the new value dict_ids[id_group] = new_id return new_id
def newObject(self, object=None, xml=None, simulate=False, # pylint: disable=redefined-builtin reset_local_roles=True, reset_workflow=True): """ modify the object with datas from the xml (action section) """ args = {} if simulate: return # Retrieve the list of users with a role and delete default roles if reset_local_roles: user_role_list = [x[0] for x in object.get_local_roles()] object.manage_delLocalRoles(user_role_list) if getattr(object, 'workflow_history', None) is not None and reset_workflow: object.workflow_history = PersistentMapping() if xml.prefix == 'xupdate': xml = xml[0] for subnode in xml.xpath('*'): #get only Element nodes (not Comments or Processing instructions) if subnode.xpath('name()') not in NOT_EDITABLE_PROPERTY_LIST: keyword_type = self.getPropertyType(subnode) # This is the case where the property is a list keyword = subnode.xpath('name()') args[keyword] = self.convertXmlValue(subnode, keyword_type) elif subnode.xpath('local-name()') in ADDABLE_PROPERTY_LIST\ + (XML_OBJECT_TAG,): self.addNode(object=object, xml=subnode, force=True) # We should first edit the object args = self.getFormatedArgs(args=args) # edit the object with a dictionnary of arguments, # like {"telephone_number":"02-5648"} self.editDocument(object=object, **args) if getattr(object, 'manage_afterEdit', None) is not None: object.manage_afterEdit() self.afterNewObject(object)
def __init__(self, *args, **kw): self.signature_methods = PersistentMapping() self.add_signature_method(OAuthSignatureMethod_PLAINTEXT()) self.add_signature_method(OAuthSignatureMethod_HMAC_SHA1()) self.consumer = OAuthConsumer(CONSUMER_KEY, CONSUMER_SECRET) self.my_request_token = OAuthToken('requestkey', 'requestsecret') self.my_access_token = OAuthToken('accesskey', 'accesssecret') self.nonce = 'nonce' self.verifier = VERIFIER
def setLastGeneratedId(self, new_id, id_group=None): """ Set a new last id. This is usefull in order to reset a sequence of ids. """ if getattr(aq_base(self), 'dict_ids', None) is None: self.dict_ids = PersistentMapping() if id_group is not None and id_group != 'None': self.dict_ids[id_group] = new_id
def _setEncodedPassword( self, value, format='default', # pylint: disable=redefined-builtin ): password = getattr(aq_base(self), 'password', None) if password is None or isinstance(password, basestring): password = self.password = PersistentMapping() self.password[format] = value
def getDictLengthIdsItems(self): """ Return a copy of dict_length_ids. This is a workaround to access the persistent mapping content from ZSQL method to be able to insert initial tuples in the database at creation. """ if getattr(self, 'dict_length_ids', None) is None: self.dict_length_ids = PersistentMapping() return self.dict_length_ids.items()
def _getResponseHeaderRuleDictForModification(self): """ Retrieve persistent rule dict storage. Use only when a modification is requested, to avoid creating useless subobjects. """ try: return self._response_header_rule_dict except AttributeError: self._response_header_rule_dict = rule_dict = PersistentMapping() return rule_dict
def getLastGeneratedId(self, id_group=None, default=None): """ Get the last id generated """ warnings.warn('getLastGeneratedId is deprecated', DeprecationWarning) if getattr(aq_base(self), 'dict_ids', None) is None: self.dict_ids = PersistentMapping() last_id = None if id_group is not None and id_group != 'None': last_id = self.dict_ids.get(id_group, default) return last_id
def addVariable(self, id, text, REQUEST=None): ''' Add a variable expression. ''' if self.var_exprs is None: self.var_exprs = PersistentMapping() expr = None if text: expr = Expression(str(text)) self.var_exprs[id] = expr if REQUEST is not None: return self.manage_variables(REQUEST, 'Variable added.')
def _checkDataStructureMigration(self, id_generator): """ First, simulate previous data structure which is using PersisntentMapping as the storage, then migrate to OOBTree. Then, migrate the id generator again from OOBTree to OOBtree just to be sure.""" id_generator_reference = id_generator.getReference() reference_portal_type_dict = { 'test_sql_non_continuous_increasing':'SQL Non Continuous ' \ 'Increasing Id Generator', 'test_zodb_continuous_increasing':'ZODB Continuous ' \ 'Increasing Id Generator' } try: portal_type = reference_portal_type_dict[id_generator_reference] self.assertEqual(id_generator.getPortalType(), portal_type) except: raise ValueError("reference is not valid: %s" % id_generator_reference) self._setLastIdDict(id_generator, PersistentMapping()) # simulate previous last_id_dict = self._getLastIdDict(id_generator) # setUp the data for migration test self._setUpLastMaxIdDict(id_generator_reference) # test migration: PersistentMapping to OOBTree self.assertTrue(isinstance(last_id_dict, PersistentMapping)) self._assertIdGeneratorLastMaxIdDict(id_generator) id_generator.rebuildGeneratorIdDict() # migrate the dict self._assertIdGeneratorLastMaxIdDict(id_generator) # test migration: OOBTree to OOBTree. this changes nothing, just to be sure last_id_dict = self._getLastIdDict(id_generator) self.assertTrue(isinstance(last_id_dict, OOBTree)) self._assertIdGeneratorLastMaxIdDict(id_generator) id_generator.rebuildGeneratorIdDict() # migrate the dict self._assertIdGeneratorLastMaxIdDict(id_generator) # test migration: SQL to OOBTree if id_generator.getPortalType() == \ 'SQL Non Continuous Increasing Id Generator': self._setLastIdDict(id_generator, OOBTree()) # set empty one last_id_dict = self._getLastIdDict(id_generator) assert(len(last_id_dict), 0) # 0 because it is empty self.assertTrue(isinstance(last_id_dict, OOBTree)) # migrate the dict totally from sql table in this case id_generator.rebuildGeneratorIdDict() self._assertIdGeneratorLastMaxIdDict(id_generator)
def setVariables(self, ids=[], REQUEST=None): ''' set values for Variables set by this state ''' if self.var_exprs is None: self.var_exprs = PersistentMapping() ve = self.var_exprs if REQUEST is not None: for id in ve.keys(): fname = 'varexpr_%s' % id val = REQUEST[fname] expr = None if val: expr = Expression(str(REQUEST[fname])) ve[id] = expr return self.manage_variables(REQUEST, 'Variables changed.')
def WorkflowTool_setStatusOf(self, wf_id, ob, status): """ Append an entry to the workflow history. o Invoked by workflow definitions. """ wfh = None has_history = 0 if getattr(aq_base(ob), 'workflow_history', None) is not None: history = ob.workflow_history if history is not None: has_history = 1 wfh = history.get(wf_id, None) if wfh is not None and not isinstance(wfh, WorkflowHistoryList): wfh = WorkflowHistoryList(list(wfh)) ob.workflow_history[wf_id] = wfh if wfh is None: wfh = WorkflowHistoryList() if not has_history: ob.workflow_history = PersistentMapping() ob.workflow_history[wf_id] = wfh wfh.append(status)
def _updateWorkflowHistory(self, document, status_dict): """ Change the state of the object. """ # Create history attributes if needed if getattr(aq_base(document), 'workflow_history', None) is None: document.workflow_history = PersistentMapping() # XXX this _p_changed is apparently not necessary document._p_changed = 1 # Add an entry for the workflow in the history workflow_key = self._generateHistoryKey() if not document.workflow_history.has_key(workflow_key): document.workflow_history[workflow_key] = () # Update history document.workflow_history[workflow_key] += (status_dict, ) # XXX this _p_changed marks the document modified, but the # only the PersistentMapping is modified document._p_changed = 1 # XXX this _p_changed is apparently not necessary document.workflow_history._p_changed = 1
def setTranslationDomain(self, prop_name, domain): """ Set a translation domain for given property. """ try: property_domain_dict = aq_base(self)._property_domain_dict except AttributeError: self._property_domain_dict = property_domain_dict = PersistentMapping( ) else: # BBB: If domain dict is not a stand-alone peristent object, changes made # to it won't be persistently stored. It used to work because the whole # dict was replaced, hence triggering a change on self. But this creates # an inconvenient API. For the sake of keeping BT diffs quiet, don't cast # that dict into a PersistentMapping. if not isinstance(property_domain_dict, Persistent): self._p_changed = 1 property_domain_dict[prop_name] = TranslationInformation( prop_name, domain) # Reset accessor cache self.getPortalObject().portal_types.\ resetDynamicDocumentsOnceAtTransactionBoundary()
def recordProperty(self, id): """ Records the current value of a property. id -- ID of the property """ for property_info in self.getPropertyMap(): if property_info['id'] == id: if property_info['type'] in list_types: value = self.getPropertyList(id) else: value = self.getProperty(id) break else: if id in self.getBaseCategoryList(): value = self.getPropertyList(id) else: # should be local property value = self.getProperty(id) try: self._getRecordedPropertyDict()[id] = value except AttributeError: self._recorded_property_dict = PersistentMapping({id: value})
def setStatusOf(self, wf_id, ob, status): """ Append an entry to the workflow history. o Invoked by workflow definitions. """ from Products.ERP5Type.Workflow import WorkflowHistoryList as NewWorkflowHistoryList wfh = None has_history = 0 if getattr(aq_base(ob), 'workflow_history', None) is not None: history = ob.workflow_history if history is not None: has_history = 1 wfh = history.get(wf_id, None) if wfh is not None and not isinstance(wfh, NewWorkflowHistoryList): wfh = NewWorkflowHistoryList(wfh) ob.workflow_history[wf_id] = wfh if wfh is None: wfh = NewWorkflowHistoryList() if not has_history: ob.workflow_history = PersistentMapping() ob.workflow_history[wf_id] = wfh wfh.append(status)
def dumpDictLengthIdsItems(self): """ Store persistently data from SQL table portal_ids. """ portal_catalog = getattr(self, 'portal_catalog').getSQLCatalog() query = getattr(portal_catalog, 'z_portal_ids_dump') dict_length_ids = getattr(aq_base(self), 'dict_length_ids', None) if dict_length_ids is None: dict_length_ids = self.dict_length_ids = PersistentMapping() for line in query().dictionaries(): id_group = line['id_group'] last_id = line['last_id'] stored_last_id = self.dict_length_ids.get(id_group) if stored_last_id is None: self.dict_length_ids[id_group] = Length(last_id) else: stored_last_id_value = stored_last_id() if stored_last_id_value < last_id: stored_last_id.set(last_id) else: if stored_last_id_value > last_id: LOG('IdTool', WARNING, 'ZODB value (%r) for group %r is higher ' \ 'than SQL value (%r). Keeping ZODB value untouched.' % \ (stored_last_id, id_group, last_id))
def test(self, context, tested_base_category_list=None, strict_membership=0, isMemberOf=None, **kw): """ A Predicate can be tested on a given context. Parameters can passed in order to ignore some conditions. - tested_base_category_list: this is the list of category that we do want to test. For example, we might want to test only the destination or the source of a predicate. - if strict_membership is specified, we should make sure that we are strictly a member of tested categories - isMemberOf can be a function caching results for CategoryTool.isMemberOf: it is always called with given 'context' and 'strict_membership' values, and different categories. """ self = self.asPredicate() if self is None: # asPredicate returned None, so this predicate never applies. # But if we reach this it is because catalog is not up to date. return False result = 1 if getattr(aq_base(self), '_identity_criterion', None) is None: self._identity_criterion = PersistentMapping() self._range_criterion = PersistentMapping() # LOG('PREDICATE TEST', 0, # 'testing %s on context of %s' % \ # (self.getRelativeUrl(), context.getRelativeUrl())) for property, value in self._identity_criterion.iteritems(): if isinstance(value, (list, tuple)): result = context.getProperty(property) in value else: result = context.getProperty(property) == value # LOG('predicate test', 0, # '%s after prop %s : %s == %s' % \ # (result, property, context.getProperty(property), value)) if not result: return result for property, (min, max) in self._range_criterion.iteritems(): value = context.getProperty(property) if min is not None: result = value >= min # LOG('predicate test', 0, # '%s after prop %s : %s >= %s' % \ # (result, property, value, min)) if not result: return result if max is not None: result = value < max # LOG('predicate test', 0, # '%s after prop %s : %s < %s' % \ # (result, property, value, max)) if not result: return result multimembership_criterion_base_category_list = \ self.getMultimembershipCriterionBaseCategoryList() membership_criterion_base_category_list = \ self.getMembershipCriterionBaseCategoryList() tested_base_category = {} # LOG('predicate test', 0, # 'categories will be tested in multi %s single %s as %s' % \ # (multimembership_criterion_base_category_list, # membership_criterion_base_category_list, # self.getMembershipCriterionCategoryList())) # Test category memberships. Enable the read-only transaction cache # because this part is strictly read-only, and context.isMemberOf # is very expensive when the category list has many items. if isMemberOf is None: isMemberOf = context._getCategoryTool().isMemberOf with readOnlyTransactionCache(): for c in self.getMembershipCriterionCategoryList(): bc = c.split('/', 1)[0] if tested_base_category_list is None or bc in tested_base_category_list: if bc in multimembership_criterion_base_category_list: if not isMemberOf(context, c, strict_membership=strict_membership): return 0 elif bc in membership_criterion_base_category_list and \ not tested_base_category.get(bc): tested_base_category[bc] = \ isMemberOf(context, c, strict_membership=strict_membership) if 0 in tested_base_category.itervalues(): return 0 # Test method calls test_method_id_list = self.getTestMethodIdList() if test_method_id_list is not None : for test_method_id in test_method_id_list : if test_method_id is not None: method = getattr(context,test_method_id) try: result = method(self) except TypeError: if method.func_code.co_argcount != isinstance(method, MethodType): raise # backward compatibilty with script that takes no argument warn('Predicate %s uses an old-style method (%s) that does not' ' take the predicate as argument' % ( self.getRelativeUrl(), method.__name__), DeprecationWarning) result = method() # LOG('predicate test', 0, # '%s after method %s ' % (result, test_method_id)) if not result: return result test_tales_expression = self.getTestTalesExpression() if test_tales_expression != 'python: True': expression = Expression(test_tales_expression) from Products.ERP5Type.Utils import createExpressionContext # evaluate a tales expression with the tested value as context result = expression(createExpressionContext(context)) return result
def manage_afterClone(self, item): """ Add self to the workflow. (Called when the object is cloned.) """ #LOG("After Clone ",0, "id:%s containes:%s" % (str(item.id), str(container.id))) # Change uid attribute so that Catalog thinks object was not yet catalogued self_base = aq_base(self) #LOG("After Clone ",0, "self:%s item:%s" % (repr(self), repr(item))) #LOG("After Clone ",0, "self:%s item:%s" % (repr(self), repr(self.getPortalObject().objectIds()))) portal = self.getPortalObject() self_base.uid = portal.portal_catalog.newUid() # Give the Owner local role to the current user, zope only does this if no # local role has been defined on the object, which breaks ERP5Security if getattr(self_base, '__ac_local_roles__', None) is not None: user=getSecurityManager().getUser() if user is not None: userid=user.getId() if userid is not None: #remove previous owners local_role_dict = self.__ac_local_roles__ removable_role_key_list = [] for key, value in local_role_dict.items(): if 'Owner' in value: value.remove('Owner') if len(value) == 0: removable_role_key_list.append(key) # there is no need to keep emptied keys after cloning, it makes # unstable local roles -- if object is cloned it can be different when # after being just added for key in removable_role_key_list: local_role_dict.pop(key) #add new owner l=local_role_dict.setdefault(userid, []) l.append('Owner') # Clear the workflow history # XXX This need to be tested again if getattr(self_base, 'workflow_history', _marker) is not _marker: self_base.workflow_history = PersistentMapping() # Pass - need to find a way to pass calls... self.notifyWorkflowCreated() # Add info about copy to edit workflow REQUEST = get_request() pw = portal.portal_workflow if 'edit_workflow' in pw.getChainFor(self)\ and (REQUEST is None or not REQUEST.get('is_business_template_installation', 0)): if REQUEST is not None and REQUEST.get('__cp', None): copied_item_list = _cb_decode(REQUEST['__cp'])[1] # Guess source item for c_item in copied_item_list: if c_item[-1] in item.getId(): source_item = '/'.join(c_item) break else: source_item = '/'.join(copied_item_list[0]) try: pw.doActionFor(self, 'edit_action', wf_id='edit_workflow', comment=translateString('Object copied from ${source_item}', mapping=(dict(source_item=source_item)))) except WorkflowException: pass else: try: pw.doActionFor(self, 'edit_action', wf_id='edit_workflow', comment=translateString('Object copied as ${item_id}', mapping=(dict(item_id=item.getId())))) except WorkflowException: pass self.__recurse('manage_afterClone', item) # Call a type based method to reset so properties if necessary script = self._getTypeBasedMethod('afterClone') if script is not None and callable(script): script()
def solve(self, simulation_movement): """ Split a simulation movement and accumulate """ movement_quantity = simulation_movement.getQuantity() delivery_quantity = simulation_movement.getDeliveryQuantity() new_movement_quantity = delivery_quantity * simulation_movement.getDeliveryRatio( ) applied_rule = simulation_movement.getParentValue() rule = applied_rule.getSpecialiseValue() # When accounting, the debit price is expressed by a minus quantity. # Thus, we must take into account the both minus and plus quantity. if ((movement_quantity < new_movement_quantity <= 0) or (movement_quantity > new_movement_quantity >= 0)): split_index = 0 new_id = "%s_split_%s" % (simulation_movement.getId(), split_index) while getattr(aq_base(applied_rule), new_id, None) is not None: split_index += 1 new_id = "%s_split_%s" % (simulation_movement.getId(), split_index) # Adopt different dates for deferred movements from erp5.component.module.MovementCollectionDiff import _getPropertyAndCategoryList movement_dict = _getPropertyAndCategoryList(simulation_movement) # new properties movement_dict.update(portal_type="Simulation Movement", id=new_id, quantity=movement_quantity - new_movement_quantity, activate_kw=self.activate_kw, delivery=None, **self.additional_parameters) new_movement = applied_rule.newContent(**movement_dict) # Dirty code until IPropertyRecordable is revised. # Merge original simulation movement recorded property to new one. recorded_property_dict = simulation_movement._getRecordedPropertyDict( None) if recorded_property_dict: new_movement_recorded_property_dict = new_movement._getRecordedPropertyDict( None) if new_movement_recorded_property_dict is None: new_movement_recorded_property_dict = new_movement._recorded_property_dict = PersistentMapping( ) new_movement_recorded_property_dict.update( recorded_property_dict) # record zero quantity property, because this was originally zero. # without this, splitanddefer after accept decision does not work # properly. current_quantity = new_movement.getQuantity() new_movement.setQuantity(0) new_movement.recordProperty('quantity') new_movement.setQuantity(current_quantity) start_date = getattr(self, 'start_date', None) if start_date is not None: new_movement.recordProperty('start_date') new_movement.edit(start_date=start_date) stop_date = getattr(self, 'stop_date', None) if stop_date is not None: new_movement.recordProperty('stop_date') new_movement.edit(stop_date=stop_date) new_movement.expand(activate_kw=self.additional_parameters) # adopt new quantity on original simulation movement simulation_movement.edit(quantity=new_movement_quantity) simulation_movement.setDefaultActivateParameterDict(self.activate_kw) simulation_movement.expand(activate_kw=self.additional_parameters)
def _solveBySplitting(self, activate_kw=None): """ contains all the logic to split. This method is convenient in case another solver needs it. """ solver_dict = {} new_movement_list = [] configuration_dict = self.getConfigurationPropertyDict() delivery_dict = {} for simulation_movement in self.getDeliveryValueList(): delivery_dict.setdefault(simulation_movement.getDeliveryValue(), []).append(simulation_movement) for movement, simulation_movement_list in delivery_dict.iteritems(): decision_quantity = movement.getQuantity() delivery_solver = self.getParentValue().newContent( portal_type=configuration_dict['delivery_solver'], temp_object=True) delivery_solver.setDeliveryValueList(simulation_movement_list) # Update the quantity using delivery solver algorithm split_list = delivery_solver.setTotalQuantity(decision_quantity, activate_kw=activate_kw) # Create split movements for (simulation_movement, split_quantity) in split_list: split_index = 0 simulation_id = simulation_movement.getId().split("_split_")[0] new_id = "%s_split_%s" % (simulation_id, split_index) applied_rule = simulation_movement.getParentValue() while getattr(aq_base(applied_rule), new_id, None) is not None: split_index += 1 new_id = "%s_split_%s" % (simulation_id, split_index) # Copy at same level kw = _getPropertyAndCategoryList(simulation_movement) kw.update(delivery=None, quantity=split_quantity) new_movement = applied_rule.newContent( new_id, simulation_movement.getPortalType(), activate_kw=activate_kw, **kw) new_movement_list.append(new_movement) # Dirty code until IPropertyRecordable is revised. # Merge original simulation movement recorded property to new one. recorded_property_dict = simulation_movement._getRecordedPropertyDict(None) if recorded_property_dict: new_movement_recorded_property_dict = new_movement._getRecordedPropertyDict(None) if new_movement_recorded_property_dict is None: new_movement_recorded_property_dict = new_movement._recorded_property_dict = PersistentMapping() new_movement_recorded_property_dict.update(recorded_property_dict) # record zero quantity property, because this was originally zero. # without this, splitanddefer after accept decision does not work # properly. current_quantity = new_movement.getQuantity() new_movement.setQuantity(0) new_movement.recordProperty('quantity') new_movement.setQuantity(current_quantity) start_date = configuration_dict.get('start_date', None) if start_date is not None: new_movement.recordProperty('start_date') new_movement.setStartDate(start_date) stop_date = configuration_dict.get('stop_date', None) if stop_date is not None: new_movement.recordProperty('stop_date') new_movement.setStopDate(stop_date) if activate_kw: new_movement.setDefaultActivateParameterDict({}) simulation_movement.expand(activate_kw=activate_kw) new_movement.expand(activate_kw=activate_kw) # Finish solving if self.getPortalObject().portal_workflow.isTransitionPossible( self, 'succeed'): self.succeed() solver_dict["new_movement_list"] = new_movement_list return solver_dict
def updateMovementCollection(rule, context, *args, **kw): orig_updateMovementCollection(rule, context, *args, **kw) new_parent = context.getParentValue() for sm in context.getMovementList(): delivery = sm.getDelivery() if delivery: sm_dict = old_dict.pop(delivery) else: sm_dict = order_dict[new_parent] order_dict[sm] = sm_dict k = get_matching_key(sm) sm_list = sm_dict.pop(k, ()) if len(sm_list) > 1: # Heuristic to find matching old simulation movements for the # currently expanded applied rule. We first try to preserve same # tree structure (new & old parent SM match), then we look for an # old possible parent that is in the same branch. try: old_parent = old_dict[new_parent] except KeyError: old_parent = simulation_tool best_dict = {} for old_sm in sm_list: parent = old_sm.getParentValue().getParentValue() if parent is old_parent: parent = None elif not (parent.aq_inContextOf(old_parent) or old_parent.aq_inContextOf(parent)): continue best_dict.setdefault(parent, []).append(old_sm) try: best_sm_list = best_dict[None] except KeyError: best_sm_list, = best_dict.values() if len(best_sm_list) < len(sm_list): sm_dict[k] = list(set(sm_list).difference(best_sm_list)) sm_list = best_sm_list if len(sm_list) > 1: kw = sm.__dict__.copy() # We may have several old matching SM, e.g. in case of split. for old_sm in sm_list: movement = old_sm.getDeliveryValue() if sm is None: sm = context.newContent(portal_type=rule.movement_type) sm.__dict__ = dict(kw, **sm.__dict__) order_dict[sm] = sm_dict if delivery: assert movement.getRelativeUrl() == delivery elif movement is not None: sm._setDeliveryValue(movement) delivery_set.add(sm.getExplanationValue()) try: sm.delivery_ratio = old_sm.aq_base.delivery_ratio except AttributeError: pass recorded_property_dict = {} edit_kw = {} kw['quantity'] = 0 for tester in rule._getUpdatingTesterList(): old = get_original_property_dict(tester, old_sm, sm, movement) if old is not None: new = tester.getUpdatablePropertyDict(sm, movement) if old != new: edit_kw.update(old) if 'quantity' in new and old_sm is not sm_list[-1]: quantity = new.pop('quantity') kw['quantity'] = quantity - old.pop('quantity') if new != old or sm.quantity != quantity: raise NotImplementedError # quantity_unit/efficiency ? else: recorded_property_dict.update(new) if recorded_property_dict: sm._recorded_property_dict = PersistentMapping( recorded_property_dict) sm._edit(**edit_kw) old_dict[sm] = old_sm sm = None
def generateNewIdList(self, id_group=None, id_count=1, default=None, store=_marker, id_generator=None, poison=False): """ Generate a list of next ids in the sequence of ids of a particular group """ if id_group in (None, 'None'): raise ValueError('%r is not a valid id_group' % id_group) # for compatibilty with sql data, must not use id_group as a list if not isinstance(id_group, str): id_group = repr(id_group) warnings.warn('id_group must be a string, other types ' 'are deprecated.', DeprecationWarning) if id_generator is None: id_generator = 'uid' if store is not _marker: warnings.warn("Use of 'store' argument is deprecated.", DeprecationWarning) try: #use _getLatestGeneratorValue here for that the technical level #must not call the method last_generator = self._getLatestGeneratorValue(id_generator) new_id_list = last_generator.generateNewIdList(id_group=id_group, id_count=id_count, default=default, poison=poison) except (KeyError, ValueError): # XXX backward compatiblity if self.getTypeInfo(): LOG('generateNewIdList', ERROR, 'while generating id') raise else: # Compatibility code below, in case the last version of erp5_core # is not installed yet warnings.warn("You are using an old version of erp5_core to generate" "ids.\nPlease update erp5_core business template to " "use new id generators", DeprecationWarning) new_id = None if default is None: default = 1 # XXX It's temporary, a New API will be implemented soon # the code will be change portal = self.getPortalObject() try: query = portal.IdTool_zGenerateId commit = portal.IdTool_zCommit except AttributeError: portal_catalog = portal.portal_catalog.getSQLCatalog() query = portal_catalog.z_portal_ids_generate_id commit = portal_catalog.z_portal_ids_commit try: result = query(id_group=id_group, id_count=id_count, default=default) finally: commit() new_id = result[0]['LAST_INSERT_ID()'] if store: if getattr(aq_base(self), 'dict_length_ids', None) is None: # Length objects are stored in a persistent mapping: there is one # Length object per id_group. self.dict_length_ids = PersistentMapping() if self.dict_length_ids.get(id_group) is None: self.dict_length_ids[id_group] = Length(new_id) self.dict_length_ids[id_group].set(new_id) if six.PY2: new_id_list = range(new_id - id_count, new_id) else: new_id_list = list(range(new_id - id_count, new_id)) return new_id_list