def refreshWorklistCache(self): """ Refresh worklist cache table. - delete everything from that table - if it fails, create the table - insert new lines - if it fails, recrete the table and retry """ # Contrary to WorkflowTool_listActions, related keys are NOT supported. Base_zInsertIntoWorklistTable = getattr( self, 'Base_zInsertIntoWorklistTable', None) if Base_zInsertIntoWorklistTable is not None: # XXX: Code below is duplicated from WorkflowTool_listActions info = self._getOAI(None) worklist_dict = {} wf_ids = self.objectIds() for wf_id in wf_ids: wf = self.getWorkflowById(wf_id) if wf is not None: a = wf.getWorklistVariableMatchDict(info, check_guard=False) if a is not None: worklist_dict[wf_id] = a # End of duplicated code if len(worklist_dict): Base_zClearWorklistTable = getattr(self, 'Base_zClearWorklistTable', None) if Base_zClearWorklistTable is None: LOG('WorkflowTool', WARNING, 'Base_zClearWorklistTable cannot be found. ' \ 'Falling back to former refresh method. Please update ' \ 'erp5_worklist_sql business template.') self.Base_zCreateWorklistTable() else: try: self.Base_zClearWorklistTable() except ProgrammingError, error_value: # 1146 = table does not exist if error_value[0] != 1146: raise self.Base_zCreateWorklistTable() portal_catalog = self.getPortalObject().portal_catalog search_result = portal_catalog.unrestrictedSearchResults sql_catalog = portal_catalog.getSQLCatalog() table_column_id_set = ImmutableSet( [COUNT_COLUMN_TITLE] + self.Base_getWorklistTableColumnIDList()) security_column_id_list = list( sql_catalog.getSQLCatalogSecurityUidGroupsColumnsDict().values()) + \ [x[1] for x in sql_catalog.getSQLCatalogRoleKeysList()] + \ [x[1] for x in sql_catalog.getSQLCatalogLocalRoleKeysList()] security_column_id_set = set(security_column_id_list) assert len(security_column_id_set) == len( security_column_id_list), (security_column_id_set, security_column_id_list) del security_column_id_list security_column_id_set.difference_update( self._getWorklistIgnoredSecurityColumnSet()) for security_column_id in security_column_id_set: assert security_column_id in table_column_id_set (worklist_list_grouped_by_condition, worklist_metadata) = \ groupWorklistListByCondition( worklist_dict=worklist_dict, sql_catalog=sql_catalog) assert COUNT_COLUMN_TITLE in table_column_id_set for grouped_worklist_dict in worklist_list_grouped_by_condition: # Generate the query for this worklist_list (total_criterion_id_list, query) = \ getWorklistListQuery( getQuery=SimpleQuery, grouped_worklist_dict=grouped_worklist_dict, ) for criterion_id in total_criterion_id_list: assert criterion_id in table_column_id_set for security_column_id in security_column_id_set: assert security_column_id not in total_criterion_id_list total_criterion_id_list.append(security_column_id) group_by = total_criterion_id_list assert COUNT_COLUMN_TITLE not in total_criterion_id_list select_dict = dict.fromkeys(total_criterion_id_list) select_dict[COUNT_COLUMN_TITLE] = 'count(*)' search_result_kw = { 'select_dict': select_dict, 'group_by': group_by, 'query': query, 'limit': None, } #LOG('refreshWorklistCache', WARNING, 'Using query: %s' % \ # (search_result(src__=1, **search_result_kw), )) catalog_brain_result = search_result(**search_result_kw) value_column_dict = {x: [] for x in table_column_id_set} for catalog_brain_line in catalog_brain_result.dictionaries( ): for column_id, value in catalog_brain_line.iteritems(): if column_id in value_column_dict: value_column_dict[column_id].append(value) if len(value_column_dict[COUNT_COLUMN_TITLE]): try: Base_zInsertIntoWorklistTable(**value_column_dict) except (ProgrammingError, OperationalError), error_value: # OperationalError 1054 = unknown column if isinstance(error_value, OperationalError ) and error_value[0] != 1054: raise LOG('WorkflowTool', WARNING, 'Insertion in worklist cache table ' \ 'failed. Recreating table and retrying.', error=True) self.Base_zCreateWorklistTable() Base_zInsertIntoWorklistTable(**value_column_dict)
def _createContent(self, xml=None, object=None, object_id=None, sub_object=None, reset_local_roles=0, reset_workflow=0, simulate=0, **kw): LOG("TioSafeNodeConduit._createConten", 300, "xml = %s" % (etree.tostring(xml, pretty_print=1), )) # if exist namespace retrieve only the tag index = 0 if xml.nsmap not in [None, {}]: index = -1 # init the new_id of the product and the checker of the creation new_id = None product_created = False # this dict contains the element to set to the product keyword = {} # this dict will contains a list of tuple (base_category, vairiation) variation_dict = {} # browse the xml for node in xml: # works on tags, no on comments if type(node.tag) is not str: continue tag = node.tag.split('}')[index] if tag == 'category': # retrieve through the mapping the base category and the variation mapping = object.getMappingFromCategory( node.text.encode("utf-8")) base_category, variation = mapping.split('/', 1) category_params = { 'document': object, 'base_category': base_category, 'variation': variation, } # if exists the variation set it to the builder dict if self.checkCategoryExistency(**category_params): variation_dict.setdefault(base_category, []).append(variation) else: keyword[tag] = node.text.encode('utf-8') # Create the product at the end of the xml browsing new_id = object.product_module.createProduct(**keyword) # XXX-AUREL : this must be changed to use gid definition instead if not new_id: new_id = object.IntegrationSite_lastProductID()[0].getId() # create the full variations in the integration site if variation_dict: for base_category, variation_list in variation_dict.items(): for variation in variation_list: object.product_module.createProductCategory( id_product=new_id, base_category=base_category, variation=variation, ) return object.product_module(id=new_id)[0]
def __call__(self, force=False): if not self.data.data: raise Exception("No data provided.") parent = self.parent if not parent: raise Exception("Cannot find parent object for %s" % self.path) _id = self.getId() if not self.exists: # People have no title or description. if self.portal_type in ('agsci_person', ): item = createContentInContainer(parent, self.portal_type, id=_id, checkConstraints=False) else: item = createContentInContainer( parent, self.portal_type, id=_id, title=self.data.title, description=self.data.description, checkConstraints=False) # Set UID setattr(item, ATTRIBUTE_NAME, self.UID) else: # If the item exists, and it's published, no further changes if self.review_state in [ 'published', ] and not force: return item = self.context # Set subject (tags) if self.data.subject: item.setSubject(list(self.data.subject)) # Set HTML html = self.html if html: item.text = RichTextValue(raw=html, mimeType=u'text/html', outputMimeType='text/x-html-safe') # Set File field file = self.file if file: item.file = self.file # Set Lead Image or Image field image = self.image if image: item.image = image # Unset full width field if image is too small, or is portrait. try: (w, h) = image.getImageSize() except: pass else: if w < h or w < 600: item.image_full_width = False # Set field values # Map current field name 'field' to old 'data_field' from feed. fields = self.fields field_names = self.field_names for field_name in field_names: field = fields.get(field_name) if self.map_fields: data_field = self.fields_mapping.get(field_name, field_name) else: data_field = field_name # Skip fields if we're only importing specific fields if self.include_fields and field_name not in self.include_fields: continue if field_name not in self.exclude_fields: value = getattr(self.data, data_field, None) if value or isinstance(value, (bool, )): value = self.transform_value( field=field, field_name=data_field, value=value, ) setattr(item, field_name, value) if self.debug: LOG( self.__class__.__name__, INFO, "%s: Setting %s to %r" % (item.absolute_url(), field_name, value)) # Set collection criteria if self.portal_type in ('Collection', 'Newsletter'): if self.data.collection_criteria: item.setQuery(self.data.collection_criteria) if self.data.collection_sort_field: item.setSort_on(self.data.collection_sort_field) if self.data.collection_sort_reversed: item.setSort_reversed(True) # Set default page if self.data.default_page: default_page_id = safe_unicode( self.data.default_page).encode('utf-8') self.context.setDefaultPage(default_page_id) else: # Set layout if no default page layout = self.data.layout if layout in self.valid_layouts: item.setLayout(layout) # Set dates effective = self.data.effective_date expires = self.data.expiration_date if effective: item.setEffectiveDate(DateTime(effective)) if expires: item.setExpirationDate(DateTime(expires)) # If event, set start and end if self.portal_type in ('Event', ): start_date = localize(DateTime(self.data.start_date)) end_date = localize(DateTime(self.data.end_date)) acc = IEventAccessor(item) acc.start = start_date acc.end = end_date # Set references modifiedContent(item, None) # Reindex item.reindexObject()
def generateNewIdList(self, id_group=None, id_count=1, default=None, store=_marker, id_generator=None, poison=False): """ Generate a list of next ids in the sequence of ids of a particular group """ if id_group in (None, 'None'): raise ValueError('%r is not a valid id_group' % id_group) # for compatibilty with sql data, must not use id_group as a list if not isinstance(id_group, str): id_group = repr(id_group) warnings.warn('id_group must be a string, other types ' 'are deprecated.', DeprecationWarning) if id_generator is None: id_generator = 'uid' if store is not _marker: warnings.warn("Use of 'store' argument is deprecated.", DeprecationWarning) try: #use _getLatestGeneratorValue here for that the technical level #must not call the method last_generator = self._getLatestGeneratorValue(id_generator) new_id_list = last_generator.generateNewIdList(id_group=id_group, id_count=id_count, default=default, poison=poison) except (KeyError, ValueError): # XXX backward compatiblity if self.getTypeInfo(): LOG('generateNewIdList', ERROR, 'while generating id') raise else: # Compatibility code below, in case the last version of erp5_core # is not installed yet warnings.warn("You are using an old version of erp5_core to generate" "ids.\nPlease update erp5_core business template to " "use new id generators", DeprecationWarning) new_id = None if default is None: default = 1 # XXX It's temporary, a New API will be implemented soon # the code will be change portal = self.getPortalObject() try: query = portal.IdTool_zGenerateId commit = portal.IdTool_zCommit except AttributeError: portal_catalog = portal.portal_catalog.getSQLCatalog() query = portal_catalog.z_portal_ids_generate_id commit = portal_catalog.z_portal_ids_commit try: result = query(id_group=id_group, id_count=id_count, default=default) finally: commit() new_id = result[0]['LAST_INSERT_ID()'] if store: if getattr(aq_base(self), 'dict_length_ids', None) is None: # Length objects are stored in a persistent mapping: there is one # Length object per id_group. self.dict_length_ids = PersistentMapping() if self.dict_length_ids.get(id_group) is None: self.dict_length_ids[id_group] = Length(new_id) self.dict_length_ids[id_group].set(new_id) if six.PY2: new_id_list = range(new_id - id_count, new_id) else: new_id_list = list(range(new_id - id_count, new_id)) return new_id_list
def createAllPropertySheetsFromFilesystem(self, erase_existing=False, REQUEST=None): """ Create Property Sheets in portal_property_sheets from _all_ filesystem Property Sheets Returns the list of PropertySheet names which failed being imported. """ from Products.ERP5Type import PropertySheet failed_import = [] append = failed_import.append # Get all the filesystem Property Sheets for name, klass in PropertySheet.__dict__.iteritems(): # If the Property Sheet is a string, it means that the Property # Sheets has either been already migrated or it is not available # (perhaps defined in a bt5 not installed yet?) if name[0] == '_' or isinstance(klass, basestring): continue if name in self.objectIds(): if not erase_existing: continue self.portal_property_sheets.deleteContent(name) LOG("Tool.PropertySheetTool", INFO, "Creating %s in portal_property_sheets" % repr(name)) try: PropertySheetDocument.importFromFilesystemDefinition( self, klass) except BadRequest: if name in KNOWN_BROKEN_PROPERTY_SHEET_DICT: LOG('PropertySheetTool', WARNING, 'Failed to import %s with error:' % (name, ), error=True) # Don't fail, this property sheet is known to have been broken in the # past, this site might be upgrading from such broken version. append(name) else: raise if REQUEST is None: return failed_import else: portal = self.getPortalObject() base_message = 'Property Sheets successfully imported from ' \ 'filesystem to ZODB.' mapping = {} if failed_import: base_message += ' These property sheets failed to be imported: ' \ '$failed_import . You must update the following business ' \ 'templates to have fixed version of these property sheets: ' \ '$business_templates' mapping['failed_import'] = ', '.join(failed_import) mapping['business_templates'] = ', '.join({ KNOWN_BROKEN_PROPERTY_SHEET_DICT[x] for x in failed_import }) message = portal.Base_translateString(base_message, mapping=mapping) return self.Base_redirect( 'view', keep_items={'portal_status_message': message})
def runPersonSync(self): """ test synchronization of person """ INITIAL_PERSON_TITLE = ["Simple person", "Person shipping to another person", "Person into an organisation", "Person with shipping into org"] # # Initial cleanup & settings # # Delete person & organisations org_ids = [x for x in self.portal.organisation_module.objectIds() if x != self.default_source_id] self.portal.organisation_module.manage_delObjects(org_ids) person_ids = [x for x in self.portal.person_module.objectIds() if x != self.default_node_id] self.portal.person_module.manage_delObjects(person_ids) # Validate some persons for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): if person.getTitle() in INITIAL_PERSON_TITLE: if person.getValidationState() != "validated": person.validate() else: if person.getValidationState() == "validated": person.invalidate() self.tic() # Check initial data self.assertEqual(len(self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person")), 6) self.assertEqual(len(self.oxatis.person_module.getObjectList()), 4) self.assertEqual(len(self.oxatis.delivered_person_module.getObjectList()), 2) self.assertEqual(len(self.oxatis.delivered_organisation_module.getObjectList()), 1) self.assertEqual(len(self.oxatis.organisation_module.getObjectList()), 1) original_person_module_length = len(self.portal.person_module.contentValues()) self.assertEqual(len(self.portal.organisation_module.contentValues()), 1) # store person that will be synced person_dict = {} for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): person_dict[person.getFirstname()] = ('person', person.getPath()) if person.getCompany(None): person_dict[person.getCompany()] = ('organisation', person.getPath()) if person.getShippingcompany(None): person_dict[person.getShippingcompany()] = ('delivered_organisation', person.getPath()) if person.getShippingfirstname(None) and \ person.getShippingfirstname() != person.getFirstname(): person_dict[person.getShippingfirstname()] = ('delivered_person', person.getPath()) # # Do & Check initial synchronization # self._runAndCheckNodeSynchronization() self.checkSaleTradeConditionRelation(self.getPortalObject().person_module) self.checkSaleTradeConditionRelation(self.getPortalObject().organisation_module) self.assertEqual(len(self.portal.person_module.contentValues()), original_person_module_length+6) self.assertEqual(len(self.portal.organisation_module.contentValues()), 2) for person in self.portal.person_module.contentValues(): if person.getId() != self.default_node_id: self.assertEqual(person.getValidationState(), 'validated') node_type, test_person = person_dict.get(person.getFirstName(), None) if node_type == "person": test_person = self.portal.restrictedTraverse(test_person) self.assertNotEqual(test_person, None) self.assertEqual(test_person.getLastname(), person.getLastName()) self.assertEqual(test_person.getEmail(), person.getDefaultEmailText()) if not person.getSubordinationValue(None): # Check phones self.assertEqual(test_person.getBillingphone(), person.getDefaultTelephoneText()) self.assertEqual(test_person.getBillingcellphone(), person.getMobileTelephoneText()) self.assertEqual(test_person.getBillingfax(), person.getDefaultFaxText()) # Check default address default_address = person.get("default_address", None) self.assertNotEqual(default_address, None) self.assertEqual(test_person.getBillingaddress(), default_address.getStreetAddress()) self.assertEqual(test_person.getBillingzipcode(), default_address.getZipCode()) self.assertEqual(test_person.getBillingcity(), default_address.getCity()) elif node_type == "delivered_person": test_person = self.portal.restrictedTraverse(test_person) self.assertNotEqual(test_person, None) self.assertEqual(test_person.getShippinglastname(), person.getLastName()) self.assertEqual(test_person.getEmail(), person.getDefaultEmailText()) if not person.getSubordinationValue(None): # Check phones self.assertEqual(test_person.getShippingphone(), person.getDefaultTelephoneText()) # Check default address default_address = person.get("default_address", None) self.assertNotEqual(default_address, None) self.assertEqual(test_person.getShippingaddress(), default_address.getStreetAddress()) self.assertEqual(test_person.getShippingzipcode(), default_address.getZipCode()) self.assertEqual(test_person.getShippingcity(), default_address.getCity()) elif node_type == "organisation": test_person = self.portal.restrictedTraverse(test_person) self.assertNotEqual(test_person, None) # Check phones self.assertEqual(test_person.getBillingphone(), person.getDefaultTelephoneText()) self.assertEqual(test_person.getBillingcellphone(), person.getMobileTelephoneText()) self.assertEqual(test_person.getBillingfax(), person.getDefaultFaxText()) # Check default address default_address = person.get("default_address", None) self.assertNotEqual(default_address, None) self.assertEqual(test_person.getBillingaddress(), default_address.getStreetAddress()) self.assertEqual(test_person.getBillingzipcode(), default_address.getZipCode()) self.assertEqual(test_person.getBillingcity(), default_address.getCity()) elif node_type == "delivered_organisation": test_person = self.portal.restrictedTraverse(test_person) self.assertNotEqual(test_person, None) # Check phones self.assertEqual(test_person.getShippingphone(), person.getDefaultTelephoneText()) # Check default address default_address = person.get("default_address", None) self.assertNotEqual(default_address, None) self.assertEqual(test_person.getShippingaddress(), default_address.getStreetAddress()) self.assertEqual(test_person.getShippingzipcode(), default_address.getZipCode()) self.assertEqual(test_person.getShippingcity(), default_address.getCity()) else: raise ValueError, 'bad type' # # Modify persons on the plugin side # mapping_dict = {} for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): if person.getTitle() == 'Simple person': # Change basic informations mapping_dict[person.getId()] = {"billingaddress":person.getBillingaddress(), "billingzipcode":person.getBillingzipcode(), "billingcity":person.getBillingcity(), "billingphone":person.getBillingphone(), "billingcellphone":person.getBillingcellphone(), "billingfax":person.getBillingfax(), "company":person.getCompany()} person.edit(billingaddress="10 rue jaune", billingzipcode="59000", billingcity="Lille", billingphone="", billingcellphone="1111111", billingfax="2222222", company="SNCF") elif person.getTitle() == 'Person shipping to another person': # Change shipping person mapping_dict[person.getId()] = {"shippingfirstname":person.getShippingfirstname(), "shippinglastname":person.getShippinglastname(),} person.edit(shippingfirstname="Chew", shippinglastname="Baccâl") elif person.getTitle() == 'Person into an organisation': # Change company name mapping_dict[person.getId()] = {"company":person.getCompany(),} person.edit(company="Etoile Noire") elif person.getTitle() == "Person with shipping into org": # change shipping company address mapping_dict[person.getId()] = {"shippingaddress":person.getShippingaddress(), "shippingzipcode":person.getShippingzipcode(), "shippingcity":person.getShippingcity(), "shippingphone":person.getShippingphone(),} person.edit(shippingaddress="101 rue de la forêt", shippingzipcode="59000", shippingcity="Lîlle", shippingphone="",) # Validate remaining persons for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): if person.getValidationState() != "validated": person.validate() try: self.assertEqual(len(mapping_dict), 4) self._runAndCheckNodeSynchronization(reset=False) self.checkSaleTradeConditionRelation(self.getPortalObject().person_module, excluded_title_list='luke skywalker') self.checkSaleTradeConditionRelation(self.getPortalObject().organisation_module) self.assertEqual(len(self.portal.person_module.contentValues()), original_person_module_length+10) self.assertEqual(len(self.portal.organisation_module.contentValues()), 5) # # Modify person on both side # for person in self.portal.person_module.searchFolder(validation_state="validated"): if person.getTitle() == "test-Aurélien Calonne": # remove company link person.default_career.setSubordination("") # define phone and adress person.setDefaultTelephoneText("454545445") person.setDefaultAddressStreetAddress("10 Route 66") person.setDefaultAddressZipCode("534322") person.setDefaultAddressCity("Paris") person.setDefaultAddressRegion("france") for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): if person.getTitle() == "Simple person": person.edit(billingcellphone="0129834765") # # Modify the organisation with multiple addresses on both sides # for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): if person.getTitle() == "Organisation witth two address": mapping_dict[person.getId()] = {"billingaddress":person.getBillingaddress(), "shippingaddress":person.getShippingaddress(), } person.edit(billingaddress="10 rue saint Andrêt", shippingaddress="1 rue saint Médart",) self._runAndCheckNodeSynchronization(reset=False) self.checkSaleTradeConditionRelation(self.getPortalObject().person_module, excluded_title_list='luke skywalker') self.checkSaleTradeConditionRelation(self.getPortalObject().organisation_module) # # Generates conflict on all properties # for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): if person.getTitle() == "Organisation witth two address": person.edit(billingaddress="billing conflict", billingzipcode="0000", billingcity="ConflictTown", billingphone="0000", billingcellphone="00000", billingfax="000000", shippingaddress="shipping conflict", shippingzipcode="0000", shippingcity="ConflictTown", shippingphone="0000", ) for org in self.portal.organisation_module.searchFolder(validation_state="validated"): if org.getTitle() == "WEE": org.setDefaultTelephoneText("9999") org.setMobileTelephoneText("9999") org.setDefaultFaxText("9999") org.setDefaultAddressStreetAddress("address conflit") org.setDefaultAddressZipCode("9999") org.setDefaultAddressCity("ConflitVille") self._runAndCheckNodeSynchronization(reset=False, conflict_dict={'organisation_module' : (7,0, True)}, ) self.checkSaleTradeConditionRelation(self.getPortalObject().person_module, excluded_title_list='luke skywalker') self.checkSaleTradeConditionRelation(self.getPortalObject().organisation_module, excluded_title_list='SNCF') # Fix all conflicts & run sync again for conflict in self.oxatis.organisation_module.getSourceSectionValue().getConflictList(): if conflict.getParentValue().getValidationState() == "conflict": LOG("changing %s to resolved" %(conflict.getParentValue().getPath(),), 300, "") conflict.getParentValue().resolveConflictWithMerge() self._runAndCheckNodeSynchronization(reset=False, conflict_dict={'organisation_module' : (7,0, False)}) self.checkSaleTradeConditionRelation(self.getPortalObject().person_module, excluded_title_list='luke skywalker') self.checkSaleTradeConditionRelation(self.getPortalObject().organisation_module, excluded_title_list='SNCF') finally: # Reset data on person for person in self.portal.oxatis_test_module.contentValues(portal_type="Oxatis Test Person"): mapping = mapping_dict.get(person.getId(), None) if mapping is not None: person.edit(**mapping)
def loadClass(cls): """ - mro before load: erp5.portal_type.XXX, GhostBaseMetaClass instance, *TAIL - mro after: erp5.portal_type.XXX, *new_bases_fetched_from_ZODB """ __traceback_info__ = cls.__name__ # Do not load the class again if it has already been loaded if not cls.__isghost__: return # cls might be a subclass of a portal type class # we need to find the right class to change for klass in cls.__mro__: # XXX hardcoded, this doesnt look too good if klass.__module__ == "erp5.portal_type": break else: raise AttributeError("Could not find a portal type class in" " class hierarchy") portal_type = klass.__name__ from Products.ERP5.ERP5Site import getSite site = getSite() with aq_method_lock: try: class_definition = generatePortalTypeClass(site, portal_type) except AttributeError: LOG("ERP5Type.Dynamic", WARNING, "Could not access Portal Type Object for type %r" % portal_type, error=True) base_tuple = (ERP5BaseBroken, ) portal_type_category_list = [] attribute_dict = dict(_categories=[], constraints=[]) interface_list = [] else: base_tuple, portal_type_category_list, \ interface_list, attribute_dict = class_definition klass.__isghost__ = False klass.__bases__ = base_tuple klass.resetAcquisition() for key, value in attribute_dict.iteritems(): setattr(klass, key, value) if getattr(klass.__setstate__, 'im_func', None) is \ persistent_migration.__setstate__: # optimization to reduce overhead of compatibility code klass.__setstate__ = persistent_migration.Base__setstate__ for interface in interface_list: classImplements(klass, interface) # skip this during the early Base Type / Types Tool generation # because they dont have accessors, and will mess up # workflow methods. We KNOW that we will re-load this type anyway if len(base_tuple) > 1: klass.generatePortalTypeAccessors(site, portal_type_category_list) # need to set %s__roles__ for generated methods cls.setupSecurity()
def generatePortalTypeClass(site, portal_type_name): """ Given a portal type, look up in Types Tool the corresponding Base Type object holding the definition of this portal type, and computes __bases__ and __dict__ for the class that will be created to represent this portal type Returns tuple with 4 items: - base_tuple: a tuple of classes to be used as __bases__ - base_category_list: categories defined on the portal type (and portal type only: this excludes property sheets) - interface_list: list of zope interfaces the portal type implements - attribute dictionary: any additional attributes to put on the class """ # LOG("ERP5Type.dynamic", INFO, "Loading portal type " + portal_type_name) global core_portal_type_class_dict portal_type_category_list = [] attribute_dict = dict(portal_type=portal_type_name, _categories=[], constraints=[]) if portal_type_name in core_portal_type_class_dict: if not core_portal_type_class_dict[portal_type_name]['generating']: # Loading the (full) outer portal type class core_portal_type_class_dict[portal_type_name]['generating'] = True else: # Loading the inner portal type class without any mixin, # interface or Property Sheet klass = _importClass(document_class_registry.get( core_portal_type_class_dict[portal_type_name]['type_class'])) # LOG("ERP5Type.dynamic", INFO, # "Loaded portal type %s (INNER)" % portal_type_name) # Don't do anything else, just allow to load fully the outer # portal type class return ((klass,), [], [], attribute_dict) # Do not use __getitem__ (or _getOb) because portal_type may exist in a # type provider other than Types Tool. portal_type = getattr(site.portal_types, portal_type_name, None) type_class = None if portal_type is not None: # type_class has a compatibility getter that should return # something even if the field is not set (i.e. Base Type object # was not migrated yet). It only works if factory_method_id is set. type_class = portal_type.getTypeClass() # The Tools used to have 'Folder' or None as type_class instead of # 'NAME Tool', so make sure the type_class is correct # # NOTE: under discussion so might be removed later on if portal_type_name.endswith('Tool') and type_class in ('Folder', None): type_class = portal_type_name.replace(' ', '') mixin_list = portal_type.getTypeMixinList() interface_list = portal_type.getTypeInterfaceList() portal_type_category_list = portal_type.getTypeBaseCategoryList() attribute_dict['_categories'] = portal_type_category_list[:] else: LOG("ERP5Type.dynamic", WARNING, "Cannot find a portal type definition for '%s', trying to guess..." % portal_type_name) # But if neither factory_init_method_id nor type_class are set on # the portal type, we have to try to guess, for compatibility. # Moreover, some tools, such as 'Activity Tool', don't have any # portal type if type_class is None: if portal_type_name in core_portal_type_class_dict: # Only happen when portal_types is empty (e.g. when creating a # new ERP5Site) type_class = core_portal_type_class_dict[portal_type_name]['type_class'] else: # Try to figure out a coresponding document class from the # document side. This can happen when calling newTempAmount for # instance: # Amount has no corresponding Base Type and will never have one # But the semantic of newTempXXX requires us to create an # object using the Amount Document, so we promptly do it: type_class = portal_type_name.replace(' ', '') mixin_list = [] interface_list = [] if type_class is None: raise AttributeError('Document class is not defined on Portal Type ' + \ portal_type_name) klass = None if '.' in type_class: type_class_path = type_class else: type_class_path = None # Skip any document within ERP5Type Product as it is needed for # bootstrapping anyway type_class_namespace = document_class_registry.get(type_class, '') if not (type_class_namespace.startswith('Products.ERP5Type') or portal_type_name in core_portal_type_class_dict): import erp5.component.document module_fullname = 'erp5.component.document.' + type_class module_loader = erp5.component.document.find_module(module_fullname) if module_loader is not None: try: module = module_loader.load_module(module_fullname) except ImportError, e: LOG("ERP5Type.dynamic", WARNING, "Could not load Component module '%s': %s" % (module_fullname, e)) else: try: klass = getattr(module, type_class) except AttributeError: LOG("ERP5Type.dynamic", WARNING, "Could not get class '%s' in Component module '%s'" % \ (type_class, module_fullname)) if klass is None: type_class_path = document_class_registry.get(type_class) if type_class_path is None: raise AttributeError('Document class %s has not been registered:' ' cannot import it as base of Portal Type %s' % (type_class, portal_type_name))
'BookletReference', 'BookReference', 'ConferenceReference', 'InbookReference', 'IncollectionReference', 'InproceedingsReference', 'ManualReference', 'MastersthesisReference', 'MiscReference', 'PhdthesisReference', 'PreprintReference', 'ProceedingsReference', 'TechreportReference', 'UnpublishedReference', 'WebpublishedReference', ) ZOPE_TEXTINDEXES = ( 'TextIndex', 'ZCTextIndex', 'TextIndexNG2', 'TextIndexNG3', ) if USE_EXTERNAL_STORAGE: try: import Products.ExternalStorage except ImportError: LOG('CMFBibliographyAT', PROBLEM, 'ExternalStorage N/A, falling back to AnnotationStorage') USE_EXTERNAL_STORAGE = False
'soap_wsdl': 'SOAPWSDLConnection', 'sftp' : "SFTPConnection", 'sql' : "SQLConnection", } for handler_id, module_id in handler_module_dict.iteritems(): # Ignore non-functionnal plugins. # This is done to avoid adding strict dependencies. # Code relying on the presence of a plugin will fail upon # WebServiceTool.connect . try: module = __import__( 'erp5.component.module.%s' % (module_id, ), globals(), {}, [module_id]) except ImportError: LOG('WebServiceTool', WARNING, 'Unable to import module %r. %r transport will not be available.' % \ (module_id, handler_id), error=True) else: registerConnectionPlugin(handler_id, getattr(module, module_id)) class WebServiceTool(BaseTool): """ This tool can do all kinds of web services in all kinds of protocols. """ id = 'portal_web_services' title = 'Web Service Tool' meta_type = 'ERP5 Web Service Tool' portal_type = 'Web Service Tool' allowed_content_types = ()
def synchronizeDynamicModules(context, force=False): """ Allow resetting all classes to ghost state, most likely done after adding and removing mixins on the fly Most of the time, this reset is only hypothetic: * with force=False, the reset is only done if another node resetted the classes since the last reset on this node. * with force=True, forcefully reset the classes on the current node and send out an invalidation to other nodes """ portal = context.getPortalObject() global last_sync if force: # hard invalidation to force sync between nodes portal.newCacheCookie('dynamic_classes') last_sync = portal.getCacheCookie('dynamic_classes') else: cookie = portal.getCacheCookie('dynamic_classes') if cookie == last_sync: # up to date, nothing to do return last_sync = cookie import erp5 with aq_method_lock: # Thanks to TransactionalResource, the '_bootstrapped' global variable # is updated in a transactional way. Without it, it would be required to # restart the instance if anything went wrong. # XXX: In fact, TransactionalResource does not solve anything here, because # portal cookie is unlikely to change and this function will return # immediately, forcing the user to restart. # This may not be so bad after all: it enables the user to do easily # some changes that are required for the migration. if portal.id not in _bootstrapped and \ TransactionalResource.registerOnce(__name__, 'bootstrap', portal.id): migrate = False from Products.ERP5Type.Tool.PropertySheetTool import PropertySheetTool from Products.ERP5Type.Tool.TypesTool import TypesTool from Products.ERP5Type.Tool.ComponentTool import ComponentTool try: for tool_class in TypesTool, PropertySheetTool, ComponentTool: # if the instance has no property sheet tool, or incomplete # property sheets, we need to import some data to bootstrap # (only likely to happen on the first run ever) tool_id = tool_class.id tool = getattr(portal, tool_id, None) if tool is None: tool = tool_class() portal._setObject(tool_id, tool, set_owner=False, suppress_events=True) tool = getattr(portal, tool_id) elif tool._isBootstrapRequired(): migrate = True else: continue tool._bootstrap() tool.__class__ = getattr(erp5.portal_type, tool.portal_type) if migrate: portal.migrateToPortalTypeClass() portal.portal_skins.changeSkin(None) TransactionalResource(tpc_finish=lambda txn: _bootstrapped.add(portal.id)) LOG('ERP5Site', INFO, 'Transition successful, please update your' ' business templates') else: _bootstrapped.add(portal.id) except: # Required because the exception may be silently dropped by the caller. transaction.doom() LOG('ERP5Site', PANIC, "Automatic migration of type and" " property sheet tool failed", error=sys.exc_info()) raise LOG("ERP5Type.dynamic", 0, "Resetting dynamic classes") try: for class_name, klass in inspect.getmembers(erp5.portal_type, inspect.isclass): klass.restoreGhostState() # Clear accessor holders of ZODB Property Sheets and Portal Types erp5.accessor_holder.clear() erp5.accessor_holder.property_sheet.clear() for name in erp5.accessor_holder.portal_type.__dict__.keys(): if name[0] != '_': delattr(erp5.accessor_holder.portal_type, name) except Exception: # Allow easier debugging when the code is wrong as this # exception is catched later and re-raised as a BadRequest import traceback; traceback.print_exc() raise # It's okay for classes to keep references to old methods - maybe. # but we absolutely positively need to clear the workflow chains # stored in WorkflowMethod objects: our generation of workflow # methods adds/registers/wraps existing methods, but does not # remove old chains. Do it now. resetRegisteredWorkflowMethod() # Some method generations are based on portal methods, and portal # methods cache results. So it is safer to invalidate the cache. cache_tool = getattr(portal, 'portal_caches', None) if cache_tool is not None: cache_tool.clearCache()
def send(self, text, recipient, sender): """Send a message. """ traverse = self.getPortalObject().restrictedTraverse #Check messsage type message_type = self.getProperty('mobyt_message_type', 'MULTITEXT') if message_type not in ('TEXT', 'MULTITEXT', 'WAPPUSH', 'UCS2', 'MULTIUCS2'): raise ValueError("Type of message in not allowed") #Check message quality quality = self.getProperty( 'mobyt_quality', 'n') #Allow sender personalization and status of SMS assert quality in ('n', 'l', 'll'), "Unknown quality : '%s'" % quality #Recipient recipient = self._transformPhoneUrlToGatewayNumber( traverse(recipient).getDefaultMobileTelephoneValue().asURL()) base_url = self.api_url + "/send.php" #Common params params = { "user": self.getGatewayUser(), "pass": self.getGatewayPassword(), "rcpt": recipient, "data": text, "qty": quality, "return_id": 1 } if self.isTitleMode(): params['sender'] = traverse( sender).getDefaultMobileTelephoneValue().getTitle() else: params['sender'] = self._transformPhoneUrlToGatewayNumber( traverse(sender).getDefaultMobileTelephoneValue().asURL() ) or self.getDefaultSender() #Define type of message if message_type != "text": assert quality == 'n', "This type of message require top level messsage quality" params['operation'] = message_type #Send message (or test) if self.isSimulationMode(): LOG("MobytGateway", INFO, params) result = {'status': "Test"} else: params = urllib.urlencode(params) page = urllib.urlopen(base_url, params) result = self._fetchSendResponseAsDict(page) #Check result and return if result['status'] == "OK": return [result.get('status_info', "")] #return message id (gateway side) elif result['status'] == "KO": #we get an error when call the gateway raise SMSGatewayError( urllib.unquote( result.get('status_info', "Impossible to send the SMS"))) elif result['status'] == "Test": #just a test, no message id return None else: raise ValueError("Unknown result", 0, result)
def __call__(self, instance, *args, **kw): assert not 'validation_state' in kw, "validation_state parameter is not supported" assert not 'simulation_state' in kw, "simulation_state parameter is not supported" if self._warning: LOG("ERP5Type", WARNING, "Deprecated Getter Id: %s" % self._id) return instance._getRelatedValueList(self._key, *args, **kw)
def _getWorklistActionList(): worklist_dict = {} for wf in self.objectValues(): if wf is not None: a = wf.getWorklistVariableMatchDict(info) if a is not None: worklist_dict[wf.getId()] = a if not worklist_dict: return () is_anonymous = portal.portal_membership.isAnonymousUser() portal_catalog = portal.portal_catalog sql_catalog = portal_catalog.getSQLCatalog() catalog_security_uid_groups_columns_dict = \ sql_catalog.getSQLCatalogSecurityUidGroupsColumnsDict() getSecurityUidDictAndRoleColumnDict = \ portal_catalog.getSecurityUidDictAndRoleColumnDict search_result_ = getattr(self, "Base_getCountFromWorklistTable", None) use_cache = search_result_ is not None if use_cache: ignored_security_column_id_set = self._getWorklistIgnoredSecurityColumnSet( ) ignored_security_uid_parameter_set = { x for x, y in catalog_security_uid_groups_columns_dict.iteritems() if y in ignored_security_column_id_set } _getSecurityUidDictAndRoleColumnDict = getSecurityUidDictAndRoleColumnDict def getSecurityUidDictAndRoleColumnDict(**kw): security_uid_dict, role_column_dict, local_role_column_dict = \ _getSecurityUidDictAndRoleColumnDict(**kw) for ignored_security_column_id in ignored_security_column_id_set: role_column_dict.pop(ignored_security_column_id, None) local_role_column_dict.pop(ignored_security_column_id, None) for ignored_security_uid_parameter in \ ignored_security_uid_parameter_set: security_uid_dict.pop(ignored_security_uid_parameter) return security_uid_dict, role_column_dict, local_role_column_dict count_column_expression = 'sum(`%s`)' % (COUNT_COLUMN_TITLE, ) # Prevent catalog from trying to join getQuery = SimpleQuery # BBB def search_result(select_dict, group_by, query, limit, src__): select_item_list = [] for alias, expression in select_dict.iteritems(): if expression is None: expression = alias select_item_list.append('%s AS %s' % (expression, alias)) return search_result_( select_expression=','.join(select_item_list), group_by_expression=','.join(group_by), query=query, limit=limit, src__=src__, ) else: search_result = portal_catalog.unrestrictedSearchResults count_column_expression = 'count(*)' # Let catalog join as needed getQuery = lambda comparison_operator=None, **kw: AutoQuery( operator=comparison_operator, **kw) worklist_result_dict = {} # Get a list of dict of WorklistVariableMatchDict grouped by compatible # conditions (worklist_list_grouped_by_condition, worklist_metadata) = \ groupWorklistListByCondition( worklist_dict=worklist_dict, sql_catalog=sql_catalog, getSecurityUidDictAndRoleColumnDict=\ getSecurityUidDictAndRoleColumnDict, catalog_security_uid_groups_columns_dict=\ catalog_security_uid_groups_columns_dict, ) if src__: action_list = [] for grouped_worklist_dict in worklist_list_grouped_by_condition: # Generate the query for this worklist_list (total_criterion_id_list, query) = \ getWorklistListQuery( getQuery=getQuery, grouped_worklist_dict=grouped_worklist_dict, ) group_by = total_criterion_id_list assert COUNT_COLUMN_TITLE not in total_criterion_id_list select_dict = dict.fromkeys(total_criterion_id_list) select_dict[COUNT_COLUMN_TITLE] = count_column_expression catalog_brain_result = [] try: catalog_brain_result = search_result( select_dict=select_dict, group_by=group_by, query=query, limit=None, src__=src__) except Unauthorized: if not is_anonymous: raise LOG('WorkflowTool.listActions', WARNING, 'Exception while computing worklists: %s' % grouped_worklist_dict.keys(), error=True) continue except ProgrammingError, error_value: # 1146 = table does not exist if not use_cache or error_value[0] != 1146: raise try: self.Base_zCreateWorklistTable() except ProgrammingError, error_value: # 1050 = table exists (alarm run just a bit too late) if error_value[0] != 1050: raise
def testPortalTypeViewRecursivly(test_class, validator, module_id, business_template_info, business_template_info_list, portal_type_list, portal_type_path_dict, base_path, tested_portal_type_list): ''' This function go on all portal_type recursivly if the portal_type could contain other portal_types and make a test for all view that have action ''' # iteration over all allowed portal_types inside the module/portal_type for portal_type in portal_type_list: portal_path = portal_type_path_dict[portal_type] if portal_type not in tested_portal_type_list: # this portal type haven't been tested yet backuped_module_id = module_id backuped_business_template_info = business_template_info if not business_template_info.actions.has_key(portal_type): # search in other bt : business_template_info = None for bt_info in business_template_info_list: if bt_info.actions.has_key(portal_type): business_template_info = bt_info break if not business_template_info: LOG("Can't find the action :", 0, portal_type) break # create the object in portal_trash module module_id = 'portal_trash' for business_template_info in business_template_info_list: if portal_type not in business_template_info.actions: continue for action_information in business_template_info.actions[portal_type]: if (action_information['category'] in ('object_view', 'object_list') and action_information['visible']==1 and action_information['action'].startswith('string:${object_url}/') and len(action_information['action'].split('/'))==2): view_name = action_information['action'].split('/')[-1].split('?')[0] method = makeTestMethod(validator, module_id, portal_path, view_name, business_template_info.title) method_name = ('test_%s_%s_%s' % (business_template_info.title, str(portal_type).replace(' ','_'), # can be unicode view_name)) method.__name__ = method_name setattr(test_class, method_name, method) module_id = backuped_module_id # add the portal_type to the tested portal_types. This avoid to test many # times a Portal Type wich is many bt. tested_portal_type_list.append(portal_type) new_portal_type_list = [] for tmp_business_template_info in business_template_info_list: new_portal_type_list.extend(tmp_business_template_info.allowed_content_types.get(portal_type, ())) new_portal_type_path_dict = {} if base_path != '': next_base_path = '%s/%s' % (base_path, portal_type) # Module portal_type not to have been added to the path because # this portal type object already existing elif 'Module' not in portal_type: next_base_path = portal_type else: next_base_path = '' for pt in new_portal_type_list: if next_base_path != '' and 'Module' not in pt: new_portal_type_path_dict[pt] = '%s/%s' % (next_base_path, pt) else: new_portal_type_path_dict[pt] = pt testPortalTypeViewRecursivly(test_class=test_class, validator=validator, module_id=module_id, business_template_info=backuped_business_template_info, business_template_info_list=business_template_info_list, portal_type_list=new_portal_type_list, portal_type_path_dict=new_portal_type_path_dict, base_path=next_base_path, tested_portal_type_list=tested_portal_type_list)
def createObjectOfType(self, root, pt): # Set return list rv = [] # Get the it of the portal_type portal_type = pt.getId() # Check if type is allowed allowed_content_types = [x.getId() for x in root.getAllowedTypes()] # Override for root folder (only structures) if root.portal_type == 'Folder': allowed_content_types = [ 'atlas_category_level_1', 'atlas_county', 'directory' ] if portal_type in allowed_content_types: kwargs = {} for s in iterSchemataForType(portal_type): for (name, field) in getAllSchemaFieldsAndDescriptions(s): if not isinstance(field, Method): kwargs[name] = self.getDefaultForFieldType(field) # Set the id kwargs['id'] = 'X_%s_X' % portal_type # Debug output if self.debug: msg = "Creating %s in %s" % (portal_type, root.portal_type) LOG('API Sample Generator', INFO, msg) # Create a dummy object with default values try: o = createContentInContainer(root, portal_type, **kwargs) except WorkflowException: # For some reason, we're getting a workflow exception on article videos? return rv # Append to return list rv.append(o) # Get the allowed object types _allowed_content_types = pt.allowed_content_types # Override for category level 2 (no products!) if portal_type == 'atlas_category_level_2': _allowed_content_types = ['atlas_category_level_3'] # Create sub-objects for _pt_id in _allowed_content_types: # Prevent recursion... Don't create a type inside itself. if _pt_id == portal_type: continue try: _o = self.createObjectOfType(o, self.portal_types[_pt_id]) rv.extend(_o) except RuntimeError: # Skip if something bombs out from recursive calls pass except TypeError: # Skip if something bombs out with a TypeError pass return rv
def log(message, severity=DEBUG): LOG('PortalTransforms', severity, message)
context = kw.get('context') if context is None: return msgid translation_service = getattr(context, domain, None) if translation_service is not None: if isinstance(translation_service, TranslationsTool): return translation_service.translate(domain, msgid, *args, **kw) return msgid def initialize(context): """ """ setGlobalTranslationService(GlobalTranslationService()) LOG('naayaHotfix', DEBUG, 'Patch for Localizer and other stuff') #patch for TextIndexNG2. import sys from Products.TextIndexNG2.converters import doc, ppt, ps, ooffice, pdf, xls from Products.TextIndexNG2.converters.doc import wvConf_file from Products.TextIndexNG2.Registry import ConverterRegistry from Products.TextIndexNG2.converters.stripogram import html2text #patch converters/doc.py def doc_convert(self, doc): """Convert WinWord document to raw text""" tmp_name = self.saveFile(doc) if sys.platform == 'win32': return self.execute('antiword -m UTF-8.txt "%s"' % tmp_name)
def prepareContents(self, registry, register_subdirs=0): # Creates objects for each file. fp = expandpath(self.filepath) data = {} objects = [] types = self._readTypesFile() for entry in _filtered_listdir(fp): if not self._isAllowableFilename(entry): continue e_filepath = path.join(self.filepath, entry) e_fp = expandpath(e_filepath) if path.isdir(e_fp): # Add a subdirectory only if it was previously registered, # unless register_subdirs is set. info = registry.getDirectoryInfo(e_filepath) if info is None and register_subdirs: # Register unknown subdirs registry.registerDirectoryByPath(e_fp) info = registry.getDirectoryInfo(e_filepath) if info is not None: mt = types.get(entry) t = None if mt is not None: t = registry.getTypeByMetaType(mt) if t is None: t = DirectoryView ob = t(entry, e_filepath) ob_id = ob.getId() data[ob_id] = ob objects.append({'id': ob_id, 'meta_type': ob.meta_type}) else: pos = rfind(entry, '.') if pos >= 0: name = entry[:pos] ext = path.normcase(entry[pos + 1:]) else: name = entry ext = '' if not name or name == 'REQUEST': # Not an allowable id. continue mo = bad_id(name) if mo is not None and mo != -1: # Both re and regex formats # Not an allowable id. continue t = None mt = types.get(entry, None) if mt is None: mt = types.get(name, None) if mt is not None: t = registry.getTypeByMetaType(mt) if t is None: t = registry.getTypeByExtension(ext) if t is not None: properties = self._readProperties(e_fp + '.properties') try: ob = t(name, e_filepath, fullname=entry, properties=properties) except: import traceback typ, val, tb = exc_info() try: exc_lines = traceback.format_exception( typ, val, tb) LOG('DirectoryView', ERROR, join(exc_lines, '\n')) ob = BadFile(name, e_filepath, exc_str=join(exc_lines, '\r\n'), fullname=entry) finally: tb = None # Avoid leaking frame! ob_id = ob.getId() data[ob_id] = ob objects.append({'id': ob_id, 'meta_type': ob.meta_type}) return data, tuple(objects)
def _index_object(self, documentId, obj, threshold=None, attr=''): encoding = self.default_encoding source = mimetype = None # This is to support foreign file formats that # are stored as "File" objects when searching # through PrincipiaSearchSource if hasattr(obj, 'txng_get'): # Check if the object has a method txng_get() result = obj.txng_get([attr]) if result is None: return None source, mimetype, encoding = result elif obj.meta_type in ('File', 'Portal File', 'Naaya File') and \ attr in ('PrincipiaSearchSource', 'SearchableText'): source = getattr(obj, attr, None) if source and not self.use_converters: if callable(source): source = source() else: source = str(obj) mimetype = obj.content_type elif obj.meta_type == 'ExtFile' and \ attr in ('PrincipiaSearchSource', 'SearchableText'): source = obj.index_html() mimetype = obj.getContentType() elif obj.meta_type in ('ZMSFile', ): lang = attr[attr.rfind('_') + 1:] req = {'lang': lang} file = obj.getObjProperty('file', req) source = '' mimetype = None if file: source = file.getData() mimetype = file.getContentType() elif obj.meta_type in ('TTWObject', ) and attr not in ('SearchableText', ): field = obj.get(attr) source = str(field) if field.meta_type in ('ZMSFile', 'File'): mimetype = field.getContentType() else: mimetype = None else: # default behaviour: try to obtain the source from # the attribute or method call return value try: source = getattr(obj, attr) if callable(source): source = source() if not isinstance(source, unicode): source = str(source) except (AttributeError, TypeError): return None # If enabled, we try to find a valid document converter # and convert the data to get a hopefully text only representation # of the data. if self.use_converters: if mimetype is None or mimetype == 'application/octet-stream': mimetype, encoding = guess_content_type(obj.getId(), source) if not encoding: encoding = self.default_encoding try: converter = ConverterRegistry.get(mimetype) except RegistryException: LOG( 'textindexng', ERROR, '%s could not be converted because no converter could be found for %s' % (obj.absolute_url(1), mimetype)) return None if converter: try: source, encoding = converter.convert2(source, encoding, mimetype) except: try: source = converter.convert(source) except: LOG('textindexng', ERROR, '%s could not be converted' % obj.absolute_url(1), error=sys.exc_info()) return None if obj.meta_type == 'Portal File': source += ' ' + obj.SearchableText() # Now we try to get a valid encoding. For unicode strings # we have to perform no action. For string objects we check # if the document has an attibute (not a method) '<index>_encoding'. # As fallback we also check for the presence of an attribute # 'document_encoding'. Checking for the two attributes allows # us to define different encodings for different attributes # on an object. This is useful when an object stores multiple texts # as attributes within the same instance (e.g. for multilingual # versions of a text but with different encodings). # If no encoding is specified as object attribute, we will use # Python's default encoding. # After getting the encoding, we convert the data to unicode. if isinstance(source, str): if encoding is None: try: encoding = self.default_encoding except: encoding = self.default_encoding = 'iso-8859-15' for k in ['document_encoding', attr + '_encoding']: enc = getattr(obj, k, None) if enc is not None: encoding = enc if encoding == 'ascii': encoding = 'iso-8859-15' try: source = unicode(source, encoding, 'strict') except UnicodeDecodeError: LOG( 'textindexng', WARNING, 'UnicodeDecodeError raised from %s - ignoring unknown unicode characters' % obj.absolute_url(1)) source = unicode(source, encoding, 'ignore') elif isinstance(source, unicode): pass else: raise TXNGError, "unknown object type" source = source.strip() if not source: return None # Normalization: apply translation table to data if self.use_normalizer: source = NormalizerRegistry.get(self.use_normalizer).process(source) # Split the text into a list of words SP = SplitterRegistry.get(self.use_splitter) _source = source words = SP(casefolding=self.splitter_casefolding, separator=self.splitter_separators, maxlen=self.splitter_max_len, singlechar=self.splitter_single_chars).split(_source) # remove stopwords from data if self.use_stopwords: words = self.use_stopwords.process(words) # We pass the list of words to the corresponding lexicon # and obtain a list of wordIds. The "old" TextIndex iterated # over every single words (overhead). return self._lexicon.getWordIdList(words)
def getTemplateField(self, cache=True): """ Return template field of the proxy field. """ if cache is True: tales = self.tales if self._p_oid is None or tales['field_id'] or tales['form_id']: cache = False else: try: return self._getTemplateFieldCache() except KeyError: pass portal = self.getPortalObject() portal_skins = portal.portal_skins form = self.aq_parent object = form.aq_parent form_id = self.get_value('form_id') proxy_field = None form_id_with_skin_folder_name_flag = False if '/' in form_id: # If a / is in the form_id, it means that skin_folder is explicitly # defined. If so, prevent acquisition to get the form. form_id_with_skin_folder_name_flag = True proxy_form = aq_base(portal_skins).unrestrictedTraverse( form_id, None) if proxy_form is not None: proxy_form = portal_skins.unrestrictedTraverse(form_id) else: proxy_form = getattr(object, form_id, None) if (proxy_form is not None): field_id = self.get_value('field_id') proxy_field = proxy_form._getOb(field_id, None) if proxy_field is None: if form_id_with_skin_folder_name_flag is False: # Try to get the field from another field library with a lower # priority. # This should return no field if the skin folder name is defined in # form_id. skin_info = SKINDATA.get(get_ident()) if skin_info is not None: skin_selection_name, ignore, resolve = skin_info selection_dict = portal_skins._getSelections() candidate_folder_id_list = selection_dict[ skin_selection_name].split(',') for candidate_folder_id in candidate_folder_id_list: candidate_folder = portal_skins._getOb( candidate_folder_id, None) if candidate_folder is not None: proxy_form = candidate_folder._getOb( form_id, None) if proxy_form is not None: proxy_field = proxy_form._getOb( field_id, None) if proxy_field is not None: break if proxy_field is None: LOG('ProxyField', WARNING, 'Could not get a field from a proxy field %s in %s' % \ (self.id, object.id)) if cache is True: self._setTemplateFieldCache(proxy_field) return proxy_field
def prepareContents(self, registry, register_subdirs=0): # Creates objects for each file. data = {} objects = [] types = self._readTypesFile() for entry in _filtered_listdir(self._filepath, ignore=self.ignore): if not self._isAllowableFilename(entry): continue entry_minimal_fp = '/'.join((self._minimal_fp, entry)) entry_filepath = path.join(self._filepath, entry) if path.isdir(entry_filepath): # Add a subdirectory only if it was previously registered, # unless register_subdirs is set. info = registry.getDirectoryInfo(entry_minimal_fp) if info is None and register_subdirs: # Register unknown subdirs registry.registerDirectoryByPath(entry_filepath) info = registry.getDirectoryInfo(entry_minimal_fp) if info is not None: mt = types.get(entry) t = None if mt is not None: t = registry.getTypeByMetaType(mt) if t is None: t = DirectoryView ob = t(entry, entry_minimal_fp) ob_id = ob.getId() data[ob_id] = ob objects.append({'id': ob_id, 'meta_type': ob.meta_type}) else: pos = entry.rfind('.') if pos >= 0: name = entry[:pos] ext = path.normcase(entry[pos + 1:]) else: name = entry ext = '' if not name or name == 'REQUEST': # Not an allowable id. continue mo = bad_id(name) if mo is not None and mo != -1: # Both re and regex formats # Not an allowable id. continue t = None mt = types.get(entry, None) if mt is None: mt = types.get(name, None) if mt is not None: t = registry.getTypeByMetaType(mt) if t is None: t = registry.getTypeByExtension(ext) if t is not None: metadata = FSMetadata(entry_filepath) metadata.read() try: ob = t(name, entry_minimal_fp, fullname=entry, properties=metadata.getProperties()) except: import traceback typ, val, tb = exc_info() try: exc_lines = traceback.format_exception( typ, val, tb) LOG('DirectoryView', ERROR, '\n'.join(exc_lines)) ob = BadFile(name, entry_minimal_fp, exc_str='\r\n'.join(exc_lines), fullname=entry) finally: tb = None # Avoid leaking frame! # FS-based security permissions = metadata.getSecurity() if permissions is not None: for name in permissions.keys(): acquire, roles = permissions[name] try: ob.manage_permission(name, roles, acquire) except ValueError: LOG('DirectoryView', ERROR, 'Error setting permissions', error=exc_info()) # only DTML Methods and Python Scripts can have proxy roles if hasattr(ob, '_proxy_roles'): try: ob._proxy_roles = tuple(metadata.getProxyRoles()) except: LOG('DirectoryView', ERROR, 'Error setting proxy role', error=exc_info()) ob_id = ob.getId() data[ob_id] = ob objects.append({'id': ob_id, 'meta_type': ob.meta_type}) return data, tuple(objects)
def Type(self): """ Deprecated. Use Title(). """ LOG('CMFCore.TypesTool', WARNING, 'TypeInformation.Type() is deprecated, use Title().') return self.Title()
def _validate_after_path_and_method_id(self, activity_tool, message, value): if not (isinstance(value, (tuple, list)) and len(value) == 2): LOG('CMFActivity', WARNING, 'unable to recognize value for after_path_and_method_id: %r' % (value,)) return [] return self._validate(activity_tool, path=value[0], method_id=value[1])
def _processSearchValue(self, search_value, default_logical_operator, comparison_operator): """ Change search_value into a list of values, one or more logical operators, and a comparison operator. If no default_logical_operator is given, 'or' is used. search_value basestring int dict list or tuple Non-empty Composed of homogeneous items Returns: 3-tuple dict: key (string) Comparison operator value (list of anything) List of values applying to this operator. string: Logical operator applied to all elements of returned dict. bool: True if logical operators were searched for in values, False otherwise. Useful to give different meanings to in-value operators and others. """ if comparison_operator == '': comparison_operator = None get_operator_from_value = False else: get_operator_from_value = self.get_operator_from_value logical_operator = None if default_logical_operator is None: default_logical_operator = 'or' parsed = False if isinstance(search_value, dict): actual_value = search_value['query'] if search_value.get('key') not in (None, self.__class__.__name__): LOG(self.__class__.__name__, 100, '"key" dict entry does not match current class: %r' % \ (search_value, )) if 'type' in search_value: assert 'operator' not in search_value, search_value assert 'range' not in search_value, search_value else: # comparison_operator parameter collides with dict's 'operator' key. # Fail loudly. assert comparison_operator is None value_operator = search_value.get('operator') value_range = search_value.get('range') if value_range is not None: if value_operator is not None: LOG('SearchKey', 100, '"range" and "operator" are mutualy exclusive, ignoring '\ 'operator: %r' % (search_value, )) if value_range in operator_list: comparison_operator = value_range elif value_range in single_operator_dict: comparison_operator = single_operator_dict[value_range] elif value_range in dual_operator_dict: if not isinstance(actual_value, (tuple, list)): raise TypeError( 'Operator %r requires value to be a tuple/list. (%r)' % (value_range, search_value)) if len(actual_value) != 2: raise TypeError( 'Operator %r requires value to have a length of 2. len(%r) = %s (%r)' % (value_range, actual_value, len(actual_value), search_value)) comparison_operator = dual_operator_dict[value_range] logical_operator = 'and' else: raise ValueError('Unknown "range" value in %r' % search_value) if value_operator is not None: if not isinstance(value_operator, basestring): raise TypeError( 'Operator must be of a string type. Got a %r' % type(value_operator)) value_operator = value_operator.lower() if not isinstance(actual_value, (tuple, list)): raise TypeError( 'When specifying an operator, query must be a list.' ) if value_operator == 'in': comparison_operator = '=' logical_operator = 'or' else: logical_operator = value_operator search_value = actual_value if not isinstance(search_value, list_type_list): search_value = [search_value] if logical_operator is None: logical_operator = default_logical_operator operator_value_dict = {} if comparison_operator is None: getComparisonOperator = self._getComparisonOperator guessComparisonOperator = self._guessComparisonOperator preprocessValue = self._preprocessValue for value in search_value: is_dict = isinstance(value, dict) if is_dict: base_value = value['query'] else: base_value = value if isinstance(base_value, basestring): if get_operator_from_value: parsed = True operator, base_value = getComparisonOperator( base_value) else: operator = guessComparisonOperator(base_value) elif base_value is None: operator = 'is' else: # XXX: comparison operator is hardcoded for non-strings. operator = '=' if is_dict: value['query'] = base_value else: value = base_value operator_value_dict.setdefault(operator, []).append( preprocessValue(value, operator), ) elif isinstance(comparison_operator, (tuple, list)): assert len(comparison_operator) == len(search_value) for operator, value in zip(comparison_operator, search_value): operator_value_dict.setdefault(operator, []).append(value) else: operator_value_dict[comparison_operator] = search_value return operator_value_dict, logical_operator, parsed
def _initialize(self, db, column_list): LOG( 'CMFActivity', ERROR, "Non-empty %r table upgraded." " The following added columns could not be initialized: %s" % (self.sql_table, ", ".join(column_list)))
def _getGroupsForPrincipal(user_name, path): security_category_dict = {} # key is the base_category_list, # value is the list of fetched categories security_group_list = [] security_definition_list = () try: # To get the complete list of groups, we try to call the # ERP5Type_getSecurityCategoryMapping which should return a list # of lists of two elements (script, base_category_list) like : # ( # ('script_1', ['base_category_1', 'base_category_2', ...]), # ('script_2', ['base_category_1', 'base_category_3', ...]) # ) # # else, if the script does not exist, falls back to a list containng # only one list : # (('ERP5Type_getSecurityCategoryFromAssignment', # self.getPortalAssignmentBaseCategoryList() ),) mapping_method = getattr( self, 'ERP5Type_getSecurityCategoryMapping', None) if mapping_method is None: security_definition_list = (( 'ERP5Type_getSecurityCategoryFromAssignment', self.getPortalAssignmentBaseCategoryList()), ) else: security_definition_list = mapping_method() # get the person from its reference - no security check needed catalog_result = self.portal_catalog.unrestrictedSearchResults( portal_type="Person", reference=user_name) if len(catalog_result) != 1: # we won't proceed with groups if len(catalog_result) > 1: # configuration is screwed raise ConsistencyError, 'There is more than one Person whose \ login is %s : %s' % ( user_name, repr([r.getObject() for r in catalog_result])) else: # no person is linked to this user login return () person_object = catalog_result[0].getObject() person_id = person_object.getId() # Fetch category values from defined scripts for (method_name, base_category_list) in security_definition_list: base_category_list = tuple(base_category_list) method = getattr(self, method_name) security_category_list = security_category_dict.setdefault( base_category_list, []) try: # The called script may want to distinguish if it is called # from here or from _updateLocalRolesOnSecurityGroups. # Currently, passing portal_type='' (instead of 'Person') # is the only way to make the difference. security_category_list.extend( method(base_category_list, user_name, person_object, '')) except ConflictError: raise except: LOG('ERP5GroupManager', WARNING, 'could not get security categories from %s' % (method_name, ), error=sys.exc_info()) # Get group names from category values # XXX try ERP5Type_asSecurityGroupIdList first for compatibility generator_name = 'ERP5Type_asSecurityGroupIdList' group_id_list_generator = getattr(self, generator_name, None) if group_id_list_generator is None: generator_name = ERP5TYPE_SECURITY_GROUP_ID_GENERATION_SCRIPT group_id_list_generator = getattr(self, generator_name) for base_category_list, category_value_list in \ security_category_dict.iteritems(): for category_dict in category_value_list: try: group_id_list = group_id_list_generator( category_order=base_category_list, **category_dict) if isinstance(group_id_list, str): group_id_list = [group_id_list] security_group_list.extend(group_id_list) except ConflictError: raise except: LOG('ERP5GroupManager', WARNING, 'could not get security groups from %s' % generator_name, error=sys.exc_info()) finally: pass return tuple(security_group_list)
def _log(self, severity, summary): LOG(self.__class__.__name__, severity, summary, error=severity > INFO and sys.exc_info() or None)
def log(self, summary, severity=INFO, detail=''): subsystem = '%s %s (IP: %s)' % (self.__class__.__name__, self.entry_id, self.remote_ip) LOG(subsystem, severity, summary, detail)
def __call__(self, field, id, **kw): REQUEST = kw.get('REQUEST', get_request()) if REQUEST is not None: # Proxyfield stores the "real" field in the request. Look if the # corresponding field exists in request, and use it as field in the # TALES context field = REQUEST.get( 'field__proxyfield_%s_%s_%s' % (field.id, field._p_oid, id), field) kw['field'] = field form = field.aq_parent # XXX (JPS) form for default is wrong apparently in listbox - double check obj = getattr(form, 'aq_parent', None) if obj is not None: container = obj.aq_inner.aq_parent else: container = None kw['form'] = form kw['request'] = REQUEST kw['here'] = obj kw['context'] = obj kw['modules'] = SecureModuleImporter kw['container'] = container try : kw['preferences'] = obj.getPortalObject().portal_preferences except AttributeError : LOG('ERP5Form', PROBLEM, 'portal_preferences not put in TALES context (not installed?)') # This allows to pass some pointer to the local object # through the REQUEST parameter. Not very clean. # Used by ListBox to render different items in a list if kw.get('cell') is None: request = kw.get('REQUEST') if request is not None: if getattr(request, 'cell', None) is not None: kw['cell'] = request.cell else: kw['cell'] = request if 'cell_index' not in kw and\ getattr(request, 'cell_index', None) is not None: kw['cell_index'] = request.cell_index elif getattr(REQUEST, 'cell', None) is not None: kw['cell'] = REQUEST.cell if 'cell_index' not in kw and \ getattr(REQUEST, 'cell_index', None) is not None: kw['cell_index'] = REQUEST.cell_index # on Zope 2.12, only path expressions can access the CONTEXTS name # but ERP5 has many python expressions that try to access CONTEXTS, so # we try to keep backward compatibility if self.tales_expr._text.startswith("python:"): kw['CONTEXTS'] = kw try: value = self.tales_expr.__of__(field)(**kw) except (ConflictError, RuntimeError): raise except: # We add this safety exception to make sure we always get # something reasonable rather than generate plenty of errors LOG('ERP5Form', PROBLEM, 'Field.get_value %r [%s], exception on tales_expr: ' % (field, id), error=sys.exc_info()) # field may be ProxyField # here we avoid calling field.get_recursive_orig_value # on all fields because it can be acquired from another # field in context. ie, from a listbox field. # So, test condition on meta_type attribute to avoid # non desirable side effects. if field.meta_type == 'ProxyField': value = field.get_recursive_orig_value(id) else: value = field.get_orig_value(id) return self.returnValue(field, id, value)