def handleReclassify(self, action):
     self.updateAttributionSubForm()
     data, errors = self.extractData()
     if errors:
         self.status = self.formErrorsMessage
         return
     history = IWorkflowHistory(self.context)
     principal = getattr(self.request, 'principal', None)
     if self.checkPermission():
         # store input data to context (attribution.getContent() returns it)
         self.attribution.storeToWorkItem()
     else:
         # store input data to persistent attribution
         pattr = PersistentAttribution()
         attr_data, attr_errors = self.attribution.extractData()
         categories = []
         for catset in attr_data.values():
             for cat in catset:
                 categories.append(cat.__name__)
         pattr.set(categories)
         # start workflow process
         self.process_started = True
         pd = zope.component.getUtility(IProcessDefinition,
                                        name='quotationtool.reclassify')
         context = ReclassificationContext(removeAllProxies(self.context))
         proc = pd(context)
         proc.start(getattr(principal, 'id', u"Unknown"),
                    datetime.datetime.now(), data['workflow-message'],
                    removeAllProxies(history), removeAllProxies(pattr))
     # write history
     history.append(
         UserNotation(getattr(principal, 'id', u"Unknown"),
                      data['workflow-message']))
     zope.event.notify(ObjectModifiedEvent(self.context))
     # redirect to next url
     self.request.response.redirect(self.nextURL())
示例#2
0
 def test_pstsubaction_fields(self):
     """Test value when pstaction_fields modified.
      remove plan in pstaction_fields and assert that pstsubaction_fields take same values
     """
     self.assertEquals(self.pst.pstsubaction_fields,
                       self.pst.pstaction_fields)
     # remove 'plan' value
     self.pst.pstaction_fields = [
         'IDublinCore.title', 'description_rich', 'reference_number',
         'categories', 'result_indicator', 'planned_end_date',
         'planned_begin_date', 'effective_begin_date', 'effective_end_date',
         'progress', 'health_indicator', 'health_indicator_details',
         'representative_responsible', 'manager', 'responsible',
         'extra_concerned_people', 'IAnalyticBudget.projection',
         'IAnalyticBudget.analytic_budget', 'budget', 'budget_comments',
         'ISustainableDevelopmentGoals.sdgs', 'observation', 'comments'
     ]
     self.assertNotEquals(self.pst.pstsubaction_fields,
                          self.pst.pstaction_fields)
     notify(
         ObjectModifiedEvent(self.pst,
                             Attributes(Interface, 'pstaction_fields')))
     self.assertEquals(self.pst.pstsubaction_fields,
                       self.pst.pstaction_fields)
示例#3
0
    def to_field_value(self, value):
        """field value is an Object type, that provides field.schema"""
        if value is NO_VALUE:
            return self.field.missing_value

        # try to get the original object, or if there's no chance an empty one
        obj = self.widget.get_object(value)
        obj = self.adapted_obj(obj)

        names = []
        registry = self.widget.request.registry
        for name, field in getFieldsInOrder(self.field.schema):
            if not field.readonly:
                try:
                    newval_raw = value[name]
                except KeyError:
                    continue

                widget = registry.getMultiAdapter((field, self.widget.request),
                                                  IFieldWidget)
                converter = registry.getMultiAdapter((field, widget),
                                                     IDataConverter)
                newval = converter.to_field_value(newval_raw)

                dman = registry.getMultiAdapter((obj, field), IDataManager)
                oldval = dman.query()
                if (oldval != newval) or IObject.providedBy(field):
                    dman.set(newval)
                    names.append(name)

        if names:
            registry.notify(
                ObjectModifiedEvent(obj, Attributes(self.field.schema,
                                                    *names)))

        return obj
示例#4
0
        def _check_parent_modified(parent, parent_modified, annex):
            """ """
            parent_uid = parent.UID()
            # modification date was updated
            self.assertNotEqual(parent_modified, item.modified())
            parent_modified = parent.modified()
            self.assertEqual(
                catalog(UID=parent_uid)[0].modified, parent_modified)

            # edit the annex
            notify(ObjectModifiedEvent(annex))
            # modification date was updated
            self.assertNotEqual(parent_modified, item.modified())
            parent_modified = parent.modified()
            self.assertEqual(
                catalog(UID=parent_uid)[0].modified, parent_modified)

            # remove an annex
            self.portal.restrictedTraverse('@@delete_givenuid')(annex.UID())
            # modification date was updated
            self.assertNotEqual(parent_modified, item.modified())
            parent_modified = parent.modified()
            self.assertEqual(
                catalog(UID=parent_uid)[0].modified, parent_modified)
示例#5
0
 def applyChanges(self, data):
     content = self.getContent()
     changes = applyChanges(self, content, data)
     # ``changes`` is a dictionary; if empty, there were no changes
     if changes:
         #import pdb;pdb.set_trace()
         # Construct change-descriptions for the object-modified event
         descriptions = []
         for interface, attrs in changes.items():
             if interface == IAdmUtilEvent:
                 #print "##### Event #######"
                 pass
             elif IEventIfSuperclass.isEqualOrExtendedBy(interface):
                 #print "##### Superclass #######"
                 pass
             names = attrs.keys()
             #for attr in attrs:
             #print "attr: %s (I:%s)" % (attr, interface)
             #print "   old: ", attrs[attr]['oldval']
             #print "   new: ", attrs[attr]['newval']
             descriptions.append(Attributes(interface, *names))
         # Send out a detailed object-modified event
         zope.event.notify(ObjectModifiedEvent(content, *descriptions))
     return changes
示例#6
0
 def set_file(self, stream, content_type=None, content_encoding=None):
     with self._file.open('w') as descriptor:
         data = stream.read(CHUNK_SIZE)
         while data:
             descriptor.write(data)
             data = stream.read(CHUNK_SIZE)
     if content_type is None:
         # Detect content-type
         identifier = get_file_name(stream, default=self.id)
         blob_filename = self._file._p_blob_uncommitted or \
             self._file._p_blob_committed
         self._content_type, self._content_encoding = getUtility(
             IMimeTypeClassifier).guess_type(id=identifier,
                                             filename=blob_filename,
                                             default=DEFAULT_MIMETYPE)
     else:
         # Set provided values
         self._content_type = content_type
         self._content_encoding = content_encoding
     if self._content_type == 'text/plain':
         self._content_type = 'text/plain; charset=utf-8'
     if not interfaces.IImage.providedBy(aq_parent(self)):
         #    # If we are not a storage of an image, trigger an event.
         notify(ObjectModifiedEvent(self))
示例#7
0
def breakRelations(event):
    """Break relations on any object pointing to us.

    That is, store the object path on the broken relation.
    """
    obj = event.object
    if not IHasIncomingRelations.providedBy(obj):
        return
    catalog = component.queryUtility(ICatalog)
    intids = component.queryUtility(IIntIds)
    if catalog is None or intids is None:
        return

    # find all relations that point to us
    try:
        obj_id = intids.getId(obj)
    except KeyError:
        # our intid was unregistered already
        return
    rels = list(catalog.findRelations({'to_id': intids.getId(obj)}))
    for rel in rels:
        rel.broken(rel.to_path)
        # we also need to update the relations for these objects
        notify(ObjectModifiedEvent(rel.from_object))
示例#8
0
    def __iter__(self):
        for item in self.previous:
            pathkey = self.pathkey(*item.keys())[0]
            # not enough info
            if not pathkey:
                yield item
                continue

            path = item[pathkey]
            # Skip the Plone site object itself
            if not path:
                yield item
                continue

            obj = self.context.unrestrictedTraverse(
                safe_unicode(path).lstrip('/'), None)

            if not IDexterityContent.providedBy(obj):
                # Path doesn't exist
                # obj can not only be None, but also the value of an attribute,
                # which is returned by traversal.
                yield item
                continue

            uuid = item.get('plone.uuid')
            if uuid is not None:
                IMutableUUID(obj).set(str(uuid))

            # For all fields in the schema, update in roughly the same way
            # z3c.form.widget.py would
            for schemata in iterSchemata(obj):
                for name, field in getFieldsInOrder(schemata):
                    self.update_field(obj, field, item)

            notify(ObjectModifiedEvent(obj))
            yield item
示例#9
0
 def handle_migrate_plans(self, action):
     logger.info('moving plans ...')
     plans_path = '/acerca-de/estructura-interna/secretaria-academica/informes/planes'
     plans = api.content.get(path=plans_path)
     transforms = api.portal.get_tool('portal_transforms')
     for folder in plans.getFolderContents():
         year = folder.id
         for plan in folder.getObject().getFolderContents():
             userid = plan.id
             if userid in [
                     'natig', 'dolivero', 'aortiz', 'gruiz', 'pablo',
                     'rolando', 'maperez'
             ]:
                 # if userid not in ['rolando', ]:
                 continue
             planfolder = self.getplanfolder(userid)
             logger.info('Plan Folder {0}'.format(planfolder))
             obj = api.content.create(
                 type='plan',
                 title='Plan de trabajo {0}'.format(year),
                 container=planfolder,
                 id=year)
             text = plan.getObject().getText().decode('utf-8', 'ignore')
             try:
                 stream = transforms.convertTo('text/plain',
                                               plan.getObject().getText(),
                                               mimetype='text/html')
             except UnicodeDecodeError:
                 stream = transforms.convertTo('text/plain',
                                               text,
                                               mimetype='text/html')
             obj.text = stream.getData().strip().decode('utf-8', 'ignore')
             obj.reindexObject()
             notify(ObjectModifiedEvent(obj))
             api.content.transition(obj=obj, transition='submit_send')
             logger.info('{0}-{1}'.format(plan.id, folder.id))
示例#10
0
    def test_old_factory_not_unregistered_if_not_created_by_dexterity(self):
        portal_type = u"testtype"
        fti = DexterityFTI(portal_type)

        # Mock the lookup of the site and the site manager at the site root
        dummy_site = self.create_dummy()
        self.mock_utility(dummy_site, ISiteRoot)

        site_manager_mock = Mock(
            wraps=PersistentComponents(bases=(getGlobalSiteManager(),)))
        from zope.component.hooks import getSiteManager
        self.patch_global(getSiteManager, return_value=site_manager_mock)

        # Pretend like we have a utility registered

        reg1 = self.create_dummy()
        reg1.provided = IFactory
        reg1.name = 'old-factory'
        reg1.info = None

        site_manager_mock.registeredUtilities = Mock(return_value=[reg1])

        fti.factory = 'new-factory'
        ftiModified(
            fti,
            ObjectModifiedEvent(
                fti,
                DexterityFTIModificationDescription('factory', 'old-factory')
            )
        )

        # This should not be removed, since we didn't create it
        self.assertFalse(site_manager_mock.unregisterUtility.called)
        # A new one may still be created, however
        self.assertEqual(
            site_manager_mock.registerUtility.call_args[0][2], 'new-factory')
示例#11
0
文件: data.py 项目: scmos/scmos.cover
    def set(self, data):
        # when setting data, we need to purge scales/image data...
        # XXX hack?
        try:
            scale_key = self.key.replace('.data.', '.scale.')
            del self.annotations[scale_key]
        except KeyError:
            pass

        for k, v in data.items():
            if INamedImage.providedBy(v):
                mtime_key = '{0}_mtime'.format(k)
                if (self.key not in self.annotations or
                    k not in self.annotations[self.key] or
                    (self.key in self.annotations and
                     data[k] != self.annotations[self.key][k])):
                    # set modification time of the image
                    notify(Purge(self.tile))
                    data[mtime_key] = time.time()
                else:
                    data[mtime_key] = self.annotations[self.key].get(mtime_key, None)

        self.annotations[self.key] = PersistentDict(data)
        notify(ObjectModifiedEvent(self.context))
示例#12
0
 def saveDiscussions(self):
     session = Session()
     new_record_keys = []
     domain_model = removeSecurityProxy(self.context.domain_model)
     for record in self.data:
         discussion_text = record.get("body_text", "")
         object_id = record.get("object_id", None)
         if object_id:
             current_record = removeSecurityProxy(
                 self.context.get(getItemKey(object_id))
             )
             current_record.body_text = discussion_text
             session.add(current_record)
             session.flush()
             notify(ObjectModifiedEvent(current_record))
             new_record_keys.append(stringKey(current_record))
         else:
             new_record = domain_model(
                 body_text = discussion_text,
                 language = get_default_language()
             )
             new_record.scheduled_item = removeSecurityProxy(
                 self.context.__parent__
             )
             session.add(new_record)
             session.flush()
             notify(ObjectCreatedEvent(new_record))
             new_record_keys.append(stringKey(new_record))
     records_to_delete = [
         removeSecurityProxy(self.context.get(key))
         for key in self.context.keys() if key not in new_record_keys
     ]
     map(session.delete, records_to_delete)
     map(lambda deleted:notify(ObjectRemovedEvent(deleted)),
         records_to_delete
     )
 def test_update_iteration(self, mock_datetime):
     mock_datetime.now.return_value = datetime(2012, 12, 28, 8, 0, 0)
     self.project.invokeFactory('Iteration',
                                'iteration-1',
                                title=u"Iteration 1",
                                start=date(2012, 12, 28),
                                end=date(2012, 12, 31),
                                estimate=Decimal("1.00"))
     iteration_1 = self.project['iteration-1']
     mock_datetime.now.return_value = datetime(2012, 12, 29, 8, 0, 0)
     iteration_1.estimate = Decimal("2.0")
     notify(ObjectModifiedEvent(iteration_1))
     self.assertTimelineEqual(iteration_1, datetime(2012, 12, 28),
                              datetime(2012, 12, 30), timedelta(days=1),
                              [(datetime(2012, 12, 28), {
                                  'estimate': Decimal("8.00"),
                                  'todo': Decimal("0.00"),
                                  'done': Decimal("0.00")
                              }),
                               (datetime(2012, 12, 29), {
                                   'estimate': Decimal("16.00"),
                                   'todo': Decimal("0.00"),
                                   'done': Decimal("0.00")
                               })])
示例#14
0
    def test_containing_subdossier(self):
        self.login(self.regular_user)

        self.subdossier.reindexObject()
        self.subdocument.reindexObject()

        self.assertEquals(
            '',
            obj2brain(self.subdossier).containing_subdossier,
            )

        self.assertEquals(
            '2016',
            obj2brain(self.subdocument).containing_subdossier,
            )

        # Check if the subscribers catch editing the title of a subdossier
        IOpenGeverBase(self.subdossier).title = u'Subd\xf6ssier CHANGED'

        self.subdossier.reindexObject()
        self.subdocument.reindexObject()

        notify(ObjectModifiedEvent(
            self.subdossier,
            Attributes(Interface, 'IOpenGeverBase.title'),
            ))

        self.assertEquals(
            u'',
            obj2brain(self.subdossier).containing_subdossier,
            )

        self.assertEquals(
            'Subd\xc3\xb6ssier CHANGED',
            obj2brain(self.subdocument).containing_subdossier,
            )
示例#15
0
    def handleSave(self, action):
        data, errors = self.extractData()
        if errors:
            self.status = self.formErrorsMessage
            return

        typeName = self.tileType.__name__

        # Traverse to a new tile in the context, with no data
        tile = self.context.restrictedTraverse('@@%s/%s' % (
            typeName,
            self.tileId,
        ))

        tile_conf_adapter = getMultiAdapter((self.context, self.request, tile),
                                            ITilesConfigurationScreen)

        tile_conf_adapter.set_configuration(data)

        #dataManager = ITileDataManager(tile)
        #dataManager.set(data)

        # Look up the URL - We need to redirect to the layout view, since
        # there's the only way from where a user would access the configuration
        contextURL = absoluteURL(tile.context, self.request)

        layoutURL = '%s/layoutedit' % contextURL

        notify(ObjectModifiedEvent(tile))

        # Get the tile URL, possibly with encoded data
        IStatusMessage(self.request).addStatusMessage(_(
            u"Tile configuration saved.", ),
                                                      type=u'info')

        self.request.response.redirect(layoutURL)
示例#16
0
 def handle_update(self, action, data):
     session = Session()
     self.template_data = []
     sitting = domain.GroupSitting()
     sitting = session.query(domain.GroupSitting).get(data["ids"])
     sitting.start_date = data["start_date"].replace(tzinfo=None)
     sitting.end_date = data["end_date"].replace(tzinfo=None)
     if "language" in data.keys():
         sitting.language = data["language"]
     if "venue" in data.keys():
         sitting.venue_id = data["venue"]
     sitting.short_name = data.get("short_name", None)
     sitting.activity_type = data.get("activity_type", None)
     sitting.meeting_type = data.get("meeting_type", None)
     sitting.convocation_type = data.get("convocation_type", None)
     # set extra data needed by template
     session.flush()
     notify(ObjectModifiedEvent(sitting))
     self.template_data.append({"group_sitting_id": sitting.group_sitting_id, 
                                 "action": "inserted",
                                 "ids": data["ids"]})
     session.flush()
     self.request.response.setHeader('Content-type', 'text/xml')
     return self.xml_template()
示例#17
0
    def handleSave(self, action):
        data, errors = self.extractData()
        if errors:
            self.status = self.formErrorsMessage
            return

        tile = self.getTile()

        # We need to check first for existing content in order not not loose
        # fields that weren't sent with the form
        dataManager = ITileDataManager(tile)
        old_data = dataManager.get()
        for item in data:
            old_data[item] = data[item]
        dataManager.set(old_data)

        # notify about modification
        notify(ObjectModifiedEvent(tile))
        api.portal.show_message(_(u'Tile saved'), self.request, type='info')

        # Look up the URL - we need to do this after we've set the data to
        # correctly account for transient tiles
        tileURL = absoluteURL(tile, self.request)
        self.request.response.redirect(tileURL)
示例#18
0
    def test_reindex_due_date_on_container_modification(self):
        """
        When modifying a contentype scheduled with a ScheduleConfig
        Task due date should be updated and reindexed.
        """
        task_container = self.task_container
        task = self.task
        old_due_date = task.due_date

        # set an additional delay of 42 days on the task config
        CalculationDefaultDelay.calculate_delay = Mock(return_value=42)
        msg = "The task due date should not have changed"
        self.assertEquals(task.due_date, old_due_date)

        # simulate modification
        notify(ObjectModifiedEvent(task_container))

        catalog = api.portal.get_tool('portal_catalog')
        msg = 'catalog should not find anything with old due date'
        task_brain = catalog(due_date=old_due_date, UID=task.UID())
        self.assertFalse(task_brain, msg)
        msg = 'new due date should have been reindexed'
        task_brain = catalog(due_date=task.due_date, UID=task.UID())
        self.assertTrue(task_brain, msg)
示例#19
0
    def test_containing_dossier(self):
        self.assertEquals(
            obj2brain(self.subdossier).containing_dossier,
            'Testd\xc3\xb6ssier XY')

        self.assertEquals(
            obj2brain(self.document).containing_dossier,
            'Testd\xc3\xb6ssier XY')

        #check subscriber for catch editing maindossier titel
        IOpenGeverBase(self.dossier).title = u"Testd\xf6ssier CHANGED"
        self.dossier.reindexObject()
        notify(
            ObjectModifiedEvent(self.dossier,
                                Attributes(Interface, 'IOpenGeverBase.title')))

        self.assertEquals(
            obj2brain(self.subdossier).containing_dossier,
            'Testd\xc3\xb6ssier CHANGED')
        self.assertEquals(
            obj2brain(self.document).containing_dossier,
            'Testd\xc3\xb6ssier CHANGED')

        transaction.commit()
示例#20
0
    def test_task_ending_on_container_modification(self):
        """
        When modifying a contentype scheduled with a ScheduleConfig
        Task should ended automatically depending on end conditions
        and ending_states.
        """
        task_container = self.task_container
        task = self.task

        # put the task container on 'published' state to match 'ending states'
        api.content.transition(task_container, transition='publish')
        # reopen the task to be sure it was not closed before the container
        # modification
        if api.content.get_state(task) == 'closed':
            api.content.transition(task, 'back_in_realized')
        msg = "The task should not be closed yet ! (for the sake of the test)"
        self.assertNotEquals(api.content.get_state(task), 'closed', msg)

        # simulate modification
        notify(ObjectModifiedEvent(task_container))

        # the task should have been ended
        msg = "The task should have been ended"
        self.assertEquals(api.content.get_state(task), 'closed', msg)
    def test_integration_task_events(self):
        """ Trigger every event of a task at least one times
        and check the journalentries.
        """

        dossier = create(Builder('dossier'))

        # Add-Event
        task = create(Builder('task').within(dossier))

        self.check_annotation(
            dossier,
            action_type='Task added',
            action_title='Task added: %s' % task.title_or_id(),
            check_entry=-2,
        )

        # Modified-Event
        notify(ObjectModifiedEvent(task))
        self.check_annotation(
            dossier,
            action_type='Task modified',
            action_title='Task modified: %s' % task.title_or_id(),
        )
示例#22
0
    def __iter__(self):
        for item in self.previous:
            filename = resolvePackageReferenceOrFile(item[self.key])
            file_ = open(filename, 'r')

            keys = item.keys()
            pathkey = self.pathkey(*keys)[0]
            typekey = self.typekey(*keys)[0]

            # Get the file object by path
            path = item[pathkey]
            obj = self.context.unrestrictedTraverse(path.lstrip('/'), None)
            if obj is None:  # path doesn't exist
                yield item
                continue

            if not file_:
                yield item
                continue

            # Set file field
            fti = getUtility(IDexterityFTI, name=item[typekey])
            schema = fti.lookupSchema()
            field = getFields(schema)[self.field]

            # Don't pass the file descriptor but only the file's data as
            # a string, because else the source files get removed!
            filedata = file_.read()
            filename = file_.name[file_.name.rfind('/') + 1:].decode('utf-8')
            fileobj = field._type(filedata, filename=filename)

            field.set(field.interface(obj), fileobj)

            # Fire ObjectModifiedEvent so that digitally_available gets set
            notify(ObjectModifiedEvent(obj))
            yield item
    def test_checksum_is_updated_before_storing_version(self, browser):
        content = bumblebee_asset('example.docx').bytes()
        document = create(
            Builder('document').within(self.dossier).attach_file_containing(
                content, u'example.docx').checked_out())

        document.update_file('foo',
                             content_type='text/plain',
                             filename=u'foo.txt')
        notify(ObjectModifiedEvent(document))
        transaction.commit()

        # checksum has not been updated
        self.assertEqual(DOCX_CHECKSUM,
                         IBumblebeeDocument(document).get_checksum())

        manager = getMultiAdapter((document, self.portal.REQUEST),
                                  ICheckinCheckoutManager)
        manager.checkin()

        # checksum has been updated
        self.assertEqual(TXT_CHECKSUM,
                         IBumblebeeDocument(document).get_checksum())

        versioner = Versioner(document)
        history = versioner.get_history_metadata()
        self.assertEqual(2, history.getLength(countPurged=False))

        version_0 = versioner.retrieve(0)
        self.assertEqual(DOCX_CHECKSUM,
                         IBumblebeeDocument(version_0).get_checksum())

        # document checksum should be updated before storing the version
        version_1 = versioner.retrieve(1)
        self.assertEqual(TXT_CHECKSUM,
                         IBumblebeeDocument(version_1).get_checksum())
示例#24
0
    def addFile(self, context, file_upload, is_attachment=False):
        # XXX 由于文件被tramline干掉了,这里的file
        # size可能不正确,此处校验是否仍然有效?
        self.validateAddAttachment(file_upload)

        def findUniqueId(id):

            contextIds = [i.lower() for i in context.objectIds()]

            if id.lower() not in contextIds:
                return id

            dotDelimited = id.split('.')

            ext = dotDelimited[-1]
            name = '.'.join(dotDelimited[:-1])

            idx = 0
            while (name.lower() + '.' + str(idx) + '.' +
                   ext.lower()) in contextIds:
                idx += 1

            return (name + '.' + str(idx) + '.' + ext)

        def _add(file, id, filename):
            """ """
            suffix = os.path.splitext(filename)[-1].lower()
            if suffix in ['.png', '.jpg', '.jpeg', '.gif', '.bmp', '.tif',\
            '.tiff', '.dib', '.jpe', '.jfif',]:
                type_name = 'Image'
                mutator = 'setImage'
            else:
                type_name = 'File'
                mutator = 'setFile'
            if is_attachment:
                type_name += 'Attachment'
            new_id = context.invokeFactory(type_name, id)
            attachment = getattr(context, new_id)
            attachment.setTitle(filename)

            getattr(attachment, mutator)(file)

            attachment.unmarkCreationFlag()
            if shasattr(attachment, 'at_post_create_script'):
                attachment.at_post_create_script()

            attachment.reindexObject()

            return attachment

        if file_upload and isinstance(file_upload, FileUpload):

            # Make sure we have a unique file name
            fileName = file_upload.filename

            file_id = ''

            if fileName:
                fileName = fileName.split('/')[-1]
                fileName = fileName.split('\\')[-1]
                fileName = fileName.split(':')[-1]

                plone_utils = getToolByName(context, 'plone_utils')
                file_id = fileName
                # 避免id以_开始
                if file_id[0] == '_':
                    file_id = '-' + file_id

                # TODO should be commented
                #把文件名改为拼音
                file_id = plone_utils.normalizeString(file_id)

            file_id = findUniqueId(file_id)

            #attachment = File(file_id, fileName, file_upload)

            attachment = _add(file_upload, file_id, fileName)
            event.notify(ObjectModifiedEvent(attachment))
            return attachment
示例#25
0
 def edit(self, **kwargs):
     for key, value in kwargs.items():
         setattr(self, key, value)
     notify(ObjectModifiedEvent(self.context))
示例#26
0
    def test_composed(self):
        request, library, definition, group_a, group_b = self._fixtures()
        # modify schema (via XML of definition, group) entries, trigger
        # load of dynamic schema via event handlers from uu.dynamicschema:
        definition.entry_schema = DEFINITION_SCHEMA
        notify(ObjectModifiedEvent(definition))
        assert 'title' in getFieldNamesInOrder(definition.schema)
        group_a.entry_schema = GROUP_A_GRID_SCHEMA
        notify(ObjectModifiedEvent(group_a))
        assert 'name' in getFieldNamesInOrder(group_a.schema)
        group_b.entry_schema = GROUP_B_FIELDSET_SCHEMA
        notify(ObjectModifiedEvent(group_b))
        assert 'feedback' in getFieldNamesInOrder(group_b.schema)

        from uu.formlibrary.forms import ComposedForm
        # assumed: ComposedForm can get out of date when the schema of the
        # adapted item changes.  composed.schema and
        # composed.additionalSchemata reflect the schema of the definition
        # and its contained groups AT THE TIME OF CONSTRUCTION/ADAPTATION
        # -- if this becomes a problem, adjust the property implementation
        # to be a true proxy at a later date, and adjust this test
        # accordingly.
        composed = ComposedForm(definition, request)
        assert len(composed.additionalSchemata) == 2
        #composed.updateFields()
        composed.update()

        # group_a is a grid, which has its schema wrapped by ComposedForm
        # construction -- the wrapper is referenced, we want to get it:
        from uu.formlibrary.forms import is_grid_wrapper_schema
        schemas = [t[1] for t in composed.group_schemas]
        wrapper = [s for s in schemas if is_grid_wrapper_schema(s)][0]
        # 'data' is field name for wrapped datagrid as list of DictRow objects
        assert 'data' in wrapper
        assert isinstance(wrapper['data'], zope.schema.List)
        assert isinstance(wrapper['data'].value_type, DictRow)
        column_schema = wrapper['data'].value_type.schema
        assert column_schema == group_a.schema

        # with regard to wrapping, serializations for the wrapper are NOT
        # stored or available in the uu.dynamicschema.schema.generated module
        # as they are throw-away and temporary for the scope of one view
        # transaction.  Every time you adapt a definition with ComposedForm,
        # a new wrapper schema will be created.
        # However, it should be noted that the wrapped schema providing the
        # field group's columns is persisted in the schema saver:
        from uu.dynamicschema.interfaces import ISchemaSaver
        saver = queryUtility(ISchemaSaver)
        assert saver is not None
        group_signature = saver.signature(group_a.schema)
        # consequence of schema mod above; group_a.schema saved, serialized:
        assert group_signature in saver.keys()
        group_schema_identifier = 'I%s' % group_signature
        from uu.dynamicschema.schema import generated
        #assert group_schema_identifier in generated.__dict__
        dynamic = getattr(generated, group_schema_identifier, None)
        assert dynamic is not None
        assert dynamic is group_a.schema

        wrapper_signature = saver.signature(wrapper)
        assert wrapper_signature not in saver.keys()  # throw-away not stored

        # default fieldset fields:
        composed_schema_fields = [f.field for f in composed.fields.values()]
        assert definition.schema in [
            field.interface for field in composed_schema_fields
        ]
        for name, field in getFieldsInOrder(definition.schema):
            assert name in composed.fields  # keys
            assert field in composed_schema_fields

        # each field group
        for group in (group_a, group_b):
            schema = group.schema
            if group.group_usage == 'grid':
                schema = wrapper  # shortcut, we only have one grid in tests...
                self.assertEqual(schema['data'].required, False)
            formgroup = [
                g for g in composed.groups if g.__name__ == group.getId()
            ][0]
            assert schema in [
                field.field.interface for field in formgroup.fields.values()
            ]
            for name, field in getFieldsInOrder(schema):
                fullname = '.'.join((composed.getPrefix(schema), name))
                assert fullname in formgroup.fields  # prefixed name in keys
                assert field in [f.field for f in formgroup.fields.values()]
 def processFinished(self, process, history, object_, finish):
     if finish == 'accept':
         annotation = interfaces.IAttribution(self.item)
         annotation.set(object_.get())
         zope.event.notify(ObjectModifiedEvent(self.item))
示例#28
0
 def testModifyObjectMakesDirty(self):
     survey = self.create()
     survey.invokeFactory("euphorie.module", "module")
     clearDirty(survey)
     notify(ObjectModifiedEvent(survey["module"]))
     self.assertEqual(isDirty(survey), True)
示例#29
0
def ical_import(container,
                ics_resource,
                event_type,
                sync_strategy=base.SYNC_KEEP_NEWER):
    cal = icalendar.Calendar.from_ical(ics_resource)
    events = cal.walk('VEVENT')

    cat = getToolByName(container, 'portal_catalog')
    container_path = '/'.join(container.getPhysicalPath())

    def _get_by_sync_uid(uid):
        return cat(sync_uid=uid, path={'query': container_path, 'depth': 1})

    def _get_prop(prop, item, default=None):
        ret = default
        if prop in item:
            ret = safe_unicode(item.decoded(prop))
        return ret

    def _from_list(ical, prop):
        """For EXDATE and RDATE recurrence component properties, the dates can
        be defined within one EXDATE/RDATE line or for each date an individual
        line.
        In the latter case, icalendar creates a list.
        This method handles this case.

        TODO: component property parameters like TZID are not used here.
        """
        val = ical[prop] if prop in ical else []
        if not isinstance(val, list):
            val = [val]

        # Zip multiple lines into one, since jquery.recurrenceinput.js does
        # not support multiple lines here
        # https://github.com/collective/jquery.recurrenceinput.js/issues/15
        ret = ''
        for item in val:
            ret = '%s,' % ret if ret else ret  # insert linebreak
            ret = '%s%s' % (ret, item.to_ical())
        return '%s:%s' % (prop, ret) if ret else None

    count = 0
    for item in events:
        start = _get_prop('DTSTART', item)
        end = _get_prop('DTEND', item)
        if not end:
            duration = _get_prop('DURATION', item)
            if duration:
                end = start + duration
            # else: whole day or open end

        whole_day = False
        open_end = False
        if is_date(start) and (is_date(end) or end is None):
            # All day / whole day events
            # End must be same type as start (RFC5545, 3.8.2.2)
            whole_day = True
            if end is None:
                end = start
            if start < end:
                # RFC5545 doesn't define clearly, if all day events should have
                # a end date one day after the start day at 0:00.
                # Internally, we handle all day events with start=0:00,
                # end=:23:59:59, so we substract one day here.
                end = end - datetime.timedelta(days=1)
            start = base.dt_start_of_day(date_to_datetime(start))
            end = base.dt_end_of_day(date_to_datetime(end))
        elif is_datetime(start) and end is None:
            # Open end event, see RFC 5545, 3.6.1
            open_end = True
            end = base.dt_end_of_day(date_to_datetime(start))
        assert (is_datetime(start))
        assert (is_datetime(end))

        # Set timezone, if not already set
        tz = base.default_timezone(container, as_tzinfo=True)
        if not getattr(start, 'tzinfo', False):
            start = tz.localize(start)
        if not getattr(end, 'tzinfo', False):
            end = tz.localize(end)

        title = _get_prop('SUMMARY', item)
        description = _get_prop('DESCRIPTION', item)
        location = _get_prop('LOCATION', item)

        url = _get_prop('URL', item)

        rrule = _get_prop('RRULE', item)
        rrule = 'RRULE:%s' % rrule.to_ical() if rrule else ''
        rdates = _from_list(item, 'RDATE')
        exdates = _from_list(item, 'EXDATE')
        rrule = '\n'.join([it for it in [rrule, rdates, exdates] if it])

        # TODO: attendee-lists are not decoded properly and contain only
        # vCalAddress values
        attendees = item.get('ATTENDEE', ())

        contact = _get_prop('CONTACT', item)
        categories = item.get('CATEGORIES', ())
        if getattr(categories, '__iter__', False):
            categories = tuple([safe_unicode(it) for it in categories])

        ext_modified = utc(_get_prop('LAST-MODIFIED', item))

        content = None
        new_content_id = None
        existing_event = None
        sync_uid = _get_prop('UID', item)
        if sync_uid and sync_strategy is not base.SYNC_NONE:
            existing_event = _get_by_sync_uid(sync_uid)
        if existing_event:
            if sync_strategy == base.SYNC_KEEP_MINE:
                # On conflict, keep mine
                continue

            exist_event = existing_event[0].getObject()
            acc = IEventAccessor(exist_event)

            if sync_strategy == base.SYNC_KEEP_NEWER and\
                    (not ext_modified or acc.last_modified > ext_modified):
                # Update only if modified date was passed in and it is not
                # older than the current modified date.  The client is not
                # expected to update the "last-modified" property, it is the
                # job of the server (calendar store) to keep it up to date.
                # This makes sure the client did the change on an up-to-date
                # version of the object.  See
                # http://tools.ietf.org/search/rfc5545#section-3.8.7.3
                continue

            # Else: update
            content = exist_event
        else:
            new_content_id = str(random.randint(0, 99999999))
            container.invokeFactory(event_type,
                                    id=new_content_id,
                                    title=title,
                                    description=description)
            content = container[new_content_id]

        assert (content)  # At this point, a content must be available.

        event = IEventAccessor(content)
        event.title = title
        event.description = description
        event.start = start
        event.end = end
        event.whole_day = whole_day
        event.open_end = open_end
        event.location = location
        event.event_url = url
        event.recurrence = rrule
        event.attendees = attendees
        event.contact_name = contact
        event.subjects = categories
        if sync_uid and sync_strategy is not base.SYNC_NONE:
            # Set the external sync_uid for sync strategies other than
            # SYNC_NONE.
            event.sync_uid = sync_uid
        notify(ObjectModifiedEvent(content))

        # Use commits instead of savepoints to avoid "FileStorageError:
        # description too long" on large imports.
        transaction.get().commit()  # Commit before rename

        if new_content_id and new_content_id in container:
            # Rename with new id from title, if processForm didn't do it.
            chooser = INameChooser(container)
            new_id = chooser.chooseName(title, content)
            content.aq_parent.manage_renameObject(new_content_id, new_id)

        # Do this at the end, otherwise it's overwritten
        if ext_modified:
            event.last_modified = ext_modified

        count += 1

    return {'count': count}
示例#30
0
    def test_parcel_indexing_on_boundlicences(self):
        licence = self.licence
        inspection = self._create_test_licence('Inspection')
        ticket = self._create_test_licence('Ticket')
        inspection.setBound_licences([licence])
        inspection.setUse_bound_licence_infos(True)
        notify(ObjectModifiedEvent(inspection))
        ticket.setBound_inspection(inspection)
        ticket.setUse_bound_inspection_infos(True)
        notify(ObjectModifiedEvent(ticket))
        catalog = api.portal.get_tool('portal_catalog')

        licence_brain = catalog(UID=licence.UID())[0]
        inspection_brain = catalog(UID=inspection.UID())[0]
        ticket_brain = catalog(UID=ticket.UID())[0]
        # so far, the index should be empty as  this licence contains no parcel
        self.assertFalse(licence_brain.parcelInfosIndex)
        self.assertFalse(inspection_brain.parcelInfosIndex)
        self.assertFalse(ticket_brain.parcelInfosIndex)

        # add a parcel1, the index should now contain this parcel reference
        parcel_1 = api.content.create(container=licence,
                                      type='Parcel',
                                      id='parcel1',
                                      division=u'A',
                                      section=u'B',
                                      radical=u'6',
                                      exposant=u'D')
        licence_brain = catalog(UID=licence.UID())[0]
        inspection_brain = catalog(UID=inspection.UID())[0]
        ticket_brain = catalog(UID=ticket.UID())[0]
        self.assertIn(parcel_1.get_capakey(), licence_brain.parcelInfosIndex)
        self.assertIn(parcel_1.get_capakey(),
                      inspection_brain.parcelInfosIndex)
        self.assertIn(parcel_1.get_capakey(), ticket_brain.parcelInfosIndex)

        # add a parcel2, the index should now contain the two parcel references
        parcel_2 = api.content.create(container=licence,
                                      type='Parcel',
                                      id='parcel2',
                                      division=u'AA',
                                      section=u'B',
                                      radical=u'69',
                                      exposant=u'E')
        licence_brain = catalog(UID=licence.UID())[0]
        inspection_brain = catalog(UID=inspection.UID())[0]
        ticket_brain = catalog(UID=ticket.UID())[0]
        self.assertIn(parcel_1.get_capakey(), licence_brain.parcelInfosIndex)
        self.assertIn(parcel_1.get_capakey(),
                      inspection_brain.parcelInfosIndex)
        self.assertIn(parcel_1.get_capakey(), ticket_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(), licence_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(),
                      inspection_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(), ticket_brain.parcelInfosIndex)

        # we remove parcel1, parcel2 capakey should be the only remaining
        # on the index
        api.content.delete(parcel_1)
        licence_brain = catalog(UID=licence.UID())[0]
        inspection_brain = catalog(UID=inspection.UID())[0]
        ticket_brain = catalog(UID=ticket.UID())[0]
        self.assertNotIn(parcel_1.get_capakey(),
                         licence_brain.parcelInfosIndex)
        self.assertNotIn(parcel_1.get_capakey(),
                         inspection_brain.parcelInfosIndex)
        self.assertNotIn(parcel_1.get_capakey(), ticket_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(), licence_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(),
                      inspection_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(), ticket_brain.parcelInfosIndex)

        # modify parcel2 capakey, the index should be updated on the licence
        old_capakey = parcel_2.get_capakey()
        parcel_2.puissance = u'69'
        self.assertNotEqual(old_capakey, parcel_2.get_capakey())
        notify(ObjectModifiedEvent(parcel_2))
        licence_brain = catalog(UID=licence.UID())[0]
        inspection_brain = catalog(UID=inspection.UID())[0]
        ticket_brain = catalog(UID=ticket.UID())[0]
        self.assertIn(parcel_2.get_capakey(), licence_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(),
                      inspection_brain.parcelInfosIndex)
        self.assertIn(parcel_2.get_capakey(), ticket_brain.parcelInfosIndex)